Compare commits
1487 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| f2ac198831 | |||
|
|
abd263f127 | ||
|
|
9c2d4ac6a7 | ||
|
|
4d950ac17e | ||
|
|
5e688c104c | ||
|
|
4ce0819c68 | ||
|
|
f7d9d0555c | ||
|
|
b754182e0d | ||
|
|
7b3a607b5e | ||
|
|
0e47e9c07f | ||
|
|
c140e5a163 | ||
|
|
0bc379bf42 | ||
|
|
9f0a3a0d9f | ||
|
|
355cf9a36c | ||
|
|
d9792e99c1 | ||
|
|
6cb7de7fb3 | ||
|
|
cb0860cddf | ||
|
|
6edeb59b16 | ||
|
|
a245aa73d4 | ||
|
|
08be9bdb4e | ||
|
|
2ee2181211 | ||
|
|
59e1c7d6f4 | ||
|
|
c2aca6e3a0 | ||
| 5f39da9273 | |||
| e16d69737e | |||
|
|
e6e2af38d3 | ||
|
|
975697728e | ||
|
|
a2789c9dba | ||
|
|
096197cf81 | ||
|
|
f9d577f596 | ||
|
|
429356a217 | ||
|
|
636e2aefde | ||
|
|
e830589e38 | ||
|
|
933ba62f39 | ||
|
|
93f7aa4457 | ||
|
|
3ef45c42fb | ||
|
|
071e5978aa | ||
|
|
fba5102705 | ||
|
|
138e308c32 | ||
|
|
fa8f1a0c8e | ||
|
|
7325c3b4d3 | ||
|
|
3e5ddb2aff | ||
|
|
d1ebbde778 | ||
|
|
8a042c5714 | ||
|
|
7a2c252bbc | ||
|
|
7d212174a6 | ||
|
|
685b35cb23 | ||
|
|
1175a54266 | ||
|
|
d38064e1c0 | ||
|
|
59a57a740f | ||
|
|
f319851753 | ||
|
|
ce424977c0 | ||
|
|
62c6b0d4cb | ||
|
|
7685570da9 | ||
|
|
ff0c488968 | ||
|
|
ebc6c8df4d | ||
|
|
06a2c3ab12 | ||
|
|
e877da1825 | ||
|
|
0b744b1179 | ||
|
|
1041f21724 | ||
|
|
73012f7976 | ||
|
|
a1b4cece7d | ||
|
|
ac4cd50929 | ||
|
|
bb5b3846a6 | ||
|
|
3a11fbcf18 | ||
|
|
a8da740ada | ||
|
|
082ec0f592 | ||
|
|
d49cad0bc0 | ||
|
|
75d8676de5 | ||
|
|
8228bc8996 | ||
|
|
8081990895 | ||
|
|
eb2d5419f9 | ||
|
|
a9ba3fcbab | ||
|
|
bcd610e2e3 | ||
|
|
217362fe14 | ||
|
|
0e0d860cd2 | ||
|
|
a8246a96cd | ||
|
|
33d28d1e7b | ||
|
|
377d7bdf47 | ||
|
|
f6eb3dd90e | ||
|
|
f632a66d60 | ||
|
|
ea0bb607b6 | ||
|
|
85e15b77d9 | ||
|
|
95055dea1d | ||
|
|
630804c5b5 | ||
|
|
6cce4183e9 | ||
|
|
35bd0d625c | ||
|
|
073d00b45b | ||
|
|
982cb13e7e | ||
|
|
6d3d4c8339 | ||
|
|
911e420c24 | ||
|
|
e69ff0325e | ||
|
|
7c1353a8aa | ||
|
|
4ce9a172b8 | ||
|
|
61a7b24084 | ||
|
|
8b2ca19f1a | ||
|
|
deafa8f4d3 | ||
|
|
c739935335 | ||
|
|
34e4a4f483 | ||
|
|
85c485509c | ||
|
|
026145e0c7 | ||
|
|
bb0cf5b547 | ||
|
|
d8acffa844 | ||
|
|
a1f1cddcbe | ||
|
|
d19612bb3c | ||
|
|
30e81961e8 | ||
|
|
a9873da802 | ||
|
|
6046e62f11 | ||
|
|
eb032619a0 | ||
|
|
d98f0a20a8 | ||
|
|
945b40249c | ||
|
|
1192642811 | ||
|
|
70772a61a9 | ||
|
|
66b3f031a5 | ||
|
|
56a9c115f8 | ||
|
|
bd10ed93af | ||
|
|
c3351f01e4 | ||
|
|
1d13384f6c | ||
|
|
598e225b00 | ||
|
|
d8090a8172 | ||
|
|
f47413686c | ||
|
|
c8a194cf32 | ||
|
|
f07ac1f322 | ||
|
|
02561b614f | ||
|
|
6ca2d02c69 | ||
|
|
300e103fed | ||
|
|
a77c0e5f32 | ||
|
|
cf644d508d | ||
|
|
452be8a22f | ||
|
|
1e816f65a5 | ||
|
|
d8496fe4c4 | ||
|
|
ca10956014 | ||
|
|
3984b84b6c | ||
|
|
493114e825 | ||
|
|
7856b6fd27 | ||
|
|
957b47680d | ||
|
|
86f6a944ff | ||
|
|
2631173a0d | ||
|
|
0dc1f810d2 | ||
|
|
0b8d19fbbe | ||
|
|
51bbe4193b | ||
|
|
d5945f6ac6 | ||
|
|
539f8824d1 | ||
|
|
539efc3f7b | ||
|
|
c0861ba796 | ||
|
|
52d308e5ae | ||
|
|
57f3afae97 | ||
|
|
80ff75debc | ||
|
|
d3a3bd2784 | ||
|
|
2dcccb804e | ||
|
|
ed7ac04d64 | ||
|
|
baf4d2bde6 | ||
|
|
ff047c5823 | ||
|
|
6c701e0c41 | ||
|
|
28ea14f2e9 | ||
|
|
f00f9b104c | ||
|
|
78d91826b1 | ||
|
|
a5be789745 | ||
|
|
028e318bd3 | ||
|
|
b2b0b0f2c8 | ||
|
|
42e73cb7e9 | ||
|
|
89c3733ce8 | ||
|
|
4be473dcea | ||
|
|
97bdc32cce | ||
|
|
d2da46854e | ||
|
|
150d3622e4 | ||
|
|
990ab29200 | ||
|
|
80d3bc84b8 | ||
|
|
b31c79431e | ||
|
|
73587ff47b | ||
|
|
53e9db2f42 | ||
|
|
b290441956 | ||
|
|
9d0d20afbd | ||
|
|
4a7d3d5fc2 | ||
|
|
32ecb05902 | ||
|
|
644ca7d0d0 | ||
|
|
7dc1b25ee7 | ||
|
|
3f28b9abc4 | ||
|
|
cf36d91da4 | ||
|
|
8c1465d3a2 | ||
|
|
bed7a4d6be | ||
|
|
23a91cca16 | ||
|
|
82c99e2cfa | ||
|
|
7d745179d4 | ||
|
|
78283e51f6 | ||
|
|
5a9ff8e331 | ||
|
|
814ddaa532 | ||
|
|
b927b5fac2 | ||
|
|
e9cf140177 | ||
|
|
9ee9de7911 | ||
|
|
53844aa0ff | ||
|
|
f527f743fc | ||
|
|
5801736955 | ||
|
|
11afc4c37d | ||
|
|
5f1427cabd | ||
|
|
27008e0f18 | ||
|
|
8898fd6e2f | ||
|
|
96d4cd50d4 | ||
|
|
7a77f31aa3 | ||
|
|
78f29e726e | ||
|
|
92e2006782 | ||
|
|
5b2edc73ec | ||
|
|
395e31ff85 | ||
|
|
dd0d828794 | ||
|
|
15bb1a2335 | ||
|
|
0a7b7880da | ||
|
|
277f324cf0 | ||
|
|
e496d5bf36 | ||
|
|
0ed6c8979e | ||
|
|
075dd1adeb | ||
|
|
027faf1949 | ||
|
|
54e049874c | ||
|
|
c4c6ba9ea4 | ||
|
|
a77bf0b8fb | ||
|
|
b9bad14df6 | ||
|
|
b1058933b8 | ||
|
|
32c7f6b29e | ||
|
|
137705450b | ||
|
|
4dc6034de8 | ||
|
|
7fa18fe38f | ||
|
|
7e34347ed7 | ||
|
|
68fa547b06 | ||
|
|
a113ed049d | ||
|
|
2f8a6a91f8 | ||
|
|
f75ed257f2 | ||
|
|
06091d1af4 | ||
|
|
fb4e550122 | ||
|
|
a74301f479 | ||
|
|
c49f417d00 | ||
|
|
a2bcd8aa30 | ||
|
|
6c34686d8d | ||
|
|
86ec6f43cb | ||
|
|
e9c7a3b99e | ||
|
|
f0ac63cd96 | ||
|
|
b1e8663ba9 | ||
|
|
2cb6bf2b98 | ||
|
|
b35c63d1c8 | ||
|
|
96b929b313 | ||
|
|
d8cddfafa0 | ||
|
|
de7d12d2c8 | ||
|
|
bc8d521853 | ||
|
|
a6c2666c82 | ||
|
|
32424e7938 | ||
|
|
ca4a3d64eb | ||
|
|
2257688c65 | ||
|
|
ea9ddc58c0 | ||
|
|
c47d375a58 | ||
|
|
36f97a5300 | ||
|
|
4adc3fadaa | ||
|
|
d9c64e7f87 | ||
|
|
9f8ae75691 | ||
|
|
7e0b88ddbd | ||
|
|
ed62f2b1f2 | ||
|
|
4e89cdaca1 | ||
|
|
b72ba8c66a | ||
|
|
fc9de568bd | ||
|
|
fe5a542e08 | ||
|
|
5d86693d98 | ||
|
|
4caefdb3fd | ||
|
|
2f843a4d40 | ||
|
|
0175609f02 | ||
|
|
f9f0dad203 | ||
|
|
fdb064ffc8 | ||
|
|
e98231c327 | ||
|
|
0888da2197 | ||
|
|
2d231ad989 | ||
|
|
19e0b7e565 | ||
|
|
fd4128c364 | ||
|
|
98f105fda0 | ||
|
|
201f2e23c7 | ||
|
|
32b257ec30 | ||
|
|
53bceb89cd | ||
|
|
04770a1e8f | ||
|
|
fbda4c76f0 | ||
|
|
85387a0a9b | ||
|
|
5ac4f6dcf9 | ||
|
|
6671f8c6fe | ||
|
|
5b77bb4649 | ||
|
|
8f6329d71d | ||
|
|
5d71ac584c | ||
|
|
534c627300 | ||
|
|
1a1e5ecd3f | ||
|
|
3e555c64d9 | ||
|
|
8d6b5f7105 | ||
|
|
4692be663e | ||
|
|
5ae0fb1b0e | ||
|
|
990d830f24 | ||
|
|
c6aa90db3e | ||
|
|
c1eed45194 | ||
|
|
31ee668311 | ||
|
|
9f60121609 | ||
|
|
a61d04f445 | ||
|
|
8ba8ada253 | ||
|
|
885b796f85 | ||
|
|
fa47806c93 | ||
|
|
5997666bad | ||
|
|
40186ce155 | ||
|
|
9cfdb35224 | ||
|
|
64860c4624 | ||
|
|
32bd2a89a3 | ||
|
|
1e6eca307b | ||
|
|
fc23fcd7c1 | ||
|
|
ab52825c71 | ||
|
|
28def30510 | ||
|
|
8f4d017180 | ||
|
|
df302ad517 | ||
|
|
357d99ac7c | ||
|
|
7e3fecc44a | ||
|
|
e0b3dd9795 | ||
|
|
6a52a2bc46 | ||
|
|
cd2b087006 | ||
|
|
ef2940142c | ||
|
|
e13b8b8827 | ||
|
|
43fce2e89a | ||
|
|
7856f0d602 | ||
|
|
bcc6ab50e2 | ||
|
|
23aa8c05ab | ||
|
|
986d898217 | ||
|
|
654c0e5e56 | ||
|
|
fed0edbf16 | ||
|
|
ede91da6f8 | ||
|
|
1f18d4291f | ||
|
|
d066e8939d | ||
|
|
e4c3435d34 | ||
|
|
fc80b180bc | ||
|
|
0d9dca99e9 | ||
|
|
2ff921cc1a | ||
|
|
e0e66fa6af | ||
|
|
110e29ec5a | ||
|
|
a2b6d622de | ||
|
|
0887789f5e | ||
|
|
3c4574f2ec | ||
|
|
62029f6164 | ||
|
|
ba986847d6 | ||
|
|
fded78ce6f | ||
|
|
8d44649a1e | ||
|
|
f4d0c51dc2 | ||
|
|
ba60db9977 | ||
|
|
0a5a7a684d | ||
|
|
7181c46ed5 | ||
|
|
afa7e69347 | ||
|
|
5050dda4d2 | ||
|
|
9ea1e16ac8 | ||
|
|
922f44f605 | ||
|
|
6ea3fc918b | ||
|
|
0c201533f0 | ||
|
|
b1e99b96c4 | ||
|
|
de1c8485ae | ||
|
|
565d1d0388 | ||
|
|
a15fea1d65 | ||
|
|
da3cf6ca27 | ||
|
|
d8dbbc6832 | ||
|
|
3563a964ef | ||
|
|
208f0c248d | ||
|
|
c0e034726a | ||
|
|
d676a7071a | ||
|
|
a681b2c944 | ||
|
|
c53701d56b | ||
|
|
a8c1f2eccc | ||
|
|
cf7ee44efc | ||
|
|
2e4a9eaaeb | ||
|
|
569a2b5694 | ||
|
|
baa84773a5 | ||
|
|
a39e642b5a | ||
|
|
e42cb49cbe | ||
|
|
b40c12efbd | ||
|
|
3c5ddad133 | ||
|
|
1ca4cb40dd | ||
|
|
678b0b9db1 | ||
|
|
e633fa41ac | ||
|
|
dac303e33b | ||
|
|
fd77d672c5 | ||
|
|
a991d9f512 | ||
|
|
674b3b54dc | ||
|
|
09948845ca | ||
|
|
0b8a0695f7 | ||
|
|
a7c95d5718 | ||
|
|
8a05597e52 | ||
|
|
145d45f800 | ||
|
|
0476b67b2f | ||
|
|
2751a828e0 | ||
|
|
a3f5dbfe07 | ||
|
|
60e09c0dd7 | ||
|
|
a23297e56a | ||
|
|
b05163632b | ||
|
|
f59be31ded | ||
|
|
2e527250de | ||
|
|
91dd378f5d | ||
|
|
f5b3d033a3 | ||
|
|
db98e3b722 | ||
|
|
586cf16fdc | ||
|
|
064d877205 | ||
|
|
fbf8cc87c9 | ||
|
|
bebabf84a0 | ||
|
|
36f169635a | ||
|
|
3f509f44a1 | ||
|
|
9bc8a8ac2b | ||
|
|
abfe98283b | ||
|
|
ded0224f51 | ||
|
|
0060a634d2 | ||
|
|
d34005e04f | ||
|
|
c734bd48b5 | ||
|
|
174c814541 | ||
|
|
993b6e9d0a | ||
|
|
273c9f7b99 | ||
|
|
822deb2867 | ||
|
|
132e2afce0 | ||
|
|
2a888f0bce | ||
|
|
f8202a7add | ||
|
|
87e8d0b775 | ||
|
|
569c00ff5f | ||
|
|
15457e1db4 | ||
|
|
0ae1eb9578 | ||
|
|
aa1d0161d3 | ||
|
|
a2d8b88337 | ||
|
|
a1b4306954 | ||
|
|
88e07031a6 | ||
|
|
cb4daa1ba1 | ||
|
|
46cfa79e89 | ||
|
|
a480f47cea | ||
|
|
4ef789e7b9 | ||
|
|
05d6d8399d | ||
|
|
8bc81d6b5e | ||
|
|
6c5cb8f07d | ||
|
|
70e79e50d6 | ||
|
|
d2c6c6cc4d | ||
|
|
ba5e22bd21 | ||
|
|
9d0bfdea44 | ||
|
|
92ae681517 | ||
|
|
3afa8c4441 | ||
|
|
547fdef9c4 | ||
|
|
152c250300 | ||
|
|
04161284a7 | ||
|
|
14d4818867 | ||
|
|
9f42e91088 | ||
|
|
79ac3b9700 | ||
|
|
7a3178896b | ||
|
|
ee95512321 | ||
|
|
55519b1855 | ||
|
|
89b99614a0 | ||
|
|
7bbd9a1a4f | ||
|
|
0dfb48593e | ||
|
|
3abe0803d6 | ||
|
|
1e0a919dc7 | ||
|
|
489e8aa4b3 | ||
|
|
5058f2f8d8 | ||
|
|
49aff9f22e | ||
|
|
775d4accf7 | ||
|
|
703a1baf8b | ||
|
|
51fdad2ae2 | ||
|
|
86545e5f99 | ||
|
|
5a0413b3e4 | ||
|
|
a0ff2b8ae4 | ||
|
|
6b6c390cb8 | ||
|
|
cd937c4844 | ||
|
|
cc1a5783fd | ||
|
|
5631caad36 | ||
|
|
d0144fb0d0 | ||
|
|
5d1870c1cf | ||
|
|
fb87460bc7 | ||
|
|
cb122cbc61 | ||
|
|
efc4e299ac | ||
|
|
848caa275b | ||
|
|
03754c56c6 | ||
|
|
471f6ff93b | ||
|
|
c53ad56a6d | ||
|
|
646439d86a | ||
|
|
ae7f65e938 | ||
|
|
b2d71a037c | ||
|
|
019ec8972f | ||
|
|
e25e411ebe | ||
|
|
911dcbac9f | ||
|
|
25195c972c | ||
|
|
304c92f733 | ||
|
|
336b9a126e | ||
|
|
01efbf5c79 | ||
|
|
866fa6a758 | ||
|
|
b8bb5e6d82 | ||
|
|
cc4b3cce55 | ||
|
|
5699562133 | ||
|
|
dae064d2bb | ||
|
|
d54e15cd15 | ||
|
|
dbdf56d494 | ||
|
|
b07cd5515a | ||
|
|
73929f2d25 | ||
|
|
acaa06749e | ||
|
|
1bd6efc0c1 | ||
|
|
936960269d | ||
|
|
5780df20ad | ||
|
|
14e5f33f2a | ||
|
|
7aa86bcec4 | ||
|
|
c77a1e85a4 | ||
|
|
093b8cb6df | ||
|
|
6460198e1d | ||
|
|
662dc3fa85 | ||
|
|
5a4bce2816 | ||
|
|
d5ea0d5a17 | ||
|
|
9e525727fd | ||
|
|
95b6995649 | ||
|
|
745064c5ce | ||
|
|
223a3b46bf | ||
|
|
fee3e320ab | ||
|
|
e068cff37b | ||
|
|
28f08ed196 | ||
|
|
056d971000 | ||
|
|
aa5195e5a9 | ||
|
|
85aaaf80ac | ||
|
|
5d467a22d0 | ||
|
|
bcc1cfb67b | ||
|
|
70a24fba69 | ||
|
|
c3121bef84 | ||
|
|
8c25be7d05 | ||
|
|
7463e9d42c | ||
|
|
e4dd9f5bb3 | ||
|
|
4e59d648ef | ||
|
|
7538e76537 | ||
|
|
20ddc54edc | ||
|
|
ac75205933 | ||
|
|
17b58b159a | ||
|
|
191cf445ef | ||
|
|
65a0c08cb3 | ||
|
|
4da68ff89c | ||
|
|
a7da4525fd | ||
|
|
92dfbce45a | ||
|
|
bfdd6b5f7c | ||
|
|
a8a8c09b90 | ||
|
|
4bd13b81cf | ||
|
|
5a07600664 | ||
|
|
f2bc4b21cb | ||
|
|
778975fbde | ||
|
|
57f920624f | ||
|
|
3d87940da9 | ||
|
|
c33d0836eb | ||
|
|
a01cf21f2c | ||
|
|
10fc1d4a2a | ||
|
|
4fa973007d | ||
|
|
c27f5ec02e | ||
|
|
5d6cf3d17d | ||
|
|
f6c7731377 | ||
|
|
bcd739da76 | ||
|
|
bea4915317 | ||
|
|
f695ba4605 | ||
|
|
24983ba3af | ||
|
|
0a2a3b3377 | ||
|
|
d8d7193cbc | ||
|
|
042c9eed9c | ||
|
|
b0c94dd998 | ||
|
|
4c8cc9e823 | ||
|
|
132c9bbdff | ||
|
|
cc0caff8d0 | ||
|
|
3496df24f8 | ||
|
|
e4e35e4c57 | ||
|
|
838320785e | ||
|
|
91bea85519 | ||
|
|
88ea081661 | ||
|
|
39c0a0dba5 | ||
|
|
9d64d3e30d | ||
|
|
39ff238716 | ||
|
|
7c96152f3f | ||
|
|
a2440c19e1 | ||
|
|
24dff5ce85 | ||
|
|
1ca8e15d19 | ||
|
|
f77a8f09bb | ||
|
|
48a7c8140f | ||
|
|
b03ff46767 | ||
|
|
e66e6364c3 | ||
|
|
89ef7ac4f6 | ||
|
|
27e74ee064 | ||
|
|
83c44aa12c | ||
|
|
17da62d53b | ||
|
|
d43fc268f4 | ||
|
|
5a7ab9795d | ||
|
|
cfcdba5aea | ||
|
|
aaca6a6704 | ||
|
|
5f7c822deb | ||
|
|
53be091a31 | ||
|
|
04c68ba013 | ||
|
|
518ea9c8b5 | ||
|
|
6bb2142f35 | ||
|
|
6aa931c866 | ||
|
|
67ab54f820 | ||
|
|
cc9fc80328 | ||
|
|
3e8a8a6b85 | ||
|
|
70a2d0e5f3 | ||
|
|
09e8d7e1da | ||
|
|
3c3d6c1e7f | ||
|
|
0dcc95cd34 | ||
|
|
edae1b0480 | ||
|
|
5fb2cb7e76 | ||
|
|
403bb71c5d | ||
|
|
9e23b16ae7 | ||
|
|
60a93a2433 | ||
|
|
14dfafcb7c | ||
|
|
dcc06cf8c6 | ||
|
|
54c084040c | ||
|
|
a9c38f5a03 | ||
|
|
bce80cca0b | ||
|
|
f56fac6877 | ||
|
|
ef2286dca4 | ||
|
|
a4a012368e | ||
|
|
1b623ef346 | ||
|
|
f661d24544 | ||
|
|
cfe10553b2 | ||
|
|
cf1042f40d | ||
|
|
e77000076b | ||
|
|
49241664dc | ||
|
|
ffa3e9ec9a | ||
|
|
fbbe0d9ca9 | ||
|
|
001eeecc09 | ||
|
|
454a9c7076 | ||
|
|
fc934ce8e1 | ||
|
|
b9bcc28e0f | ||
|
|
b5ddca65f5 | ||
|
|
536805791e | ||
|
|
5a6c73c4c1 | ||
|
|
d50f5a0488 | ||
|
|
671a31a95a | ||
|
|
5e38f41530 | ||
|
|
7569465a61 | ||
|
|
5913166a13 | ||
|
|
6036562af2 | ||
|
|
39e6c258d5 | ||
|
|
1ab0291483 | ||
|
|
fe024502d4 | ||
|
|
9a76c6428a | ||
|
|
2d4053c792 | ||
|
|
a69254612c | ||
|
|
09a14c1270 | ||
|
|
b343068805 | ||
|
|
ecf50dde2b | ||
|
|
008404ffb3 | ||
|
|
2b6cca6bab | ||
|
|
fe968e19c0 | ||
|
|
479498d8be | ||
|
|
af9669acd0 | ||
|
|
d2eb98f859 | ||
|
|
ae11084f48 | ||
|
|
b02b554105 | ||
|
|
2d66ba918a | ||
|
|
400c0f35f0 | ||
|
|
5b8b265371 | ||
|
|
f3fb27005a | ||
|
|
79bb9bdde8 | ||
|
|
4b983e401d | ||
|
|
5f2f0a9a57 | ||
|
|
8b2ae6d464 | ||
|
|
91aae6a9f5 | ||
|
|
b0fc9ef05e | ||
|
|
5d6fbb6f04 | ||
|
|
301ebdbcd8 | ||
|
|
49bcf46b4f | ||
|
|
f7263c8ab8 | ||
|
|
c69772131a | ||
|
|
e8c025b898 | ||
|
|
8fb5c16bce | ||
|
|
f4551ddf3a | ||
|
|
f337488951 | ||
|
|
f104f11613 | ||
|
|
32e8f952fd | ||
|
|
c7cfc81c6f | ||
|
|
faeafc329c | ||
|
|
eeef0d13ff | ||
|
|
0686fca3b0 | ||
|
|
79b425326b | ||
|
|
2d879b9604 | ||
|
|
38eb14b013 | ||
|
|
ff2f43766e | ||
|
|
593d66ea54 | ||
|
|
ee8cbbf7ef | ||
|
|
474cb49d2c | ||
|
|
590f815dc0 | ||
|
|
a93d5246d2 | ||
|
|
3e2ab8e7ee | ||
|
|
a83270699f | ||
|
|
745afb32a3 | ||
|
|
2fcb8d915d | ||
|
|
a596b62a5f | ||
|
|
55b1504aab | ||
|
|
c42822669b | ||
|
|
b58d28c723 | ||
|
|
a6a34d8246 | ||
|
|
0080399db2 | ||
|
|
50b480ee23 | ||
|
|
da467b7837 | ||
|
|
70914b6c3e | ||
|
|
6c3fbfe0ca | ||
|
|
8e4b705e60 | ||
|
|
48d80d3194 | ||
|
|
38768c777d | ||
|
|
1daa841f31 | ||
|
|
a9f81d0ad3 | ||
|
|
76720424ab | ||
|
|
a7b639a66b | ||
|
|
1b78d221b6 | ||
|
|
ac7b1a3c66 | ||
|
|
039c3182aa | ||
|
|
55d94b1844 | ||
|
|
3baa69b43b | ||
|
|
5f92df97e6 | ||
|
|
47f297c495 | ||
|
|
58f09ba150 | ||
|
|
1142948945 | ||
|
|
9528160b03 | ||
|
|
4afdc54914 | ||
|
|
09973a6a56 | ||
|
|
f56bf3afef | ||
|
|
c4c7bbf46b | ||
|
|
7fdd374911 | ||
|
|
fb9ad9870d | ||
|
|
bbabc6f5e9 | ||
|
|
2060579bb4 | ||
|
|
b0f0940605 | ||
|
|
ee23df176d | ||
|
|
d53865d81b | ||
|
|
7becfd8adb | ||
|
|
a9feed56fe | ||
|
|
dea517c17c | ||
|
|
6d93ceb910 | ||
|
|
9b0ac5b83d | ||
|
|
acabb40171 | ||
|
|
04cd56ca16 | ||
|
|
4371512ba2 | ||
|
|
ced686e1cd | ||
|
|
272aab2397 | ||
|
|
a7f87f2ac8 | ||
|
|
f9e85155a8 | ||
|
|
d281c18951 | ||
|
|
04e7f4f8a7 | ||
|
|
a090c11f4a | ||
|
|
c303c697a9 | ||
|
|
9466c5ea10 | ||
|
|
1268fcf1cc | ||
|
|
86d06fa879 | ||
|
|
cfe79a73d6 | ||
|
|
d7a9d9ddc8 | ||
|
|
b3d45384e1 | ||
|
|
64099e37e5 | ||
|
|
2fe612d392 | ||
|
|
14d72b92cb | ||
|
|
8c0055723c | ||
|
|
bb9e368b2d | ||
|
|
2c203acbd2 | ||
|
|
7115eb573c | ||
|
|
e626c6f286 | ||
|
|
24058f9534 | ||
|
|
a8d47e9a72 | ||
|
|
9a2f51b48d | ||
|
|
667deef5f2 | ||
|
|
aa349aa935 | ||
|
|
666ab01d03 | ||
|
|
ffbab0136f | ||
|
|
b5f7399b2c | ||
|
|
f1892eeba2 | ||
|
|
e7128a57db | ||
|
|
793cafd294 | ||
|
|
9b1097effe | ||
|
|
079ae0e1f3 | ||
|
|
e181a9837a | ||
|
|
ca5c7022ef | ||
|
|
75c50392cb | ||
|
|
74f8e744ac | ||
|
|
1c657036da | ||
|
|
2869bb3ef3 | ||
|
|
784e117a8a | ||
|
|
8c2ea052de | ||
|
|
0b6bbf6733 | ||
|
|
2312df3718 | ||
|
|
095dd73c52 | ||
|
|
ac6ac42860 | ||
|
|
c6ce888cc7 | ||
|
|
c4e6fcc451 | ||
|
|
22ae5242b2 | ||
|
|
4d77281249 | ||
|
|
b77a373efa | ||
|
|
463c3908d4 | ||
|
|
79239a2e01 | ||
|
|
41073615e9 | ||
|
|
bd12b09cbc | ||
|
|
7d598bb1d0 | ||
|
|
ccd3e3ab2c | ||
|
|
118dac5a1a | ||
|
|
7035fe05c4 | ||
|
|
af7331c6c6 | ||
|
|
4d2b64e79d | ||
|
|
d47bbd6131 | ||
|
|
54fafa07a2 | ||
|
|
97eef9149c | ||
|
|
b95e70a8c4 | ||
|
|
a16d7de1ac | ||
|
|
6f590a64af | ||
|
|
00e747eb5f | ||
|
|
2d8164cf0a | ||
|
|
be1a8df3ef | ||
|
|
e8ca2faaa7 | ||
|
|
83f9284b05 | ||
|
|
6e5c70ab71 | ||
|
|
ab4f958b5f | ||
|
|
2711ebca68 | ||
|
|
041684b13a | ||
|
|
777a2102f8 | ||
|
|
65949c5af5 | ||
|
|
e40f880b9e | ||
|
|
f9b34bbd50 | ||
|
|
f06dbd0562 | ||
|
|
4a6f5c6268 | ||
|
|
b5dbfb1a86 | ||
|
|
d86a17f76e | ||
|
|
37c7f20256 | ||
|
|
f324407c9c | ||
|
|
c96b39d597 | ||
|
|
d4d89a56e9 | ||
|
|
f19ad133c6 | ||
|
|
00d9019f29 | ||
|
|
8951a6e237 | ||
|
|
b0ac037964 | ||
|
|
2c8ff2d2f2 | ||
|
|
03bd951848 | ||
|
|
63e2ea987e | ||
|
|
115df7f884 | ||
|
|
ed94d34589 | ||
|
|
dc5ff9d809 | ||
|
|
72a6832571 | ||
|
|
b0c692ab7d | ||
|
|
06e8bc14f4 | ||
|
|
3d160fc35c | ||
|
|
734f85a517 | ||
|
|
a56a80db88 | ||
|
|
c4ed90dd5a | ||
|
|
f173b326a4 | ||
|
|
5e302290de | ||
|
|
75af3c7f2b | ||
|
|
d7aec6fdfb | ||
|
|
9a3586fd1c | ||
|
|
063bcbd0e5 | ||
|
|
a76e69e05d | ||
|
|
e560cce879 | ||
|
|
6406c3af13 | ||
|
|
173d2d3a3a | ||
|
|
9a5cc1595d | ||
|
|
d1962ab745 | ||
|
|
af5b27d0fc | ||
|
|
a9196d27d6 | ||
|
|
2f1ea12cfa | ||
|
|
37a408e58a | ||
|
|
736a993284 | ||
|
|
09359a8df3 | ||
|
|
a68987f257 | ||
|
|
3b5831f317 | ||
|
|
5336981154 | ||
|
|
df417fa619 | ||
|
|
509bbe25d2 | ||
|
|
f49012ab51 | ||
|
|
e4fc7c336e | ||
|
|
0e4430bae4 | ||
|
|
f811a9c8f4 | ||
|
|
771999c603 | ||
|
|
18820202d7 | ||
|
|
abc8fb988f | ||
|
|
885c525d7b | ||
|
|
418a2f02b2 | ||
|
|
031078284c | ||
|
|
3c7ba8c9d9 | ||
|
|
835078a84c | ||
|
|
049418c2f5 | ||
|
|
7c03266d16 | ||
|
|
627f06ea9f | ||
|
|
9f84178182 | ||
|
|
54641fc3c5 | ||
|
|
b8995d838a | ||
|
|
a0154c5919 | ||
|
|
08365c412a | ||
|
|
2d291a08fc | ||
|
|
7a17b3df4b | ||
|
|
0589f2fe2a | ||
|
|
a9abd75b51 | ||
|
|
bc6eeec663 | ||
|
|
3132856efe | ||
|
|
4db7628eed | ||
|
|
7b71f59d3e | ||
|
|
76c177bc0f | ||
|
|
50a508b596 | ||
|
|
59c634f626 | ||
|
|
08ebb9e7d6 | ||
|
|
f5f0a5b873 | ||
|
|
1036176e51 | ||
|
|
c6a24e839a | ||
|
|
87421ce74d | ||
|
|
f94ccb9947 | ||
|
|
1f1d4af8ba | ||
|
|
633803e0ad | ||
|
|
74fb8dc75c | ||
|
|
2523a0cf90 | ||
|
|
47a58a9e65 | ||
|
|
27c4fd9b30 | ||
|
|
f3904c599b | ||
|
|
b86f288e86 | ||
|
|
67bdd3664a | ||
|
|
8d4fdb8ece | ||
|
|
0720222423 | ||
|
|
7bca841a1a | ||
|
|
572ab86bc4 | ||
|
|
29cf5940f5 | ||
|
|
4b4f5c39d4 | ||
|
|
5069430a3a | ||
|
|
28d7373779 | ||
|
|
9d5fa04d49 | ||
|
|
43bf8b3f15 | ||
|
|
ed80605755 | ||
|
|
cc84a4637d | ||
|
|
701b759ba2 | ||
|
|
9cb2782ef9 | ||
|
|
5cd0a3c451 | ||
|
|
b74dbb74bb | ||
|
|
d332636dd4 | ||
|
|
32654f6cb2 | ||
|
|
14a90e7138 | ||
|
|
dd2366a15e | ||
|
|
370ba2d461 | ||
|
|
ef655ef5e0 | ||
|
|
5cc550c830 | ||
|
|
52cbe67756 | ||
|
|
c910e0af40 | ||
|
|
7a74534285 | ||
|
|
916775462d | ||
|
|
3d98600126 | ||
|
|
7ba834ee8d | ||
|
|
1c527cba03 | ||
|
|
2d7ba7b246 | ||
|
|
4ed70a8011 | ||
|
|
7a3a148359 | ||
|
|
e07e35110d | ||
|
|
a463753c84 | ||
|
|
a946ddfab1 | ||
|
|
af1cb43bd6 | ||
|
|
935d61324f | ||
|
|
10e016f2ea | ||
|
|
154655d571 | ||
|
|
491cd8f01c | ||
|
|
2b711ff04c | ||
|
|
07eadd2364 | ||
|
|
7086afe73d | ||
|
|
f21398357b | ||
|
|
c338802329 | ||
|
|
c3076e748e | ||
|
|
1595256c86 | ||
|
|
0623592f75 | ||
|
|
f2444f151c | ||
|
|
710a3f2552 | ||
|
|
6071c664e7 | ||
|
|
e203815f4f | ||
|
|
c3658080d3 | ||
|
|
8703470010 | ||
|
|
88cc7e6fec | ||
|
|
badcbc32c6 | ||
|
|
daa8f15ce9 | ||
|
|
89ee174ded | ||
|
|
cc03c2407b | ||
|
|
89254025f3 | ||
|
|
f693c986ec | ||
|
|
3c7f3ecb56 | ||
|
|
ef3809ed25 | ||
|
|
f5ddb7107e | ||
|
|
1d32018c02 | ||
|
|
c458b27573 | ||
|
|
524e0bb0a8 | ||
|
|
d525d1fe59 | ||
|
|
bdc6264701 | ||
|
|
7e2640e2d0 | ||
|
|
822a72efde | ||
|
|
192b13e393 | ||
|
|
2497cb31d0 | ||
|
|
446a0ede54 | ||
|
|
3d39638c99 | ||
|
|
69e87d129a | ||
|
|
f7a7510322 | ||
|
|
e9f2ba47f2 | ||
|
|
b2c95e5a73 | ||
|
|
482ed7c3c6 | ||
|
|
e497f73316 | ||
|
|
28965dc2b1 | ||
|
|
5425ca35b3 | ||
|
|
9f191d0ab8 | ||
|
|
c2a3ce5096 | ||
|
|
1af06f2e0d | ||
|
|
2f2f11b7bc | ||
|
|
260e6015a1 | ||
|
|
7696014545 | ||
|
|
2457e95f0e | ||
|
|
327c907463 | ||
|
|
1b558d3b30 | ||
|
|
a0d55417cd | ||
|
|
f182d441d1 | ||
|
|
e14f215793 | ||
|
|
34536f4e21 | ||
|
|
bc2da6e59e | ||
|
|
afdedc7575 | ||
|
|
69a7a9b180 | ||
|
|
a7540583d6 | ||
|
|
a253d7ebf5 | ||
|
|
3886402f8e | ||
|
|
97e402b980 | ||
|
|
93e15af209 | ||
|
|
9f3a5a13b3 | ||
|
|
3a0fb4bdec | ||
|
|
8a460930c6 | ||
|
|
a11c4c9bd2 | ||
|
|
d4b98b5cb2 | ||
|
|
fdf3908494 | ||
|
|
cc052eb450 | ||
|
|
4535c3005d | ||
|
|
030fed6d12 | ||
|
|
fc9579cd51 | ||
|
|
74712b5410 | ||
|
|
05628a2529 | ||
|
|
1bdd2f2f5b | ||
|
|
3af6e103aa | ||
|
|
3dcc20ae86 | ||
|
|
8a18630ae5 | ||
|
|
b87c331b12 | ||
|
|
9da2414e8b | ||
|
|
285291b2ed | ||
|
|
dbbb4e589e | ||
|
|
064879c4ce | ||
|
|
8614922213 | ||
|
|
745ea0ca10 | ||
|
|
99bc80c15b | ||
|
|
3423d854e1 | ||
|
|
483fd090f1 | ||
|
|
82988b6011 | ||
|
|
d1821163c2 | ||
|
|
7de118adeb | ||
|
|
abe7215bc0 | ||
|
|
7b064e63b9 | ||
|
|
f6890c709f | ||
|
|
e4814dbfbc | ||
|
|
8261b32c99 | ||
|
|
9a73520b25 | ||
|
|
10db4f7210 | ||
|
|
94f6fdcbe7 | ||
|
|
bb467b7aba | ||
|
|
3da503cdea | ||
|
|
b062f0a19f | ||
|
|
2a6e0b0e07 | ||
|
|
1558ee2177 | ||
|
|
3d542e5554 | ||
|
|
c61938ba62 | ||
|
|
c856b88008 | ||
|
|
fce3684113 | ||
|
|
ae25dd65bf | ||
|
|
a88a1c5f1f | ||
|
|
0cd678cc1d | ||
|
|
c9bf58af66 | ||
|
|
ed87a00225 | ||
|
|
bcc3608c04 | ||
|
|
038d28779c | ||
|
|
973d47ee05 | ||
|
|
e803e11c81 | ||
|
|
f50c990d93 | ||
|
|
72eef6f104 | ||
|
|
5ccdb5d100 | ||
|
|
4b8fceaa7d | ||
|
|
a24c9b1350 | ||
|
|
8913b7aedb | ||
|
|
978edfc11f | ||
|
|
f4b41d0fd3 | ||
|
|
20162a16a8 | ||
|
|
73f7bfa64b | ||
|
|
159b0d92dd | ||
|
|
02de2059db | ||
|
|
bdf8901dd8 | ||
|
|
b6a71688c0 | ||
|
|
dcc4eae27e | ||
|
|
65a080ed1d | ||
|
|
48071c28a7 | ||
|
|
2fbc833ebe | ||
|
|
f7b5b35374 | ||
|
|
46bd23cf66 | ||
|
|
138cfdb68c | ||
|
|
6d9dfe6fe1 | ||
|
|
fc48ce9c47 | ||
|
|
3a9a2f4033 | ||
|
|
f0ce0eb653 | ||
|
|
23979b8f22 | ||
|
|
e22a78fac6 | ||
|
|
7690cb1356 | ||
|
|
a96f4c57c9 | ||
|
|
45522b86df | ||
|
|
1135d77147 | ||
|
|
9dfb18a487 | ||
|
|
fbfa0de17e | ||
|
|
ea29aae64a | ||
|
|
e77b0a84c7 | ||
|
|
c5e0bf34cb | ||
|
|
4ba82c1515 | ||
|
|
b172aa7aa3 | ||
|
|
c6f5a4200d | ||
|
|
e2a1762af4 | ||
|
|
07c1a9a0e9 | ||
|
|
31bd75ae45 | ||
|
|
67af85e347 | ||
|
|
df02054b29 | ||
|
|
0cdd48ac1e | ||
|
|
bdda596806 | ||
|
|
8cbae4050e | ||
|
|
d3696fc5ec | ||
|
|
2d33606251 | ||
|
|
89564996df | ||
|
|
e196a6c905 | ||
|
|
7eb038b899 | ||
|
|
1c84f0d721 | ||
|
|
ab519a54d3 | ||
|
|
7ed1cd8b26 | ||
|
|
c6e426cbac | ||
|
|
d3f279374a | ||
|
|
0dc16dab9a | ||
|
|
ed5ff16598 | ||
|
|
c1ecdbda52 | ||
|
|
d3472b3205 | ||
|
|
5cd3dff1d4 | ||
|
|
4d1168803c | ||
|
|
7eec01a52e | ||
|
|
44ebe46f10 | ||
|
|
008f57bec4 | ||
|
|
d0ae5a4198 | ||
|
|
064c2b4be0 | ||
|
|
34d7391363 | ||
|
|
e84e7df3f1 | ||
|
|
8ba9f928d6 | ||
|
|
435ce05ac7 | ||
|
|
ede4c21e30 | ||
|
|
0892fe26ba | ||
|
|
1085d468b9 | ||
|
|
8f0a63fdb7 | ||
|
|
de43cae015 | ||
|
|
f9f015a211 | ||
|
|
3874235074 | ||
|
|
1888539a97 | ||
|
|
d131c279b9 | ||
|
|
92d01ec51a | ||
|
|
97c65c8bd1 | ||
|
|
e8ae0b0251 | ||
|
|
f1f248fa40 | ||
|
|
a007460029 | ||
|
|
7a0e7708be | ||
|
|
5e7d8841af | ||
|
|
d0c5764471 | ||
|
|
82f577c17d | ||
|
|
32296d91b7 | ||
|
|
80fd4e9fc5 | ||
|
|
7b62ef203d | ||
|
|
2f062afb07 | ||
|
|
817d5d9cf7 | ||
|
|
a2cf2357c6 | ||
|
|
815639fe5f | ||
|
|
b1c3b4f9cc | ||
|
|
332a439b7a | ||
|
|
fb5264c479 | ||
|
|
a3705d424b | ||
|
|
a4660a0700 | ||
|
|
77a52bdc8c | ||
|
|
18e55e23ee | ||
|
|
d14973cf0d | ||
|
|
120d494280 | ||
|
|
8c0e3221cb | ||
|
|
5e1d1e557f | ||
|
|
a7e3b1b99e | ||
|
|
1e722e0d58 | ||
|
|
39aa86157b | ||
|
|
df85a4ea9e | ||
|
|
2912bad110 | ||
|
|
a57fdef01d | ||
|
|
7a0ae6ee14 | ||
|
|
f335bedc2a | ||
|
|
b370af35d4 | ||
|
|
d6817aaa1f | ||
|
|
5da002fe6d | ||
|
|
0e16f0acb0 | ||
|
|
d5a7fa9de8 | ||
|
|
9274aba849 | ||
|
|
457e43978f | ||
|
|
f9096e5fac | ||
|
|
e74a2df27f | ||
|
|
1c4138c166 | ||
|
|
050fe9c52b | ||
|
|
a8c49049dd | ||
|
|
8095919bbe | ||
|
|
f7f6087d70 | ||
|
|
ea75d50053 | ||
|
|
44d7a4e222 | ||
|
|
2d6f34fa4d | ||
|
|
d231cc165f | ||
|
|
c2d5b5ded0 | ||
|
|
7bd361fbbb | ||
|
|
66e816fc5f | ||
|
|
20f36a67a0 | ||
|
|
eb75e68b72 | ||
|
|
dc9a28c0c7 | ||
|
|
843feabf25 | ||
|
|
937bd61d5e | ||
|
|
4ee5888b7f | ||
|
|
73eeb1b545 | ||
|
|
16bbfb5478 | ||
|
|
410657c98b | ||
|
|
1625f773c2 | ||
|
|
da8c3a7827 | ||
|
|
6c9e0984a5 | ||
|
|
df17a10014 | ||
|
|
ef52a026bc | ||
|
|
b3c4de320b | ||
|
|
7d9781052f | ||
|
|
855b1517a7 | ||
|
|
1606ceaadb | ||
|
|
46b6c0d27e | ||
|
|
b92ba7be76 | ||
|
|
44dbb06dcc | ||
|
|
e6b0cfccb5 | ||
|
|
5c5faafc72 | ||
|
|
9dc555cad7 | ||
|
|
1394366f91 | ||
|
|
6abf2535b0 | ||
|
|
1c96288178 | ||
|
|
098c5755b0 | ||
|
|
93372a30d0 | ||
|
|
5d88998180 | ||
|
|
574cfae5c6 | ||
|
|
e707087e72 | ||
|
|
3a6f6d8931 | ||
|
|
3766db2b14 | ||
|
|
8174fe0bac | ||
|
|
bd9844f012 | ||
|
|
f64779f70a | ||
|
|
978e5a61f6 | ||
|
|
fa03c20b00 | ||
|
|
bf4f655ed2 | ||
|
|
b3460b15a2 | ||
|
|
7f78e87551 | ||
|
|
3358fcd0b3 | ||
|
|
10345ffda7 | ||
|
|
2f7a22355f | ||
|
|
e63a366fdc | ||
|
|
1031315cdd | ||
|
|
d60dbfbd6b | ||
|
|
a5fd79e220 | ||
|
|
e162f8fd3f | ||
|
|
e4d77a6ba4 | ||
|
|
6517956987 | ||
|
|
ee9cede921 | ||
|
|
1dd25f91fd | ||
|
|
771b7b3804 | ||
|
|
8e367b7e86 | ||
|
|
e20b96e061 | ||
|
|
58854c2ebd | ||
|
|
01fe5c4730 | ||
|
|
e192cf6618 | ||
|
|
88a4c2d9f0 | ||
|
|
95ff3715a3 | ||
|
|
8a90de3c0e | ||
|
|
77f005caab | ||
|
|
a63a6e168c | ||
|
|
3899e8ed39 | ||
|
|
fe2f8617a2 | ||
|
|
7f8544373f | ||
|
|
955845110b | ||
|
|
d4e3bf696b | ||
|
|
0ba5e14d7f | ||
|
|
ea70addbc6 | ||
|
|
87a259e1a4 | ||
|
|
68d7c621d3 | ||
|
|
c54f29e82a | ||
|
|
9dafb0ae9e | ||
|
|
193117f579 | ||
|
|
eef83fc98c | ||
|
|
345d4b3865 | ||
|
|
81b1ec810c | ||
|
|
074284d286 | ||
|
|
e290ae223e | ||
|
|
5c333291e1 | ||
|
|
6bd391d89e | ||
|
|
4a6a9517f1 | ||
|
|
e961d5f8b0 | ||
|
|
5fed283ff4 | ||
|
|
bb5c25d87f | ||
|
|
da459707be | ||
|
|
13ba2762ad | ||
|
|
e9dd297bf3 | ||
|
|
eafad35860 | ||
|
|
d8d4b08252 | ||
|
|
65a2d666a3 | ||
|
|
299d2e8db1 | ||
|
|
8ad5bf6f09 | ||
|
|
112b61d39e | ||
|
|
9d82b0edda | ||
|
|
29837ad39a | ||
|
|
750abb519b | ||
|
|
f873278a96 | ||
|
|
f30c645596 | ||
|
|
67bc3e5225 | ||
|
|
c33b493dcd | ||
|
|
2633161c67 | ||
|
|
33cd6e69ff | ||
|
|
84da6aac8c | ||
|
|
c8537e9474 | ||
|
|
a0476fedec | ||
|
|
f54ae2f1d5 | ||
|
|
2d71a99121 | ||
|
|
c67c6e463e | ||
|
|
c61d322569 | ||
|
|
37e9b6fbf0 | ||
|
|
15b3eb115c | ||
|
|
b32f448957 | ||
|
|
e42353e03f | ||
|
|
a3c204a02d | ||
|
|
a3cd76dbf8 | ||
|
|
785a5f80c6 | ||
|
|
e727734443 | ||
|
|
963ff212ad | ||
|
|
3c84d889fd | ||
|
|
5730e71e2a | ||
|
|
4894683924 | ||
|
|
ce08d49107 | ||
|
|
bcc089bd82 | ||
|
|
036292284d | ||
|
|
ac286888bd | ||
|
|
fdd63afdfe | ||
|
|
ca06a16bd9 | ||
|
|
55b6fccefd | ||
|
|
1d78ab7c42 | ||
|
|
8a7d6d9f1b | ||
|
|
287620d6cd | ||
|
|
5942e47f2a | ||
|
|
a9d1084e05 | ||
|
|
7d7b09ff0f | ||
|
|
1f0a2b5c6c | ||
|
|
ada73a616f | ||
|
|
0a7825dfff | ||
|
|
bfb74c4dba | ||
|
|
41abeeb2e3 | ||
|
|
e99ab58b83 | ||
|
|
933fec55a5 | ||
|
|
fde4bedce6 | ||
|
|
d5b9379732 | ||
|
|
0a47b91913 | ||
|
|
9f51f4826f | ||
|
|
eee491f17c | ||
|
|
8cccbe928d | ||
|
|
ee03d92fdb | ||
|
|
f8b9bdb696 | ||
|
|
ef705f9a2a | ||
|
|
e7185d2cd3 | ||
|
|
30970c8dfb | ||
|
|
2d6f02c407 | ||
|
|
37dbfff766 | ||
|
|
f9d17afad3 | ||
|
|
5234ecb2a7 | ||
|
|
32c60e7ffa | ||
|
|
95c290df50 | ||
|
|
832495bece | ||
|
|
ae6a97d725 | ||
|
|
fac048a24d | ||
|
|
7aebb4d50d | ||
|
|
31b19dd88e | ||
|
|
0129efb02e | ||
|
|
4512790f2d | ||
|
|
5541b18a6a | ||
|
|
b820ee4db7 | ||
|
|
8513ba3644 | ||
|
|
2ad0539a82 | ||
|
|
b7e160f735 | ||
|
|
4a033e6d89 | ||
|
|
378b8609aa | ||
|
|
c8310b1eb9 | ||
|
|
13b2043b76 | ||
|
|
e416bca963 | ||
|
|
74a6e5814a | ||
|
|
994ea5af22 | ||
|
|
336846f6dd | ||
|
|
db59bed69d | ||
|
|
3c24b9f9d9 | ||
|
|
2e06205eda | ||
|
|
c070fc3032 | ||
|
|
ace62f7ed6 | ||
|
|
ccfea527ac | ||
|
|
bda09c032a | ||
|
|
632f011db5 | ||
|
|
914cad72f5 | ||
|
|
895591f628 | ||
|
|
ad9a932143 | ||
|
|
f602e8c36d | ||
|
|
5220eaceee | ||
|
|
61bd7893c7 | ||
|
|
32cd24fc7b | ||
|
|
945600b20d | ||
|
|
e04de5a576 | ||
|
|
0826514cbc | ||
|
|
3cbda5979d | ||
|
|
681b91d368 | ||
|
|
2eb58eae00 | ||
|
|
4c8c1f0013 | ||
|
|
35009e2574 | ||
|
|
75fe0f8ff5 | ||
|
|
440badf1eb | ||
|
|
f7af05a650 | ||
|
|
aca35c0513 | ||
|
|
f8ae2dcffe | ||
|
|
0653460fb2 | ||
|
|
dbe08f7deb | ||
|
|
5989f4f541 | ||
|
|
4375c7f4ce | ||
|
|
3c90e98d43 | ||
|
|
b1af25e4c0 | ||
|
|
6614818418 | ||
|
|
c74153f56a | ||
|
|
4b0a6ea0f7 | ||
|
|
6284c8828b | ||
|
|
0c1e759a45 | ||
|
|
0ac78fcff1 | ||
|
|
e3b540170a | ||
|
|
a7208b0c61 | ||
|
|
9d07ba24b8 | ||
|
|
afdb79b712 | ||
|
|
8fe6b526a5 | ||
|
|
ebbebf7735 | ||
|
|
fe136c8dbe | ||
|
|
da57a966d5 | ||
|
|
f0de6bfb1b | ||
|
|
d74aa9c625 | ||
|
|
0bb0035f3c | ||
|
|
0a52f87f3a | ||
|
|
82a59d80dd | ||
|
|
5defb5867a | ||
|
|
d5d55d4ef3 | ||
|
|
01310ee7fa | ||
|
|
211b36dfa3 | ||
|
|
ddb195db3c | ||
|
|
fa33138d66 | ||
|
|
ad7f5013f3 | ||
|
|
e9b6e26f08 | ||
|
|
2461dca94d | ||
|
|
6f635ac9fa | ||
|
|
e127941d8a | ||
|
|
851be1fcfb | ||
|
|
d8281c70a5 | ||
|
|
bc57c33491 | ||
|
|
999a2a1fc1 | ||
|
|
8b4684679b | ||
|
|
b528c48e3b | ||
|
|
5b19274e6a | ||
|
|
48e61c6377 | ||
|
|
1266c6e971 | ||
|
|
41b0a36b81 | ||
|
|
6801d0b1d1 | ||
|
|
973212254c | ||
|
|
2b2f44b10e | ||
|
|
9431fb827f | ||
|
|
d7af108833 | ||
|
|
c07d240532 | ||
|
|
05e056481f | ||
|
|
8e6cc4d2f2 | ||
|
|
35f7464540 | ||
|
|
c8c0699d9c | ||
|
|
0835b5ed4f | ||
|
|
ca2ad9b2da | ||
|
|
276169e43d | ||
|
|
ad1d9a155e | ||
|
|
8bd48a09a6 | ||
|
|
7727bc5ec3 | ||
|
|
74a1aae168 | ||
|
|
9312366f62 | ||
|
|
8d63db047a | ||
|
|
0359e9dd35 | ||
|
|
91ac2c6445 | ||
|
|
2d4acc3ba3 | ||
|
|
fb6fc68d23 | ||
|
|
3fca086d14 | ||
|
|
dcc541d832 | ||
|
|
300fc3467d | ||
|
|
326ee75cd8 | ||
|
|
b4b2f24c77 | ||
|
|
5414dbdb37 | ||
|
|
dd0034327b | ||
|
|
774e5b38a0 | ||
|
|
f1f4d10fe3 | ||
|
|
84c918bbd5 | ||
|
|
94879b4256 | ||
|
|
bf240a00f4 | ||
|
|
b32e35c574 | ||
|
|
1fbcfa47ac |
24
.gitignore
vendored
24
.gitignore
vendored
@ -1,13 +1,23 @@
|
||||
/target/
|
||||
target/
|
||||
**/*.rs.bk
|
||||
Cargo.lock
|
||||
.vscode
|
||||
*.ui~
|
||||
*.gresource
|
||||
_build
|
||||
resources.gresource
|
||||
_build/
|
||||
build/
|
||||
vendor/
|
||||
.criterion/
|
||||
org.gnome.*.json~
|
||||
podcasts-gtk/po/gnome-podcasts.pot
|
||||
|
||||
# scripts/test.sh
|
||||
target_*/
|
||||
|
||||
# flatpak-builder stuff
|
||||
.flatpak-builder/
|
||||
flatpak-build/
|
||||
flatpak-repo/
|
||||
app/
|
||||
repo/
|
||||
Makefile
|
||||
|
||||
# Files configured by meson
|
||||
podcasts-gtk/src/config.rs
|
||||
podcasts-gtk/src/static_resource.rs
|
||||
|
||||
@ -1,76 +1,30 @@
|
||||
stages:
|
||||
# meson uses cargo to do the build
|
||||
# so it's ok to have the tests first.
|
||||
- test
|
||||
# - build
|
||||
- lint
|
||||
|
||||
before_script:
|
||||
- apt-get update -yqq
|
||||
- apt-get install -yqq --no-install-recommends build-essential
|
||||
- apt-get install -yqq --no-install-recommends libgtk-3-dev
|
||||
# - apt-get install -yqq --no-install-recommends meson
|
||||
|
||||
# kcov
|
||||
# - apt-get install -y libcurl4-openssl-dev libelf-dev libdw-dev cmake gcc binutils-dev libiberty-dev
|
||||
include:
|
||||
- project: 'gnome/citemplates'
|
||||
file: 'flatpak/flatpak-ci-initiative-sdk-extensions.yml'
|
||||
# ref: ''
|
||||
|
||||
flatpak:
|
||||
image: 'registry.gitlab.gnome.org/gnome/gnome-runtime-images/rust_bundle:3.36'
|
||||
variables:
|
||||
# RUSTFLAGS: "-C link-dead-code"
|
||||
RUST_BACKTRACE: "FULL"
|
||||
MANIFEST_PATH: "org.gnome.Podcasts.Devel.json"
|
||||
FLATPAK_MODULE: "gnome-podcasts"
|
||||
MESON_ARGS: "-Dprofile=development"
|
||||
APP_ID: "org.gnome.Podcasts.Devel"
|
||||
RUNTIME_REPO: "https://nightly.gnome.org/gnome-nightly.flatpakrepo"
|
||||
BUNDLE: "org.gnome.Podcasts.Devel.flatpak"
|
||||
extends: '.flatpak'
|
||||
|
||||
# Currently doesnt work.
|
||||
# # Build with meson
|
||||
# build:stable:
|
||||
# # Stable img
|
||||
# # https://hub.docker.com/_/rust/
|
||||
# image: "rust"
|
||||
# script:
|
||||
# - rustc --version && cargo --version
|
||||
# - ./configure --prefix=/usr/local
|
||||
# - make && sudo make install
|
||||
|
||||
# build:nightly:
|
||||
# # Nightly
|
||||
# # https://hub.docker.com/r/rustlang/rust/
|
||||
# image: "rustlang/rust:nightly"
|
||||
# script:
|
||||
# - rustc --version && cargo --version
|
||||
# - ./configure --prefix=/usr/local
|
||||
# - make && sudo make install
|
||||
|
||||
test:stable:
|
||||
# Stable img
|
||||
# https://hub.docker.com/_/rust/
|
||||
image: "rust"
|
||||
script:
|
||||
- rustc --version && cargo --version
|
||||
- cargo build
|
||||
- cargo test --verbose -- --test-threads=1
|
||||
|
||||
test:nightly:
|
||||
# Nightly
|
||||
# https://hub.docker.com/r/rustlang/rust/
|
||||
image: "rustlang/rust:nightly"
|
||||
script:
|
||||
- rustc --version && cargo --version
|
||||
- cargo build
|
||||
- cargo test --verbose -- --test-threads=1
|
||||
# - cargo bench
|
||||
|
||||
# Configure and run rustfmt on nightly
|
||||
# Configure and run rustfmt
|
||||
# Exits and builds fails if on bad format
|
||||
lint:rustfmt:
|
||||
image: "rustlang/rust:nightly"
|
||||
rustfmt:
|
||||
image: "rust:slim"
|
||||
stage: ".pre"
|
||||
script:
|
||||
- rustc --version && cargo --version
|
||||
- cargo install rustfmt-nightly
|
||||
- cargo fmt --all -- --write-mode=diff
|
||||
|
||||
# Configure and run clippy on nightly
|
||||
# Only fails on errors atm.
|
||||
lint:clippy:
|
||||
image: "rustlang/rust:nightly"
|
||||
script:
|
||||
- rustc --version && cargo --version
|
||||
- cargo install clippy
|
||||
- cargo clippy --all
|
||||
- rustup component add rustfmt
|
||||
# Create blank versions of our configured files
|
||||
# so rustfmt does not yell about non-existent files or completely empty files
|
||||
- echo -e "" >> podcasts-gtk/src/config.rs
|
||||
- echo -e "" >> podcasts-gtk/src/static_resource.rs
|
||||
- rustc -Vv && cargo -Vv
|
||||
- cargo fmt --version
|
||||
- cargo fmt --all -- --color=always --check
|
||||
|
||||
24
.gitlab/issue_templates/BrokenFeed.md
Normal file
24
.gitlab/issue_templates/BrokenFeed.md
Normal file
@ -0,0 +1,24 @@
|
||||
## Invalid RSS Feed Template.
|
||||
|
||||
Please provide the source of the xml rss feed.
|
||||
|
||||
**Feed URL**
|
||||
|
||||
example.com/podcast
|
||||
|
||||
**Detailed description of the issue**
|
||||
|
||||
Would be helpfull if error messages where included from stderr.
|
||||
If you are not sure how to do it, feel free to ask and we will walk you through it!
|
||||
|
||||
Some common cases might be:
|
||||
* Feed cannot be added
|
||||
* Broken Feed Image
|
||||
* Episode(s) do not download
|
||||
|
||||
Steps to reproduce:
|
||||
|
||||
1. Open GNOME Podcasts
|
||||
2. Do an action
|
||||
3. ...
|
||||
|
||||
@ -1,20 +1,40 @@
|
||||
Detailed description of the issue. Put as much information as you can, potentially
|
||||
with images showing the issue.
|
||||
# Steps to reproduce
|
||||
<!--
|
||||
Explain in detail the steps on how the issue can be reproduced.
|
||||
-->
|
||||
1.
|
||||
2.
|
||||
3.
|
||||
|
||||
Steps to reproduce:
|
||||
Reproducible in:
|
||||
<!--
|
||||
Please test if the issue was already fixed in the unstable version of the app.
|
||||
For that, follow these steps:
|
||||
1. Make sure Flatpak is installed or install it following these steps https://flatpak.org/setup
|
||||
2. Install the unstable version of the app following, flatpak bundles can be found in the CI artifacts.
|
||||
|
||||
1. Open Hammond
|
||||
2. Do an action
|
||||
3. ...
|
||||
If these steps failed, write in 'Other' the distribution you’re using and
|
||||
the version of the app.
|
||||
-->
|
||||
- Flatpak unstable: (yes or no) <!-- Write "yes" or "no" after the semicolon. -->
|
||||
- Other:
|
||||
|
||||
## Design Tasks
|
||||
# Current behavior
|
||||
<!-- Describe the current behavior. -->
|
||||
|
||||
* [ ] design tasks
|
||||
|
||||
## Development Tasks
|
||||
# Expected behavior
|
||||
<!-- Describe the expected behavior. -->
|
||||
|
||||
* [ ] development tasks
|
||||
|
||||
## QA Tasks
|
||||
# Additional information
|
||||
<!--
|
||||
Provide more information that could be relevant.
|
||||
|
||||
* [ ] qa (quality assurance) tasks
|
||||
If the issue is a crash, provide a stack trace following the steps in:
|
||||
https://wiki.gnome.org/Community/GettingInTouch/Bugzilla/GettingTraces
|
||||
-->
|
||||
|
||||
|
||||
<!-- Ignore the text under this line. -->
|
||||
/label ~"Bug"
|
||||
|
||||
41
.gitlab/issue_templates/Epic.md
Normal file
41
.gitlab/issue_templates/Epic.md
Normal file
@ -0,0 +1,41 @@
|
||||
# Current problems
|
||||
<!--
|
||||
What are the problems that the current project has?
|
||||
|
||||
For example:
|
||||
* User cannot use the keyboard to perform most common actions
|
||||
or
|
||||
* User cannot see documents from cloud services
|
||||
-->
|
||||
|
||||
# Goals & use cases
|
||||
<!--
|
||||
What are the use cases that this proposal will cover? What are the end goals?
|
||||
|
||||
For example:
|
||||
* User needs to share a file with their friends.
|
||||
or
|
||||
* It should be easy to edit a picture within the app.
|
||||
-->
|
||||
|
||||
# Requirements
|
||||
<!--
|
||||
What does the solution needs to ensure for being succesful?
|
||||
|
||||
For example:
|
||||
* Work on small form factors and touch
|
||||
or
|
||||
* Use the Meson build system and integrate with it
|
||||
-->
|
||||
|
||||
# Relevant art
|
||||
<!--
|
||||
Is there any product that has implemented something similar? Put links to other
|
||||
projects, pictures, links to other code, etc.
|
||||
-->
|
||||
|
||||
# Proposal & plan
|
||||
<!-- What's the solution and how should be achieved? It can be split in smaller
|
||||
tasks of minimum change, so they can be delivered across several releases. -->
|
||||
|
||||
/label ~"Epic"
|
||||
@ -1,17 +1,24 @@
|
||||
Detailed description of the feature. Put as much information as you can.
|
||||
### Use cases
|
||||
<!--
|
||||
Describe what problem(s) the user is experiencing and that this request
|
||||
is trying to solve.
|
||||
-->
|
||||
|
||||
Proposed Mockups:
|
||||
|
||||
(Add mockups of the proposed feature)
|
||||
### Desired behavior
|
||||
<!-- Describe the desired functionality. -->
|
||||
|
||||
## Design Tasks
|
||||
|
||||
* [ ] design tasks
|
||||
### Benefits of the solution
|
||||
<!-- List the possible benefits of the solution and how it fits in the project. -->
|
||||
|
||||
## Development Tasks
|
||||
|
||||
* [ ] development tasks
|
||||
### Possible drawbacks
|
||||
<!--
|
||||
Describe possible drawbacks of the feature and list how it could affect
|
||||
the project i.e. UI discoverability, complexity, impact in more or less
|
||||
number of users, etc.
|
||||
-->
|
||||
|
||||
## QA Tasks
|
||||
|
||||
* [ ] qa (quality assurance) tasks
|
||||
<!-- Ignore the text under this line. -->
|
||||
/label ~"Feature"
|
||||
|
||||
1
.gitlab/merge_requests_templates/mr.md
Normal file
1
.gitlab/merge_requests_templates/mr.md
Normal file
@ -0,0 +1 @@
|
||||
### Please attach a relevant issue to this MR, if this doesn't exist please create one.
|
||||
275
CHANGELOG.md
Normal file
275
CHANGELOG.md
Normal file
@ -0,0 +1,275 @@
|
||||
# Changelog
|
||||
All notable changes to this project will be documented in this file.
|
||||
|
||||
The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/)
|
||||
and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html).
|
||||
|
||||
## [Unreleased]
|
||||
|
||||
### Added:
|
||||
|
||||
### Changed:
|
||||
|
||||
### Fixed:
|
||||
|
||||
### Removed:
|
||||
|
||||
## [0.4.7] - 2019-10-23
|
||||
|
||||
### Added:
|
||||
- Improved appdata validation and meson tests World/podcasts!89
|
||||
- The ability to export show subscriptions to opml files World/podcasts!77
|
||||
- Support for feeds requiring authentication World/podcasts!120
|
||||
|
||||
### Changed:
|
||||
|
||||
- Episodes now have a checkmark to show whether or not they've been played World/podcasts!106
|
||||
- Changed to how errors are shown when adding podcasts World/podcasts!108 World/podcasts!109 World/podcasts!110
|
||||
- Improved integration of cargo and meson World/podcasts!94
|
||||
- Refactored some macros for error handling World/podcasts!82
|
||||
- Refactored the handling of styling changes World/podcasts!119
|
||||
- Updated the icon to better match the HIG guidlines World/podcasts#102
|
||||
- Made Podcasts use a GtkApplication subclass World/podcasts!113
|
||||
- Updated the MPRIS permissions in order to remove a sandbox hole World/podcasts!124
|
||||
- Bumped gtk and libhandy minimum versions
|
||||
|
||||
### Fixed:
|
||||
|
||||
- Rewind now works regardless if its the start or the end of the episode World/podcasts!83
|
||||
- Typos in the README and CONTRIBUTING docs World/podcast!97 World/podcast!98 World/podcast!99 World/podcasts!121
|
||||
- Show cover is reset properly now if there isn't an image World/podcasts#114
|
||||
- Query pairs are no longer stripped from URLs World/podcasts!111
|
||||
- Pause MPRIS button now works on KDE Plasma World/podcasts#115
|
||||
- The playback widget now properly reflects the playback state on episode change World/podcasts!116
|
||||
|
||||
### Removed:
|
||||
|
||||
- All preferences World/podcast!104
|
||||
|
||||
## [0.4.6] - 2018-10-07
|
||||
|
||||
### Added:
|
||||
- Felix, @haecker-felix, wrote an [mpris crate](https://crates.io/crates/mpris-player) and implemented MPRIS2 client side support! !74 #68
|
||||
|
||||
### Changed:
|
||||
- Download Cancel button was changed to an Icon instead of a label !72
|
||||
- The applciation will no longer scale below 360p in width 1933c79f7a87d8261d91ca4e14eb51c1ddc66624
|
||||
- Update to the latest HIG 5050dda4d2f75b706842de8507d115dd5a1bd0a9
|
||||
- Chris, @brainblasted, upgraded hyper to 0.12, this brings openssl 1.1 support !75
|
||||
- Pipeline backend is now completly migrated to tokio-runtime 0887789f5e653dd92ad397fb39561df6dffcb45c
|
||||
- Resume playing an episode will attempt to rewind the track only if more than a minute has passed since the last pause !76
|
||||
|
||||
### Fixed:
|
||||
- Fixed a regression where indexing feeds was blocking the `tokio reactor` #88 !70
|
||||
- Episodeds Listbox no longer resizes when a download starts #89 !72
|
||||
- The `total_size` label of the `EpisodeWidget` now behaves correctly if the request fails #90 !73
|
||||
- The Pipeline will no longer log things in stderr for Requests that returned 304 and are expected to be skipped da361d0cb93cd8edd076859b2c607509a96dac8d
|
||||
- A bug where the HomeView wold get into an invalid state if your only shows had no episodes 32bd2a89a34e8e940b3b260c6be76defe11835ed
|
||||
|
||||
### Translations:
|
||||
|
||||
**Added**
|
||||
- Brazilian Portuguese translation 586cf16f
|
||||
- Swedish translation 2e527250
|
||||
- Italian translation a23297e5
|
||||
- Friulian translation 60e09c0d
|
||||
- Hungarian translation 2751a828
|
||||
- Croatian translation 0476b67b
|
||||
- Latvian translation a681b2c9
|
||||
- Czech translation 3563a964
|
||||
- Catalan translation 6ea3fc91
|
||||
|
||||
**Updated**
|
||||
- German translation
|
||||
- Finnish translation
|
||||
- Polish translation
|
||||
- Turkish translation
|
||||
- Croatian translation
|
||||
- Indonesian translation
|
||||
- Spanish translation
|
||||
|
||||
|
||||
## [0.4.5] - 2018-08-31
|
||||
|
||||
### Added:
|
||||
- [OARS](https://hughsie.github.io/oars/) Tags where added for compatibility with Store clients b0c94dd9
|
||||
- Daniel added support for Translations !46
|
||||
- Svitozar Cherepii(@svito) created a [wiki page](https://wiki.gnome.org/Apps/Podcasts) 70e79e50
|
||||
- Libhandy was added as a dependancy #70
|
||||
- Development builds can now be installed in parallel with stable builds !64
|
||||
|
||||
### Changed:
|
||||
- The update indication was moved to an In-App notification #72
|
||||
- The app icon's accent color was changed from orange to red 0dfb4859
|
||||
- The stack switcher in the Headerbar is now insesitive on Empty Views !63
|
||||
|
||||
### Fixed:
|
||||
- Improved handling of HTTP redirections #64 !61 !62
|
||||
- Fixed a major performance regression when loading show covers !67
|
||||
- More refference cycles have been fixed !59
|
||||
- OPML import dialog now exits properly and no longer keeps the application from shuting down !65
|
||||
- Update action is disabled if there isn't something to update #71
|
||||
|
||||
### Translations:
|
||||
- Added Finish 93696026
|
||||
- Added Polish 1bd6efc0
|
||||
- Added Turkish 73929f2d
|
||||
- Added Spanish !46
|
||||
- Added German 6b6c390c
|
||||
- Added Galician 0060a634
|
||||
- Added Indonesian ded0224f
|
||||
- Added Korean 36f16963
|
||||
|
||||
|
||||
## [0.4.4] - 2018-07-31
|
||||
|
||||
### Changed:
|
||||
- `SendCell` crate was replaced with `Fragile`. (Jorda Petridis) 838320785ebbea94e009698b473495cfec076f54
|
||||
- Update dependancies (Jorda Petridis) 91bea8551998b16e44e5358fdd43c53422bcc6f3
|
||||
|
||||
### Fixed:
|
||||
- Fix more refference cycles. (Jorda Petridis) 3496df24f8d8bfa8c8a53d8f00262d42ee39b41c
|
||||
- Actually fix cargo-vendor (Jorda Petridis)
|
||||
|
||||
## [0.4.3] - 2018-07-27
|
||||
|
||||
### Fixed:
|
||||
|
||||
- Fix the cargo vendor config for the tarball releash script. (Jorda Petridis) a2440c19e11ca4dcdbcb67cd85259a41fe3754d6
|
||||
|
||||
## [0.4.2] - 2018-07-27
|
||||
|
||||
### Changed:
|
||||
|
||||
- Minimum size requested by the Views. (Jorda Petridis) 7c96152f3f53f271247230dccf1c9cd5947b685f
|
||||
|
||||
### Fixed:
|
||||
|
||||
- Screenshot metadata in appstream data. (Jorda Petridis) a2440c19e11ca4dcdbcb67cd85259a41fe3754d6
|
||||
|
||||
## [0.4.1] - 2018-07-26
|
||||
### Added:
|
||||
|
||||
- Custom icons for the fast-forward and rewind actions in the Player were added. (Tobias Bernard) e77000076b3d78b8625f4c7ef367376d0130ece6
|
||||
- Hicolor and symbolic icons for the Application. (Tobias Bernard and Sam Hewitt) edae1b04801dba9d91d5d4145db79b287f0eec2c
|
||||
- Basic prefferences dialog (Zander Brown). [34](https://gitlab.gnome.org/World/podcasts/merge_requests/34)
|
||||
- Dbus service preperation. Not used till the MPRIS2 integration has landed. (Zander Brown) [42](https://gitlab.gnome.org/World/podcasts/merge_requests/42)
|
||||
- Episodes and Images will only get drawn when needed. Big Performance impact. (Jordan Petridis) [43](https://gitlab.gnome.org/World/podcasts/merge_requests/43)
|
||||
|
||||
### Changed:
|
||||
|
||||
- The `ShowWidget` control button were moved to a secondary menu in the Headerbar. (Jordan Petridis) 536805791e336a3e112799be554706bb804d2bef
|
||||
- EmptyView layout improvements. (Jorda Petridis) 3c3d6c1e7f15b88308a9054b15a6ca0d8fa233ce 518ea9c8b57885c44bda9c418b19fef26ae0e55d
|
||||
- Improved the `AddButton` behavior. (Jorda Petridis) 67ab54f8203f19aad198dc49e935127d25432b41
|
||||
|
||||
### Fixed:
|
||||
|
||||
- A couple reffence cycles where fixed. (Jorda Petridis)
|
||||
|
||||
### Removed:
|
||||
|
||||
- The delay between the application startup and the `update_on_startup` action. (Jorda Petridis) 7569465a612ee5ef84d0e58f4e1010c8d14080d4
|
||||
|
||||
## [0.4.0] - 2018-07-04
|
||||
### Added:
|
||||
- Keyboard Shortcuts and a Shortcuts dialog were implemented. (ZanderBrown)
|
||||
[!33](https://gitlab.gnome.org/World/podcasts/merge_requests/33)
|
||||
|
||||
### Changed:
|
||||
- The `FileChooser` of the OPML import was changed to use the `FileChooserNative` widget/API. (ZanderBrown)
|
||||
[!33](https://gitlab.gnome.org/World/podcasts/merge_requests/33)
|
||||
- The `EpisdeWidget` was refactored.
|
||||
[!38](https://gitlab.gnome.org/World/podcasts/merge_requests/38)
|
||||
- `EpisdeWidget`'s progressbar was changed to be non-blocking and should feel way more responsive now. 9b0ac5b83dadecdff51cd398293afdf0d5276012
|
||||
- An embeded audio player was implemented!
|
||||
[!40](https://gitlab.gnome.org/World/podcasts/merge_requests/40)
|
||||
- Various Database changes.
|
||||
[!41](https://gitlab.gnome.org/World/podcasts/merge_requests/41)
|
||||
|
||||
### Fixed:
|
||||
- Fixed a bug whre the about dialog would be unclosable. (ZanderBrown) [!37](https://gitlab.gnome.org/World/podcasts/merge_requests/37)
|
||||
|
||||
## [0.3.4] - 2018-05-20
|
||||
### Fixed:
|
||||
- Flatpak can now access the Home folder. This fixes the OPML import feature from
|
||||
not being able to access any file.
|
||||
|
||||
## [0.3.3] - 2018-05-19
|
||||
### Added:
|
||||
- Initial functionality for importing shows from an OPML file was implemented.
|
||||
- ShowsView now rembmers the vertical alignment of the scrollbar between refreshes. 4d2b64e79d8518454b3677612664cd32044cf837
|
||||
|
||||
### Changed:
|
||||
- Minimum `rustc` version requirment was bumped to `1.26`
|
||||
- Some animations should be smoother now. 7d598bb1d08b05fd5ab532657acdad967c0afbc3
|
||||
- InAppNotification now can be used to propagate some erros to the user. 7035fe05c4741b3e7ccce6827f72766226d5fc0a and 118dac5a1ab79c0b4ebe78e88256a4a38b138c04
|
||||
|
||||
### Fixed:
|
||||
- Fixed a of by one bug in the `ShowsView` where the last show was never shown. bd12b09cbc8132fd39a266fd091e24bc6c3c040f
|
||||
|
||||
## [0.3.2] - 2018-05-07
|
||||
### Added:
|
||||
- Vies now have a new fancy scrolling animation when they are refereshed.
|
||||
|
||||
### Changed:
|
||||
- Downlaoding and loading images now is done asynchronously and is not blocking programs execution.
|
||||
[#7](https://gitlab.gnome.org/World/podcasts/issues/7)
|
||||
- Bold, italics links and some other `html` tags can now be rendered in the Show Description.
|
||||
[#25](https://gitlab.gnome.org/World/podcasts/issues/25)
|
||||
- `Rayon` Threadpools are now used instead of unlimited one-off threads.
|
||||
- `EpisdeWidget`s are now loaded asynchronously accross views.
|
||||
- `EpisodeWidget`s no longer trigger a `View` refresh for trivial stuff 03bd95184808ccab3e0ea0e3713a52ee6b7c9ab4
|
||||
- `ShowWidget` layout was changed 9a5cc1595d982f3232ee7595b83b6512ac8f6c88
|
||||
- `ShowWidget` Description is inside a scrolled window now
|
||||
|
||||
### Fixed:
|
||||
- `EpisodeWidget` Height now is consistent accros views [#57](https://gitlab.gnome.org/World/podcasts/issues/57)
|
||||
- Implemented a tail-recursion loop to follow-up when a feed redirects to another url. c6a24e839a8ba77d09673f299cfc1e64ba7078f3
|
||||
|
||||
### Removed:
|
||||
- Removed the custom configuration file and replaced instructions to just use meson. 1f1d4af8ba7db8f56435d13a1c191ecff3d4a85b
|
||||
|
||||
## [0.3.1] - 2018-03-28
|
||||
### Added:
|
||||
- Ability to mark all episodes of a Show as watched.
|
||||
[#47](https://gitlab.gnome.org/World/podcasts/issues/47)
|
||||
- Now you are able to subscribe to itunes™ podcasts by using the itunes link of the show.
|
||||
[#49](https://gitlab.gnome.org/World/podcasts/issues/49)
|
||||
- Hammond now remembers the window size and position. (Rowan Lewis)
|
||||
[#50](https://gitlab.gnome.org/World/podcasts/issues/50)
|
||||
- Implemnted the initial work for integrating with GSettings and storing preferences. (Rowan Lewis)
|
||||
[!22](https://gitlab.gnome.org/World/podcasts/merge_requests/22) [!23](https://gitlab.gnome.org/World/podcasts/merge_requests/23)
|
||||
- Shows without episodes now display an empty message similar to EmptyView.
|
||||
[#44](https://gitlab.gnome.org/World/podcasts/issues/44)
|
||||
|
||||
### Changed:
|
||||
- EpisdeWidget has been reimplemented as a compile time state machine.
|
||||
[!18](https://gitlab.gnome.org/World/podcasts/merge_requests/18)
|
||||
- Content Views no longer scroll horizontally when shrunk bellow their minimum size.
|
||||
[#35](https://gitlab.gnome.org/World/podcasts/issues/35)
|
||||
- Some requests now use the Tor Browser's user agent. (Rowan Lewis)
|
||||
[#53](https://gitlab.gnome.org/World/podcasts/issues/53)
|
||||
|
||||
### Fixed:
|
||||
- Double border aroun the main window was fixed. (Rowan Lewis)
|
||||
[#52](https://gitlab.gnome.org/World/podcasts/issues/52)
|
||||
|
||||
## [0.3.0] - 2018-02-11
|
||||
- Tobias Bernard Redesigned the whole Gtk+ client.
|
||||
- Complete re-write of hammond-data and hammond-gtk modules.
|
||||
- Error handling for all crates was migrated from error-chain to Failure.
|
||||
- Hammond-data now uses futures to parse feeds.
|
||||
- Custom gtk-widgets are now composed structs as opposed to functions returning Gtk widgets.
|
||||
|
||||
## [0.2.0] - 2017-11-28
|
||||
- Database Schema Breaking Changes.
|
||||
- Added url sanitization. #4.
|
||||
- Reworked and refactored of the hammond-data API.
|
||||
- Added some more unit tests
|
||||
- Documented hammond-data public API.
|
||||
|
||||
## [0.1.1] - 2017-11-13
|
||||
- Added appdata.xml file
|
||||
|
||||
## [0.1.0] - 2017-11-13
|
||||
- Initial Release
|
||||
@ -1,30 +1,60 @@
|
||||
## Contributing
|
||||
## Contributing to GNOME Podcasts
|
||||
|
||||
When contributing to the development of Hammond, please first discuss the change you wish to make via issue, email, or any other method with the maintainers before making a change.
|
||||
Thank you for looking in this file!
|
||||
|
||||
Please note we have a code of conduct, please follow it in all your interactions with the project.
|
||||
When contributing to the development of GNOME Podcasts, please first discuss the change you wish to make via issue, email, or any other method with the maintainers before making a change.
|
||||
|
||||
If you have any questions regarding the use or development of GNOME Podcasts,
|
||||
want to discuss design or simply hang out, please join us in [#gnome-podcasts:matrix.org](https://matrix.to/#/#gnome-podcasts:matrix.org) or [#hammond on irc.gnome.org.](irc://irc.gnome.org/#hammond)
|
||||
|
||||
Please note we have a [code of conduct](/code-of-conduct.md), please follow it in all your interactions with the project.
|
||||
|
||||
## Source repository
|
||||
|
||||
GNOME Podcasts's main source repository is at gitlab.gnome.org. You can view
|
||||
the web interface [here](https://gitlab.gnome.org/World/podcasts)
|
||||
|
||||
Development happens in the master branch.
|
||||
|
||||
Note that we don't do bug tracking in the Github mirror.
|
||||
|
||||
If you need to publish a branch, feel free to do it at any
|
||||
publically-accessible Git hosting service, although gitlab.gnome.org
|
||||
makes things easier for the maintainers.
|
||||
|
||||
## Style
|
||||
|
||||
We use rustfmt for code formatting and we enforce it on the gitlab-CI server.
|
||||
We use [rustfmt](https://github.com/rust-lang-nursery/rustfmt) for code formatting and we enforce it on the gitlab-CI server.
|
||||
|
||||
***Installing rustfmt*** As of 2019/Jan, our continuous integration
|
||||
pipeline assumes the version of rustfmt that is distributed through the
|
||||
stable channel of [rustup](rustup.rs). You can install it with
|
||||
|
||||
Quick setup
|
||||
```
|
||||
cargo install rustfmt-nightly
|
||||
rustup component add rustfmt
|
||||
cargo fmt --all
|
||||
```
|
||||
|
||||
It is recommended to add a pre-commit hook to run cargo test and cargo fmt
|
||||
It is recommended to add a pre-commit hook to run cargo test and `cargo fmt`.
|
||||
Don't forget to `git add` again after `cargo fmt`.
|
||||
```
|
||||
#!/bin/sh
|
||||
cargo test -- --test-threads=1 && cargo fmt --all -- --write-mode=diff
|
||||
cargo test -- --test-threads=1 && cargo fmt --all -- --check
|
||||
```
|
||||
|
||||
## Running the test suite
|
||||
|
||||
Running the tests requires an internet connection and it it will download some files from the [Internet Archive](archive.org)
|
||||
|
||||
The test suite sets a temporary sqlite database in the `/tmp` folder.
|
||||
Due to that it's not possible to run them in parallel.
|
||||
|
||||
In order to run the test suite use the following: `cargo test -- --test-threads=1`
|
||||
|
||||
# Issues, issues and more issues!
|
||||
|
||||
There are many ways you can contribute to Hammond, and all of them involve creating issues
|
||||
in [Hammond issue tracker](https://gitlab.gnome.org/alatiera/Hammond/issues). This is the
|
||||
entry point for your contribution.
|
||||
There are many ways you can contribute to GNOME Podcasts, and all of them involve creating issues
|
||||
in [GNOME Podcasts issue tracker](https://gitlab.gnome.org/World/podcasts/issues). This is the entry point for your contribution.
|
||||
|
||||
To create an effective and high quality ticket, try to put the following information on your
|
||||
ticket:
|
||||
@ -47,7 +77,7 @@ If it's an issue, add the steps to reproduce like this:
|
||||
|
||||
Steps to reproduce:
|
||||
|
||||
1. Open Hammond
|
||||
1. Open GNOME Podcasts
|
||||
2. Do an Action
|
||||
3. ...
|
||||
|
||||
@ -64,11 +94,13 @@ Steps to reproduce:
|
||||
* [ ] qa (quality assurance) tasks
|
||||
```
|
||||
|
||||
## Pull Request Process
|
||||
## Merge Request Process
|
||||
|
||||
1. Ensure your code compiles. Run `make` before creating the pull request.
|
||||
2. If you're adding new API, it must be properly documented.
|
||||
3. The commit message is formatted as follows:
|
||||
1. Ensure your code compiles. Run `meson` & `ninja` before creating the merge request.
|
||||
2. Ensure the test suit passes. Run `cargo test -- --test-threads=1`.
|
||||
3. Ensure your code is properly formatted. Run `cargo fmt --all`.
|
||||
4. If you're adding new API, it must be properly documented.
|
||||
5. The commit message has to be formatted as follows:
|
||||
```
|
||||
component: <summary>
|
||||
|
||||
@ -78,8 +110,8 @@ Steps to reproduce:
|
||||
|
||||
<link to the bug ticket>
|
||||
```
|
||||
4. You may merge the pull request in once you have the sign-off of the maintainers, or if you
|
||||
6. You may merge the merge request once you have the sign-off of the maintainers, or if you
|
||||
do not have permission to do that, you may request the second reviewer to merge it for you.
|
||||
|
||||
## Code of Conduct
|
||||
We follow the Gnome [Code of Conduct.](https://wiki.gnome.org/Foundation/CodeOfConduct)
|
||||
We follow the [GNOME Foundation Code of Conduct](/code-of-conduct.md).
|
||||
|
||||
3106
Cargo.lock
generated
3106
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
13
Cargo.toml
13
Cargo.toml
@ -1,14 +1,9 @@
|
||||
[workspace]
|
||||
members = [
|
||||
"hammond-data",
|
||||
"hammond-downloader",
|
||||
"hammond-gtk"
|
||||
"podcasts-data",
|
||||
"podcasts-downloader",
|
||||
"podcasts-gtk"
|
||||
]
|
||||
|
||||
[profile.release]
|
||||
debug = false
|
||||
|
||||
[patch.crates-io]
|
||||
diesel = { git = "https://github.com/diesel-rs/diesel.git" }
|
||||
diesel_infer_schema = { git = "https://github.com/diesel-rs/diesel.git" }
|
||||
diesel_codegen = { git = "https://github.com/diesel-rs/diesel.git" }
|
||||
debug = true
|
||||
|
||||
4
LICENSE
4
LICENSE
@ -631,7 +631,7 @@ to attach them to the start of each source file to most effectively
|
||||
state the exclusion of warranty; and each file should have at least
|
||||
the "copyright" line and a pointer to where the full notice is found.
|
||||
|
||||
Hammond
|
||||
GNOME Podcasts
|
||||
Copyright (C) 2017 Jordan Petridis
|
||||
|
||||
This program is free software: you can redistribute it and/or modify
|
||||
@ -652,7 +652,7 @@ Also add information on how to contact you by electronic and paper mail.
|
||||
If the program does terminal interaction, make it output a short
|
||||
notice like this when it starts in an interactive mode:
|
||||
|
||||
Hammond Copyright (C) 2017 Jordan Petridis
|
||||
GNOME Podcasts Copyright (C) 2017 Jordan Petridis
|
||||
This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
|
||||
This is free software, and you are welcome to redistribute it
|
||||
under certain conditions; type `show c' for details.
|
||||
|
||||
169
README.md
169
README.md
@ -1,128 +1,143 @@
|
||||
# Hammond
|
||||
## Multithreaded and reliable Gtk+ Podcast client.
|
||||
This is a prototype of a podcast client written in Rust.
|
||||
# GNOME Podcasts
|
||||
|
||||
[](https://gitlab.gnome.org/alatiera/Hammond/commits/master)
|
||||
### A Podcast application for GNOME.
|
||||
Listen to your favorite podcasts, right from your desktop.
|
||||
|
||||

|
||||

|
||||

|
||||

|
||||

|
||||
|
||||
## Getting in Touch
|
||||
If you have any questions regarding the
|
||||
use or development of Hammond, want to discuss design or simply hang out, please join us in [#hammond on irc.gnome.org.](irc://irc.gnome.org/#hammond)
|
||||
## Available on Flathub
|
||||
|
||||
Sidenote:
|
||||
|
||||
There isn't much documentation yet, so you will probably have question about parts of the Code.
|
||||
[](https://flathub.org/apps/details/org.gnome.Podcasts)
|
||||
|
||||
## Quick start
|
||||
The following steps assume you have a working installation of rustc and cargo.
|
||||
If you dont take a look at [rustup.rs](rustup.rs)
|
||||
|
||||
```sh
|
||||
git clone https://gitlab.gnome.org/alatiera/hammond.git
|
||||
cd Hammond/
|
||||
cargo run -p hammond-gtk --release
|
||||
```
|
||||
GNOME Podcasts can be built and run with [Gnome Builder][builder] >= 3.28.
|
||||
Just clone the repo and hit the run button!
|
||||
|
||||
## Install from soure
|
||||
```sh
|
||||
git clone https://gitlab.gnome.org/alatiera/hammond.git
|
||||
cd Hammond/
|
||||
./configure --prefix=/usr/local
|
||||
make && sudo make install
|
||||
```
|
||||
You can get Builder from [here][get_builder].
|
||||
|
||||
**Additionall:**
|
||||
## Broken Feeds
|
||||
|
||||
You can run `sudo make uninstall` for removal
|
||||
Found a feed that does not work in GNOME Podcasts?
|
||||
Please [open an issue][new_issue] and choose the `BrokenFeed` template so we will know and fix it!
|
||||
|
||||
And `make clean` to clean up the enviroment after instalation.
|
||||
## Getting in Touch
|
||||
|
||||
### Flatpak
|
||||
Flatpak instructions... Soon™.
|
||||
If you have any questions regarding the use or development of GNOME Podcasts,
|
||||
want to discuss design or simply hang out, please join us on our [irc][irc] or [matrix][matrix] channel.
|
||||
|
||||
## Building
|
||||
|
||||
### Dependancies
|
||||
### Flatpak
|
||||
|
||||
* Rust stable 1.22 or later.
|
||||
* Gtk+ 3.22 or later
|
||||
Flatpak is the recommended way of building and installing GNOME Podcasts.
|
||||
Here are the dependencies you will need.
|
||||
|
||||
```sh
|
||||
# Add flathub and the gnome-nightly repo
|
||||
flatpak remote-add --user --if-not-exists flathub https://dl.flathub.org/repo/flathub.flatpakrepo
|
||||
flatpak remote-add --user --if-not-exists gnome-nightly https://nightly.gnome.org/gnome-nightly.flatpakrepo
|
||||
|
||||
# Install the gnome-nightly Sdk and Platform runtime
|
||||
flatpak install --user gnome-nightly org.gnome.Sdk org.gnome.Platform
|
||||
|
||||
# Install the required rust-stable extension from flathub
|
||||
flatpak install --user flathub org.freedesktop.Sdk.Extension.rust-stable//19.08
|
||||
```
|
||||
|
||||
To install the resulting flatpak you can do:
|
||||
|
||||
```bash
|
||||
flatpak-builder --user --install --force-clean --repo=repo podcasts org.gnome.Podcasts.json
|
||||
```
|
||||
|
||||
### Building from source
|
||||
|
||||
```sh
|
||||
git clone https://gitlab.gnome.org/World/podcasts.git
|
||||
cd gnome-podcasts/
|
||||
meson --prefix=/usr build
|
||||
ninja -C build
|
||||
sudo ninja -C build install
|
||||
```
|
||||
|
||||
#### Dependencies
|
||||
|
||||
* Rust stable 1.34 or later along with cargo.
|
||||
* Gtk+ 3.24.11 or later
|
||||
* Gstreamer 1.16 or later
|
||||
* libhandy 0.0.11 or later
|
||||
* Meson
|
||||
* A network connection
|
||||
|
||||
**Debian/Ubuntu**:
|
||||
```sh
|
||||
apt-get update -yqq
|
||||
apt-get install -yqq --no-install-recommends build-essential
|
||||
apt-get install -yqq --no-install-recommends libgtk-3-dev meson
|
||||
```
|
||||
|
||||
**Fedora**:
|
||||
```sh
|
||||
dnf install -y gtk3-devel glib2-devel openssl-devel sqlite-devel meson
|
||||
```
|
||||
|
||||
If you happen to build it on other distributions please let me know the names of the corresponding libraries. Feel free to open a PR or an Issue to note it.
|
||||
|
||||
```sh
|
||||
git clone https://gitlab.gnome.org/alatiera/Hammond.git
|
||||
cd Hammond/
|
||||
cargo build --all
|
||||
```
|
||||
|
||||
## Call for designers
|
||||
|
||||
Currently there no design plans or mockups. They are highly needed in order to advance the Gtk Client.
|
||||
|
||||
There is the will for a complete client re-write if a someone contributes the mockups.
|
||||
|
||||
If you happen to be a designer and want to contribute please hope on [#hammond](https://docs.python.org/3/library/exceptions.html) and get in touch with us.
|
||||
Offline build are possible too, but [`cargo-vendor`][vendor] would have to be setup first
|
||||
|
||||
## Contributing
|
||||
|
||||
There alot of thins yet to be done.
|
||||
There are a lot of things yet to be done.
|
||||
|
||||
If you want to contribute, please check the [Contributions Guidelines][contribution-guidelines].
|
||||
|
||||
You can find start by taking a look at [Issues](https://gitlab.gnome.org/alatiera/Hammond/issues) or Opening a [New one](https://gitlab.gnome.org/alatiera/Hammond/issues/new?issue%5Bassignee_id%5D=&issue%5Bmilestone_id%5D=).
|
||||
You can start by taking a look at [Issues][issues] or by opening a [New issue][new_issue].
|
||||
|
||||
There are also some minor tasks tagged with `TODO:` and `FIXME:` in the source code.
|
||||
|
||||
[contribution-guidelines]: https://gitlab.gnome.org/alatiera/Hammond/blob/master/CONTRIBUTING.md
|
||||
[contribution-guidelines]: https://gitlab.gnome.org/World/podcasts/blob/master/CONTRIBUTING.md
|
||||
|
||||
### Translations
|
||||
|
||||
Translation of this project takes place on the GNOME translation platform,
|
||||
[Damned Lies](https://l10n.gnome.org/module/podcasts). For further
|
||||
information on how to join a language team, or even to create one, please see
|
||||
[GNOME Translation Project wiki page](https://wiki.gnome.org/TranslationProject).
|
||||
|
||||
|
||||
## Overview
|
||||
|
||||
```sh
|
||||
$ tree -d
|
||||
├── assets # png's used in the README.md
|
||||
├── hammond-data # Storate related stuff, Sqlite db, XDG setup.
|
||||
│ ├── migrations # Diesel migrations.
|
||||
├── screenshots # png's used in the README.md
|
||||
├── podcasts-data # Storate related stuff, SQLite, XDG setup, RSS Parser.
|
||||
│ ├── migrations # Diesel SQL migrations.
|
||||
│ │ └── ...
|
||||
│ ├── src
|
||||
│ └── tests
|
||||
│ └── feeds # Raw RSS Feeds used for tests.
|
||||
├── hammond-downloader # Really basic, Really crappy downloader.
|
||||
├── podcasts-downloader # Really basic, Really crappy downloader.
|
||||
│ └── src
|
||||
├── hammond-gtk # The Gtk+ Client
|
||||
├── podcasts-gtk # The Gtk+ Client
|
||||
│ ├── resources # GResources folder
|
||||
│ │ └── gtk # Contains the glade.ui files.
|
||||
│ └── src
|
||||
│ ├── views # Currently only contains the Podcasts_view.
|
||||
│ └── widgets # Contains custom widgets such as Podcast and Episode.
|
||||
│ ├── stacks # Contains the gtk Stacks that hold all the different views.
|
||||
│ └── widgets # Contains custom widgets such as Show and Episode.
|
||||
```
|
||||
|
||||
## A note about the project's name
|
||||
|
||||
The project was named after Allan Moore's character [Evey Hammond](https://en.wikipedia.org/wiki/Evey_Hammond) from the graphic novel V for Vendetta.
|
||||
|
||||
It has nothing to do with the horrible headlines on the news.
|
||||
The project used to be called Hammond, after Allan Moore's character [Evey Hammond][hammond] from the graphic novel V for Vendetta.
|
||||
It was renamed to GNOME Podcasts on 2018/07/24 shortly before its first public release.
|
||||
|
||||
## Acknowledgments
|
||||
|
||||
Hammond's design is heavily insired by [Gnome-Music](https://wiki.gnome.org/Design/Apps/Music) and [Vocal](http://vocalproject.net/).
|
||||
GNOME Podcasts's design is heavily inspired by [GNOME Music][music] and [Vocal][vocal].
|
||||
|
||||
We also copied some elements from [Gnome-news](https://wiki.gnome.org/Design/Apps/Potential/News).
|
||||
We also copied some elements from [GNOME News][news].
|
||||
|
||||
And almost the entirety of the build system is copied from the [Fractal](https://gitlab.gnome.org/danigm/fractal) project.
|
||||
And almost the entirety of the build system is copied from the [Fractal][fractal] project.
|
||||
|
||||
[vendor]: https://github.com/alexcrichton/cargo-vendor
|
||||
[irc]: irc://irc.gnome.org/#hammond
|
||||
[matrix]: https://matrix.to/#/#gnome-podcasts:matrix.org
|
||||
[flatpak_setup]: https://flatpak.org/setup/
|
||||
[music]: https://wiki.gnome.org/Design/Apps/Music
|
||||
[vocal]: http://vocalproject.net/
|
||||
[news]: https://wiki.gnome.org/Design/Apps/Potential/News
|
||||
[fractal]: https://gitlab.gnome.org/World/fractal
|
||||
[hammond]: https://en.wikipedia.org/wiki/Evey_Hammond
|
||||
[issues]: https://gitlab.gnome.org/World/podcasts/issues
|
||||
[new_issue]: https://gitlab.gnome.org/World/podcasts/issues/new
|
||||
[builder]: https://wiki.gnome.org/Apps/Builder
|
||||
[get_builder]: https://wiki.gnome.org/Apps/Builder/Downloads
|
||||
|
||||
41
TODO.md
41
TODO.md
@ -1,50 +1,25 @@
|
||||
## TODOs:
|
||||
|
||||
**General:**
|
||||
|
||||
- [ ] Write docs
|
||||
# TODOs
|
||||
|
||||
## Planned Features
|
||||
|
||||
## Priorities:
|
||||
## Priorities
|
||||
|
||||
- [ ] Discuss and decide when to schedule the download cleaner. [#3](https://gitlab.gnome.org/alatiera/Hammond/issues/3)
|
||||
- [ ] Unplayed Only and Downloaded only view.
|
||||
- [ ] Auto-updater
|
||||
- [ ] OPML import/export // Probably need to create a crate.
|
||||
|
||||
**Proper Desing Mockups for the Gtk Client:**
|
||||
## Second
|
||||
|
||||
- [ ] Re-design EpisodeWidget.
|
||||
- [ ] Re-design PodcastWidget.
|
||||
- [ ] Polish the flowbox_child banner.
|
||||
|
||||
## Second:
|
||||
|
||||
- [ ] Make use of file metadas, [This](https://github.com/GuillaumeGomez/audio-video-metadata) might be helpfull.
|
||||
- [ ] Notifications
|
||||
- [ ] Make use of file metadas?, [This](https://github.com/GuillaumeGomez/audio-video-metadata) might be helpfull.
|
||||
- [ ] Episode queue
|
||||
- [ ] Embedded player
|
||||
- [ ] MPRIS integration
|
||||
- [ ] Search Implementation
|
||||
|
||||
|
||||
## Third:
|
||||
## Third
|
||||
|
||||
- [ ] Download Queue
|
||||
- [ ] Ability to Stream content on demand
|
||||
- [ ] soundcloud and itunes feeds // [This](http://getrssfeed.com) seems intresting.
|
||||
- [ ] Integrate with Itunes API for various crap
|
||||
- [ ] YoutubeFeeds
|
||||
- [ ] rss feeds from soundcloud urls? // [This](http://getrssfeed.com) seems intresting.
|
||||
- [ ] Integrate with Itunes API for various crap?
|
||||
- [ ] YoutubeFeeds?
|
||||
|
||||
## Rest Tasks
|
||||
|
||||
**Would be nice:**
|
||||
|
||||
- [ ] Make Podcast cover fetchng and loading not block the execution of the program at startup.
|
||||
- [ ] Lazy evaluate episode loading based on the podcast_widget's view scrolling.
|
||||
- [ ] Headerbar back button and stack switching
|
||||
|
||||
**FIXME:**
|
||||
|
||||
- [ ] Fix Etag/Last-modified implementation. [#2](https://gitlab.gnome.org/alatiera/Hammond/issues/2)
|
||||
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 108 KiB |
Binary file not shown.
|
Before Width: | Height: | Size: 510 KiB |
126
code-of-conduct.md
Normal file
126
code-of-conduct.md
Normal file
@ -0,0 +1,126 @@
|
||||
# GNOME Code of Conduct
|
||||
|
||||
Thank you for being a part of the GNOME project. We value your participation and want everyone to have an enjoyable and fulfilling experience. Accordingly, all participants are expected to follow this Code of Conduct, and to show respect, understanding, and consideration to one another. Thank you for helping make this a welcoming, friendly community for everyone.
|
||||
|
||||
## Scope
|
||||
|
||||
This Code of Conduct applies to all online GNOME community spaces, including, but not limited to:
|
||||
|
||||
* Issue tracking systems - bugzilla.gnome.org
|
||||
* Documentation and tutorials - developer.gnome.org
|
||||
* Code repositories - git.gnome.org and gitlab.gnome.org
|
||||
* Mailing lists - mail.gnome.org
|
||||
* Wikis - wiki.gnome.org
|
||||
* Chat and forums - irc.gnome.org, discourse.gnome.org, GNOME Telegram channels, and GNOME groups and channels on Matrix.org (including bridges to GNOME IRC channels)
|
||||
* Community spaces hosted on gnome.org infrastructure
|
||||
* Any other channels or groups which exist in order to discuss GNOME project activities
|
||||
|
||||
Communication channels and private conversations that are normally out of scope may be considered in scope if a GNOME participant is being stalked or harassed. Social media conversations may be considered in-scope if the incident occurred under a GNOME event hashtag, or when an official GNOME account on social media is tagged, or within any other discussion about GNOME. The GNOME Foundation reserves the right to take actions against behaviors that happen in any context, if they are deemed to be relevant to the GNOME project and its participants.
|
||||
|
||||
All participants in GNOME online community spaces are subject to the Code of Conduct. This includes GNOME Foundation board members, corporate sponsors, and paid employees. This also includes volunteers, maintainers, leaders, contributors, contribution reviewers, issue reporters, GNOME users, and anyone participating in discussion in GNOME online spaces.
|
||||
|
||||
## Reporting an Incident
|
||||
|
||||
If you believe that someone is violating the Code of Conduct, or have
|
||||
any other concerns, please [contact the Code of Conduct committee](https://wiki.gnome.org/Foundation/CodeOfConduct/ReporterGuide).
|
||||
|
||||
## Our Standards
|
||||
|
||||
The GNOME online community is dedicated to providing a positive experience for everyone, regardless of:
|
||||
|
||||
* age
|
||||
* body size
|
||||
* caste
|
||||
* citizenship
|
||||
* disability
|
||||
* education
|
||||
* ethnicity
|
||||
* familial status
|
||||
* gender expression
|
||||
* gender identity
|
||||
* genetic information
|
||||
* immigration status
|
||||
* level of experience
|
||||
* nationality
|
||||
* personal appearance
|
||||
* pregnancy
|
||||
* race
|
||||
* religion
|
||||
* sex characteristics
|
||||
* sexual orientation
|
||||
* sexual identity
|
||||
* socio-economic status
|
||||
* tribe
|
||||
* veteran status
|
||||
|
||||
### Community Guidelines
|
||||
|
||||
Examples of behavior that contributes to creating a positive environment include:
|
||||
|
||||
* **Be friendly.** Use welcoming and inclusive language.
|
||||
* **Be empathetic.** Be respectful of differing viewpoints and experiences.
|
||||
* **Be respectful.** When we disagree, we do so in a polite and constructive manner.
|
||||
* **Be considerate.** Remember that decisions are often a difficult choice between competing priorities. Focus on what is best for the community. Keep discussions around technology choices constructive and respectful.
|
||||
* **Be patient and generous.** If someone asks for help it is because they need it. When documentation is available that answers the question, politely point them to it. If the question is off-topic, suggest a more appropriate online space to seek help.
|
||||
* **Try to be concise.** Read the discussion before commenting in order to not repeat a point that has been made.
|
||||
|
||||
### Inappropriate Behavior
|
||||
|
||||
Community members asked to stop any inappropriate behavior are expected to comply immediately.
|
||||
|
||||
We want all participants in the GNOME community have the best possible experience they can. In order to be clear what that means, we've provided a list of examples of behaviors that are inappropriate for GNOME community spaces:
|
||||
|
||||
* **Deliberate intimidation, stalking, or following.**
|
||||
* **Sustained disruption of online discussion, talks, or other events.** Sustained disruption of events, online discussions, or meetings, including talks and presentations, will not be tolerated. This includes 'Talking over' or 'heckling' event speakers or influencing crowd actions that cause hostility in event sessions. Sustained disruption also includes drinking alcohol to excess or using recreational drugs to excess, or pushing others to do so.
|
||||
* **Harassment of people who don't drink alcohol.** We do not tolerate derogatory comments about those who abstain from alcohol or other substances. We do not tolerate pushing people to drink, talking about their abstinence or preferences to others, or pressuring them to drink - physically or through jeering.
|
||||
* **Sexist, racist, homophobic, transphobic, ableist language or otherwise exclusionary language.** This includes deliberately referring to someone by a gender that they do not identify with, and/or questioning the legitimacy of an individual's gender identity. If you're unsure if a word is derogatory, don't use it. This also includes repeated subtle and/or indirect discrimination.
|
||||
* **Unwelcome sexual attention or behavior that contributes to a sexualized environment.** This includes sexualized comments, jokes or imagery in interactions, communications or presentation materials, as well as inappropriate touching, groping, or sexual advances. Sponsors should not use sexualized images, activities, or other material. Meetup organizing staff and other volunteer organizers should not use sexualized clothing/uniforms/costumes, or otherwise create a sexualized environment.
|
||||
* **Unwelcome physical contact.** This includes touching a person without permission, including sensitive areas such as their hair, pregnant stomach, mobility device (wheelchair, scooter, etc) or tattoos. This also includes physically blocking or intimidating another person. Physical contact or simulated physical contact (such as emojis like "kiss") without affirmative consent is not acceptable. This includes sharing or distribution of sexualized images or text.
|
||||
* **Violence or threats of violence.** Violence and threats of violence are not acceptable - online or offline. This includes incitement of violence toward any individual, including encouraging a person to commit self-harm. This also includes posting or threatening to post other people's personally identifying information ("doxxing") online.
|
||||
* **Influencing or encouraging inappropriate behavior.** If you influence or encourage another person to violate the Code of Conduct, you may face the same consequences as if you had violated the Code of Conduct.
|
||||
* **Possession of an offensive weapon at a GNOME event.** This includes anything deemed to be a weapon by the event organizers.
|
||||
|
||||
The GNOME community prioritizes marginalized people's safety over privileged people's comfort. The committee will not act on complaints regarding:
|
||||
|
||||
* "Reverse"-isms, including "reverse racism," "reverse sexism," and "cisphobia"
|
||||
* Reasonable communication of boundaries, such as "leave me alone," "go away," or "I'm not discussing this with you."
|
||||
* Criticizing racist, sexist, cissexist, or otherwise oppressive behavior or assumptions
|
||||
* Communicating boundaries or criticizing oppressive behavior in a "tone" you don't find congenial
|
||||
|
||||
The examples listed above are not against the Code of Conduct. If you have questions about the above statements, please [read this document](https://github.com/sagesharp/code-of-conduct-template/blob/master/code-of-conduct/example-reversisms.md#supporting-diversity).
|
||||
|
||||
If a participant engages in behavior that violates this code of conduct, the GNOME Code of Conduct committee may take any action they deem appropriate. Examples of consequences are outlined in the [Committee Procedures Guide](https://wiki.gnome.org/Foundation/CodeOfConduct/CommitteeProcedures).
|
||||
|
||||
## Procedure for Handling Incidents
|
||||
|
||||
* [Reporter Guide](https://wiki.gnome.org/Foundation/CodeOfConduct/ReporterGuide)
|
||||
|
||||
* [Moderator Procedures](https://wiki.gnome.org/Foundation/CodeOfConduct/ModeratorProcedures)
|
||||
|
||||
* [Committee Procedures Guide](https://wiki.gnome.org/Foundation/CodeOfConduct/CommitteeProcedures)
|
||||
|
||||
## License
|
||||
|
||||
The GNOME Online Code of Conduct is licensed under a [Creative Commons Attribution Share-Alike 3.0 Unported License](http://creativecommons.org/licenses/by-sa/3.0/)
|
||||
|
||||

|
||||
|
||||
## Attribution
|
||||
|
||||
The GNOME Online Code of Conduct was forked from the example policy from the [Geek Feminism wiki, created by the Ada Initiative and other volunteers](http://geekfeminism.wikia.com/wiki/Conference_anti-harassment/Policy), which is under a Creative Commons Zero license.
|
||||
|
||||
Additional language was incorporated and modified from the following Codes of Conduct:
|
||||
|
||||
* [Citizen Code of Conduct](http://citizencodeofconduct.org/) is licensed [Creative Commons Attribution Share-Alike 3.0 Unported License](http://creativecommons.org/licenses/by-sa/3.0/).
|
||||
* [Code of Conduct template](https://github.com/sagesharp/code-of-conduct-template/) is licensed [Creative Commons Attribution Share-Alike 3.0 Unported License](http://creativecommons.org/licenses/by-sa/3.0/) by [Otter Tech](https://otter.technology/code-of-conduct-training)
|
||||
* [Contributor Covenant version 1.4](https://www.contributor-covenant.org/version/1/4/code-of-conduct) (licensed [CC BY 4.0](https://github.com/ContributorCovenant/contributor_covenant/blob/master/LICENSE.md))
|
||||
* [Data Carpentry Code of Conduct](https://docs.carpentries.org/topic_folders/policies/index_coc.html) is licensed [Creative Commons Attribution 4.0 License](https://creativecommons.org/licenses/by/4.0/)
|
||||
* [Django Project Code of Conduct](https://www.djangoproject.com/conduct/) is licensed under a [Creative Commons Attribution 3.0 Unported License](http://creativecommons.org/licenses/by/3.0/)
|
||||
* [Fedora Code of Conduct](http://fedoraproject.org/code-of-conduct)
|
||||
* [Geek Feminism Anti-harassment Policy](http://geekfeminism.wikia.com/wiki/Conference_anti-harassment/Policy) which is under a [Creative Commons Zero license](https://creativecommons.org/publicdomain/zero/1.0/)
|
||||
* [Previous GNOME Foundation Code of Conduct](https://wiki.gnome.org/action/recall/Foundation/CodeOfConduct/Old)
|
||||
* [LGBTQ in Technology Slack Code of Conduct](https://lgbtq.technology/coc.html) licensed [Creative Commons Zero](https://creativecommons.org/publicdomain/zero/1.0/)
|
||||
* [Mozilla Community Participation Guidelines](https://www.mozilla.org/en-US/about/governance/policies/participation/) is licensed [Creative Commons Attribution-ShareAlike 3.0 Unported License](https://creativecommons.org/licenses/by-sa/3.0/).
|
||||
* [Python Mentors Code of Conduct](http://pythonmentors.com/)
|
||||
* [Speak Up! Community Code of Conduct](http://web.archive.org/web/20141109123859/http://speakup.io/coc.html), licensed under a [Creative Commons Attribution 3.0 Unported License](http://creativecommons.org/licenses/by/3.0/)
|
||||
|
||||
183
configure
vendored
183
configure
vendored
@ -1,183 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Adapted from:
|
||||
# https://gitlab.gnome.org/danigm/libgepub/blob/27f0d374e0c8f6fa972dbd111d4ce0c0f3096914/configure_meson
|
||||
|
||||
# configure script adapter for Meson
|
||||
# Based on build-api: https://github.com/cgwalters/build-api
|
||||
# Copyright 2010, 2011, 2013 Colin Walters <walters@verbum.org>
|
||||
# Copyright 2016, 2017 Emmanuele Bassi
|
||||
# Copyright 2017 Iñigo Martínez <inigomartinez@gmail.com>
|
||||
# Licensed under the new-BSD license (http://www.opensource.org/licenses/bsd-license.php)
|
||||
|
||||
# Build API variables:
|
||||
|
||||
# Little helper function for reading args from the commandline.
|
||||
# it automatically handles -a b and -a=b variants, and returns 1 if
|
||||
# we need to shift $3.
|
||||
read_arg() {
|
||||
# $1 = arg name
|
||||
# $2 = arg value
|
||||
# $3 = arg parameter
|
||||
local rematch='^[^=]*=(.*)$'
|
||||
if [[ $2 =~ $rematch ]]; then
|
||||
read "$1" <<< "${BASH_REMATCH[1]}"
|
||||
else
|
||||
read "$1" <<< "$3"
|
||||
# There is no way to shift our callers args, so
|
||||
# return 1 to indicate they should do it instead.
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
sanitycheck() {
|
||||
# $1 = arg name
|
||||
# $1 = arg command
|
||||
# $2 = arg alternates
|
||||
local cmd=$( which $2 2>/dev/null )
|
||||
|
||||
if [ -x "$cmd" ]; then
|
||||
read "$1" <<< "$cmd"
|
||||
return 0
|
||||
fi
|
||||
|
||||
test -z $3 || {
|
||||
for alt in $3; do
|
||||
cmd=$( which $alt 2>/dev/null )
|
||||
|
||||
if [ -x "$cmd" ]; then
|
||||
read "$1" <<< "$cmd"
|
||||
return 0
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
||||
echo -e "\e[1;31mERROR\e[0m: Command '$2' not found"
|
||||
exit 1
|
||||
}
|
||||
|
||||
checkoption() {
|
||||
# $1 = arg
|
||||
option="${1#*--}"
|
||||
action="${option%%-*}"
|
||||
name="${option#*-}"
|
||||
if [ ${default_options[$name]+_} ]; then
|
||||
case "$action" in
|
||||
enable) meson_options[$name]=true;;
|
||||
disable) meson_options[$name]=false;;
|
||||
*) echo -e "\e[1;33mINFO\e[0m: Ignoring unknown action '$action'";;
|
||||
esac
|
||||
else
|
||||
echo -e "\e[1;33mINFO\e[0m: Ignoring unknown option '$option'"
|
||||
fi
|
||||
}
|
||||
|
||||
echooption() {
|
||||
# $1 = option
|
||||
if [ ${meson_options[$1]+_} ]; then
|
||||
echo ${meson_options[$1]}
|
||||
elif [ ${default_options[$1]+_} ]; then
|
||||
echo ${default_options[$1]}
|
||||
fi
|
||||
}
|
||||
|
||||
sanitycheck MESON 'meson'
|
||||
sanitycheck MESONTEST 'mesontest'
|
||||
sanitycheck NINJA 'ninja' 'ninja-build'
|
||||
|
||||
declare -A meson_options
|
||||
|
||||
while (($# > 0)); do
|
||||
case "${1%%=*}" in
|
||||
--prefix) read_arg prefix "$@" || shift;;
|
||||
--bindir) read_arg bindir "$@" || shift;;
|
||||
--sbindir) read_arg sbindir "$@" || shift;;
|
||||
--libexecdir) read_arg libexecdir "$@" || shift;;
|
||||
--datarootdir) read_arg datarootdir "$@" || shift;;
|
||||
--datadir) read_arg datadir "$@" || shift;;
|
||||
--sysconfdir) read_arg sysconfdir "$@" || shift;;
|
||||
--libdir) read_arg libdir "$@" || shift;;
|
||||
--mandir) read_arg mandir "$@" || shift;;
|
||||
--includedir) read_arg includedir "$@" || shift;;
|
||||
*) checkoption $1;;
|
||||
esac
|
||||
shift
|
||||
done
|
||||
|
||||
# Defaults
|
||||
test -z ${prefix} && prefix="/usr/local"
|
||||
test -z ${bindir} && bindir=${prefix}/bin
|
||||
test -z ${sbindir} && sbindir=${prefix}/sbin
|
||||
test -z ${libexecdir} && libexecdir=${prefix}/bin
|
||||
test -z ${datarootdir} && datarootdir=${prefix}/share
|
||||
test -z ${datadir} && datadir=${datarootdir}
|
||||
test -z ${sysconfdir} && sysconfdir=${prefix}/etc
|
||||
test -z ${libdir} && libdir=${prefix}/lib
|
||||
test -z ${mandir} && mandir=${prefix}/share/man
|
||||
test -z ${includedir} && includedir=${prefix}/include
|
||||
|
||||
# The source directory is the location of this file
|
||||
srcdir=$(dirname $0)
|
||||
|
||||
# The build directory is the current location
|
||||
builddir=`pwd`
|
||||
|
||||
# If we're calling this file from the source directory then
|
||||
# we automatically create a build directory and ensure that
|
||||
# both Meson and Ninja invocations are relative to that
|
||||
# location
|
||||
if [[ -f "${builddir}/meson.build" ]]; then
|
||||
mkdir -p _build
|
||||
builddir="${builddir}/_build"
|
||||
NINJA_OPT="-C ${builddir}"
|
||||
fi
|
||||
|
||||
# Wrapper Makefile for Ninja
|
||||
cat > Makefile <<END
|
||||
# Generated by configure; do not edit
|
||||
|
||||
all:
|
||||
${NINJA} ${NINJA_OPT}
|
||||
|
||||
install:
|
||||
DESTDIR="\$(DESTDIR)" ${NINJA} ${NINJA_OPT} install
|
||||
|
||||
uninstall:
|
||||
${NINJA} ${NINJA_OPT} uninstall
|
||||
|
||||
release:
|
||||
${NINJA} ${NINJA_OPT} release
|
||||
|
||||
check:
|
||||
${MESONTEST} ${NINJA_OPT}
|
||||
END
|
||||
|
||||
echo "
|
||||
|
||||
hammond
|
||||
=======
|
||||
|
||||
meson: ${MESON}
|
||||
ninja: ${NINJA}
|
||||
prefix: ${prefix}
|
||||
|
||||
Now type 'make' to build
|
||||
"
|
||||
|
||||
cmd_options=""
|
||||
for key in "${!meson_options[@]}"; do
|
||||
cmd_options="$cmd_options -Denable-$key=${meson_options[$key]}"
|
||||
done
|
||||
|
||||
exec ${MESON} \
|
||||
--prefix=${prefix} \
|
||||
--libdir=${libdir} \
|
||||
--libexecdir=${libexecdir} \
|
||||
--datadir=${datadir} \
|
||||
--sysconfdir=${sysconfdir} \
|
||||
--bindir=${bindir} \
|
||||
--includedir=${includedir} \
|
||||
--mandir=${mandir} \
|
||||
${cmd_options} \
|
||||
${builddir} \
|
||||
${srcdir}
|
||||
@ -1,32 +0,0 @@
|
||||
[package]
|
||||
authors = ["Jordan Petridis <jordanpetridis@protonmail.com>"]
|
||||
name = "hammond-data"
|
||||
version = "0.1.0"
|
||||
workspace = "../"
|
||||
|
||||
[dependencies]
|
||||
chrono = "0.4.0"
|
||||
dotenv = "0.10.1"
|
||||
error-chain = "0.11.0"
|
||||
lazy_static = "0.2.11"
|
||||
log = "0.3.8"
|
||||
r2d2 = "0.7.4"
|
||||
r2d2-diesel = "0.16.0"
|
||||
rayon = "0.9.0"
|
||||
reqwest = "0.8.1"
|
||||
rfc822_sanitizer = "0.3.3"
|
||||
rss = "1.1.0"
|
||||
url = "1.6.0"
|
||||
xdg = "2.1.0"
|
||||
|
||||
[dependencies.diesel]
|
||||
features = ["sqlite"]
|
||||
git = "https://github.com/diesel-rs/diesel.git"
|
||||
|
||||
[dependencies.diesel_codegen]
|
||||
features = ["sqlite"]
|
||||
git = "https://github.com/diesel-rs/diesel.git"
|
||||
|
||||
[dev-dependencies]
|
||||
rand = "0.3.18"
|
||||
tempdir = "0.3.5"
|
||||
@ -1,66 +0,0 @@
|
||||
#![feature(test)]
|
||||
|
||||
extern crate diesel;
|
||||
extern crate hammond_data;
|
||||
extern crate rand;
|
||||
extern crate rayon;
|
||||
extern crate rss;
|
||||
extern crate tempdir;
|
||||
extern crate test;
|
||||
|
||||
use rayon::prelude::*;
|
||||
|
||||
use test::Bencher;
|
||||
|
||||
use hammond_data::Source;
|
||||
use hammond_data::feed::{index, Feed};
|
||||
|
||||
use std::io::BufReader;
|
||||
|
||||
// Big rss feed
|
||||
const PCPER: &[u8] = include_bytes!("feeds/pcpermp3.xml");
|
||||
const UNPLUGGED: &[u8] = include_bytes!("feeds/linuxunplugged.xml");
|
||||
const RADIO: &[u8] = include_bytes!("feeds/coderradiomp3.xml");
|
||||
const SNAP: &[u8] = include_bytes!("feeds/techsnapmp3.xml");
|
||||
const LAS: &[u8] = include_bytes!("feeds/TheLinuxActionShow.xml");
|
||||
|
||||
static URLS: &[(&[u8], &str)] = &[
|
||||
(PCPER, "https://www.pcper.com/rss/podcasts-mp3.rss"),
|
||||
(UNPLUGGED, "http://feeds.feedburner.com/linuxunplugged"),
|
||||
(RADIO, "https://feeds.feedburner.com/coderradiomp3"),
|
||||
(SNAP, "https://feeds.feedburner.com/techsnapmp3"),
|
||||
(LAS, "https://feeds2.feedburner.com/TheLinuxActionShow"),
|
||||
];
|
||||
|
||||
fn index_urls() {
|
||||
let feeds: Vec<_> = URLS.par_iter()
|
||||
.map(|&(buff, url)| {
|
||||
// Create and insert a Source into db
|
||||
let s = Source::from_url(url).unwrap();
|
||||
// parse it into a channel
|
||||
let chan = rss::Channel::read_from(BufReader::new(buff)).unwrap();
|
||||
Feed::from_channel_source(chan, s)
|
||||
})
|
||||
.collect();
|
||||
|
||||
index(feeds);
|
||||
}
|
||||
|
||||
#[bench]
|
||||
fn bench_index_feeds(b: &mut Bencher) {
|
||||
b.iter(|| {
|
||||
index_urls();
|
||||
});
|
||||
}
|
||||
|
||||
#[bench]
|
||||
fn bench_index_unchanged_feeds(b: &mut Bencher) {
|
||||
// Index first so it will only bench the comparison test case.
|
||||
index_urls();
|
||||
|
||||
b.iter(|| {
|
||||
for _ in 0..10 {
|
||||
index_urls();
|
||||
}
|
||||
});
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@ -1,77 +0,0 @@
|
||||
use r2d2_diesel::ConnectionManager;
|
||||
use diesel::prelude::*;
|
||||
use r2d2;
|
||||
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
use std::io;
|
||||
use std::time::Duration;
|
||||
|
||||
use errors::*;
|
||||
|
||||
#[cfg(not(test))]
|
||||
use xdg_dirs;
|
||||
|
||||
type Pool = Arc<r2d2::Pool<ConnectionManager<SqliteConnection>>>;
|
||||
|
||||
embed_migrations!("migrations/");
|
||||
|
||||
lazy_static!{
|
||||
static ref POOL: Pool = init_pool(DB_PATH.to_str().unwrap());
|
||||
}
|
||||
|
||||
#[cfg(not(test))]
|
||||
lazy_static! {
|
||||
static ref DB_PATH: PathBuf = xdg_dirs::HAMMOND_XDG.place_data_file("hammond.db").unwrap();
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
extern crate tempdir;
|
||||
|
||||
#[cfg(test)]
|
||||
lazy_static! {
|
||||
static ref TEMPDIR: tempdir::TempDir = {
|
||||
tempdir::TempDir::new("hammond_unit_test").unwrap()
|
||||
};
|
||||
|
||||
static ref DB_PATH: PathBuf = TEMPDIR.path().join("hammond.db");
|
||||
}
|
||||
|
||||
pub(crate) fn connection() -> Pool {
|
||||
Arc::clone(&POOL)
|
||||
}
|
||||
|
||||
fn init_pool(db_path: &str) -> Pool {
|
||||
let config = r2d2::Config::builder()
|
||||
.pool_size(1)
|
||||
.connection_timeout(Duration::from_secs(60))
|
||||
.build();
|
||||
let manager = ConnectionManager::<SqliteConnection>::new(db_path);
|
||||
let pool = Arc::new(r2d2::Pool::new(config, manager).expect("Failed to create pool."));
|
||||
|
||||
{
|
||||
let db = Arc::clone(&pool).get().expect("Failed to initialize pool.");
|
||||
run_migration_on(&*db).expect("Failed to run migrations during init.");
|
||||
}
|
||||
info!("Database pool initialized.");
|
||||
pool
|
||||
}
|
||||
|
||||
fn run_migration_on(connection: &SqliteConnection) -> Result<()> {
|
||||
info!("Running DB Migrations...");
|
||||
// embedded_migrations::run(connection)?;
|
||||
embedded_migrations::run_with_output(connection, &mut io::stdout())?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Reset the database into a clean state.
|
||||
// Test share a Temp file db.
|
||||
#[allow(dead_code)]
|
||||
pub fn truncate_db() -> Result<()> {
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
con.execute("DELETE FROM episode")?;
|
||||
con.execute("DELETE FROM podcast")?;
|
||||
con.execute("DELETE FROM source")?;
|
||||
Ok(())
|
||||
}
|
||||
@ -1,206 +0,0 @@
|
||||
//! Random CRUD helper functions.
|
||||
|
||||
use diesel::prelude::*;
|
||||
use diesel;
|
||||
use models::queryables::{Episode, Podcast, Source};
|
||||
use chrono::prelude::*;
|
||||
use errors::*;
|
||||
|
||||
use database::connection;
|
||||
|
||||
pub fn get_sources() -> Result<Vec<Source>> {
|
||||
use schema::source::dsl::*;
|
||||
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
Ok(source.load::<Source>(&*con)?)
|
||||
}
|
||||
|
||||
pub fn get_podcasts() -> Result<Vec<Podcast>> {
|
||||
use schema::podcast::dsl::*;
|
||||
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
Ok(podcast.load::<Podcast>(&*con)?)
|
||||
}
|
||||
|
||||
pub fn get_episodes() -> Result<Vec<Episode>> {
|
||||
use schema::episode::dsl::*;
|
||||
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
Ok(episode.order(epoch.desc()).load::<Episode>(&*con)?)
|
||||
}
|
||||
|
||||
pub fn get_downloaded_episodes() -> Result<Vec<Episode>> {
|
||||
use schema::episode::dsl::*;
|
||||
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
Ok(episode
|
||||
.filter(local_uri.is_not_null())
|
||||
.load::<Episode>(&*con)?)
|
||||
}
|
||||
|
||||
pub fn get_played_episodes() -> Result<Vec<Episode>> {
|
||||
use schema::episode::dsl::*;
|
||||
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
Ok(episode.filter(played.is_not_null()).load::<Episode>(&*con)?)
|
||||
}
|
||||
|
||||
pub fn get_episode_from_id(ep_id: i32) -> Result<Episode> {
|
||||
use schema::episode::dsl::*;
|
||||
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
Ok(episode.filter(id.eq(ep_id)).get_result::<Episode>(&*con)?)
|
||||
}
|
||||
|
||||
pub fn get_episode_local_uri_from_id(ep_id: i32) -> Result<Option<String>> {
|
||||
use schema::episode::dsl::*;
|
||||
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
Ok(episode
|
||||
.filter(id.eq(ep_id))
|
||||
.select(local_uri)
|
||||
.get_result::<Option<String>>(&*con)?)
|
||||
}
|
||||
|
||||
pub fn get_episodes_with_limit(limit: u32) -> Result<Vec<Episode>> {
|
||||
use schema::episode::dsl::*;
|
||||
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
Ok(episode
|
||||
.order(epoch.desc())
|
||||
.limit(i64::from(limit))
|
||||
.load::<Episode>(&*con)?)
|
||||
}
|
||||
|
||||
pub fn get_podcast_from_id(pid: i32) -> Result<Podcast> {
|
||||
use schema::podcast::dsl::*;
|
||||
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
Ok(podcast.filter(id.eq(pid)).get_result::<Podcast>(&*con)?)
|
||||
}
|
||||
|
||||
pub fn get_pd_episodes(parent: &Podcast) -> Result<Vec<Episode>> {
|
||||
use schema::episode::dsl::*;
|
||||
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
Ok(Episode::belonging_to(parent)
|
||||
.order(epoch.desc())
|
||||
.load::<Episode>(&*con)?)
|
||||
}
|
||||
|
||||
pub fn get_pd_unplayed_episodes(parent: &Podcast) -> Result<Vec<Episode>> {
|
||||
use schema::episode::dsl::*;
|
||||
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
Ok(Episode::belonging_to(parent)
|
||||
.filter(played.is_null())
|
||||
.order(epoch.desc())
|
||||
.load::<Episode>(&*con)?)
|
||||
}
|
||||
|
||||
pub fn get_pd_episodes_limit(parent: &Podcast, limit: u32) -> Result<Vec<Episode>> {
|
||||
use schema::episode::dsl::*;
|
||||
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
Ok(Episode::belonging_to(parent)
|
||||
.order(epoch.desc())
|
||||
.limit(i64::from(limit))
|
||||
.load::<Episode>(&*con)?)
|
||||
}
|
||||
|
||||
pub fn get_source_from_uri(uri_: &str) -> Result<Source> {
|
||||
use schema::source::dsl::*;
|
||||
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
Ok(source.filter(uri.eq(uri_)).get_result::<Source>(&*con)?)
|
||||
}
|
||||
|
||||
// pub fn get_podcast_from_title(title_: &str) -> QueryResult<Podcast> {
|
||||
// use schema::podcast::dsl::*;
|
||||
|
||||
// let db = connection();
|
||||
// let con = db.get()?;
|
||||
// podcast
|
||||
// .filter(title.eq(title_))
|
||||
// .get_result::<Podcast>(&*con)
|
||||
// }
|
||||
|
||||
pub fn get_podcast_from_source_id(sid: i32) -> Result<Podcast> {
|
||||
use schema::podcast::dsl::*;
|
||||
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
Ok(podcast
|
||||
.filter(source_id.eq(sid))
|
||||
.get_result::<Podcast>(&*con)?)
|
||||
}
|
||||
|
||||
pub fn get_episode_from_uri(con: &SqliteConnection, uri_: &str) -> QueryResult<Episode> {
|
||||
use schema::episode::dsl::*;
|
||||
|
||||
episode.filter(uri.eq(uri_)).get_result::<Episode>(&*con)
|
||||
}
|
||||
|
||||
pub fn remove_feed(pd: &Podcast) -> Result<()> {
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
con.transaction(|| -> Result<()> {
|
||||
delete_source(&con, pd.source_id())?;
|
||||
delete_podcast(&con, *pd.id())?;
|
||||
delete_podcast_episodes(&con, *pd.id())?;
|
||||
info!("Feed removed from the Database.");
|
||||
Ok(())
|
||||
})
|
||||
}
|
||||
|
||||
pub fn delete_source(con: &SqliteConnection, source_id: i32) -> QueryResult<usize> {
|
||||
use schema::source::dsl::*;
|
||||
|
||||
diesel::delete(source.filter(id.eq(source_id))).execute(&*con)
|
||||
}
|
||||
|
||||
pub fn delete_podcast(con: &SqliteConnection, podcast_id: i32) -> QueryResult<usize> {
|
||||
use schema::podcast::dsl::*;
|
||||
|
||||
diesel::delete(podcast.filter(id.eq(podcast_id))).execute(&*con)
|
||||
}
|
||||
|
||||
pub fn delete_podcast_episodes(con: &SqliteConnection, parent_id: i32) -> QueryResult<usize> {
|
||||
use schema::episode::dsl::*;
|
||||
|
||||
diesel::delete(episode.filter(podcast_id.eq(parent_id))).execute(&*con)
|
||||
}
|
||||
|
||||
pub fn update_none_to_played_now(parent: &Podcast) -> Result<usize> {
|
||||
use schema::episode::dsl::*;
|
||||
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
let epoch_now = Utc::now().timestamp() as i32;
|
||||
con.transaction(|| -> Result<usize> {
|
||||
Ok(diesel::update(
|
||||
Episode::belonging_to(parent).filter(played.is_null()),
|
||||
).set(played.eq(Some(epoch_now)))
|
||||
.execute(&*con)?)
|
||||
})
|
||||
}
|
||||
@ -1,18 +0,0 @@
|
||||
use diesel::result;
|
||||
use diesel::migrations::RunMigrationsError;
|
||||
use rss;
|
||||
use reqwest;
|
||||
use r2d2;
|
||||
|
||||
use std::io;
|
||||
|
||||
error_chain! {
|
||||
foreign_links {
|
||||
R2D2TimeoutError(r2d2::GetTimeout);
|
||||
DieselResultError(result::Error);
|
||||
DieselMigrationError(RunMigrationsError);
|
||||
RSSError(rss::Error);
|
||||
ReqError(reqwest::Error);
|
||||
IoError(io::Error);
|
||||
}
|
||||
}
|
||||
@ -1,288 +0,0 @@
|
||||
//! Index and retrieve Feeds.
|
||||
|
||||
use rayon::prelude::*;
|
||||
use diesel::prelude::*;
|
||||
use rayon::iter::IntoParallelIterator;
|
||||
|
||||
use diesel::Identifiable;
|
||||
use rss;
|
||||
|
||||
use dbqueries;
|
||||
use parser;
|
||||
|
||||
use models::queryables::{Episode, Podcast, Source};
|
||||
use models::insertables::{NewEpisode, NewPodcast};
|
||||
use database::connection;
|
||||
use errors::*;
|
||||
|
||||
#[derive(Debug)]
|
||||
/// Wrapper struct that hold a `Source` and the `rss::Channel`
|
||||
/// that corresponds to the `Source.uri` field.
|
||||
pub struct Feed {
|
||||
channel: rss::Channel,
|
||||
source: Source,
|
||||
}
|
||||
|
||||
impl Feed {
|
||||
/// Constructor that consumes a `Source` and returns the corresponding `Feed` struct.
|
||||
pub fn from_source(s: Source) -> Result<Feed> {
|
||||
s.into_feed()
|
||||
}
|
||||
|
||||
/// Constructor that consumes a `Source` and a `rss::Channel` returns a `Feed` struct.
|
||||
pub fn from_channel_source(chan: rss::Channel, s: Source) -> Feed {
|
||||
Feed {
|
||||
channel: chan,
|
||||
source: s,
|
||||
}
|
||||
}
|
||||
|
||||
fn index(&self) -> Result<()> {
|
||||
let pd = self.get_podcast()?;
|
||||
self.index_channel_items(&pd)
|
||||
}
|
||||
|
||||
// #[allow(dead_code)]
|
||||
// fn index_channel(&self) -> Result<()> {
|
||||
// self.parse_channel().index()?;
|
||||
// Ok(())
|
||||
// }
|
||||
|
||||
// TODO: Refactor transcactions and find a way to do it in parallel.
|
||||
fn index_channel_items(&self, pd: &Podcast) -> Result<()> {
|
||||
let episodes = self.parse_channel_items(pd);
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
let _ = con.transaction::<(), Error, _>(|| {
|
||||
episodes.into_iter().for_each(|x| {
|
||||
let e = x.index(&con);
|
||||
if let Err(err) = e {
|
||||
error!("Failed to index episode: {:?}.", x.title());
|
||||
error!("Error msg: {}", err);
|
||||
};
|
||||
});
|
||||
Ok(())
|
||||
});
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn parse_channel(&self) -> NewPodcast {
|
||||
parser::new_podcast(&self.channel, *self.source.id())
|
||||
}
|
||||
|
||||
fn parse_channel_items(&self, pd: &Podcast) -> Vec<NewEpisode> {
|
||||
let items = self.channel.items();
|
||||
let new_episodes: Vec<_> = items
|
||||
.into_par_iter()
|
||||
.filter_map(|item| parser::new_episode(item, *pd.id()).ok())
|
||||
.collect();
|
||||
|
||||
new_episodes
|
||||
}
|
||||
|
||||
fn get_podcast(&self) -> Result<Podcast> {
|
||||
self.parse_channel().into_podcast()
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
fn get_episodes(&self) -> Result<Vec<Episode>> {
|
||||
let pd = self.get_podcast()?;
|
||||
let eps = self.parse_channel_items(&pd);
|
||||
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
// TODO: Make it parallel
|
||||
// This returns only the episodes in the xml feed.
|
||||
let episodes: Vec<_> = eps.into_iter()
|
||||
.filter_map(|ep| ep.into_episode(&con).ok())
|
||||
.collect();
|
||||
|
||||
Ok(episodes)
|
||||
|
||||
// This would return every episode of the feed from the db.
|
||||
// self.index_channel_items(&pd)?;
|
||||
// Ok(dbqueries::get_pd_episodes(&pd)?)
|
||||
}
|
||||
}
|
||||
|
||||
/// Use's `fetch_all` to retrieve a list of `Feed`s and use index them using `feed::index`.
|
||||
pub fn index_all() -> Result<()> {
|
||||
let feeds = fetch_all()?;
|
||||
|
||||
index(feeds);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Handle the indexing of a feed `F` into the Database.
|
||||
///
|
||||
/// Consume a `ParallelIterator<Feed>` and index it.
|
||||
pub fn index<F: IntoParallelIterator<Item = Feed>>(feeds: F) {
|
||||
feeds.into_par_iter().for_each(|f| {
|
||||
let e = f.index();
|
||||
if e.is_err() {
|
||||
error!("Error While trying to update the database.");
|
||||
error!("Error msg: {}", e.unwrap_err());
|
||||
};
|
||||
});
|
||||
info!("Indexing done.");
|
||||
}
|
||||
|
||||
/// Retrieve a list of all the `Source` in the database,
|
||||
/// then use `feed::fetch` to convert them into `Feed`s
|
||||
/// and return them.
|
||||
pub fn fetch_all() -> Result<Vec<Feed>> {
|
||||
let feeds = dbqueries::get_sources()?;
|
||||
Ok(fetch(feeds))
|
||||
}
|
||||
|
||||
/// Consume a `ParallelIterator<Source>` and return a list of `Feed`s.
|
||||
pub fn fetch<F: IntoParallelIterator<Item = Source>>(feeds: F) -> Vec<Feed> {
|
||||
let results: Vec<_> = feeds
|
||||
.into_par_iter()
|
||||
.filter_map(|x| {
|
||||
let uri = x.uri().to_owned();
|
||||
let feed = Feed::from_source(x).ok();
|
||||
if feed.is_none() {
|
||||
error!("Error While trying to fetch from source url: {}.", uri);
|
||||
}
|
||||
feed
|
||||
})
|
||||
.collect();
|
||||
|
||||
results
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::fs;
|
||||
use std::io::BufReader;
|
||||
use database::truncate_db;
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
/// Insert feeds and update/index them.
|
||||
fn test_index_loop() {
|
||||
truncate_db().unwrap();
|
||||
let inpt = vec![
|
||||
"https://request-for-explanation.github.io/podcast/rss.xml",
|
||||
"https://feeds.feedburner.com/InterceptedWithJeremyScahill",
|
||||
"http://feeds.propublica.org/propublica/podcast",
|
||||
"http://feeds.feedburner.com/linuxunplugged",
|
||||
];
|
||||
|
||||
inpt.iter().for_each(|url| {
|
||||
// Index the urls into the source table.
|
||||
Source::from_url(url).unwrap();
|
||||
});
|
||||
|
||||
index_all().unwrap();
|
||||
|
||||
// Run again to cover Unique constrains erros.
|
||||
index_all().unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
/// Insert feeds and update/index them.
|
||||
fn test_fetch_loop() {
|
||||
truncate_db().unwrap();
|
||||
let inpt = vec![
|
||||
"https://request-for-explanation.github.io/podcast/rss.xml",
|
||||
"https://feeds.feedburner.com/InterceptedWithJeremyScahill",
|
||||
"http://feeds.propublica.org/propublica/podcast",
|
||||
"http://feeds.feedburner.com/linuxunplugged",
|
||||
];
|
||||
|
||||
inpt.iter().for_each(|url| {
|
||||
// Index the urls into the source table.
|
||||
Source::from_url(url).unwrap();
|
||||
});
|
||||
|
||||
fetch_all().unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_complete_index() {
|
||||
// vec of (path, url) tuples.
|
||||
let urls = vec![
|
||||
(
|
||||
"tests/feeds/Intercepted.xml",
|
||||
"https://feeds.feedburner.com/InterceptedWithJeremyScahill",
|
||||
),
|
||||
(
|
||||
"tests/feeds/LinuxUnplugged.xml",
|
||||
"http://feeds.feedburner.com/linuxunplugged",
|
||||
),
|
||||
(
|
||||
"tests/feeds/TheBreakthrough.xml",
|
||||
"http://feeds.propublica.org/propublica/podcast",
|
||||
),
|
||||
(
|
||||
"tests/feeds/R4Explanation.xml",
|
||||
"https://request-for-explanation.github.io/podcast/rss.xml",
|
||||
),
|
||||
];
|
||||
|
||||
truncate_db().unwrap();
|
||||
|
||||
let feeds: Vec<_> = urls.iter()
|
||||
.map(|&(path, url)| {
|
||||
// Create and insert a Source into db
|
||||
let s = Source::from_url(url).unwrap();
|
||||
|
||||
// open the xml file
|
||||
let feed = fs::File::open(path).unwrap();
|
||||
// parse it into a channel
|
||||
let chan = rss::Channel::read_from(BufReader::new(feed)).unwrap();
|
||||
Feed::from_channel_source(chan, s)
|
||||
})
|
||||
.collect();
|
||||
|
||||
// Index the channels
|
||||
index(feeds);
|
||||
|
||||
// Assert the index rows equal the controlled results
|
||||
assert_eq!(dbqueries::get_sources().unwrap().len(), 4);
|
||||
assert_eq!(dbqueries::get_podcasts().unwrap().len(), 4);
|
||||
assert_eq!(dbqueries::get_episodes().unwrap().len(), 274);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_partial_index_podcast() {
|
||||
truncate_db().unwrap();
|
||||
let url = "https://feeds.feedburner.com/InterceptedWithJeremyScahill";
|
||||
|
||||
let s1 = Source::from_url(url).unwrap();
|
||||
let s2 = Source::from_url(url).unwrap();
|
||||
assert_eq!(s1, s2);
|
||||
assert_eq!(s1.id(), s2.id());
|
||||
|
||||
let f1 = s1.into_feed().unwrap();
|
||||
let f2 = s2.into_feed().unwrap();
|
||||
|
||||
let p1 = f1.get_podcast().unwrap();
|
||||
let p2 = {
|
||||
f2.index().unwrap();
|
||||
f2.get_podcast().unwrap()
|
||||
};
|
||||
assert_eq!(p1, p2);
|
||||
assert_eq!(p1.id(), p2.id());
|
||||
assert_eq!(p1.source_id(), p2.source_id());
|
||||
|
||||
let eps1 = f1.get_episodes().unwrap();
|
||||
let eps2 = {
|
||||
f2.index().unwrap();
|
||||
f2.get_episodes().unwrap()
|
||||
};
|
||||
|
||||
eps1.into_par_iter()
|
||||
.zip(eps2)
|
||||
.into_par_iter()
|
||||
.for_each(|(ep1, ep2): (Episode, Episode)| {
|
||||
assert_eq!(ep1, ep2);
|
||||
assert_eq!(ep1.id(), ep2.id());
|
||||
assert_eq!(ep1.podcast_id(), ep2.podcast_id());
|
||||
});
|
||||
}
|
||||
}
|
||||
@ -1,81 +0,0 @@
|
||||
#![recursion_limit = "1024"]
|
||||
#![deny(missing_docs)]
|
||||
#![cfg_attr(feature = "cargo-clippy", allow(blacklisted_name))]
|
||||
#![cfg_attr(feature = "clippy",
|
||||
warn(option_unwrap_used, result_unwrap_used, print_stdout,
|
||||
wrong_pub_self_convention, mut_mut, non_ascii_literal, similar_names,
|
||||
unicode_not_nfc, enum_glob_use, if_not_else, items_after_statements,
|
||||
used_underscore_binding))]
|
||||
#![cfg_attr(all(test, feature = "clippy"), allow(option_unwrap_used, result_unwrap_used))]
|
||||
|
||||
//! A libraty for parsing, indexing and retrieving podcast Feeds,
|
||||
//! into and from a Database.
|
||||
|
||||
#[macro_use]
|
||||
extern crate error_chain;
|
||||
|
||||
#[macro_use]
|
||||
extern crate lazy_static;
|
||||
|
||||
#[macro_use]
|
||||
extern crate log;
|
||||
|
||||
#[macro_use]
|
||||
extern crate diesel;
|
||||
#[macro_use]
|
||||
extern crate diesel_codegen;
|
||||
|
||||
extern crate chrono;
|
||||
extern crate r2d2;
|
||||
extern crate r2d2_diesel;
|
||||
extern crate rayon;
|
||||
extern crate reqwest;
|
||||
extern crate rfc822_sanitizer;
|
||||
extern crate rss;
|
||||
extern crate url;
|
||||
extern crate xdg;
|
||||
|
||||
#[allow(missing_docs)]
|
||||
pub mod dbqueries;
|
||||
pub mod utils;
|
||||
pub mod feed;
|
||||
#[allow(missing_docs)]
|
||||
pub mod errors;
|
||||
pub(crate) mod database;
|
||||
pub(crate) mod models;
|
||||
mod parser;
|
||||
mod schema;
|
||||
|
||||
pub use models::queryables::{Episode, Podcast, Source};
|
||||
|
||||
/// [XDG Base Direcotory](https://specifications.freedesktop.org/basedir-spec/basedir-spec-latest.html) Paths.
|
||||
pub mod xdg_dirs {
|
||||
use std::path::PathBuf;
|
||||
use xdg;
|
||||
|
||||
lazy_static!{
|
||||
pub(crate) static ref HAMMOND_XDG: xdg::BaseDirectories = {
|
||||
xdg::BaseDirectories::with_prefix("hammond").unwrap()
|
||||
};
|
||||
|
||||
/// XDG_DATA Directory `Pathbuf`.
|
||||
pub static ref HAMMOND_DATA: PathBuf = {
|
||||
HAMMOND_XDG.create_data_directory(HAMMOND_XDG.get_data_home()).unwrap()
|
||||
};
|
||||
|
||||
/// XDG_CONFIG Directory `Pathbuf`.
|
||||
pub static ref HAMMOND_CONFIG: PathBuf = {
|
||||
HAMMOND_XDG.create_config_directory(HAMMOND_XDG.get_config_home()).unwrap()
|
||||
};
|
||||
|
||||
/// XDG_CACHE Directory `Pathbuf`.
|
||||
pub static ref HAMMOND_CACHE: PathBuf = {
|
||||
HAMMOND_XDG.create_cache_directory(HAMMOND_XDG.get_cache_home()).unwrap()
|
||||
};
|
||||
|
||||
/// Hammond Download Direcotry `PathBuf`.
|
||||
pub static ref DL_DIR: PathBuf = {
|
||||
HAMMOND_XDG.create_data_directory("Downloads").unwrap()
|
||||
};
|
||||
}
|
||||
}
|
||||
@ -1,424 +0,0 @@
|
||||
use diesel::prelude::*;
|
||||
|
||||
use schema::{episode, podcast, source};
|
||||
use models::queryables::{Episode, Podcast, Source};
|
||||
|
||||
use utils::url_cleaner;
|
||||
use errors::*;
|
||||
|
||||
use dbqueries;
|
||||
use diesel;
|
||||
use database::connection;
|
||||
|
||||
trait Insert {
|
||||
fn insert(&self, &SqliteConnection) -> QueryResult<usize>;
|
||||
}
|
||||
|
||||
trait Update {
|
||||
fn update(&self, &SqliteConnection, i32) -> QueryResult<usize>;
|
||||
}
|
||||
|
||||
#[derive(Insertable)]
|
||||
#[table_name = "source"]
|
||||
#[derive(Debug, Clone)]
|
||||
pub(crate) struct NewSource {
|
||||
uri: String,
|
||||
last_modified: Option<String>,
|
||||
http_etag: Option<String>,
|
||||
}
|
||||
|
||||
impl Insert for NewSource {
|
||||
fn insert(&self, con: &SqliteConnection) -> QueryResult<usize> {
|
||||
use schema::source::dsl::*;
|
||||
diesel::insert_into(source).values(self).execute(&*con)
|
||||
}
|
||||
}
|
||||
|
||||
impl NewSource {
|
||||
pub(crate) fn new_with_uri(uri: &str) -> NewSource {
|
||||
let uri = url_cleaner(uri);
|
||||
NewSource {
|
||||
uri,
|
||||
last_modified: None,
|
||||
http_etag: None,
|
||||
}
|
||||
}
|
||||
|
||||
fn index(&self) -> Result<()> {
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
// Throw away the result like `insert or ignore`
|
||||
// Diesel deos not support `insert or ignore` yet.
|
||||
let _ = self.insert(&con);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Look out for when tryinto lands into stable.
|
||||
pub(crate) fn into_source(self) -> Result<Source> {
|
||||
self.index()?;
|
||||
dbqueries::get_source_from_uri(&self.uri)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Insertable, AsChangeset)]
|
||||
#[table_name = "podcast"]
|
||||
#[derive(Debug, Clone)]
|
||||
pub(crate) struct NewPodcast {
|
||||
title: String,
|
||||
link: String,
|
||||
description: String,
|
||||
image_uri: Option<String>,
|
||||
favorite: bool,
|
||||
archive: bool,
|
||||
always_dl: bool,
|
||||
source_id: i32,
|
||||
}
|
||||
|
||||
impl Insert for NewPodcast {
|
||||
fn insert(&self, con: &SqliteConnection) -> QueryResult<usize> {
|
||||
use schema::podcast::dsl::*;
|
||||
diesel::insert_into(podcast).values(self).execute(&*con)
|
||||
}
|
||||
}
|
||||
|
||||
impl Update for NewPodcast {
|
||||
fn update(&self, con: &SqliteConnection, podcast_id: i32) -> QueryResult<usize> {
|
||||
use schema::podcast::dsl::*;
|
||||
|
||||
diesel::update(podcast.filter(id.eq(podcast_id)))
|
||||
.set(self)
|
||||
.execute(&*con)
|
||||
}
|
||||
}
|
||||
|
||||
impl NewPodcast {
|
||||
// Look out for when tryinto lands into stable.
|
||||
pub(crate) fn into_podcast(self) -> Result<Podcast> {
|
||||
self.index()?;
|
||||
Ok(dbqueries::get_podcast_from_source_id(self.source_id)?)
|
||||
}
|
||||
|
||||
pub(crate) fn index(&self) -> Result<()> {
|
||||
let pd = dbqueries::get_podcast_from_source_id(self.source_id);
|
||||
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
match pd {
|
||||
Ok(foo) => {
|
||||
if foo.source_id() != self.source_id {
|
||||
error!("NSPD sid: {}, SPD sid: {}", self.source_id, foo.source_id());
|
||||
};
|
||||
|
||||
if (foo.link() != self.link) || (foo.title() != self.title)
|
||||
|| (foo.image_uri() != self.image_uri.as_ref().map(|x| x.as_str()))
|
||||
{
|
||||
self.update(&con, *foo.id())?;
|
||||
}
|
||||
}
|
||||
Err(_) => {
|
||||
self.insert(&con)?;
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub(crate) struct NewPodcastBuilder {
|
||||
title: String,
|
||||
link: String,
|
||||
description: String,
|
||||
image_uri: Option<String>,
|
||||
favorite: bool,
|
||||
archive: bool,
|
||||
always_dl: bool,
|
||||
source_id: i32,
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
impl NewPodcastBuilder {
|
||||
pub(crate) fn new() -> NewPodcastBuilder {
|
||||
NewPodcastBuilder::default()
|
||||
}
|
||||
|
||||
pub(crate) fn title(mut self, s: String) -> NewPodcastBuilder {
|
||||
self.title = s;
|
||||
self
|
||||
}
|
||||
|
||||
pub(crate) fn link(mut self, s: String) -> NewPodcastBuilder {
|
||||
self.link = s;
|
||||
self
|
||||
}
|
||||
|
||||
pub(crate) fn description(mut self, s: String) -> NewPodcastBuilder {
|
||||
self.description = s;
|
||||
self
|
||||
}
|
||||
|
||||
pub(crate) fn image_uri(mut self, s: Option<String>) -> NewPodcastBuilder {
|
||||
self.image_uri = s;
|
||||
self
|
||||
}
|
||||
|
||||
pub(crate) fn source_id(mut self, s: i32) -> NewPodcastBuilder {
|
||||
self.source_id = s;
|
||||
self
|
||||
}
|
||||
|
||||
pub(crate) fn favorite(mut self, s: bool) -> NewPodcastBuilder {
|
||||
self.favorite = s;
|
||||
self
|
||||
}
|
||||
|
||||
pub(crate) fn archive(mut self, s: bool) -> NewPodcastBuilder {
|
||||
self.archive = s;
|
||||
self
|
||||
}
|
||||
|
||||
pub(crate) fn always_dl(mut self, s: bool) -> NewPodcastBuilder {
|
||||
self.always_dl = s;
|
||||
self
|
||||
}
|
||||
|
||||
pub(crate) fn build(self) -> NewPodcast {
|
||||
NewPodcast {
|
||||
title: self.title,
|
||||
link: self.link,
|
||||
description: self.description,
|
||||
image_uri: self.image_uri,
|
||||
favorite: self.favorite,
|
||||
archive: self.archive,
|
||||
always_dl: self.always_dl,
|
||||
source_id: self.source_id,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
// Ignore the following geters. They are used in unit tests mainly.
|
||||
impl NewPodcast {
|
||||
pub(crate) fn source_id(&self) -> i32 {
|
||||
self.source_id
|
||||
}
|
||||
|
||||
pub(crate) fn title(&self) -> &str {
|
||||
&self.title
|
||||
}
|
||||
|
||||
pub(crate) fn link(&self) -> &str {
|
||||
&self.link
|
||||
}
|
||||
|
||||
pub(crate) fn description(&self) -> &str {
|
||||
&self.description
|
||||
}
|
||||
|
||||
pub(crate) fn image_uri(&self) -> Option<&str> {
|
||||
self.image_uri.as_ref().map(|s| s.as_str())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Insertable, AsChangeset)]
|
||||
#[table_name = "episode"]
|
||||
#[derive(Debug, Clone, Default)]
|
||||
pub(crate) struct NewEpisode {
|
||||
title: Option<String>,
|
||||
uri: String,
|
||||
local_uri: Option<String>,
|
||||
description: Option<String>,
|
||||
published_date: Option<String>,
|
||||
length: Option<i32>,
|
||||
guid: Option<String>,
|
||||
epoch: i32,
|
||||
played: Option<i32>,
|
||||
favorite: bool,
|
||||
archive: bool,
|
||||
podcast_id: i32,
|
||||
}
|
||||
|
||||
impl Insert for NewEpisode {
|
||||
fn insert(&self, con: &SqliteConnection) -> QueryResult<usize> {
|
||||
use schema::episode::dsl::*;
|
||||
diesel::insert_into(episode).values(self).execute(&*con)
|
||||
}
|
||||
}
|
||||
|
||||
impl Update for NewEpisode {
|
||||
fn update(&self, con: &SqliteConnection, episode_id: i32) -> QueryResult<usize> {
|
||||
use schema::episode::dsl::*;
|
||||
|
||||
diesel::update(episode.filter(id.eq(episode_id)))
|
||||
.set(self)
|
||||
.execute(&*con)
|
||||
}
|
||||
}
|
||||
|
||||
impl NewEpisode {
|
||||
// TODO: Currently using diesel from master git.
|
||||
// Watch out for v0.99.0 beta and change the toml.
|
||||
// TODO: Refactor into batch indexes instead.
|
||||
pub(crate) fn into_episode(self, con: &SqliteConnection) -> Result<Episode> {
|
||||
self.index(con)?;
|
||||
Ok(dbqueries::get_episode_from_uri(con, &self.uri)?)
|
||||
}
|
||||
|
||||
pub(crate) fn index(&self, con: &SqliteConnection) -> QueryResult<()> {
|
||||
let ep = dbqueries::get_episode_from_uri(con, &self.uri.clone());
|
||||
|
||||
match ep {
|
||||
Ok(foo) => {
|
||||
if foo.podcast_id() != self.podcast_id {
|
||||
error!("NEP pid: {}, EP pid: {}", self.podcast_id, foo.podcast_id());
|
||||
};
|
||||
|
||||
if foo.title() != self.title.as_ref().map(|x| x.as_str())
|
||||
|| foo.published_date() != self.published_date.as_ref().map(|x| x.as_str())
|
||||
{
|
||||
self.update(con, *foo.id())?;
|
||||
}
|
||||
}
|
||||
Err(_) => {
|
||||
self.insert(con)?;
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub(crate) struct NewEpisodeBuilder {
|
||||
title: Option<String>,
|
||||
uri: String,
|
||||
local_uri: Option<String>,
|
||||
description: Option<String>,
|
||||
published_date: Option<String>,
|
||||
length: Option<i32>,
|
||||
guid: Option<String>,
|
||||
epoch: i32,
|
||||
played: Option<i32>,
|
||||
favorite: bool,
|
||||
archive: bool,
|
||||
podcast_id: i32,
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
impl NewEpisodeBuilder {
|
||||
pub(crate) fn new() -> NewEpisodeBuilder {
|
||||
NewEpisodeBuilder::default()
|
||||
}
|
||||
|
||||
pub(crate) fn title(mut self, s: Option<String>) -> NewEpisodeBuilder {
|
||||
self.title = s;
|
||||
self
|
||||
}
|
||||
|
||||
pub(crate) fn uri(mut self, s: String) -> NewEpisodeBuilder {
|
||||
self.uri = s;
|
||||
self
|
||||
}
|
||||
|
||||
pub(crate) fn local_uri(mut self, s: Option<String>) -> NewEpisodeBuilder {
|
||||
self.local_uri = s;
|
||||
self
|
||||
}
|
||||
|
||||
pub(crate) fn description(mut self, s: Option<String>) -> NewEpisodeBuilder {
|
||||
self.description = s;
|
||||
self
|
||||
}
|
||||
|
||||
pub(crate) fn published_date(mut self, s: Option<String>) -> NewEpisodeBuilder {
|
||||
self.published_date = s;
|
||||
self
|
||||
}
|
||||
|
||||
pub(crate) fn length(mut self, s: Option<i32>) -> NewEpisodeBuilder {
|
||||
self.length = s;
|
||||
self
|
||||
}
|
||||
|
||||
pub(crate) fn played(mut self, s: Option<i32>) -> NewEpisodeBuilder {
|
||||
self.played = s;
|
||||
self
|
||||
}
|
||||
|
||||
pub(crate) fn guid(mut self, s: Option<String>) -> NewEpisodeBuilder {
|
||||
self.guid = s;
|
||||
self
|
||||
}
|
||||
|
||||
pub(crate) fn epoch(mut self, s: i32) -> NewEpisodeBuilder {
|
||||
self.epoch = s;
|
||||
self
|
||||
}
|
||||
|
||||
pub(crate) fn podcast_id(mut self, s: i32) -> NewEpisodeBuilder {
|
||||
self.podcast_id = s;
|
||||
self
|
||||
}
|
||||
|
||||
pub(crate) fn favorite(mut self, s: bool) -> NewEpisodeBuilder {
|
||||
self.favorite = s;
|
||||
self
|
||||
}
|
||||
|
||||
pub(crate) fn archive(mut self, s: bool) -> NewEpisodeBuilder {
|
||||
self.archive = s;
|
||||
self
|
||||
}
|
||||
|
||||
pub(crate) fn build(self) -> NewEpisode {
|
||||
NewEpisode {
|
||||
title: self.title,
|
||||
uri: self.uri,
|
||||
local_uri: self.local_uri,
|
||||
description: self.description,
|
||||
published_date: self.published_date,
|
||||
length: self.length,
|
||||
guid: self.guid,
|
||||
epoch: self.epoch,
|
||||
played: self.played,
|
||||
favorite: self.favorite,
|
||||
archive: self.archive,
|
||||
podcast_id: self.podcast_id,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
// Ignore the following geters. They are used in unit tests mainly.
|
||||
impl NewEpisode {
|
||||
pub(crate) fn title(&self) -> Option<&str> {
|
||||
self.title.as_ref().map(|s| s.as_str())
|
||||
}
|
||||
|
||||
pub(crate) fn uri(&self) -> &str {
|
||||
self.uri.as_ref()
|
||||
}
|
||||
|
||||
pub(crate) fn description(&self) -> Option<&str> {
|
||||
self.description.as_ref().map(|s| s.as_str())
|
||||
}
|
||||
|
||||
pub(crate) fn published_date(&self) -> Option<&str> {
|
||||
self.published_date.as_ref().map(|s| s.as_str())
|
||||
}
|
||||
|
||||
pub(crate) fn guid(&self) -> Option<&str> {
|
||||
self.guid.as_ref().map(|s| s.as_str())
|
||||
}
|
||||
|
||||
pub(crate) fn epoch(&self) -> i32 {
|
||||
self.epoch
|
||||
}
|
||||
|
||||
pub(crate) fn length(&self) -> Option<i32> {
|
||||
self.length
|
||||
}
|
||||
|
||||
pub(crate) fn podcast_id(&self) -> i32 {
|
||||
self.podcast_id
|
||||
}
|
||||
}
|
||||
@ -1,2 +0,0 @@
|
||||
pub(crate) mod insertables;
|
||||
pub(crate) mod queryables;
|
||||
@ -1,418 +0,0 @@
|
||||
use chrono::prelude::*;
|
||||
|
||||
use reqwest;
|
||||
use diesel::SaveChangesDsl;
|
||||
use reqwest::header::{ETag, LastModified};
|
||||
use rss::Channel;
|
||||
|
||||
use schema::{episode, podcast, source};
|
||||
use feed::Feed;
|
||||
use errors::*;
|
||||
|
||||
use models::insertables::NewSource;
|
||||
use database::connection;
|
||||
|
||||
use std::io::Read;
|
||||
use std::str::FromStr;
|
||||
|
||||
#[derive(Queryable, Identifiable, AsChangeset, Associations, PartialEq)]
|
||||
#[table_name = "episode"]
|
||||
#[changeset_options(treat_none_as_null = "true")]
|
||||
#[belongs_to(Podcast, foreign_key = "podcast_id")]
|
||||
#[derive(Debug, Clone)]
|
||||
/// Diesel Model of the episode table.
|
||||
pub struct Episode {
|
||||
id: i32,
|
||||
title: Option<String>,
|
||||
uri: String,
|
||||
local_uri: Option<String>,
|
||||
description: Option<String>,
|
||||
published_date: Option<String>,
|
||||
epoch: i32,
|
||||
length: Option<i32>,
|
||||
guid: Option<String>,
|
||||
played: Option<i32>,
|
||||
favorite: bool,
|
||||
archive: bool,
|
||||
podcast_id: i32,
|
||||
}
|
||||
|
||||
impl Episode {
|
||||
/// Get the value of the `title` field.
|
||||
pub fn title(&self) -> Option<&str> {
|
||||
self.title.as_ref().map(|s| s.as_str())
|
||||
}
|
||||
|
||||
/// Set the `title`.
|
||||
pub fn set_title(&mut self, value: Option<&str>) {
|
||||
self.title = value.map(|x| x.to_string());
|
||||
}
|
||||
|
||||
/// Get the value of the `uri`.
|
||||
///
|
||||
/// Represents the url(usually) that the media file will be located at.
|
||||
pub fn uri(&self) -> &str {
|
||||
self.uri.as_ref()
|
||||
}
|
||||
|
||||
/// Set the `uri`.
|
||||
pub fn set_uri(&mut self, value: &str) {
|
||||
self.uri = value.to_string();
|
||||
}
|
||||
|
||||
/// Get the value of the `local_uri`.
|
||||
///
|
||||
/// Represents the local uri,usually filesystem path,
|
||||
/// that the media file will be located at.
|
||||
pub fn local_uri(&self) -> Option<&str> {
|
||||
self.local_uri.as_ref().map(|s| s.as_str())
|
||||
}
|
||||
|
||||
/// Set the `local_uri`.
|
||||
pub fn set_local_uri(&mut self, value: Option<&str>) {
|
||||
self.local_uri = value.map(|x| x.to_string());
|
||||
}
|
||||
|
||||
/// Get the `description`.
|
||||
pub fn description(&self) -> Option<&str> {
|
||||
self.description.as_ref().map(|s| s.as_str())
|
||||
}
|
||||
|
||||
/// Set the `description`.
|
||||
pub fn set_description(&mut self, value: Option<&str>) {
|
||||
self.description = value.map(|x| x.to_string());
|
||||
}
|
||||
|
||||
/// Get the the `published_date`.
|
||||
pub fn published_date(&self) -> Option<&str> {
|
||||
self.published_date.as_ref().map(|s| s.as_str())
|
||||
}
|
||||
|
||||
/// Set the `published_date`.
|
||||
pub fn set_published_date(&mut self, value: Option<&str>) {
|
||||
self.published_date = value.map(|x| x.to_string().to_owned());
|
||||
}
|
||||
|
||||
/// Get the value of the `description`.
|
||||
pub fn guid(&self) -> Option<&str> {
|
||||
self.guid.as_ref().map(|s| s.as_str())
|
||||
}
|
||||
|
||||
/// Set the `guid`.
|
||||
pub fn set_guid(&mut self, value: Option<&str>) {
|
||||
self.guid = value.map(|x| x.to_string());
|
||||
}
|
||||
|
||||
/// Get the `epoch` value.
|
||||
///
|
||||
/// Retrieved from the rss Item publish date.
|
||||
/// Value is set to Utc whenever possible.
|
||||
pub fn epoch(&self) -> i32 {
|
||||
self.epoch
|
||||
}
|
||||
|
||||
/// Set the `epoch`.
|
||||
pub fn set_epoch(&mut self, value: i32) {
|
||||
self.epoch = value;
|
||||
}
|
||||
|
||||
/// Get the `length`.
|
||||
pub fn length(&self) -> Option<i32> {
|
||||
self.length
|
||||
}
|
||||
|
||||
/// Set the `length`.
|
||||
pub fn set_length(&mut self, value: Option<i32>) {
|
||||
self.length = value;
|
||||
}
|
||||
|
||||
/// Epoch representation of the last time the episode was played.
|
||||
///
|
||||
/// None/Null for unplayed.
|
||||
pub fn played(&self) -> Option<i32> {
|
||||
self.played
|
||||
}
|
||||
|
||||
/// Set the `played` value.
|
||||
pub fn set_played(&mut self, value: Option<i32>) {
|
||||
self.played = value;
|
||||
}
|
||||
|
||||
/// Represents the archiving policy for the episode.
|
||||
pub fn archive(&self) -> bool {
|
||||
self.archive
|
||||
}
|
||||
|
||||
/// Set the `archive` policy.
|
||||
///
|
||||
/// If true, the download cleanr will ignore the episode
|
||||
/// and the corresponding media value will never be automaticly deleted.
|
||||
pub fn set_archive(&mut self, b: bool) {
|
||||
self.archive = b
|
||||
}
|
||||
|
||||
/// Get the `favorite` status of the `Episode`.
|
||||
pub fn favorite(&self) -> bool {
|
||||
self.favorite
|
||||
}
|
||||
|
||||
/// Set `favorite` status.
|
||||
pub fn set_favorite(&mut self, b: bool) {
|
||||
self.favorite = b
|
||||
}
|
||||
|
||||
/// `Podcast` table foreign key.
|
||||
pub fn podcast_id(&self) -> i32 {
|
||||
self.podcast_id
|
||||
}
|
||||
|
||||
/// Sets the `played` value with the current `epoch` timestap and save it.
|
||||
pub fn set_played_now(&mut self) -> Result<()> {
|
||||
let epoch = Utc::now().timestamp() as i32;
|
||||
self.set_played(Some(epoch));
|
||||
self.save()?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Helper method to easily save/"sync" current state of self to the Database.
|
||||
pub fn save(&self) -> Result<Episode> {
|
||||
let db = connection();
|
||||
let tempdb = db.get()?;
|
||||
|
||||
Ok(self.save_changes::<Episode>(&*tempdb)?)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Queryable, Identifiable, AsChangeset, Associations, PartialEq)]
|
||||
#[belongs_to(Source, foreign_key = "source_id")]
|
||||
#[changeset_options(treat_none_as_null = "true")]
|
||||
#[table_name = "podcast"]
|
||||
#[derive(Debug, Clone)]
|
||||
/// Diesel Model of the podcast table.
|
||||
pub struct Podcast {
|
||||
id: i32,
|
||||
title: String,
|
||||
link: String,
|
||||
description: String,
|
||||
image_uri: Option<String>,
|
||||
favorite: bool,
|
||||
archive: bool,
|
||||
always_dl: bool,
|
||||
source_id: i32,
|
||||
}
|
||||
|
||||
impl Podcast {
|
||||
/// Get the Feed `title`.
|
||||
pub fn title(&self) -> &str {
|
||||
&self.title
|
||||
}
|
||||
|
||||
/// Get the Feed `link`.
|
||||
///
|
||||
/// Usually the website/homepage of the content creator.
|
||||
pub fn link(&self) -> &str {
|
||||
&self.link
|
||||
}
|
||||
|
||||
/// Set the Podcast/Feed `link`.
|
||||
pub fn set_link(&mut self, value: &str) {
|
||||
self.link = value.to_string();
|
||||
}
|
||||
|
||||
/// Get the `description`.
|
||||
pub fn description(&self) -> &str {
|
||||
&self.description
|
||||
}
|
||||
|
||||
/// Set the `description`.
|
||||
pub fn set_description(&mut self, value: &str) {
|
||||
self.description = value.to_string();
|
||||
}
|
||||
|
||||
/// Get the `image_uri`.
|
||||
///
|
||||
/// Represents the uri(url usually) that the Feed cover image is located at.
|
||||
pub fn image_uri(&self) -> Option<&str> {
|
||||
self.image_uri.as_ref().map(|s| s.as_str())
|
||||
}
|
||||
|
||||
/// Set the `image_uri`.
|
||||
pub fn set_image_uri(&mut self, value: Option<&str>) {
|
||||
self.image_uri = value.map(|x| x.to_string());
|
||||
}
|
||||
|
||||
/// Represents the archiving policy for the episode.
|
||||
pub fn archive(&self) -> bool {
|
||||
self.archive
|
||||
}
|
||||
|
||||
/// Set the `archive` policy.
|
||||
pub fn set_archive(&mut self, b: bool) {
|
||||
self.archive = b
|
||||
}
|
||||
|
||||
/// Get the `favorite` status of the `Podcast` Feed.
|
||||
pub fn favorite(&self) -> bool {
|
||||
self.favorite
|
||||
}
|
||||
|
||||
/// Set `favorite` status.
|
||||
pub fn set_favorite(&mut self, b: bool) {
|
||||
self.favorite = b
|
||||
}
|
||||
|
||||
/// Represents the download policy for the `Podcast` Feed.
|
||||
///
|
||||
/// Reserved for the use with a Download manager, yet to be implemented.
|
||||
///
|
||||
/// If true Podcast Episode should be downloaded automaticly/skipping
|
||||
/// the selection queue.
|
||||
pub fn always_download(&self) -> bool {
|
||||
self.always_dl
|
||||
}
|
||||
|
||||
/// Set the download policy.
|
||||
pub fn set_always_download(&mut self, b: bool) {
|
||||
self.always_dl = b
|
||||
}
|
||||
|
||||
/// `Source` table foreign key.
|
||||
pub fn source_id(&self) -> i32 {
|
||||
self.source_id
|
||||
}
|
||||
|
||||
/// Helper method to easily save/"sync" current state of self to the Database.
|
||||
pub fn save(&self) -> Result<Podcast> {
|
||||
let db = connection();
|
||||
let tempdb = db.get()?;
|
||||
|
||||
Ok(self.save_changes::<Podcast>(&*tempdb)?)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Queryable, Identifiable, AsChangeset, PartialEq)]
|
||||
#[table_name = "source"]
|
||||
#[changeset_options(treat_none_as_null = "true")]
|
||||
#[derive(Debug, Clone)]
|
||||
/// Diesel Model of the source table.
|
||||
pub struct Source {
|
||||
id: i32,
|
||||
uri: String,
|
||||
last_modified: Option<String>,
|
||||
http_etag: Option<String>,
|
||||
}
|
||||
|
||||
impl<'a> Source {
|
||||
/// Represents the location(usually url) of the Feed xml file.
|
||||
pub fn uri(&self) -> &str {
|
||||
&self.uri
|
||||
}
|
||||
|
||||
/// Represents the Http Last-Modified Header field.
|
||||
///
|
||||
/// See [RFC 7231](https://tools.ietf.org/html/rfc7231#section-7.2) for more.
|
||||
pub fn last_modified(&self) -> Option<&str> {
|
||||
self.last_modified.as_ref().map(|s| s.as_str())
|
||||
}
|
||||
|
||||
/// Set `last_modified` value.
|
||||
pub fn set_last_modified(&mut self, value: Option<&str>) {
|
||||
self.last_modified = value.map(|x| x.to_string());
|
||||
}
|
||||
|
||||
/// Represents the Http Etag Header field.
|
||||
///
|
||||
/// See [RFC 7231](https://tools.ietf.org/html/rfc7231#section-7.2) for more.
|
||||
pub fn http_etag(&self) -> Option<&str> {
|
||||
self.http_etag.as_ref().map(|s| s.as_str())
|
||||
}
|
||||
|
||||
/// Set `http_etag` value.
|
||||
pub fn set_http_etag(&mut self, value: Option<&str>) {
|
||||
self.http_etag = value.map(|x| x.to_string());
|
||||
}
|
||||
|
||||
/// Extract Etag and LastModifier from req, and update self and the
|
||||
/// corresponding db row.
|
||||
fn update_etag(&mut self, req: &reqwest::Response) -> Result<()> {
|
||||
let headers = req.headers();
|
||||
|
||||
// let etag = headers.get_raw("ETag").unwrap();
|
||||
let etag = headers.get::<ETag>();
|
||||
let lmod = headers.get::<LastModified>();
|
||||
|
||||
// FIXME: This dsnt work most of the time apparently
|
||||
if self.http_etag() != etag.map(|x| x.tag())
|
||||
|| self.last_modified != lmod.map(|x| format!("{}", x))
|
||||
{
|
||||
self.http_etag = etag.map(|x| x.tag().to_string().to_owned());
|
||||
self.last_modified = lmod.map(|x| format!("{}", x));
|
||||
self.save()?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Helper method to easily save/"sync" current state of self to the Database.
|
||||
pub fn save(&self) -> Result<Source> {
|
||||
let db = connection();
|
||||
let tempdb = db.get()?;
|
||||
|
||||
Ok(self.save_changes::<Source>(&*tempdb)?)
|
||||
}
|
||||
|
||||
/// `Feed` constructor.
|
||||
///
|
||||
/// Fetches the latest xml Feed.
|
||||
///
|
||||
/// Updates the validator Http Headers.
|
||||
///
|
||||
/// Consumes `self` and Returns the corresponding `Feed` Object.
|
||||
// TODO: Refactor into TryInto once it lands on stable.
|
||||
pub fn into_feed(mut self) -> Result<Feed> {
|
||||
use reqwest::header::{ETag, EntityTag, Headers, HttpDate, LastModified};
|
||||
|
||||
let mut headers = Headers::new();
|
||||
|
||||
if let Some(foo) = self.http_etag() {
|
||||
headers.set(ETag(EntityTag::new(true, foo.to_owned())));
|
||||
}
|
||||
|
||||
if let Some(foo) = self.last_modified() {
|
||||
if let Ok(x) = foo.parse::<HttpDate>() {
|
||||
headers.set(LastModified(x));
|
||||
}
|
||||
}
|
||||
|
||||
// FIXME: I have fucked up somewhere here.
|
||||
// Getting back 200 codes even though I supposedly sent etags.
|
||||
// info!("Headers: {:?}", headers);
|
||||
let client = reqwest::Client::builder().referer(false).build()?;
|
||||
let mut req = client.get(self.uri()).headers(headers).send()?;
|
||||
|
||||
info!("GET to {} , returned: {}", self.uri(), req.status());
|
||||
|
||||
// TODO match on more stuff
|
||||
// 301: Permanent redirect of the url
|
||||
// 302: Temporary redirect of the url
|
||||
// 304: Up to date Feed, checked with the Etag
|
||||
// 410: Feed deleted
|
||||
// match req.status() {
|
||||
// reqwest::StatusCode::NotModified => (),
|
||||
// _ => (),
|
||||
// };
|
||||
|
||||
self.update_etag(&req)?;
|
||||
|
||||
let mut buf = String::new();
|
||||
req.read_to_string(&mut buf)?;
|
||||
let chan = Channel::from_str(&buf)?;
|
||||
|
||||
Ok(Feed::from_channel_source(chan, self))
|
||||
}
|
||||
|
||||
/// Construct a new `Source` with the given `uri` and index it.
|
||||
pub fn from_url(uri: &str) -> Result<Source> {
|
||||
NewSource::new_with_uri(uri).into_source()
|
||||
}
|
||||
}
|
||||
@ -1,362 +0,0 @@
|
||||
use rss::{Channel, Item};
|
||||
use rfc822_sanitizer::parse_from_rfc2822_with_fallback;
|
||||
|
||||
use models::insertables::{NewEpisode, NewEpisodeBuilder, NewPodcast, NewPodcastBuilder};
|
||||
use utils::url_cleaner;
|
||||
|
||||
use errors::*;
|
||||
|
||||
// TODO: Extend the support for parsing itunes extensions
|
||||
/// Parses a `rss::Channel` into a `NewPodcast` Struct.
|
||||
pub(crate) fn new_podcast(chan: &Channel, source_id: i32) -> NewPodcast {
|
||||
let title = chan.title().trim().to_owned();
|
||||
let description = chan.description().trim().to_owned();
|
||||
|
||||
let link = url_cleaner(chan.link()).to_owned();
|
||||
let x = chan.itunes_ext().map(|s| s.image());
|
||||
let image_uri = if let Some(img) = x {
|
||||
img.map(|s| url_cleaner(s))
|
||||
} else {
|
||||
chan.image().map(|foo| url_cleaner(foo.url()))
|
||||
};
|
||||
|
||||
NewPodcastBuilder::new()
|
||||
.title(title)
|
||||
.description(description)
|
||||
.link(link)
|
||||
.image_uri(image_uri)
|
||||
.source_id(source_id)
|
||||
.build()
|
||||
}
|
||||
|
||||
/// Parses an `rss::Item` into a `NewEpisode` Struct.
|
||||
pub(crate) fn new_episode(item: &Item, parent_id: i32) -> Result<NewEpisode> {
|
||||
let title = item.title().map(|s| s.trim().to_owned());
|
||||
let description = item.description().map(|s| s.trim().to_owned());
|
||||
let guid = item.guid().map(|s| s.value().trim().to_owned());
|
||||
|
||||
// Its kinda weird this being an Option type.
|
||||
// Rss 2.0 specified that it's optional.
|
||||
// Though the db scema has a requirment of episode uri being Unique && Not Null.
|
||||
// TODO: Restructure
|
||||
let x = item.enclosure().map(|s| url_cleaner(s.url()));
|
||||
let uri = if x.is_some() {
|
||||
x.unwrap()
|
||||
} else if item.link().is_some() {
|
||||
item.link().map(|s| url_cleaner(s)).unwrap()
|
||||
} else {
|
||||
bail!("No url specified for the item.")
|
||||
};
|
||||
|
||||
let date = parse_from_rfc2822_with_fallback(
|
||||
// Default to rfc2822 represantation of epoch 0.
|
||||
item.pub_date().unwrap_or("Thu, 1 Jan 1970 00:00:00 +0000"),
|
||||
);
|
||||
|
||||
// Should treat information from the rss feeds as invalid by default.
|
||||
// Case: Thu, 05 Aug 2016 06:00:00 -0400 <-- Actually that was friday.
|
||||
let pub_date = date.map(|x| x.to_rfc2822()).ok();
|
||||
let epoch = date.map(|x| x.timestamp() as i32).unwrap_or(0);
|
||||
|
||||
let length = item.enclosure().map(|x| x.length().parse().unwrap_or(0));
|
||||
|
||||
Ok(
|
||||
NewEpisodeBuilder::new()
|
||||
.title(title)
|
||||
.uri(uri)
|
||||
.description(description)
|
||||
.length(length)
|
||||
.published_date(pub_date)
|
||||
.epoch(epoch)
|
||||
.guid(guid)
|
||||
.podcast_id(parent_id)
|
||||
.build(),
|
||||
)
|
||||
}
|
||||
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::fs::File;
|
||||
use std::io::BufReader;
|
||||
use rss::Channel;
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_new_podcast_intercepted() {
|
||||
let file = File::open("tests/feeds/Intercepted.xml").unwrap();
|
||||
let channel = Channel::read_from(BufReader::new(file)).unwrap();
|
||||
|
||||
let descr = "The people behind The Intercept’s fearless reporting and incisive \
|
||||
commentary—Jeremy Scahill, Glenn Greenwald, Betsy Reed and others—discuss \
|
||||
the crucial issues of our time: national security, civil liberties, foreign \
|
||||
policy, and criminal justice. Plus interviews with artists, thinkers, and \
|
||||
newsmakers who challenge our preconceptions about the world we live in.";
|
||||
let pd = new_podcast(&channel, 0);
|
||||
|
||||
assert_eq!(pd.title(), "Intercepted with Jeremy Scahill");
|
||||
assert_eq!(pd.link(), "https://theintercept.com/podcasts");
|
||||
assert_eq!(pd.description(), descr);
|
||||
assert_eq!(
|
||||
pd.image_uri(),
|
||||
Some(
|
||||
"http://static.megaphone.fm/podcasts/d5735a50-d904-11e6-8532-73c7de466ea6/image/\
|
||||
uploads_2F1484252190700-qhn5krasklbce3dh-a797539282700ea0298a3a26f7e49b0b_\
|
||||
2FIntercepted_COVER%2B_281_29.png"
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_new_podcast_breakthrough() {
|
||||
let file = File::open("tests/feeds/TheBreakthrough.xml").unwrap();
|
||||
let channel = Channel::read_from(BufReader::new(file)).unwrap();
|
||||
|
||||
let descr = "Latest Articles and Investigations from ProPublica, an independent, \
|
||||
non-profit newsroom that produces investigative journalism in the public \
|
||||
interest.";
|
||||
let pd = new_podcast(&channel, 0);
|
||||
|
||||
assert_eq!(pd.title(), "The Breakthrough");
|
||||
assert_eq!(pd.link(), "http://www.propublica.org/podcast");
|
||||
assert_eq!(pd.description(), descr);
|
||||
assert_eq!(
|
||||
pd.image_uri(),
|
||||
Some("http://www.propublica.org/images/podcast_logo_2.png")
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_new_podcast_lup() {
|
||||
let file = File::open("tests/feeds/LinuxUnplugged.xml").unwrap();
|
||||
let channel = Channel::read_from(BufReader::new(file)).unwrap();
|
||||
|
||||
let descr = "An open show powered by community LINUX Unplugged takes the best attributes \
|
||||
of open collaboration and focuses them into a weekly lifestyle show about \
|
||||
Linux.";
|
||||
let pd = new_podcast(&channel, 0);
|
||||
|
||||
assert_eq!(pd.title(), "LINUX Unplugged Podcast");
|
||||
assert_eq!(pd.link(), "http://www.jupiterbroadcasting.com/");
|
||||
assert_eq!(pd.description(), descr);
|
||||
assert_eq!(
|
||||
pd.image_uri(),
|
||||
Some("http://www.jupiterbroadcasting.com/images/LASUN-Badge1400.jpg")
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_new_podcast_r4explanation() {
|
||||
let file = File::open("tests/feeds/R4Explanation.xml").unwrap();
|
||||
let channel = Channel::read_from(BufReader::new(file)).unwrap();
|
||||
|
||||
let pd = new_podcast(&channel, 0);
|
||||
let descr = "A weekly discussion of Rust RFCs";
|
||||
|
||||
assert_eq!(pd.title(), "Request For Explanation");
|
||||
assert_eq!(
|
||||
pd.link(),
|
||||
"https://request-for-explanation.github.io/podcast/"
|
||||
);
|
||||
assert_eq!(pd.description(), descr);
|
||||
assert_eq!(
|
||||
pd.image_uri(),
|
||||
Some("https://request-for-explanation.github.io/podcast/podcast.png")
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_new_episode_intercepted() {
|
||||
let file = File::open("tests/feeds/Intercepted.xml").unwrap();
|
||||
let channel = Channel::read_from(BufReader::new(file)).unwrap();
|
||||
|
||||
let firstitem = channel.items().first().unwrap();
|
||||
let descr = "NSA whistleblower Edward Snowden discusses the massive Equifax data breach \
|
||||
and allegations of Russian interference in the US election. Commentator \
|
||||
Shaun King explains his call for a boycott of the NFL and talks about his \
|
||||
campaign to bring violent neo-Nazis to justice. Rapper Open Mike Eagle \
|
||||
performs.";
|
||||
let i = new_episode(&firstitem, 0).unwrap();
|
||||
|
||||
assert_eq!(i.title(), Some("The Super Bowl of Racism"));
|
||||
assert_eq!(i.uri(), "http://traffic.megaphone.fm/PPY6458293736.mp3");
|
||||
assert_eq!(i.description(), Some(descr));
|
||||
assert_eq!(i.length(), Some(66738886));
|
||||
assert_eq!(i.guid(), Some("7df4070a-9832-11e7-adac-cb37b05d5e24"));
|
||||
assert_eq!(i.published_date(), Some("Wed, 13 Sep 2017 10:00:00 +0000"));
|
||||
assert_eq!(i.epoch(), 1505296800);
|
||||
|
||||
let second = channel.items().iter().nth(1).unwrap();
|
||||
let i2 = new_episode(&second, 0).unwrap();
|
||||
|
||||
let descr2 = "This week on Intercepted: Jeremy gives an update on the aftermath of \
|
||||
Blackwater’s 2007 massacre of Iraqi civilians. Intercept reporter Lee Fang \
|
||||
lays out how a network of libertarian think tanks called the Atlas Network \
|
||||
is insidiously shaping political infrastructure in Latin America. We speak \
|
||||
with attorney and former Hugo Chavez adviser Eva Golinger about the \
|
||||
Venezuela\'s political turmoil.And we hear Claudia Lizardo of the \
|
||||
Caracas-based band, La Pequeña Revancha, talk about her music and hopes for \
|
||||
Venezuela.";
|
||||
assert_eq!(
|
||||
i2.title(),
|
||||
Some("Atlas Golfed — U.S.-Backed Think Tanks Target Latin America")
|
||||
);
|
||||
assert_eq!(i2.uri(), "http://traffic.megaphone.fm/FL5331443769.mp3");
|
||||
assert_eq!(i2.description(), Some(descr2));
|
||||
assert_eq!(i2.length(), Some(67527575));
|
||||
assert_eq!(i2.guid(), Some("7c207a24-e33f-11e6-9438-eb45dcf36a1d"));
|
||||
assert_eq!(i2.published_date(), Some("Wed, 9 Aug 2017 10:00:00 +0000"));
|
||||
assert_eq!(i2.epoch(), 1502272800);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_new_episode_breakthrough() {
|
||||
let file = File::open("tests/feeds/TheBreakthrough.xml").unwrap();
|
||||
let channel = Channel::read_from(BufReader::new(file)).unwrap();
|
||||
|
||||
let firstitem = channel.items().first().unwrap();
|
||||
let descr = "<p>A reporter finds that homes meant to replace New York’s troubled \
|
||||
psychiatric hospitals might be just as bad.</p>";
|
||||
let i = new_episode(&firstitem, 0).unwrap();
|
||||
|
||||
assert_eq!(
|
||||
i.title(),
|
||||
Some("The Breakthrough: Hopelessness and Exploitation Inside Homes for Mentally Ill")
|
||||
);
|
||||
assert_eq!(
|
||||
i.uri(),
|
||||
"http://tracking.feedpress.it/link/10581/6726758/20170908-cliff-levy.mp3"
|
||||
);
|
||||
assert_eq!(i.description(), Some(descr));
|
||||
assert_eq!(i.length(), Some(33396551));
|
||||
assert_eq!(
|
||||
i.guid(),
|
||||
Some(
|
||||
"https://www.propublica.org/podcast/\
|
||||
the-breakthrough-hopelessness-exploitation-homes-for-mentally-ill#134472"
|
||||
)
|
||||
);
|
||||
assert_eq!(i.published_date(), Some("Fri, 8 Sep 2017 12:00:00 +0000"));
|
||||
assert_eq!(i.epoch(), 1504872000);
|
||||
|
||||
let second = channel.items().iter().nth(1).unwrap();
|
||||
let i2 = new_episode(&second, 0).unwrap();
|
||||
let descr2 = "<p>Jonathan Allen and Amie Parnes didn’t know their book would be called \
|
||||
‘Shattered,’ or that their extraordinary access would let them chronicle \
|
||||
the mounting signs of a doomed campaign.</p>";
|
||||
|
||||
assert_eq!(
|
||||
i2.title(),
|
||||
Some("The Breakthrough: Behind the Scenes of Hillary Clinton’s Failed Bid for \
|
||||
President")
|
||||
);
|
||||
assert_eq!(
|
||||
i2.uri(),
|
||||
"http://tracking.feedpress.it/link/10581/6726759/16_JohnAllen-CRAFT.mp3".to_string()
|
||||
);
|
||||
assert_eq!(i2.description(), Some(descr2));
|
||||
assert_eq!(i2.length(), Some(17964071));
|
||||
assert_eq!(
|
||||
i2.guid(),
|
||||
Some(
|
||||
"https://www.propublica.\
|
||||
org/podcast/the-breakthrough-hillary-clinton-failed-presidential-bid#133721"
|
||||
)
|
||||
);
|
||||
assert_eq!(i2.published_date(), Some("Fri, 25 Aug 2017 12:00:00 +0000"));
|
||||
assert_eq!(i2.epoch(), 1503662400);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_new_episode_lup() {
|
||||
let file = File::open("tests/feeds/LinuxUnplugged.xml").unwrap();
|
||||
let channel = Channel::read_from(BufReader::new(file)).unwrap();
|
||||
|
||||
let firstitem = channel.items().first().unwrap();
|
||||
let descr =
|
||||
"Audit your network with a couple of easy commands on Kali Linux. Chris decides to \
|
||||
blow off a little steam by attacking his IoT devices, Wes has the scope on Equifax \
|
||||
blaming open source & the Beard just saved the show. It’s a really packed episode!";
|
||||
let i = new_episode(&firstitem, 0).unwrap();
|
||||
|
||||
assert_eq!(i.title(), Some("Hacking Devices with Kali Linux | LUP 214"));
|
||||
assert_eq!(
|
||||
i.uri(),
|
||||
"http://www.podtrac.com/pts/redirect.mp3/traffic.libsyn.com/jnite/lup-0214.mp3"
|
||||
);
|
||||
assert_eq!(i.description(), Some(descr));
|
||||
assert_eq!(i.length(), Some(46479789));
|
||||
assert_eq!(i.guid(), Some("78A682B4-73E8-47B8-88C0-1BE62DD4EF9D"));
|
||||
assert_eq!(i.published_date(), Some("Tue, 12 Sep 2017 22:24:42 -0700"));
|
||||
assert_eq!(i.epoch(), 1505280282);
|
||||
|
||||
let second = channel.items().iter().nth(1).unwrap();
|
||||
let i2 = new_episode(&second, 0).unwrap();
|
||||
|
||||
let descr2 = "<p>The Gnome project is about to solve one of our audience's biggest \
|
||||
Wayland’s concerns. But as the project takes on a new level of relevance, \
|
||||
decisions for the next version of Gnome have us worried about the \
|
||||
future.</p>\n\n<p>Plus we chat with Wimpy about the Ubuntu Rally in NYC, \
|
||||
Microsoft’s sneaky move to turn Windows 10 into the “ULTIMATE LINUX \
|
||||
RUNTIME”, community news & more!</p>";
|
||||
assert_eq!(i2.title(), Some("Gnome Does it Again | LUP 213"));
|
||||
assert_eq!(
|
||||
i2.uri(),
|
||||
"http://www.podtrac.com/pts/redirect.mp3/traffic.libsyn.com/jnite/lup-0213.mp3"
|
||||
);
|
||||
assert_eq!(i2.description(), Some(descr2));
|
||||
assert_eq!(i2.length(), Some(36544272));
|
||||
assert_eq!(i2.guid(), Some("1CE57548-B36C-4F14-832A-5D5E0A24E35B"));
|
||||
assert_eq!(i2.published_date(), Some("Tue, 5 Sep 2017 20:57:27 -0700"));
|
||||
assert_eq!(i2.epoch(), 1504670247);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_new_episode_r4expanation() {
|
||||
let file = File::open("tests/feeds/R4Explanation.xml").unwrap();
|
||||
let channel = Channel::read_from(BufReader::new(file)).unwrap();
|
||||
|
||||
let firstitem = channel.items().iter().nth(9).unwrap();
|
||||
let descr = "This week we look at <a \
|
||||
href=\"https://github.com/rust-lang/rfcs/pull/2094\">RFC 2094</a> \
|
||||
\"Non-lexical lifetimes\"";
|
||||
let i = new_episode(&firstitem, 0).unwrap();
|
||||
|
||||
assert_eq!(i.title(), Some("Episode #9 - A Once in a Lifetime RFC"));
|
||||
assert_eq!(
|
||||
i.uri(),
|
||||
"http://request-for-explanation.github.\
|
||||
io/podcast/ep9-a-once-in-a-lifetime-rfc/episode.mp3"
|
||||
);
|
||||
assert_eq!(i.description(), Some(descr));
|
||||
assert_eq!(i.length(), Some(15077388));
|
||||
assert_eq!(
|
||||
i.guid(),
|
||||
Some("https://request-for-explanation.github.io/podcast/ep9-a-once-in-a-lifetime-rfc/")
|
||||
);
|
||||
assert_eq!(i.published_date(), Some("Mon, 28 Aug 2017 15:00:00 -0700"));
|
||||
assert_eq!(i.epoch(), 1503957600);
|
||||
|
||||
let second = channel.items().iter().nth(8).unwrap();
|
||||
let i2 = new_episode(&second, 0).unwrap();
|
||||
|
||||
let descr2 = "This week we look at <a \
|
||||
href=\"https://github.com/rust-lang/rfcs/pull/2071\">RFC 2071</a> \"Add \
|
||||
impl Trait type alias and variable declarations\"";
|
||||
assert_eq!(i2.title(), Some("Episode #8 - An Existential Crisis"));
|
||||
assert_eq!(
|
||||
i2.uri(),
|
||||
"http://request-for-explanation.github.io/podcast/ep8-an-existential-crisis/episode.\
|
||||
mp3"
|
||||
);
|
||||
assert_eq!(i2.description(), Some(descr2));
|
||||
assert_eq!(i2.length(), Some(13713219));
|
||||
assert_eq!(
|
||||
i2.guid(),
|
||||
Some("https://request-for-explanation.github.io/podcast/ep8-an-existential-crisis/")
|
||||
);
|
||||
assert_eq!(i2.published_date(), Some("Tue, 15 Aug 2017 17:00:00 -0700"));
|
||||
assert_eq!(i2.epoch(), 1502841600);
|
||||
}
|
||||
}
|
||||
@ -1,235 +0,0 @@
|
||||
//! Helper utilities for accomplishing various tasks.
|
||||
|
||||
use rayon::prelude::*;
|
||||
use chrono::prelude::*;
|
||||
|
||||
use url::{Position, Url};
|
||||
|
||||
use errors::*;
|
||||
use dbqueries;
|
||||
use models::queryables::Episode;
|
||||
|
||||
use std::path::Path;
|
||||
use std::fs;
|
||||
|
||||
fn download_checker() -> Result<()> {
|
||||
let episodes = dbqueries::get_downloaded_episodes()?;
|
||||
|
||||
episodes
|
||||
.into_par_iter()
|
||||
.for_each(|mut ep| checker_helper(&mut ep));
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn checker_helper(ep: &mut Episode) {
|
||||
if !Path::new(ep.local_uri().unwrap()).exists() {
|
||||
ep.set_local_uri(None);
|
||||
let res = ep.save();
|
||||
if let Err(err) = res {
|
||||
error!("Error while trying to update episode: {:#?}", ep);
|
||||
error!("Error: {}", err);
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
fn played_cleaner() -> Result<()> {
|
||||
let episodes = dbqueries::get_played_episodes()?;
|
||||
|
||||
let now_utc = Utc::now().timestamp() as i32;
|
||||
episodes.into_par_iter().for_each(|mut ep| {
|
||||
if ep.local_uri().is_some() && ep.played().is_some() {
|
||||
let played = ep.played().unwrap();
|
||||
// TODO: expose a config and a user set option.
|
||||
// Chnage the test too when exposed
|
||||
let limit = played + 172_800; // add 2days in seconds
|
||||
if now_utc > limit {
|
||||
let e = delete_local_content(&mut ep);
|
||||
if let Err(err) = e {
|
||||
error!("Error while trying to delete file: {:?}", ep.local_uri());
|
||||
error!("Error: {}", err);
|
||||
} else {
|
||||
info!("Episode {:?} was deleted succesfully.", ep.title());
|
||||
};
|
||||
}
|
||||
}
|
||||
});
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Check `ep.local_uri` field and delete the file it points to.
|
||||
pub fn delete_local_content(ep: &mut Episode) -> Result<()> {
|
||||
if ep.local_uri().is_some() {
|
||||
let uri = ep.local_uri().unwrap().to_owned();
|
||||
if Path::new(&uri).exists() {
|
||||
let res = fs::remove_file(&uri);
|
||||
if res.is_ok() {
|
||||
ep.set_local_uri(None);
|
||||
ep.save()?;
|
||||
} else {
|
||||
error!("Error while trying to delete file: {}", uri);
|
||||
error!("Error: {}", res.unwrap_err());
|
||||
};
|
||||
}
|
||||
} else {
|
||||
error!(
|
||||
"Something went wrong evaluating the following path: {:?}",
|
||||
ep.local_uri(),
|
||||
);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Database cleaning tasks.
|
||||
///
|
||||
/// Runs a download checker which looks for `Episode.local_uri` entries that
|
||||
/// doesn't exist and sets them to None
|
||||
///
|
||||
/// Runs a cleaner for played Episode's that are pass the lifetime limit and
|
||||
/// scheduled for removal.
|
||||
pub fn checkup() -> Result<()> {
|
||||
download_checker()?;
|
||||
played_cleaner()?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Remove fragment identifiers and query pairs from a URL
|
||||
/// If url parsing fails, return's a trimmed version of the original input.
|
||||
pub fn url_cleaner(s: &str) -> String {
|
||||
// Copied from the cookbook.
|
||||
// https://rust-lang-nursery.github.io/rust-cookbook/net.html
|
||||
// #remove-fragment-identifiers-and-query-pairs-from-a-url
|
||||
match Url::parse(s) {
|
||||
Ok(parsed) => parsed[..Position::AfterPath].to_owned(),
|
||||
_ => s.trim().to_owned(),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
extern crate tempdir;
|
||||
|
||||
use super::*;
|
||||
use database::{connection, truncate_db};
|
||||
use models::insertables::NewEpisodeBuilder;
|
||||
use self::tempdir::TempDir;
|
||||
use std::fs::File;
|
||||
use std::io::Write;
|
||||
|
||||
fn helper_db() -> TempDir {
|
||||
// Clean the db
|
||||
truncate_db().unwrap();
|
||||
// Setup tmp file stuff
|
||||
let tmp_dir = TempDir::new("hammond_test").unwrap();
|
||||
let valid_path = tmp_dir.path().join("virtual_dl.mp3");
|
||||
let bad_path = tmp_dir.path().join("invalid_thing.mp3");
|
||||
let mut tmp_file = File::create(&valid_path).unwrap();
|
||||
writeln!(tmp_file, "Foooo").unwrap();
|
||||
|
||||
// Setup episodes
|
||||
let db = connection();
|
||||
let con = db.get().unwrap();
|
||||
NewEpisodeBuilder::new()
|
||||
.uri("foo_bar".to_string())
|
||||
.local_uri(Some(valid_path.to_str().unwrap().to_owned()))
|
||||
.build()
|
||||
.into_episode(&con)
|
||||
.unwrap();
|
||||
|
||||
NewEpisodeBuilder::new()
|
||||
.uri("bar_baz".to_string())
|
||||
.local_uri(Some(bad_path.to_str().unwrap().to_owned()))
|
||||
.build()
|
||||
.into_episode(&con)
|
||||
.unwrap();
|
||||
|
||||
tmp_dir
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_download_checker() {
|
||||
let _tmp_dir = helper_db();
|
||||
download_checker().unwrap();
|
||||
let episodes = dbqueries::get_downloaded_episodes().unwrap();
|
||||
|
||||
assert_eq!(episodes.len(), 1);
|
||||
assert_eq!("foo_bar", episodes.first().unwrap().uri());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_checker_helper() {
|
||||
let _tmp_dir = helper_db();
|
||||
let mut episode = {
|
||||
let db = connection();
|
||||
let con = db.get().unwrap();
|
||||
dbqueries::get_episode_from_uri(&con, "bar_baz").unwrap()
|
||||
};
|
||||
|
||||
checker_helper(&mut episode);
|
||||
assert!(episode.local_uri().is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_download_cleaner() {
|
||||
let _tmp_dir = helper_db();
|
||||
let mut episode = {
|
||||
let db = connection();
|
||||
let con = db.get().unwrap();
|
||||
dbqueries::get_episode_from_uri(&con, "foo_bar").unwrap()
|
||||
};
|
||||
|
||||
let valid_path = episode.local_uri().unwrap().to_owned();
|
||||
delete_local_content(&mut episode).unwrap();
|
||||
assert_eq!(Path::new(&valid_path).exists(), false);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_played_cleaner_expired() {
|
||||
let _tmp_dir = helper_db();
|
||||
let mut episode = {
|
||||
let db = connection();
|
||||
let con = db.get().unwrap();
|
||||
dbqueries::get_episode_from_uri(&con, "foo_bar").unwrap()
|
||||
};
|
||||
let now_utc = Utc::now().timestamp() as i32;
|
||||
// let limit = now_utc - 172_800;
|
||||
let epoch = now_utc - 200_000;
|
||||
episode.set_played(Some(epoch));
|
||||
episode.save().unwrap();
|
||||
let valid_path = episode.local_uri().unwrap().to_owned();
|
||||
|
||||
// This should delete the file
|
||||
played_cleaner().unwrap();
|
||||
assert_eq!(Path::new(&valid_path).exists(), false);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_played_cleaner_none() {
|
||||
let _tmp_dir = helper_db();
|
||||
let mut episode = {
|
||||
let db = connection();
|
||||
let con = db.get().unwrap();
|
||||
dbqueries::get_episode_from_uri(&con, "foo_bar").unwrap()
|
||||
};
|
||||
let now_utc = Utc::now().timestamp() as i32;
|
||||
// limit = 172_800;
|
||||
let epoch = now_utc - 20_000;
|
||||
episode.set_played(Some(epoch));
|
||||
episode.save().unwrap();
|
||||
let valid_path = episode.local_uri().unwrap().to_owned();
|
||||
|
||||
// This should not delete the file
|
||||
played_cleaner().unwrap();
|
||||
assert_eq!(Path::new(&valid_path).exists(), true);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_url_cleaner() {
|
||||
let good_url = "http://traffic.megaphone.fm/FL8608731318.mp3";
|
||||
let bad_url = "http://traffic.megaphone.fm/FL8608731318.mp3?updated=1484685184";
|
||||
|
||||
assert_eq!(url_cleaner(bad_url), good_url);
|
||||
assert_eq!(url_cleaner(good_url), good_url);
|
||||
assert_eq!(url_cleaner(&format!(" {}\t\n", bad_url)), good_url);
|
||||
}
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
@ -1,139 +0,0 @@
|
||||
<?xml version="1.0"?>
|
||||
<rss version="2.0" xmlns:atom="http://www.w3.org/2005/Atom" xmlns:googleplay="http://www.google.com/schemas/play-podcasts/1.0" xmlns:itunes="http://www.itunes.com/dtds/podcast-1.0.dtd">
|
||||
<channel>
|
||||
<title>Request For Explanation</title>
|
||||
<link>https://request-for-explanation.github.io/podcast/</link>
|
||||
<description>A weekly discussion of Rust RFCs</description>
|
||||

|
||||
<atom:link href="http://request-for-explanation.github.io/podcast/rss.xml" rel="self" type="application/rss+xml" />
|
||||
<googleplay:author>The Request For Explanation Podcast</googleplay:author>
|
||||
<itunes:author>The Request For Explanation Podcast</itunes:author>
|
||||
<googleplay:email>manishearth@gmail.com</googleplay:email>
|
||||
<itunes:owner>
|
||||
<itunes:name>Manish Goregaokar</itunes:name>
|
||||
<itunes:email>manishearth@gmail.com</itunes:email>
|
||||
</itunes:owner>
|
||||
<googleplay:image href="https://request-for-explanation.github.io/podcast/podcast.png" />
|
||||
<itunes:image href="https://request-for-explanation.github.io/podcast/podcast.png" />
|
||||
<language>en-us</language>
|
||||
<googleplay:category text="Technology"/>
|
||||
<itunes:category text="Technology" />
|
||||
<itunes:explicit>no</itunes:explicit>
|
||||
<item>
|
||||
<title>Episode #0 - What the Hell</title>
|
||||
<link>https://request-for-explanation.github.io/podcast/ep0-what-the-hell/</link>
|
||||
<pubDate>Mon, 19 Jun 2017 19:45:01 EST</pubDate>
|
||||
<guid isPermaLink="false">https://request-for-explanation.github.io/podcast/ep0-what-the-hell/</guid>
|
||||
<enclosure url="http://request-for-explanation.github.io/podcast/ep0-what-the-hell/episode.mp3" length="7057920" type="audio/mpeg" />
|
||||
<description><![CDATA[This week we look at <a href="https://github.com/rust-lang/rfcs/pull/2005">RFC 2005</a> "Match Ergonomics Using Default Binding Modes"]]></description>
|
||||
<itunes:order>1</itunes:order>
|
||||
<itunes:duration>20:29</itunes:duration>
|
||||
</item>
|
||||
<item>
|
||||
<title>Episode #1 - Constermash</title>
|
||||
<link>https://request-for-explanation.github.io/podcast/ep1-constermash/</link>
|
||||
<pubDate>Thu, 29 Jun 2017 17:30:00 PDT</pubDate>
|
||||
<guid isPermaLink="false">https://request-for-explanation.github.io/podcast/ep1-constermash/</guid>
|
||||
<enclosure url="http://request-for-explanation.github.io/podcast/ep1-constermash/episode.mp3" length="28588800" type="audio/mpeg" />
|
||||
<description><![CDATA[This week we look at <a href="https://github.com/rust-lang/rfcs/pull/2000">RFC 2000</a> "Const Generics"]]></description>
|
||||
<itunes:order>2</itunes:order>
|
||||
<itunes:duration>16:09</itunes:duration>
|
||||
</item>
|
||||
<item>
|
||||
<title>Episode #2 - Stealing Chickens on the Internet</title>
|
||||
<link>https://request-for-explanation.github.io/podcast/ep2-stealing-chickens-on-the-internet/</link>
|
||||
<pubDate>Thu, 6 July 2017 15:30:00 PDT</pubDate>
|
||||
<guid isPermaLink="false">https://request-for-explanation.github.io/podcast/ep2-stealing-chickens-on-the-internet</guid>
|
||||
<enclosure url="http://request-for-explanation.github.io/podcast/ep2-stealing-chickens-on-the-internet/episode.mp3" length="19608187" type="audio/mpeg" />
|
||||
<description><![CDATA[This week we look at <a href="https://github.com/rust-lang/rfcs/pull/2052">RFC 2052</a> "Evolving Rust through Epochs"]]></description>
|
||||
<itunes:order>3</itunes:order>
|
||||
<itunes:duration>43:25</itunes:duration>
|
||||
</item>
|
||||
<item>
|
||||
<title>Episode #3 - Aaron's Favorite Topic</title>
|
||||
<link>https://request-for-explanation.github.io/podcast/ep3-aarons-favorite-topic/</link>
|
||||
<pubDate>Mon, 10 July 2017 16:00:00 PDT</pubDate>
|
||||
<guid isPermaLink="false">https://request-for-explanation.github.io/podcast/ep3-aarons-favorite-topic</guid>
|
||||
<enclosure url="http://request-for-explanation.github.io/podcast/ep3-aarons-favorite-topic/episode.mp3" length="19070229" type="audio/mpeg" />
|
||||
<description><![CDATA[This week we talk about the RFC process in general -- what it is, how it works, and how it came to be.]]></description>
|
||||
<itunes:order>4</itunes:order>
|
||||
<itunes:duration>54:01</itunes:duration>
|
||||
</item>
|
||||
<item>
|
||||
<title>Episode #4 - Literally Haskell</title>
|
||||
<link>https://request-for-explanation.github.io/podcast/ep4-literally-haskell/</link>
|
||||
<pubDate>Mon, 17 July 2017 17:00:00 PDT</pubDate>
|
||||
<guid isPermaLink="false">https://request-for-explanation.github.io/podcast/ep4-literally-haskell</guid>
|
||||
<enclosure url="http://request-for-explanation.github.io/podcast/ep4-literally-haskell/episode.mp3" length="12973478" type="audio/mpeg" />
|
||||
<description><![CDATA[This week we look at <a href="https://github.com/rust-lang/rfcs/pull/1598">RFC 1598</a> "Generic Associated Types"]]></description>
|
||||
<itunes:order>5</itunes:order>
|
||||
<itunes:duration>36:41</itunes:duration>
|
||||
</item>
|
||||
<item>
|
||||
<title>Episode #5 - Are you my main?</title>
|
||||
<link>https://request-for-explanation.github.io/podcast/ep5-are-you-my-main/</link>
|
||||
<pubDate>Mon, 24 July 2017 16:00:00 PDT</pubDate>
|
||||
<guid isPermaLink="false">https://request-for-explanation.github.io/podcast/ep5-are-you-my-main</guid>
|
||||
<enclosure url="http://request-for-explanation.github.io/podcast/ep5-are-you-my-main/episode.mp3" length="8232966" type="audio/mpeg" />
|
||||
<description><![CDATA[This week we look at <a href="https://github.com/rust-lang/rfcs/pull/1937">RFC 1937</a> "? in main", as well as discuss some news on other RFCs.]]></description>
|
||||
<itunes:order>6</itunes:order>
|
||||
<itunes:duration>22:33</itunes:duration>
|
||||
</item>
|
||||
<item>
|
||||
<title>Episode #6 - Everything and the kitchen async</title>
|
||||
<link>https://request-for-explanation.github.io/podcast/ep6-everything-and-the-kitchen-async/</link>
|
||||
<pubDate>Mon, 31 July 2017 16:00:00 PDT</pubDate>
|
||||
<guid isPermaLink="false">https://request-for-explanation.github.io/podcast/ep6-everything-and-the-kitchen-async/</guid>
|
||||
<enclosure url="http://request-for-explanation.github.io/podcast/ep6-everything-and-the-kitchen-async/episode.mp3" length="9530774" type="audio/mpeg" />
|
||||
<description><![CDATA[This week we look at <a href="https://github.com/rust-lang/rfcs/pull/2033">eRFC 2033</a> "Experimentally add coroutines to Rust"]]></description>
|
||||
<itunes:order>7</itunes:order>
|
||||
<itunes:duration>25:52</itunes:duration>
|
||||
</item>
|
||||
<item>
|
||||
<title>Episode #7 - Unwrapping a great RFC</title>
|
||||
<link>https://request-for-explanation.github.io/podcast/ep7-unwrapping-a-great-rfc/</link>
|
||||
<pubDate>Tue, 8 Aug 2017 16:00:00 PDT</pubDate>
|
||||
<guid isPermaLink="false">https://request-for-explanation.github.io/podcast/ep7-unwrapping-a-great-rfc/</guid>
|
||||
<enclosure url="http://request-for-explanation.github.io/podcast/ep7-unwrapping-a-great-rfc/episode.mp3" length="8715317" type="audio/mpeg" />
|
||||
<description><![CDATA[This week we look at <a href="https://github.com/rust-lang/rfcs/pull/2091">RFC 2091</a> "Implicit caller location"]]></description>
|
||||
<itunes:order>8</itunes:order>
|
||||
<itunes:duration>24:11</itunes:duration>
|
||||
</item>
|
||||
<item>
|
||||
<title>Episode #8 - An Existential Crisis</title>
|
||||
<link>https://request-for-explanation.github.io/podcast/ep8-an-existential-crisis/</link>
|
||||
<pubDate>Tue, 15 Aug 2017 17:00:00 PDT</pubDate>
|
||||
<guid isPermaLink="false">https://request-for-explanation.github.io/podcast/ep8-an-existential-crisis/</guid>
|
||||
<enclosure url="http://request-for-explanation.github.io/podcast/ep8-an-existential-crisis/episode.mp3" length="13713219" type="audio/mpeg" />
|
||||
<description><![CDATA[This week we look at <a href="https://github.com/rust-lang/rfcs/pull/2071">RFC 2071</a> "Add impl Trait type alias and variable declarations"]]></description>
|
||||
<itunes:order>9</itunes:order>
|
||||
<itunes:duration>38:33</itunes:duration>
|
||||
</item>
|
||||
<item>
|
||||
<title>Episode #9 - A Once in a Lifetime RFC</title>
|
||||
<link>https://request-for-explanation.github.io/podcast/ep9-a-once-in-a-lifetime-rfc/</link>
|
||||
<pubDate>Mon, 28 Aug 2017 15:00:00 PDT</pubDate>
|
||||
<guid isPermaLink="false">https://request-for-explanation.github.io/podcast/ep9-a-once-in-a-lifetime-rfc/</guid>
|
||||
<enclosure url="http://request-for-explanation.github.io/podcast/ep9-a-once-in-a-lifetime-rfc/episode.mp3" length="15077388" type="audio/mpeg" />
|
||||
<description><![CDATA[This week we look at <a href="https://github.com/rust-lang/rfcs/pull/2094">RFC 2094</a> "Non-lexical lifetimes"]]></description>
|
||||
<itunes:order>10</itunes:order>
|
||||
<itunes:duration>42:13</itunes:duration>
|
||||
</item>
|
||||
<item>
|
||||
<title>Episode #10 - Two Paths Diverged in a Yellow Wood</title>
|
||||
<link>https://request-for-explanation.github.io/podcast/ep10-two-paths-diverged-in-a-yellow-wood/</link>
|
||||
<pubDate>Thu, 30 Aug 2017 1:30:00 PDT</pubDate>
|
||||
<guid isPermaLink="false">https://request-for-explanation.github.io/podcast/ep10-two-paths-diverged-in-a-yellow-wood/</guid>
|
||||
<enclosure url="http://request-for-explanation.github.io/podcast/ep10-two-paths-diverged-in-a-yellow-wood/episode.mp3" length="19994929" type="audio/mpeg" />
|
||||
<description><![CDATA[This week we look at <a href="https://github.com/rust-lang/rfcs/pull/2126">RFC 2126</a> "Clarify and streamline paths and visibility" (aka "The modules RFC")]]></description>
|
||||
<itunes:order>11</itunes:order>
|
||||
<itunes:duration>56:40</itunes:duration>
|
||||
</item>
|
||||
</channel>
|
||||
</rss>
|
||||
|
||||
|
||||
@ -1,457 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<?xml-stylesheet type="text/xsl" media="screen" href="/~files/feed-premium.xsl"?>
|
||||
|
||||
<rss xmlns:itunes="http://www.itunes.com/dtds/podcast-1.0.dtd" xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:sy="http://purl.org/rss/1.0/modules/syndication/" xmlns:admin="http://webns.net/mvcb/" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:atom="http://www.w3.org/2005/Atom" xmlns:content="http://purl.org/rss/1.0/modules/content/" xmlns:feedpress="https://feed.press/xmlns" xmlns:googleplay="http://www.google.com/schemas/play-podcasts/1.0" xmlns:media="http://www.rssboard.org/media-rss" version="2.0">
|
||||
<channel>
|
||||
<feedpress:locale>en</feedpress:locale>
|
||||
<atom:link rel="via" href="http://feeds.propublica.org/propublica/podcast"/>
|
||||
<atom:link rel="hub" href="http://feedpress.superfeedr.com/"/>
|
||||
<itunes:category text="News & Politics"/>
|
||||
<media:category scheme="http://www.itunes.com/dtds/podcast-1.0.dtd">News & Politics</media:category>
|
||||
<itunes:category text="Society & Culture">
|
||||
<itunes:category text="History"/>
|
||||
</itunes:category>
|
||||
<media:category scheme="http://www.itunes.com/dtds/podcast-1.0.dtd">Society & Culture/History</media:category>
|
||||
<media:rating>nonadult</media:rating>
|
||||
<media:description type="plain">The ProPublica Podcast</media:description>
|
||||
<media:credit role="author">ProPublica</media:credit>
|
||||
<media:keywords>journalism, news, investigative journalism, interview, propublica, media, behind the scenes</media:keywords>
|
||||
<itunes:keywords>journalism, news, investigative journalism, interview, propublica, media, behind the scenes</itunes:keywords>
|
||||
<googleplay:description>The podcast that takes you behind the scenes with journalists to hear how they nailed their biggest stories.</googleplay:description>
|
||||
<googleplay:author>ProPublica</googleplay:author>
|
||||
<googleplay:email>celeste.lecompte@propublica.org</googleplay:email>
|
||||
<googleplay:explicit>no</googleplay:explicit>
|
||||
<title>The Breakthrough</title>
|
||||
<link>http://www.propublica.org/podcast</link>
|
||||
<description>Latest Articles and Investigations from ProPublica, an independent, non-profit newsroom that produces investigative journalism in the public interest.</description>
|
||||
<language>en</language>
|
||||
<dc:creator>ProPublica</dc:creator>
|
||||
<copyright>Copyright 2017 Pro Publica Inc.</copyright>
|
||||
<pubDate>Fri, 22 Sep 2017 13:22:48 +0000</pubDate>
|
||||
<dc:date>2017-09-22T13:22:48+00:00</dc:date>
|
||||
<dc:language>en-us</dc:language>
|
||||
<dc:rights>Copyright 2017 Pro Publica Inc.</dc:rights>
|
||||
<atom:link href="http://feeds.propublica.org/propublica/podcast" rel="self" type="application/rss+xml"/>
|
||||
<itunes:subtitle>The ProPublica Podcast</itunes:subtitle>
|
||||
<itunes:author>ProPublica</itunes:author>
|
||||
<itunes:summary>The podcast that takes you behind the scenes with journalists to hear how they nailed their biggest stories.</itunes:summary>
|
||||
<itunes:explicit>no</itunes:explicit>
|
||||
<itunes:owner>
|
||||
<itunes:email>celeste.lecompte@propublica.org</itunes:email>
|
||||
<itunes:name>ProPublica</itunes:name>
|
||||
</itunes:owner>
|
||||
<itunes:image href="http://www.propublica.org/images/podcast_logo_2.png"/>
|
||||
<item>
|
||||
<title>The Breakthrough: Hopelessness and Exploitation Inside Homes for Mentally Ill</title>
|
||||
<itunes:author>ProPublica</itunes:author>
|
||||
<itunes:summary>A reporter finds that homes meant to replace New York’s troubled psychiatric hospitals might be just as bad.</itunes:summary>
|
||||
<enclosure url="http://tracking.feedpress.it/link/10581/6726758/20170908-cliff-levy.mp3" length="33396551" type="audio/mpeg"/>
|
||||
<itunes:duration>00:27:50</itunes:duration>
|
||||
<link>https://www.propublica.org/podcast/the-breakthrough-hopelessness-exploitation-homes-for-mentally-ill#134472</link>
|
||||
<pubDate>Fri, 08 Sep 2017 12:00:00 +0000</pubDate>
|
||||
<guid isPermaLink="false">https://www.propublica.org/podcast/the-breakthrough-hopelessness-exploitation-homes-for-mentally-ill#134472</guid>
|
||||
<description><![CDATA[
|
||||
<p>A reporter finds that homes meant to replace New York’s troubled psychiatric hospitals might be just as bad.</p>
|
||||
|
||||
]]></description>
|
||||
<dc:subject/>
|
||||
<dc:date>2017-09-08T12:00:00+00:00</dc:date>
|
||||
<dc:creator>ProPublica</dc:creator>
|
||||
<itunes:explicit>no</itunes:explicit>
|
||||
<itunes:keywords>journalism, news, investigative journalism, interview, propublica, media, behind the scenes</itunes:keywords>
|
||||
<itunes:subtitle>
|
||||
A reporter finds that homes meant to replace New York’s troubled psychiatric hospitals might be just as bad.
|
||||
</itunes:subtitle>
|
||||
</item>
|
||||
<item>
|
||||
<title>The Breakthrough: Behind the Scenes of Hillary Clinton’s Failed Bid for President</title>
|
||||
<itunes:author>ProPublica</itunes:author>
|
||||
<itunes:summary>Jonathan Allen and Amie Parnes didn’t know their book would be called ‘Shattered,’ or that their extraordinary access would let them chronicle the mounting signs of a doomed campaign.</itunes:summary>
|
||||
<enclosure url="http://tracking.feedpress.it/link/10581/6726759/16_JohnAllen-CRAFT.mp3" length="17964071" type="audio/mpeg"/>
|
||||
<itunes:duration>00:18:45</itunes:duration>
|
||||
<link>https://www.propublica.org/podcast/the-breakthrough-hillary-clinton-failed-presidential-bid#133721</link>
|
||||
<pubDate>Fri, 25 Aug 2017 12:00:00 +0000</pubDate>
|
||||
<guid isPermaLink="false">https://www.propublica.org/podcast/the-breakthrough-hillary-clinton-failed-presidential-bid#133721</guid>
|
||||
<description><![CDATA[
|
||||
<p>Jonathan Allen and Amie Parnes didn’t know their book would be called ‘Shattered,’ or that their extraordinary access would let them chronicle the mounting signs of a doomed campaign.</p>
|
||||
|
||||
]]></description>
|
||||
<dc:subject/>
|
||||
<dc:date>2017-08-25T12:00:00+00:00</dc:date>
|
||||
<dc:creator>ProPublica</dc:creator>
|
||||
<itunes:explicit>no</itunes:explicit>
|
||||
<itunes:keywords>journalism, news, investigative journalism, interview, propublica, media, behind the scenes</itunes:keywords>
|
||||
<itunes:subtitle>
|
||||
Jonathan Allen and Amie Parnes didn’t know their book would be called ‘Shattered,’ or that their extraordinary access would let them chronicle the mounting signs of a doomed campaign.
|
||||
</itunes:subtitle>
|
||||
</item>
|
||||
<item>
|
||||
<title>The Breakthrough: How a Small News Outlet Brought Down the State Hero</title>
|
||||
<itunes:author>ProPublica</itunes:author>
|
||||
<itunes:summary>VTDigger’s Anne Galloway was suspicious the moment she heard about a too-good-to-be-true development. She didn’t know how right she was.</itunes:summary>
|
||||
<enclosure url="http://tracking.feedpress.it/link/10581/6726760/15_VTDigger-CRAFT.mp3" length="" type="audio/mpeg"/>
|
||||
<itunes:duration/>
|
||||
<link>https://www.propublica.org/podcast/the-breakthrough-how-a-small-news-outlet-brought-down-the-state-hero#133361</link>
|
||||
<pubDate>Fri, 11 Aug 2017 12:00:00 +0000</pubDate>
|
||||
<guid isPermaLink="false">https://www.propublica.org/podcast/the-breakthrough-how-a-small-news-outlet-brought-down-the-state-hero#133361</guid>
|
||||
<description><![CDATA[
|
||||
<p>VTDigger’s Anne Galloway was suspicious the moment she heard about a too-good-to-be-true development. She didn’t know how right she was.</p>
|
||||
|
||||
]]></description>
|
||||
<dc:subject/>
|
||||
<dc:date>2017-08-11T12:00:00+00:00</dc:date>
|
||||
<dc:creator>ProPublica</dc:creator>
|
||||
<itunes:explicit>no</itunes:explicit>
|
||||
<itunes:keywords>journalism, news, investigative journalism, interview, propublica, media, behind the scenes</itunes:keywords>
|
||||
<itunes:subtitle>
|
||||
VTDigger’s Anne Galloway was suspicious the moment she heard about a too-good-to-be-true development. She didn’t know how right she was.
|
||||
</itunes:subtitle>
|
||||
</item>
|
||||
<item>
|
||||
<title>The Breakthrough: Reporting on Life and Death in the Delivery Room</title>
|
||||
<itunes:author>ProPublica</itunes:author>
|
||||
<itunes:summary>ProPublica reporter Nina Martin and her team used social media and old-fashioned shoe leather to show how the U.S. has the worst maternal death rate in the developed world.</itunes:summary>
|
||||
<enclosure url="http://tracking.feedpress.it/link/10581/6726761/14_MaternalMortality-CRAFT.mp3" length="" type="audio/mpeg"/>
|
||||
<itunes:duration/>
|
||||
<link>https://www.propublica.org/podcast/the-breakthrough-reporting-on-life-and-death-in-the-delivery-room#133354</link>
|
||||
<pubDate>Fri, 28 Jul 2017 12:00:00 +0000</pubDate>
|
||||
<guid isPermaLink="false">https://www.propublica.org/podcast/the-breakthrough-reporting-on-life-and-death-in-the-delivery-room#133354</guid>
|
||||
<description><![CDATA[
|
||||
<p>ProPublica reporter Nina Martin and her team used social media and old-fashioned shoe leather to show how the U.S. has the worst maternal death rate in the developed world.</p>
|
||||
|
||||
]]></description>
|
||||
<dc:subject>Health CareLost Mothers</dc:subject>
|
||||
<dc:date>2017-07-28T12:00:00+00:00</dc:date>
|
||||
<dc:creator>ProPublica</dc:creator>
|
||||
<itunes:explicit>no</itunes:explicit>
|
||||
<itunes:keywords>journalism, news, investigative journalism, interview, propublica, media, behind the scenes</itunes:keywords>
|
||||
<itunes:subtitle>
|
||||
ProPublica reporter Nina Martin and her team used social media and old-fashioned shoe leather to show how the U.S. has the worst maternal death rate in the developed world.
|
||||
</itunes:subtitle>
|
||||
</item>
|
||||
<item>
|
||||
<title>The Breakthrough: How an ICIJ Reporter Dug Up the World Bank’s Best Kept Secret</title>
|
||||
<itunes:author>ProPublica</itunes:author>
|
||||
<itunes:summary>Sasha Chavkin chased it down across three continents, and into places he was warned weren't safe.</itunes:summary>
|
||||
<enclosure url="http://tracking.feedpress.it/link/10581/6726762/13_WorldBank-CRAFT.mp3" length="" type="audio/mpeg"/>
|
||||
<itunes:duration/>
|
||||
<link>https://www.propublica.org/podcast/the-breakthrough-how-an-icij-reporter-dug-up-world-banks-best-kept-secret#133343</link>
|
||||
<pubDate>Fri, 14 Jul 2017 12:00:00 +0000</pubDate>
|
||||
<guid isPermaLink="false">https://www.propublica.org/podcast/the-breakthrough-how-an-icij-reporter-dug-up-world-banks-best-kept-secret#133343</guid>
|
||||
<description><![CDATA[
|
||||
<p>Sasha Chavkin chased it down across three continents, and into places he was warned weren't safe.</p>
|
||||
|
||||
]]></description>
|
||||
<dc:subject/>
|
||||
<dc:date>2017-07-14T12:00:00+00:00</dc:date>
|
||||
<dc:creator>ProPublica</dc:creator>
|
||||
<itunes:explicit>no</itunes:explicit>
|
||||
<itunes:keywords>journalism, news, investigative journalism, interview, propublica, media, behind the scenes</itunes:keywords>
|
||||
<itunes:subtitle>
|
||||
Sasha Chavkin chased it down across three continents, and into places he was warned weren't safe.
|
||||
</itunes:subtitle>
|
||||
</item>
|
||||
<item>
|
||||
<title>The Breakthrough: A Reporter Crosses Borders to Uncover Labor Abuse</title>
|
||||
<itunes:author>ProPublica</itunes:author>
|
||||
<itunes:summary>ProPublica’s Michael Grabell travels from the heart of Ohio to the mountains of Guatemala to track down immigrant workers harmed in American poultry plants.</itunes:summary>
|
||||
<enclosure url="http://tracking.feedpress.it/link/10581/6726763/12_CaseFarms_Grabell-CRAFT.mp3" length="" type="audio/mpeg"/>
|
||||
<itunes:duration/>
|
||||
<link>https://www.propublica.org/podcast/the-breakthrough-case-farms-labor-abuse-guatemala-michael-grabell#133335</link>
|
||||
<pubDate>Fri, 30 Jun 2017 12:00:00 +0000</pubDate>
|
||||
<guid isPermaLink="false">https://www.propublica.org/podcast/the-breakthrough-case-farms-labor-abuse-guatemala-michael-grabell#133335</guid>
|
||||
<description><![CDATA[
|
||||
<p>ProPublica’s Michael Grabell travels from the heart of Ohio to the mountains of Guatemala to track down immigrant workers harmed in American poultry plants.</p>
|
||||
|
||||
]]></description>
|
||||
<dc:subject>Labor</dc:subject>
|
||||
<dc:date>2017-06-30T12:00:00+00:00</dc:date>
|
||||
<dc:creator>ProPublica</dc:creator>
|
||||
<itunes:explicit>no</itunes:explicit>
|
||||
<itunes:keywords>journalism, news, investigative journalism, interview, propublica, media, behind the scenes</itunes:keywords>
|
||||
<itunes:subtitle>
|
||||
ProPublica’s Michael Grabell travels from the heart of Ohio to the mountains of Guatemala to track down immigrant workers harmed in American poultry plants.
|
||||
</itunes:subtitle>
|
||||
</item>
|
||||
<item>
|
||||
<title>The Breakthrough: Uncovering NYC Cops Making Millions in Suspicious Deals</title>
|
||||
<itunes:author>ProPublica</itunes:author>
|
||||
<itunes:summary>On our first episode of this season’s The Breakthrough, we talk with WNYC’s Robert Lewis tells us how his reporting triggered an internal investigation of suspicious dealings made by active-duty New York police officers.</itunes:summary>
|
||||
<enclosure url="http://tracking.feedpress.it/link/10581/6726764/11_RobertLewis-CRAFT.mp3" length="" type="audio/mpeg"/>
|
||||
<itunes:duration/>
|
||||
<link>https://www.propublica.org/podcast/the-breakthrough-uncovering-nyc-cops-making-millions-in-suspicious-deals#133325</link>
|
||||
<pubDate>Fri, 16 Jun 2017 12:00:00 +0000</pubDate>
|
||||
<guid isPermaLink="false">https://www.propublica.org/podcast/the-breakthrough-uncovering-nyc-cops-making-millions-in-suspicious-deals#133325</guid>
|
||||
<description><![CDATA[
|
||||
<p>On our first episode of this season’s The Breakthrough, we talk with WNYC’s Robert Lewis tells us how his reporting triggered an internal investigation of suspicious dealings made by active-duty New York police officers.</p>
|
||||
|
||||
]]></description>
|
||||
<dc:subject/>
|
||||
<dc:date>2017-06-16T12:00:00+00:00</dc:date>
|
||||
<dc:creator>ProPublica</dc:creator>
|
||||
<itunes:explicit>no</itunes:explicit>
|
||||
<itunes:keywords>journalism, news, investigative journalism, interview, propublica, media, behind the scenes</itunes:keywords>
|
||||
<itunes:subtitle>
|
||||
On our first episode of this season’s The Breakthrough, we talk with WNYC’s Robert Lewis tells us how his reporting triggered an internal investigation of suspicious dealings made by active-duty New…</itunes:subtitle>
|
||||
</item>
|
||||
<item>
|
||||
<title>Our Podcast, The Breakthrough, Is Back</title>
|
||||
<itunes:author>ProPublica</itunes:author>
|
||||
<itunes:summary>In January, we launched The Breakthrough, which tells the stories behind investigative reporting. And we’re about to start a new season, on June 16.</itunes:summary>
|
||||
<enclosure url="http://tracking.feedpress.it/link/10581/6726765/10_breakthrough_promo-CRAFT.mp3" length="" type="audio/mpeg"/>
|
||||
<itunes:duration/>
|
||||
<link>https://www.propublica.org/podcast/our-podcast-the-breakthrough-is-back#133309</link>
|
||||
<pubDate>Fri, 09 Jun 2017 12:00:00 +0000</pubDate>
|
||||
<guid isPermaLink="false">https://www.propublica.org/podcast/our-podcast-the-breakthrough-is-back#133309</guid>
|
||||
<description><![CDATA[
|
||||
<p>In January, we launched The Breakthrough, which tells the stories behind investigative reporting. And we’re about to start a new season, on June 16.</p>
|
||||
|
||||
]]></description>
|
||||
<dc:subject/>
|
||||
<dc:date>2017-06-09T12:00:00+00:00</dc:date>
|
||||
<dc:creator>ProPublica</dc:creator>
|
||||
<itunes:explicit>no</itunes:explicit>
|
||||
<itunes:keywords>journalism, news, investigative journalism, interview, propublica, media, behind the scenes</itunes:keywords>
|
||||
<itunes:subtitle>
|
||||
In January, we launched The Breakthrough, which tells the stories behind investigative reporting. And we’re about to start a new season, on June 16.
|
||||
</itunes:subtitle>
|
||||
</item>
|
||||
<item>
|
||||
<title>We Want Your Thoughts on Our Podcast</title>
|
||||
<itunes:author>ProPublica</itunes:author>
|
||||
<itunes:summary>We recently relaunched our podcast, in which journalists tell us how they nailed their biggest stories. Now we want to hear from you.</itunes:summary>
|
||||
<enclosure url="http://tracking.feedpress.it/link/10581/5486888/9_Breakthrough_Survey.mp3" length="" type="audio/mpeg"/>
|
||||
<itunes:duration/>
|
||||
<link>https://www.propublica.org/podcast/we-want-your-thoughts-on-our-podcast#106727</link>
|
||||
<pubDate>Sat, 11 Mar 2017 00:54:00 +0000</pubDate>
|
||||
<guid isPermaLink="false">https://www.propublica.org/podcast/we-want-your-thoughts-on-our-podcast#106727</guid>
|
||||
<description><![CDATA[
|
||||
<p>We recently relaunched our podcast, in which journalists tell us how they nailed their biggest stories. Now we want to hear from you.</p>
|
||||
|
||||
]]></description>
|
||||
<dc:subject/>
|
||||
<dc:date>2017-03-11T00:54:00+00:00</dc:date>
|
||||
<dc:creator>ProPublica</dc:creator>
|
||||
<itunes:explicit>no</itunes:explicit>
|
||||
<itunes:keywords>journalism, news, investigative journalism, interview, propublica, media, behind the scenes</itunes:keywords>
|
||||
<itunes:subtitle>
|
||||
We recently relaunched our podcast, in which journalists tell us how they nailed their biggest stories. Now we want to hear from you.
|
||||
</itunes:subtitle>
|
||||
</item>
|
||||
<item>
|
||||
<title>The Breakthrough: How Reporters Really Use Unnamed Sources</title>
|
||||
<itunes:author>ProPublica</itunes:author>
|
||||
<itunes:summary/>
|
||||
<itunes:duration>00:16:27</itunes:duration>
|
||||
<link>https://www.propublica.org/podcast/the-breakthrough-how-reporters-really-use-unnamed-sources#66520</link>
|
||||
<pubDate>Fri, 24 Feb 2017 14:00:00 +0000</pubDate>
|
||||
<guid isPermaLink="false">https://www.propublica.org/podcast/the-breakthrough-how-reporters-really-use-unnamed-sources#66520</guid>
|
||||
<description><![CDATA[
|
||||
|
||||
]]></description>
|
||||
<dc:subject/>
|
||||
<dc:date>2017-02-24T14:00:00+00:00</dc:date>
|
||||
<dc:creator>ProPublica</dc:creator>
|
||||
<itunes:explicit>no</itunes:explicit>
|
||||
<itunes:keywords>journalism, news, investigative journalism, interview, propublica, media, behind the scenes</itunes:keywords>
|
||||
<itunes:subtitle>
|
||||
</itunes:subtitle>
|
||||
</item>
|
||||
<item>
|
||||
<title>The Breakthrough: Uncovering the FBI’s Secret Rules</title>
|
||||
<itunes:author>ProPublica</itunes:author>
|
||||
<itunes:summary/>
|
||||
<itunes:duration>00:20:28</itunes:duration>
|
||||
<link>https://www.propublica.org/podcast/the-breakthrough-uncovering-the-fbis-secret-rules#66523</link>
|
||||
<pubDate>Fri, 17 Feb 2017 17:40:57 +0000</pubDate>
|
||||
<guid isPermaLink="false">https://www.propublica.org/podcast/the-breakthrough-uncovering-the-fbis-secret-rules#66523</guid>
|
||||
<description><![CDATA[
|
||||
|
||||
]]></description>
|
||||
<dc:subject/>
|
||||
<dc:date>2017-02-17T17:40:57+00:00</dc:date>
|
||||
<dc:creator>ProPublica</dc:creator>
|
||||
<itunes:explicit>no</itunes:explicit>
|
||||
<itunes:keywords>journalism, news, investigative journalism, interview, propublica, media, behind the scenes</itunes:keywords>
|
||||
<itunes:subtitle>
|
||||
</itunes:subtitle>
|
||||
</item>
|
||||
<item>
|
||||
<title>The Breakthrough: Reporters Examine Murder Where Cops Struggle to Curb It</title>
|
||||
<itunes:author>ProPublica</itunes:author>
|
||||
<itunes:summary/>
|
||||
<itunes:duration>00:20:01</itunes:duration>
|
||||
<link>https://www.propublica.org/podcast/the-breakthrough-reporters-examine-murder-where-cops-struggle-to-curb-it#66526</link>
|
||||
<pubDate>Fri, 10 Feb 2017 17:25:36 +0000</pubDate>
|
||||
<guid isPermaLink="false">https://www.propublica.org/podcast/the-breakthrough-reporters-examine-murder-where-cops-struggle-to-curb-it#66526</guid>
|
||||
<description><![CDATA[
|
||||
|
||||
]]></description>
|
||||
<dc:subject/>
|
||||
<dc:date>2017-02-10T17:25:36+00:00</dc:date>
|
||||
<dc:creator>ProPublica</dc:creator>
|
||||
<itunes:explicit>no</itunes:explicit>
|
||||
<itunes:keywords>journalism, news, investigative journalism, interview, propublica, media, behind the scenes</itunes:keywords>
|
||||
<itunes:subtitle>
|
||||
</itunes:subtitle>
|
||||
</item>
|
||||
<item>
|
||||
<title>The Breakthrough: What American Journalists Can Learn From Reporting Under Putin</title>
|
||||
<itunes:author>ProPublica</itunes:author>
|
||||
<itunes:summary/>
|
||||
<itunes:duration>00:31:54</itunes:duration>
|
||||
<link>https://www.propublica.org/podcast/the-breakthrough-what-american-journalists-can-learn-reporting-under-putin#66529</link>
|
||||
<pubDate>Fri, 03 Feb 2017 14:00:40 +0000</pubDate>
|
||||
<guid isPermaLink="false">https://www.propublica.org/podcast/the-breakthrough-what-american-journalists-can-learn-reporting-under-putin#66529</guid>
|
||||
<description><![CDATA[
|
||||
|
||||
]]></description>
|
||||
<dc:subject/>
|
||||
<dc:date>2017-02-03T14:00:40+00:00</dc:date>
|
||||
<dc:creator>ProPublica</dc:creator>
|
||||
<itunes:explicit>no</itunes:explicit>
|
||||
<itunes:keywords>journalism, news, investigative journalism, interview, propublica, media, behind the scenes</itunes:keywords>
|
||||
<itunes:subtitle>
|
||||
</itunes:subtitle>
|
||||
</item>
|
||||
<item>
|
||||
<title>The Breakthrough: Uncovering Danger at the Pharmacy Counter</title>
|
||||
<itunes:author>ProPublica</itunes:author>
|
||||
<itunes:summary/>
|
||||
<itunes:duration>00:16:38</itunes:duration>
|
||||
<link>https://www.propublica.org/podcast/the-breakthrough-uncovering-danger-at-the-pharmacy-counter#66532</link>
|
||||
<pubDate>Fri, 27 Jan 2017 17:03:24 +0000</pubDate>
|
||||
<guid isPermaLink="false">https://www.propublica.org/podcast/the-breakthrough-uncovering-danger-at-the-pharmacy-counter#66532</guid>
|
||||
<description><![CDATA[
|
||||
|
||||
]]></description>
|
||||
<dc:subject/>
|
||||
<dc:date>2017-01-27T17:03:24+00:00</dc:date>
|
||||
<dc:creator>ProPublica</dc:creator>
|
||||
<itunes:explicit>no</itunes:explicit>
|
||||
<itunes:keywords>journalism, news, investigative journalism, interview, propublica, media, behind the scenes</itunes:keywords>
|
||||
<itunes:subtitle>
|
||||
</itunes:subtitle>
|
||||
</item>
|
||||
<item>
|
||||
<title>The Breakthrough: How a Reporter Solved a Decades-Old Murder</title>
|
||||
<itunes:author>ProPublica</itunes:author>
|
||||
<itunes:summary/>
|
||||
<enclosure url="http://tracking.feedpress.it/link/10581/6726766/4_Breakthrough_ MSKiller.mp3" length="56458604" type="audio/mpeg"/>
|
||||
<itunes:duration>23:31</itunes:duration>
|
||||
<link>https://www.propublica.org/podcast/the-breakthrough-how-a-reporter-solved-a-decades-old-murder#66537</link>
|
||||
<pubDate>Fri, 20 Jan 2017 14:00:14 +0000</pubDate>
|
||||
<guid isPermaLink="false">https://www.propublica.org/podcast/the-breakthrough-how-a-reporter-solved-a-decades-old-murder#66537</guid>
|
||||
<description><![CDATA[
|
||||
|
||||
]]></description>
|
||||
<dc:subject/>
|
||||
<dc:date>2017-01-20T14:00:14+00:00</dc:date>
|
||||
<dc:creator>ProPublica</dc:creator>
|
||||
<itunes:explicit>no</itunes:explicit>
|
||||
<itunes:keywords>journalism, news, investigative journalism, interview, propublica, media, behind the scenes</itunes:keywords>
|
||||
<itunes:subtitle>
|
||||
</itunes:subtitle>
|
||||
</item>
|
||||
<item>
|
||||
<title>The Breakthrough: Meet the Reporter Who Went Undercover in the Hermit Kingdom</title>
|
||||
<itunes:author>ProPublica</itunes:author>
|
||||
<itunes:summary/>
|
||||
<enclosure url="http://tracking.feedpress.it/link/10581/5133080/3_Breakthrough_SukiKim.mp3" length="71396156" type="audio/mpeg"/>
|
||||
<itunes:duration>00:29:44</itunes:duration>
|
||||
<link>https://www.propublica.org/podcast/the-breakthrough-meet-the-reporter-went-undercover-in-the-hermit-kingdom#66540</link>
|
||||
<pubDate>Fri, 13 Jan 2017 14:00:24 +0000</pubDate>
|
||||
<guid isPermaLink="false">https://www.propublica.org/podcast/the-breakthrough-meet-the-reporter-went-undercover-in-the-hermit-kingdom#66540</guid>
|
||||
<description><![CDATA[
|
||||
|
||||
]]></description>
|
||||
<dc:subject/>
|
||||
<dc:date>2017-01-13T14:00:24+00:00</dc:date>
|
||||
<dc:creator>ProPublica</dc:creator>
|
||||
<itunes:explicit>no</itunes:explicit>
|
||||
<itunes:keywords>journalism, news, investigative journalism, interview, propublica, media, behind the scenes</itunes:keywords>
|
||||
<itunes:subtitle>
|
||||
</itunes:subtitle>
|
||||
</item>
|
||||
<item>
|
||||
<title>The Breakthrough: The $2 Drug Test</title>
|
||||
<itunes:author>ProPublica</itunes:author>
|
||||
<itunes:summary/>
|
||||
<enclosure url="http://tracking.feedpress.it/link/10581/5094042/2_Breakthrough_DrugTest.mp3" length="63031678" type="audio/mpeg"/>
|
||||
<itunes:duration>00:26:17</itunes:duration>
|
||||
<link>https://www.propublica.org/podcast/the-breakthrough-the-2-dollar-drug-test#66543</link>
|
||||
<pubDate>Fri, 06 Jan 2017 22:22:00 +0000</pubDate>
|
||||
<guid isPermaLink="false">https://www.propublica.org/podcast/the-breakthrough-the-2-dollar-drug-test#66543</guid>
|
||||
<description><![CDATA[
|
||||
|
||||
]]></description>
|
||||
<dc:subject>Busted</dc:subject>
|
||||
<dc:date>2017-01-06T22:22:00+00:00</dc:date>
|
||||
<dc:creator>ProPublica</dc:creator>
|
||||
<itunes:explicit>no</itunes:explicit>
|
||||
<itunes:keywords>journalism, news, investigative journalism, interview, propublica, media, behind the scenes</itunes:keywords>
|
||||
<itunes:subtitle>
|
||||
</itunes:subtitle>
|
||||
</item>
|
||||
<item>
|
||||
<title>Introducing Our New Podcast: The Breakthrough</title>
|
||||
<itunes:author>ProPublica</itunes:author>
|
||||
<itunes:summary/>
|
||||
<enclosure url="http://tracking.feedpress.it/link/10581/5094043/1_TheBreakthrough_Promo.mp3" length="4910060" type="audio/mpeg"/>
|
||||
<itunes:duration>00:02:03</itunes:duration>
|
||||
<link>https://www.propublica.org/podcast/introducing-our-new-podcast-the-breakthrough#66546</link>
|
||||
<pubDate>Fri, 06 Jan 2017 22:21:18 +0000</pubDate>
|
||||
<guid isPermaLink="false">https://www.propublica.org/podcast/introducing-our-new-podcast-the-breakthrough#66546</guid>
|
||||
<description><![CDATA[
|
||||
|
||||
]]></description>
|
||||
<dc:subject/>
|
||||
<dc:date>2017-01-06T22:21:18+00:00</dc:date>
|
||||
<dc:creator>ProPublica</dc:creator>
|
||||
<itunes:explicit>no</itunes:explicit>
|
||||
<itunes:keywords>journalism, news, investigative journalism, interview, propublica, media, behind the scenes</itunes:keywords>
|
||||
<itunes:subtitle>
|
||||
</itunes:subtitle>
|
||||
</item>
|
||||
<item>
|
||||
<title>Renewable Energy: An Exxon Investigation Given Second Life as Trump Taps Exec for Cabinet</title>
|
||||
<itunes:author>ProPublica</itunes:author>
|
||||
<itunes:summary/>
|
||||
<enclosure url="http://tracking.feedpress.it/link/10581/5023664/52_NeelaBanerjee.mp3" length="34999184" type="audio/mpeg"/>
|
||||
<itunes:duration>00:14:35</itunes:duration>
|
||||
<link>https://www.propublica.org/podcast/renewable-energy-an-exxon-investigation-given-second-life-as-trump-taps-exe#66549</link>
|
||||
<pubDate>Fri, 23 Dec 2016 21:57:02 +0000</pubDate>
|
||||
<guid isPermaLink="false">https://www.propublica.org/podcast/renewable-energy-an-exxon-investigation-given-second-life-as-trump-taps-exe#66549</guid>
|
||||
<description><![CDATA[
|
||||
|
||||
]]></description>
|
||||
<dc:subject/>
|
||||
<dc:date>2016-12-23T21:57:02+00:00</dc:date>
|
||||
<dc:creator>ProPublica</dc:creator>
|
||||
<itunes:explicit>no</itunes:explicit>
|
||||
<itunes:keywords>journalism, news, investigative journalism, interview, propublica, media, behind the scenes</itunes:keywords>
|
||||
<itunes:subtitle>
|
||||
</itunes:subtitle>
|
||||
</item>
|
||||
<item>
|
||||
<title>How We Found a Pro-Trump Group Blew Past Campaign Finance Laws</title>
|
||||
<itunes:author>ProPublica</itunes:author>
|
||||
<itunes:summary/>
|
||||
<enclosure url="http://tracking.feedpress.it/link/10581/4984992/51_Kate_Robert.mp3" length="34152423" type="audio/mpeg"/>
|
||||
<itunes:duration>00:14:14</itunes:duration>
|
||||
<link>https://www.propublica.org/podcast/how-we-found-a-pro-trump-group-blew-past-campaign-finance-laws#66552</link>
|
||||
<pubDate>Fri, 16 Dec 2016 22:45:08 +0000</pubDate>
|
||||
<guid isPermaLink="false">https://www.propublica.org/podcast/how-we-found-a-pro-trump-group-blew-past-campaign-finance-laws#66552</guid>
|
||||
<description><![CDATA[
|
||||
|
||||
]]></description>
|
||||
<dc:subject/>
|
||||
<dc:date>2016-12-16T22:45:08+00:00</dc:date>
|
||||
<dc:creator>ProPublica</dc:creator>
|
||||
<itunes:explicit>no</itunes:explicit>
|
||||
<itunes:keywords>journalism, news, investigative journalism, interview, propublica, media, behind the scenes</itunes:keywords>
|
||||
<itunes:subtitle>
|
||||
</itunes:subtitle>
|
||||
</item>
|
||||
</channel>
|
||||
</rss>
|
||||
@ -1,20 +0,0 @@
|
||||
[package]
|
||||
authors = ["Jordan Petridis <jordanpetridis@protonmail.com>"]
|
||||
name = "hammond-downloader"
|
||||
version = "0.1.0"
|
||||
workspace = "../"
|
||||
|
||||
[dependencies]
|
||||
error-chain = "0.11.0"
|
||||
hyper = "0.11.7"
|
||||
log = "0.3.8"
|
||||
mime_guess = "1.8.2"
|
||||
reqwest = "0.8.1"
|
||||
tempdir = "0.3.5"
|
||||
|
||||
[dependencies.diesel]
|
||||
features = ["sqlite"]
|
||||
git = "https://github.com/diesel-rs/diesel.git"
|
||||
|
||||
[dependencies.hammond-data]
|
||||
path = "../hammond-data"
|
||||
@ -1,223 +0,0 @@
|
||||
use reqwest;
|
||||
use hyper::header::*;
|
||||
use tempdir::TempDir;
|
||||
use mime_guess;
|
||||
|
||||
use std::fs::{rename, DirBuilder, File};
|
||||
use std::io::{BufWriter, Read, Write};
|
||||
use std::path::Path;
|
||||
|
||||
use errors::*;
|
||||
use hammond_data::{Episode, Podcast};
|
||||
use hammond_data::xdg_dirs::{DL_DIR, HAMMOND_CACHE};
|
||||
|
||||
// TODO: Replace path that are of type &str with std::path.
|
||||
// TODO: Have a convention/document absolute/relative paths, if they should end with / or not.
|
||||
|
||||
// Adapted from https://github.com/mattgathu/rget .
|
||||
// I never wanted to write a custom downloader.
|
||||
// Sorry to those who will have to work with that code.
|
||||
// Would much rather use a crate,
|
||||
// or bindings for a lib like youtube-dl(python),
|
||||
// But cant seem to find one.
|
||||
// TODO: Write unit-tests.
|
||||
fn download_into(dir: &str, file_title: &str, url: &str) -> Result<String> {
|
||||
info!("GET request to: {}", url);
|
||||
let client = reqwest::Client::builder().referer(false).build()?;
|
||||
let mut resp = client.get(url).send()?;
|
||||
info!("Status Resp: {}", resp.status());
|
||||
|
||||
if !resp.status().is_success() {
|
||||
bail!("Unexpected server response: {}", resp.status())
|
||||
}
|
||||
|
||||
let headers = resp.headers().clone();
|
||||
|
||||
let ct_len = headers.get::<ContentLength>().map(|ct_len| **ct_len);
|
||||
let ct_type = headers.get::<ContentType>();
|
||||
ct_len.map(|x| info!("File Lenght: {}", x));
|
||||
ct_type.map(|x| info!("Content Type: {}", x));
|
||||
|
||||
// This could be prettier.
|
||||
// Determine the file extension from the http content-type header.
|
||||
let ext = if let Some(t) = ct_type {
|
||||
let mime = mime_guess::get_extensions(t.type_().as_ref(), t.subtype().as_ref());
|
||||
if let Some(m) = mime {
|
||||
if m.contains(&t.subtype().as_ref()) {
|
||||
t.subtype().as_ref().to_string()
|
||||
} else {
|
||||
m.first().unwrap().to_string()
|
||||
}
|
||||
} else {
|
||||
error!("Unkown mime type. {}", t);
|
||||
"unkown".to_string()
|
||||
}
|
||||
} else {
|
||||
error!("Unkown mime type.");
|
||||
"unkown".to_string()
|
||||
};
|
||||
info!("Extension: {}", ext);
|
||||
|
||||
// Construct a temp file to save desired content.
|
||||
let tempdir = TempDir::new_in(dir, "")?;
|
||||
|
||||
let out_file = format!("{}/temp.part", tempdir.path().to_str().unwrap(),);
|
||||
|
||||
// Save requested content into the file.
|
||||
save_io(&out_file, &mut resp, ct_len)?;
|
||||
|
||||
// Construct the desired path.
|
||||
let target = format!("{}/{}.{}", dir, file_title, ext);
|
||||
// Rename/move the tempfile into a permanent place upon success.
|
||||
rename(out_file, &target)?;
|
||||
info!("Downloading of {} completed succesfully.", &target);
|
||||
Ok(target)
|
||||
}
|
||||
|
||||
// TODO: Write unit-tests.
|
||||
/// Handles the I/O of fetching a remote file and saving into a Buffer and A File.
|
||||
fn save_io(file: &str, resp: &mut reqwest::Response, content_lenght: Option<u64>) -> Result<()> {
|
||||
info!("Downloading into: {}", file);
|
||||
let chunk_size = match content_lenght {
|
||||
Some(x) => x as usize / 99,
|
||||
None => 1024 as usize, // default chunk size
|
||||
};
|
||||
|
||||
let mut writer = BufWriter::new(File::create(&file)?);
|
||||
|
||||
loop {
|
||||
let mut buffer = vec![0; chunk_size];
|
||||
let bcount = resp.read(&mut buffer[..])?;
|
||||
buffer.truncate(bcount);
|
||||
if !buffer.is_empty() {
|
||||
writer.write_all(buffer.as_slice())?;
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn get_download_folder(pd_title: &str) -> Result<String> {
|
||||
// It might be better to make it a hash of the title
|
||||
let download_fold = format!("{}/{}", DL_DIR.to_str().unwrap(), pd_title);
|
||||
|
||||
// Create the folder
|
||||
DirBuilder::new().recursive(true).create(&download_fold)?;
|
||||
Ok(download_fold)
|
||||
}
|
||||
|
||||
// TODO: Refactor
|
||||
pub fn get_episode(ep: &mut Episode, download_folder: &str) -> Result<()> {
|
||||
// Check if its alrdy downloaded
|
||||
if ep.local_uri().is_some() {
|
||||
if Path::new(ep.local_uri().unwrap()).exists() {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
// If the path is not valid, then set it to None.
|
||||
ep.set_local_uri(None);
|
||||
ep.save()?;
|
||||
};
|
||||
|
||||
let res = download_into(download_folder, ep.title().unwrap(), ep.uri());
|
||||
|
||||
if let Ok(path) = res {
|
||||
// If download succedes set episode local_uri to dlpath.
|
||||
ep.set_local_uri(Some(&path));
|
||||
ep.save()?;
|
||||
Ok(())
|
||||
} else {
|
||||
error!("Something whent wrong while downloading.");
|
||||
Err(res.unwrap_err())
|
||||
}
|
||||
}
|
||||
|
||||
pub fn cache_image(pd: &Podcast) -> Option<String> {
|
||||
let url = pd.image_uri()?.to_owned();
|
||||
if url == "" {
|
||||
return None;
|
||||
}
|
||||
|
||||
let download_fold = format!(
|
||||
"{}{}",
|
||||
HAMMOND_CACHE.to_str().unwrap(),
|
||||
pd.title().to_owned()
|
||||
);
|
||||
|
||||
// Hacky way
|
||||
// TODO: make it so it returns the first cover.* file encountered.
|
||||
// Use glob instead
|
||||
let png = format!("{}/cover.png", download_fold);
|
||||
let jpg = format!("{}/cover.jpg", download_fold);
|
||||
let jpe = format!("{}/cover.jpe", download_fold);
|
||||
let jpeg = format!("{}/cover.jpeg", download_fold);
|
||||
if Path::new(&png).exists() {
|
||||
return Some(png);
|
||||
} else if Path::new(&jpe).exists() {
|
||||
return Some(jpe);
|
||||
} else if Path::new(&jpg).exists() {
|
||||
return Some(jpg);
|
||||
} else if Path::new(&jpeg).exists() {
|
||||
return Some(jpeg);
|
||||
};
|
||||
|
||||
DirBuilder::new()
|
||||
.recursive(true)
|
||||
.create(&download_fold)
|
||||
.unwrap();
|
||||
|
||||
let dlpath = download_into(&download_fold, "cover", &url);
|
||||
if let Ok(path) = dlpath {
|
||||
info!("Cached img into: {}", &path);
|
||||
Some(path)
|
||||
} else {
|
||||
error!("Failed to get feed image.");
|
||||
error!("Error: {}", dlpath.unwrap_err());
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use hammond_data::Source;
|
||||
use hammond_data::feed::index;
|
||||
use hammond_data::dbqueries;
|
||||
use diesel::Identifiable;
|
||||
|
||||
use std::fs;
|
||||
|
||||
#[test]
|
||||
fn test_get_dl_folder() {
|
||||
let foo_ = format!("{}/{}", DL_DIR.to_str().unwrap(), "foo");
|
||||
assert_eq!(get_download_folder("foo").unwrap(), foo_);
|
||||
let _ = fs::remove_dir_all(foo_);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_cache_image() {
|
||||
let url = "http://www.newrustacean.com/feed.xml";
|
||||
|
||||
// Create and index a source
|
||||
let source = Source::from_url(url).unwrap();
|
||||
// Copy it's id
|
||||
let sid = source.id().clone();
|
||||
|
||||
// Convert Source it into a Feed and index it
|
||||
let feed = source.into_feed().unwrap();
|
||||
index(vec![feed]);
|
||||
|
||||
// Get the Podcast
|
||||
let pd = dbqueries::get_podcast_from_source_id(sid).unwrap();
|
||||
|
||||
let img_path = cache_image(&pd);
|
||||
let foo_ = format!(
|
||||
"{}{}/cover.png",
|
||||
HAMMOND_CACHE.to_str().unwrap(),
|
||||
pd.title()
|
||||
);
|
||||
assert_eq!(img_path, Some(foo_));
|
||||
}
|
||||
}
|
||||
@ -1,13 +0,0 @@
|
||||
use diesel::result;
|
||||
use reqwest;
|
||||
use hammond_data;
|
||||
use std::io;
|
||||
|
||||
error_chain! {
|
||||
foreign_links {
|
||||
ReqError(reqwest::Error);
|
||||
IoError(io::Error);
|
||||
DieselResultError(result::Error);
|
||||
DataError(hammond_data::errors::Error);
|
||||
}
|
||||
}
|
||||
@ -1,15 +0,0 @@
|
||||
#![recursion_limit = "1024"]
|
||||
|
||||
extern crate diesel;
|
||||
#[macro_use]
|
||||
extern crate error_chain;
|
||||
extern crate hammond_data;
|
||||
extern crate hyper;
|
||||
#[macro_use]
|
||||
extern crate log;
|
||||
extern crate mime_guess;
|
||||
extern crate reqwest;
|
||||
extern crate tempdir;
|
||||
|
||||
pub mod downloader;
|
||||
pub mod errors;
|
||||
@ -1,35 +0,0 @@
|
||||
[package]
|
||||
authors = ["Jordan Petridis <jordanpetridis@protonmail.com>"]
|
||||
build = "build.rs"
|
||||
name = "hammond-gtk"
|
||||
version = "0.1.0"
|
||||
workspace = "../"
|
||||
|
||||
[dependencies]
|
||||
dissolve = "0.2.2"
|
||||
gdk = "0.7.0"
|
||||
gdk-pixbuf = "0.3.0"
|
||||
gio = "0.3.0"
|
||||
glib = "0.4.0"
|
||||
log = "0.3.8"
|
||||
loggerv = "0.5.1"
|
||||
open = "1.2.1"
|
||||
rayon = "0.9.0"
|
||||
|
||||
[dependencies.diesel]
|
||||
features = ["sqlite"]
|
||||
git = "https://github.com/diesel-rs/diesel.git"
|
||||
|
||||
[dependencies.diesel_codegen]
|
||||
features = ["sqlite"]
|
||||
git = "https://github.com/diesel-rs/diesel.git"
|
||||
|
||||
[dependencies.gtk]
|
||||
features = ["v3_22"]
|
||||
version = "0.3.0"
|
||||
|
||||
[dependencies.hammond-data]
|
||||
path = "../hammond-data"
|
||||
|
||||
[dependencies.hammond-downloader]
|
||||
path = "../hammond-downloader"
|
||||
@ -1,9 +0,0 @@
|
||||
use std::process::Command;
|
||||
|
||||
fn main() {
|
||||
Command::new("glib-compile-resources")
|
||||
.args(&["--generate", "resources.xml"])
|
||||
.current_dir("resources")
|
||||
.status()
|
||||
.unwrap();
|
||||
}
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 943 B |
@ -1,63 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!-- Generated with glade 3.20.1 -->
|
||||
<interface>
|
||||
<requires lib="gtk+" version="3.20"/>
|
||||
<object class="GtkBox" id="empty_view">
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">False</property>
|
||||
<property name="halign">center</property>
|
||||
<property name="valign">center</property>
|
||||
<property name="hexpand">True</property>
|
||||
<property name="vexpand">True</property>
|
||||
<property name="orientation">vertical</property>
|
||||
<property name="spacing">12</property>
|
||||
<child>
|
||||
<object class="GtkImage">
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">False</property>
|
||||
<property name="pixel_size">128</property>
|
||||
<property name="icon_name">application-rss+xml-symbolic</property>
|
||||
<property name="use_fallback">True</property>
|
||||
<style>
|
||||
<class name="dim-label"/>
|
||||
</style>
|
||||
</object>
|
||||
<packing>
|
||||
<property name="expand">False</property>
|
||||
<property name="fill">True</property>
|
||||
<property name="position">0</property>
|
||||
</packing>
|
||||
</child>
|
||||
<child>
|
||||
<object class="GtkLabel">
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">False</property>
|
||||
<property name="label" translatable="yes">No Feed Subscription Found</property>
|
||||
<attributes>
|
||||
<attribute name="weight" value="bold"/>
|
||||
<attribute name="scale" value="1.4399999999999999"/>
|
||||
</attributes>
|
||||
</object>
|
||||
<packing>
|
||||
<property name="expand">False</property>
|
||||
<property name="fill">True</property>
|
||||
<property name="position">1</property>
|
||||
</packing>
|
||||
</child>
|
||||
<child>
|
||||
<object class="GtkLabel">
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">False</property>
|
||||
<property name="label" translatable="yes">You can subscribe to feeds using the "+" button</property>
|
||||
<style>
|
||||
<class name="dim-label"/>
|
||||
</style>
|
||||
</object>
|
||||
<packing>
|
||||
<property name="expand">False</property>
|
||||
<property name="fill">True</property>
|
||||
<property name="position">2</property>
|
||||
</packing>
|
||||
</child>
|
||||
</object>
|
||||
</interface>
|
||||
@ -1,202 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!-- Generated with glade 3.20.1 -->
|
||||
<interface>
|
||||
<requires lib="gtk+" version="3.20"/>
|
||||
<object class="GtkBox" id="episode_box">
|
||||
<property name="width_request">100</property>
|
||||
<property name="height_request">25</property>
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">False</property>
|
||||
<property name="margin_top">5</property>
|
||||
<property name="margin_bottom">5</property>
|
||||
<property name="spacing">5</property>
|
||||
<child>
|
||||
<object class="GtkButton" id="play_button">
|
||||
<property name="can_focus">True</property>
|
||||
<property name="receives_default">True</property>
|
||||
<property name="halign">end</property>
|
||||
<property name="valign">center</property>
|
||||
<property name="margin_top">5</property>
|
||||
<property name="margin_bottom">5</property>
|
||||
<child>
|
||||
<object class="GtkImage">
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">False</property>
|
||||
<property name="stock">gtk-media-play</property>
|
||||
<property name="use_fallback">True</property>
|
||||
</object>
|
||||
</child>
|
||||
</object>
|
||||
<packing>
|
||||
<property name="expand">False</property>
|
||||
<property name="fill">False</property>
|
||||
<property name="padding">5</property>
|
||||
<property name="pack_type">end</property>
|
||||
<property name="position">0</property>
|
||||
</packing>
|
||||
</child>
|
||||
<child>
|
||||
<object class="GtkButton" id="download_button">
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">True</property>
|
||||
<property name="receives_default">True</property>
|
||||
<property name="halign">end</property>
|
||||
<property name="valign">center</property>
|
||||
<property name="margin_top">5</property>
|
||||
<property name="margin_bottom">5</property>
|
||||
<property name="always_show_image">True</property>
|
||||
<child>
|
||||
<object class="GtkImage">
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">False</property>
|
||||
<property name="stock">gtk-save</property>
|
||||
<property name="use_fallback">True</property>
|
||||
</object>
|
||||
</child>
|
||||
</object>
|
||||
<packing>
|
||||
<property name="expand">False</property>
|
||||
<property name="fill">False</property>
|
||||
<property name="padding">5</property>
|
||||
<property name="pack_type">end</property>
|
||||
<property name="position">0</property>
|
||||
</packing>
|
||||
</child>
|
||||
<child>
|
||||
<object class="GtkButton" id="delete_button">
|
||||
<property name="name">delete_button</property>
|
||||
<property name="can_focus">True</property>
|
||||
<property name="receives_default">True</property>
|
||||
<property name="halign">end</property>
|
||||
<property name="valign">center</property>
|
||||
<child>
|
||||
<object class="GtkImage">
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">False</property>
|
||||
<property name="stock">gtk-delete</property>
|
||||
</object>
|
||||
</child>
|
||||
</object>
|
||||
<packing>
|
||||
<property name="expand">False</property>
|
||||
<property name="fill">False</property>
|
||||
<property name="pack_type">end</property>
|
||||
<property name="position">0</property>
|
||||
</packing>
|
||||
</child>
|
||||
<child>
|
||||
<object class="GtkButton" id="mark_unplayed_button">
|
||||
<property name="can_focus">True</property>
|
||||
<property name="receives_default">True</property>
|
||||
<property name="tooltip_text" translatable="yes">Mark episode as Unplayed.</property>
|
||||
<property name="halign">end</property>
|
||||
<property name="valign">center</property>
|
||||
<child>
|
||||
<object class="GtkImage">
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">False</property>
|
||||
<property name="stock">gtk-undo</property>
|
||||
</object>
|
||||
</child>
|
||||
</object>
|
||||
<packing>
|
||||
<property name="expand">False</property>
|
||||
<property name="fill">False</property>
|
||||
<property name="pack_type">end</property>
|
||||
<property name="position">1</property>
|
||||
</packing>
|
||||
</child>
|
||||
<child>
|
||||
<object class="GtkButton" id="mark_played_button">
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">True</property>
|
||||
<property name="receives_default">True</property>
|
||||
<property name="tooltip_text" translatable="yes">Mark episode as played.</property>
|
||||
<property name="halign">end</property>
|
||||
<property name="valign">center</property>
|
||||
<child>
|
||||
<object class="GtkImage">
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">False</property>
|
||||
<property name="stock">gtk-apply</property>
|
||||
</object>
|
||||
</child>
|
||||
</object>
|
||||
<packing>
|
||||
<property name="expand">False</property>
|
||||
<property name="fill">False</property>
|
||||
<property name="pack_type">end</property>
|
||||
<property name="position">2</property>
|
||||
</packing>
|
||||
</child>
|
||||
<child>
|
||||
<object class="GtkBox">
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">False</property>
|
||||
<property name="orientation">vertical</property>
|
||||
<child>
|
||||
<object class="GtkLabel" id="title_label">
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">False</property>
|
||||
<property name="halign">start</property>
|
||||
<property name="valign">center</property>
|
||||
<property name="use_markup">True</property>
|
||||
<property name="wrap">True</property>
|
||||
<property name="ellipsize">end</property>
|
||||
<property name="lines">1</property>
|
||||
</object>
|
||||
<packing>
|
||||
<property name="expand">False</property>
|
||||
<property name="fill">False</property>
|
||||
<property name="position">0</property>
|
||||
</packing>
|
||||
</child>
|
||||
<child>
|
||||
<object class="GtkExpander" id="expand_desc">
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">True</property>
|
||||
<property name="label_fill">True</property>
|
||||
<child>
|
||||
<object class="GtkScrolledWindow">
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">True</property>
|
||||
<property name="shadow_type">in</property>
|
||||
<property name="min_content_height">100</property>
|
||||
<property name="max_content_height">600</property>
|
||||
<property name="propagate_natural_width">True</property>
|
||||
<property name="propagate_natural_height">True</property>
|
||||
<child>
|
||||
<object class="GtkTextView" id="desc_text_view">
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">True</property>
|
||||
<property name="margin_bottom">5</property>
|
||||
<property name="editable">False</property>
|
||||
<property name="wrap_mode">word-char</property>
|
||||
<property name="cursor_visible">False</property>
|
||||
</object>
|
||||
</child>
|
||||
</object>
|
||||
</child>
|
||||
<child type="label">
|
||||
<object class="GtkLabel">
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">False</property>
|
||||
<property name="label" translatable="yes">Description:</property>
|
||||
</object>
|
||||
</child>
|
||||
</object>
|
||||
<packing>
|
||||
<property name="expand">False</property>
|
||||
<property name="fill">True</property>
|
||||
<property name="position">1</property>
|
||||
</packing>
|
||||
</child>
|
||||
</object>
|
||||
<packing>
|
||||
<property name="expand">True</property>
|
||||
<property name="fill">True</property>
|
||||
<property name="position">3</property>
|
||||
</packing>
|
||||
</child>
|
||||
</object>
|
||||
</interface>
|
||||
@ -1,204 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!-- Generated with glade 3.20.1 -->
|
||||
<interface domain="gnome-music">
|
||||
<requires lib="gtk+" version="3.12"/>
|
||||
<object class="GtkBox" id="podcast_widget">
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">False</property>
|
||||
<child>
|
||||
<object class="GtkBox">
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">False</property>
|
||||
<property name="halign">center</property>
|
||||
<property name="valign">center</property>
|
||||
<property name="margin_left">32</property>
|
||||
<property name="margin_right">32</property>
|
||||
<property name="margin_start">32</property>
|
||||
<property name="margin_end">32</property>
|
||||
<property name="margin_top">64</property>
|
||||
<property name="margin_bottom">32</property>
|
||||
<child>
|
||||
<object class="GtkBox">
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">False</property>
|
||||
<property name="halign">center</property>
|
||||
<property name="valign">center</property>
|
||||
<property name="orientation">vertical</property>
|
||||
<property name="spacing">15</property>
|
||||
<child>
|
||||
<object class="GtkImage" id="cover">
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">False</property>
|
||||
<property name="halign">center</property>
|
||||
<property name="valign">start</property>
|
||||
<property name="margin_left">1</property>
|
||||
<property name="margin_right">1</property>
|
||||
<property name="margin_start">1</property>
|
||||
<property name="margin_end">1</property>
|
||||
<property name="stock">gtk-missing-image</property>
|
||||
<property name="use_fallback">True</property>
|
||||
</object>
|
||||
<packing>
|
||||
<property name="expand">False</property>
|
||||
<property name="fill">False</property>
|
||||
<property name="position">0</property>
|
||||
</packing>
|
||||
</child>
|
||||
<child>
|
||||
<object class="GtkBox">
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">False</property>
|
||||
<property name="halign">center</property>
|
||||
<property name="valign">center</property>
|
||||
<child>
|
||||
<object class="GtkBox">
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">False</property>
|
||||
<property name="halign">center</property>
|
||||
<child>
|
||||
<object class="GtkLabel" id="title_label">
|
||||
<property name="width_request">50</property>
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">False</property>
|
||||
<property name="halign">center</property>
|
||||
<property name="label" translatable="yes">Foobar</property>
|
||||
<property name="use_markup">True</property>
|
||||
<property name="justify">center</property>
|
||||
<property name="wrap">True</property>
|
||||
<property name="max_width_chars">28</property>
|
||||
<property name="track_visited_links">False</property>
|
||||
</object>
|
||||
<packing>
|
||||
<property name="expand">True</property>
|
||||
<property name="fill">True</property>
|
||||
<property name="position">0</property>
|
||||
</packing>
|
||||
</child>
|
||||
</object>
|
||||
<packing>
|
||||
<property name="expand">False</property>
|
||||
<property name="fill">True</property>
|
||||
<property name="padding">5</property>
|
||||
<property name="position">1</property>
|
||||
</packing>
|
||||
</child>
|
||||
<child>
|
||||
<object class="GtkButton" id="unsub_button">
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">True</property>
|
||||
<property name="receives_default">True</property>
|
||||
<property name="halign">center</property>
|
||||
<property name="valign">center</property>
|
||||
<child>
|
||||
<object class="GtkImage">
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">False</property>
|
||||
<property name="tooltip_text" translatable="yes">Unsubrscribe from this Podcast.
|
||||
Warn: This will delete downloaded content associated with this Podcast.</property>
|
||||
<property name="stock">gtk-delete</property>
|
||||
</object>
|
||||
</child>
|
||||
</object>
|
||||
<packing>
|
||||
<property name="expand">False</property>
|
||||
<property name="fill">False</property>
|
||||
<property name="padding">5</property>
|
||||
<property name="pack_type">end</property>
|
||||
<property name="position">1</property>
|
||||
</packing>
|
||||
</child>
|
||||
<child>
|
||||
<object class="GtkButton" id="mark_all_played_button">
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">True</property>
|
||||
<property name="receives_default">True</property>
|
||||
<property name="tooltip_text" translatable="yes">Mark all episodes as Played.</property>
|
||||
<property name="halign">center</property>
|
||||
<property name="valign">center</property>
|
||||
<child>
|
||||
<object class="GtkImage">
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">False</property>
|
||||
<property name="stock">gtk-apply</property>
|
||||
</object>
|
||||
</child>
|
||||
</object>
|
||||
<packing>
|
||||
<property name="expand">False</property>
|
||||
<property name="fill">False</property>
|
||||
<property name="position">2</property>
|
||||
</packing>
|
||||
</child>
|
||||
</object>
|
||||
<packing>
|
||||
<property name="expand">False</property>
|
||||
<property name="fill">False</property>
|
||||
<property name="position">1</property>
|
||||
</packing>
|
||||
</child>
|
||||
<child>
|
||||
<object class="GtkScrolledWindow">
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">True</property>
|
||||
<property name="shadow_type">in</property>
|
||||
<property name="min_content_width">200</property>
|
||||
<property name="max_content_width">200</property>
|
||||
<property name="propagate_natural_width">True</property>
|
||||
<property name="propagate_natural_height">True</property>
|
||||
<child>
|
||||
<object class="GtkTextView" id="desc_text_view">
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">True</property>
|
||||
<property name="editable">False</property>
|
||||
<property name="wrap_mode">word-char</property>
|
||||
<property name="cursor_visible">False</property>
|
||||
</object>
|
||||
</child>
|
||||
</object>
|
||||
<packing>
|
||||
<property name="expand">True</property>
|
||||
<property name="fill">True</property>
|
||||
<property name="position">2</property>
|
||||
</packing>
|
||||
</child>
|
||||
</object>
|
||||
<packing>
|
||||
<property name="expand">False</property>
|
||||
<property name="fill">False</property>
|
||||
<property name="position">0</property>
|
||||
</packing>
|
||||
</child>
|
||||
</object>
|
||||
<packing>
|
||||
<property name="expand">False</property>
|
||||
<property name="fill">False</property>
|
||||
<property name="position">1</property>
|
||||
</packing>
|
||||
</child>
|
||||
<child>
|
||||
<object class="GtkScrolledWindow">
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">True</property>
|
||||
<property name="hexpand">True</property>
|
||||
<property name="vexpand">True</property>
|
||||
<property name="hscrollbar_policy">never</property>
|
||||
<child>
|
||||
<object class="GtkViewport" id="view">
|
||||
<property name="width_request">400</property>
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">False</property>
|
||||
<property name="shadow_type">none</property>
|
||||
<child>
|
||||
<placeholder/>
|
||||
</child>
|
||||
</object>
|
||||
</child>
|
||||
</object>
|
||||
<packing>
|
||||
<property name="expand">True</property>
|
||||
<property name="fill">True</property>
|
||||
<property name="position">2</property>
|
||||
</packing>
|
||||
</child>
|
||||
</object>
|
||||
</interface>
|
||||
@ -1,89 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!-- Generated with glade 3.20.1 -->
|
||||
<interface>
|
||||
<requires lib="gtk+" version="3.20"/>
|
||||
<object class="GtkBox" id="fb_child">
|
||||
<property name="width_request">256</property>
|
||||
<property name="height_request">256</property>
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">False</property>
|
||||
<property name="halign">center</property>
|
||||
<property name="valign">center</property>
|
||||
<property name="orientation">vertical</property>
|
||||
<child>
|
||||
<object class="GtkOverlay">
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">False</property>
|
||||
<child>
|
||||
<placeholder/>
|
||||
</child>
|
||||
<child type="overlay">
|
||||
<object class="GtkImage" id="pd_cover">
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">False</property>
|
||||
<property name="halign">center</property>
|
||||
<property name="valign">center</property>
|
||||
<property name="stock">gtk-missing-image</property>
|
||||
<property name="use_fallback">True</property>
|
||||
<property name="icon_size">6</property>
|
||||
</object>
|
||||
</child>
|
||||
<child type="overlay">
|
||||
<object class="GtkImage" id="banner">
|
||||
<property name="can_focus">False</property>
|
||||
<property name="no_show_all">True</property>
|
||||
<property name="halign">end</property>
|
||||
<property name="valign">start</property>
|
||||
<property name="stock">gtk-missing-image</property>
|
||||
</object>
|
||||
<packing>
|
||||
<property name="index">1</property>
|
||||
</packing>
|
||||
</child>
|
||||
<child type="overlay">
|
||||
<object class="GtkLabel" id="banner_label">
|
||||
<property name="can_focus">False</property>
|
||||
<property name="no_show_all">True</property>
|
||||
<property name="halign">end</property>
|
||||
<property name="valign">start</property>
|
||||
<property name="margin_right">40</property>
|
||||
<property name="margin_top">38</property>
|
||||
<property name="label" translatable="yes">Num</property>
|
||||
<property name="use_markup">True</property>
|
||||
<property name="justify">center</property>
|
||||
<property name="track_visited_links">False</property>
|
||||
<property name="xalign">1</property>
|
||||
<property name="yalign">0</property>
|
||||
</object>
|
||||
<packing>
|
||||
<property name="index">2</property>
|
||||
</packing>
|
||||
</child>
|
||||
</object>
|
||||
<packing>
|
||||
<property name="expand">True</property>
|
||||
<property name="fill">True</property>
|
||||
<property name="position">0</property>
|
||||
</packing>
|
||||
</child>
|
||||
<child>
|
||||
<object class="GtkLabel" id="pd_title">
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">False</property>
|
||||
<property name="halign">center</property>
|
||||
<property name="valign">center</property>
|
||||
<property name="label" translatable="yes">label</property>
|
||||
<property name="use_markup">True</property>
|
||||
<property name="justify">center</property>
|
||||
<property name="ellipsize">end</property>
|
||||
<property name="single_line_mode">True</property>
|
||||
<property name="max_width_chars">33</property>
|
||||
</object>
|
||||
<packing>
|
||||
<property name="expand">False</property>
|
||||
<property name="fill">False</property>
|
||||
<property name="position">2</property>
|
||||
</packing>
|
||||
</child>
|
||||
</object>
|
||||
</interface>
|
||||
@ -1,41 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!-- Generated with glade 3.20.1 -->
|
||||
<interface>
|
||||
<requires lib="gtk+" version="3.20"/>
|
||||
<object class="GtkBox" id="fb_parent">
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">False</property>
|
||||
<property name="orientation">vertical</property>
|
||||
<child>
|
||||
<object class="GtkScrolledWindow">
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">True</property>
|
||||
<property name="shadow_type">in</property>
|
||||
<child>
|
||||
<object class="GtkViewport">
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">False</property>
|
||||
<child>
|
||||
<object class="GtkFlowBox" id="flowbox">
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">False</property>
|
||||
<property name="halign">baseline</property>
|
||||
<property name="valign">start</property>
|
||||
<property name="homogeneous">True</property>
|
||||
<property name="column_spacing">5</property>
|
||||
<property name="row_spacing">2</property>
|
||||
<property name="max_children_per_line">20</property>
|
||||
<property name="selection_mode">none</property>
|
||||
</object>
|
||||
</child>
|
||||
</object>
|
||||
</child>
|
||||
</object>
|
||||
<packing>
|
||||
<property name="expand">True</property>
|
||||
<property name="fill">True</property>
|
||||
<property name="position">0</property>
|
||||
</packing>
|
||||
</child>
|
||||
</object>
|
||||
</interface>
|
||||
@ -1,23 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<component type="desktop">
|
||||
<id>org.gnome.Hammond</id>
|
||||
<name>Hammond</name>
|
||||
<project_license>GPL-3.0</project_license>
|
||||
<metadata_license>CC0-1.0</metadata_license>
|
||||
<developer_name>Daniel García Moreno</developer_name>
|
||||
<summary>Gtk+ Matrix.org client</summary>
|
||||
<url type="homepage">https://gitlab.gnome.org/alatiera/Hammond</url>
|
||||
<description>
|
||||
Hammond is a Fast, Safe and Reliable Gtk+ Podcast client written in Rust
|
||||
</description>
|
||||
<screenshots>
|
||||
<screenshot>
|
||||
<image type="source">https://gitlab.gnome.org/alatiera/Hammond/raw/master/assets/podcasts_view.png</image>
|
||||
<image type="source">https://gitlab.gnome.org/alatiera/Hammond/raw/master/assets/podcast_widget.png</image>
|
||||
</screenshot>
|
||||
</screenshots>
|
||||
<releases>
|
||||
<release version="0.1.1" date="2017-11-13"/>
|
||||
</releases>
|
||||
<update_contact>jordanpetridis@protonmail.com</update_contact>
|
||||
</component>
|
||||
@ -1,11 +0,0 @@
|
||||
[Desktop Entry]
|
||||
Name=Hammond
|
||||
GenericName=Podcast Client
|
||||
Comment=Play, Subscribe and Manage Podcast Feeds.
|
||||
Icon=multimedia-player
|
||||
Exec=hammond
|
||||
Terminal=false
|
||||
Type=Application
|
||||
StartupNotify=true
|
||||
Categories=AudioVideo;Audio;Video;
|
||||
Keywords=Podcast
|
||||
@ -1,12 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<gresources>
|
||||
<gresource prefix="/org/gnome/hammond/">
|
||||
<file>banner.png</file>
|
||||
<file preprocess="xml-stripblanks">gtk/episode_widget.ui</file>
|
||||
<file preprocess="xml-stripblanks">gtk/podcast_widget.ui</file>
|
||||
<file preprocess="xml-stripblanks">gtk/empty_view.ui</file>
|
||||
<file preprocess="xml-stripblanks">gtk/podcasts_view.ui</file>
|
||||
<file preprocess="xml-stripblanks">gtk/podcasts_child.ui</file>
|
||||
<file preprocess="xml-stripblanks">gtk/headerbar.ui</file>
|
||||
</gresource>
|
||||
</gresources>
|
||||
@ -1,67 +0,0 @@
|
||||
use gtk;
|
||||
use gtk::prelude::*;
|
||||
|
||||
use hammond_data::Source;
|
||||
use hammond_data::utils::url_cleaner;
|
||||
|
||||
use podcasts_view::update_podcasts_view;
|
||||
use utils;
|
||||
|
||||
pub fn get_headerbar(stack: >k::Stack) -> gtk::HeaderBar {
|
||||
let builder = gtk::Builder::new_from_resource("/org/gnome/hammond/gtk/headerbar.ui");
|
||||
|
||||
let header: gtk::HeaderBar = builder.get_object("headerbar1").unwrap();
|
||||
let home_button: gtk::Button = builder.get_object("homebutton").unwrap();
|
||||
let refresh_button: gtk::Button = builder.get_object("refbutton").unwrap();
|
||||
|
||||
let add_toggle_button: gtk::MenuButton = builder.get_object("add-toggle-button").unwrap();
|
||||
let add_popover: gtk::Popover = builder.get_object("add-popover").unwrap();
|
||||
let new_url: gtk::Entry = builder.get_object("new-url").unwrap();
|
||||
let add_button: gtk::Button = builder.get_object("add-button").unwrap();
|
||||
// TODO: check if url exists in the db and lock the button
|
||||
new_url.connect_changed(move |url| {
|
||||
println!("{:?}", url.get_text());
|
||||
});
|
||||
|
||||
add_button.connect_clicked(clone!(stack, add_popover => move |_| {
|
||||
let url = new_url.get_text().unwrap_or_default();
|
||||
let url = url_cleaner(&url);
|
||||
on_add_bttn_clicked(&stack, &url);
|
||||
|
||||
// TODO: lock the button instead of hiding and add notification of feed added.
|
||||
// TODO: map the spinner
|
||||
add_popover.hide();
|
||||
}));
|
||||
add_popover.hide();
|
||||
add_toggle_button.set_popover(&add_popover);
|
||||
|
||||
// TODO: make it a back arrow button, that will hide when appropriate,
|
||||
// and add a StackSwitcher when more views are added.
|
||||
home_button.connect_clicked(clone!(stack => move |_| {
|
||||
let vis = stack.get_visible_child_name().unwrap();
|
||||
stack.set_visible_child_name("fb_parent");
|
||||
if vis != "pdw" {
|
||||
update_podcasts_view(&stack);
|
||||
}
|
||||
}));
|
||||
|
||||
// FIXME: There appears to be a memmory leak here.
|
||||
refresh_button.connect_clicked(clone!(stack => move |_| {
|
||||
utils::refresh_feed(&stack, None, None);
|
||||
}));
|
||||
|
||||
header
|
||||
}
|
||||
|
||||
fn on_add_bttn_clicked(stack: >k::Stack, url: &str) {
|
||||
let source = Source::from_url(url);
|
||||
|
||||
if let Ok(s) = source {
|
||||
info!("{:?} feed added", url);
|
||||
// update the db
|
||||
utils::refresh_feed(stack, Some(vec![s]), None);
|
||||
} else {
|
||||
error!("Feed probably already exists.");
|
||||
error!("Error: {:?}", source.unwrap_err());
|
||||
}
|
||||
}
|
||||
@ -1,125 +0,0 @@
|
||||
extern crate gdk;
|
||||
extern crate gdk_pixbuf;
|
||||
extern crate gio;
|
||||
extern crate glib;
|
||||
extern crate gtk;
|
||||
|
||||
extern crate diesel;
|
||||
extern crate dissolve;
|
||||
extern crate hammond_data;
|
||||
extern crate hammond_downloader;
|
||||
#[macro_use]
|
||||
extern crate log;
|
||||
extern crate loggerv;
|
||||
extern crate open;
|
||||
// extern crate rayon;
|
||||
|
||||
// use rayon::prelude::*;
|
||||
use log::LogLevel;
|
||||
use hammond_data::utils::checkup;
|
||||
|
||||
use gtk::prelude::*;
|
||||
use gio::{ActionMapExt, ApplicationExt, MenuExt, SimpleActionExt};
|
||||
|
||||
// http://gtk-rs.org/tuto/closures
|
||||
#[macro_export]
|
||||
macro_rules! clone {
|
||||
(@param _) => ( _ );
|
||||
(@param $x:ident) => ( $x );
|
||||
($($n:ident),+ => move || $body:expr) => (
|
||||
{
|
||||
$( let $n = $n.clone(); )+
|
||||
move || $body
|
||||
}
|
||||
);
|
||||
($($n:ident),+ => move |$($p:tt),+| $body:expr) => (
|
||||
{
|
||||
$( let $n = $n.clone(); )+
|
||||
move |$(clone!(@param $p),)+| $body
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
mod views;
|
||||
mod widgets;
|
||||
mod headerbar;
|
||||
|
||||
mod utils;
|
||||
mod static_resource;
|
||||
|
||||
use views::podcasts_view;
|
||||
|
||||
/*
|
||||
THIS IS STILL A PROTOTYPE.
|
||||
*/
|
||||
|
||||
fn build_ui(app: >k::Application) {
|
||||
let menu = gio::Menu::new();
|
||||
menu.append("Quit", "app.quit");
|
||||
menu.append("Checkup", "app.check");
|
||||
app.set_app_menu(&menu);
|
||||
|
||||
// Get the main window
|
||||
let window = gtk::ApplicationWindow::new(app);
|
||||
window.set_default_size(1150, 650);
|
||||
// Setup the Stack that will manage the switch between podcasts_view and podcast_widget.
|
||||
let stack = podcasts_view::setup_stack();
|
||||
window.add(&stack);
|
||||
|
||||
window.connect_delete_event(|w, _| {
|
||||
w.destroy();
|
||||
Inhibit(false)
|
||||
});
|
||||
|
||||
// Setup quit in the app menu since default is overwritten.
|
||||
let quit = gio::SimpleAction::new("quit", None);
|
||||
let window2 = window.clone();
|
||||
quit.connect_activate(move |_, _| {
|
||||
window2.destroy();
|
||||
});
|
||||
app.add_action(&quit);
|
||||
|
||||
// Setup the checkup in the app menu.
|
||||
let check = gio::SimpleAction::new("check", None);
|
||||
check.connect_activate(move |_, _| {
|
||||
let _ = checkup();
|
||||
});
|
||||
app.add_action(&check);
|
||||
|
||||
// queue a db update 1 minute after the startup.
|
||||
gtk::idle_add(clone!(stack => move || {
|
||||
utils::refresh_feed(&stack, None, Some(60));
|
||||
glib::Continue(false)
|
||||
}));
|
||||
|
||||
gtk::idle_add(move || {
|
||||
let _ = checkup();
|
||||
glib::Continue(false)
|
||||
});
|
||||
|
||||
// Get the headerbar
|
||||
let header = headerbar::get_headerbar(&stack);
|
||||
window.set_titlebar(&header);
|
||||
|
||||
window.show_all();
|
||||
window.activate();
|
||||
app.connect_activate(move |_| ());
|
||||
}
|
||||
|
||||
fn main() {
|
||||
use gio::ApplicationExtManual;
|
||||
|
||||
// TODO: make the the logger a cli -vv option
|
||||
loggerv::init_with_level(LogLevel::Info).unwrap();
|
||||
static_resource::init().expect("Something went wrong with the resource file initialization.");
|
||||
|
||||
let application = gtk::Application::new("org.gnome.Hammond", gio::ApplicationFlags::empty())
|
||||
.expect("Initialization failed...");
|
||||
|
||||
application.connect_startup(move |app| {
|
||||
build_ui(app);
|
||||
});
|
||||
|
||||
// application.run(&[]);
|
||||
ApplicationExtManual::run(&application, &[]);
|
||||
}
|
||||
@ -1,16 +0,0 @@
|
||||
use gio::{resources_register, Error, Resource};
|
||||
use glib::Bytes;
|
||||
|
||||
pub fn init() -> Result<(), Error> {
|
||||
// load the gresource binary at build time and include/link it into the final binary.
|
||||
let res_bytes = include_bytes!("../resources/resources.gresource");
|
||||
|
||||
// Create Resource it will live as long the value lives.
|
||||
let gbytes = Bytes::from_static(res_bytes.as_ref());
|
||||
let resource = Resource::new_from_data(&gbytes)?;
|
||||
|
||||
// Register the resource so It wont be dropped and will continue to live in memory.
|
||||
resources_register(&resource);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@ -1,63 +0,0 @@
|
||||
use glib;
|
||||
use gtk;
|
||||
|
||||
use hammond_data::feed;
|
||||
use hammond_data::Source;
|
||||
|
||||
use std::{thread, time};
|
||||
use std::cell::RefCell;
|
||||
use std::sync::mpsc::{channel, Receiver};
|
||||
|
||||
use views::podcasts_view;
|
||||
|
||||
type Foo = RefCell<Option<(gtk::Stack, Receiver<bool>)>>;
|
||||
|
||||
// Create a thread local storage that will store the arguments to be transfered.
|
||||
thread_local!(static GLOBAL: Foo = RefCell::new(None));
|
||||
|
||||
/// Update the rss feed(s) originating from `Source`.
|
||||
/// If `source` is None, Fetches all the `Source` entries in the database and updates them.
|
||||
/// `delay` represents the desired time in seconds for the thread to sleep before executing.
|
||||
/// When It's done,it queues up a `podcast_view` refresh.
|
||||
pub fn refresh_feed(stack: >k::Stack, source: Option<Vec<Source>>, delay: Option<u64>) {
|
||||
// Create a async channel.
|
||||
let (sender, receiver) = channel();
|
||||
|
||||
// Pass the desired arguments into the Local Thread Storage.
|
||||
GLOBAL.with(clone!(stack => move |global| {
|
||||
*global.borrow_mut() = Some((stack, receiver));
|
||||
}));
|
||||
|
||||
thread::spawn(move || {
|
||||
if let Some(s) = delay {
|
||||
let t = time::Duration::from_secs(s);
|
||||
thread::sleep(t);
|
||||
}
|
||||
|
||||
let feeds = {
|
||||
if let Some(vec) = source {
|
||||
Ok(feed::fetch(vec))
|
||||
} else {
|
||||
feed::fetch_all()
|
||||
}
|
||||
};
|
||||
|
||||
if let Ok(x) = feeds {
|
||||
feed::index(x);
|
||||
|
||||
sender.send(true).expect("Couldn't send data to channel");;
|
||||
glib::idle_add(refresh_podcasts_view);
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
fn refresh_podcasts_view() -> glib::Continue {
|
||||
GLOBAL.with(|global| {
|
||||
if let Some((ref stack, ref reciever)) = *global.borrow() {
|
||||
if reciever.try_recv().is_ok() {
|
||||
podcasts_view::update_podcasts_view(stack);
|
||||
}
|
||||
}
|
||||
});
|
||||
glib::Continue(false)
|
||||
}
|
||||
@ -1 +0,0 @@
|
||||
pub mod podcasts_view;
|
||||
@ -1,152 +0,0 @@
|
||||
use gtk;
|
||||
use gtk::prelude::*;
|
||||
use gdk_pixbuf::Pixbuf;
|
||||
use diesel::associations::Identifiable;
|
||||
|
||||
use hammond_data::dbqueries;
|
||||
use hammond_data::Podcast;
|
||||
|
||||
use widgets::podcast::*;
|
||||
|
||||
fn setup_empty_view(stack: >k::Stack) {
|
||||
let builder = gtk::Builder::new_from_resource("/org/gnome/hammond/gtk/empty_view.ui");
|
||||
let view: gtk::Box = builder.get_object("empty_view").unwrap();
|
||||
stack.add_named(&view, "empty");
|
||||
}
|
||||
|
||||
fn show_empty_view(stack: >k::Stack) {
|
||||
stack.set_visible_child_name("empty");
|
||||
|
||||
info!("Empty view.");
|
||||
}
|
||||
|
||||
fn populate_flowbox(flowbox: >k::FlowBox) {
|
||||
let podcasts = dbqueries::get_podcasts();
|
||||
|
||||
if let Ok(pds) = podcasts {
|
||||
pds.iter().for_each(|parent| {
|
||||
let f = create_flowbox_child(parent);
|
||||
flowbox.add(&f);
|
||||
});
|
||||
flowbox.show_all();
|
||||
}
|
||||
}
|
||||
|
||||
fn create_flowbox_child(pd: &Podcast) -> gtk::FlowBoxChild {
|
||||
let builder = gtk::Builder::new_from_resource("/org/gnome/hammond/gtk/podcasts_child.ui");
|
||||
|
||||
// Copy of gnome-music AlbumWidget
|
||||
let box_: gtk::Box = builder.get_object("fb_child").unwrap();
|
||||
let pd_title: gtk::Label = builder.get_object("pd_title").unwrap();
|
||||
let pd_cover: gtk::Image = builder.get_object("pd_cover").unwrap();
|
||||
let banner: gtk::Image = builder.get_object("banner").unwrap();
|
||||
let banner_title: gtk::Label = builder.get_object("banner_label").unwrap();
|
||||
|
||||
pd_title.set_text(pd.title());
|
||||
|
||||
let cover = get_pixbuf_from_path(pd);
|
||||
if let Some(img) = cover {
|
||||
pd_cover.set_from_pixbuf(&img);
|
||||
};
|
||||
|
||||
configure_banner(pd, &banner, &banner_title);
|
||||
|
||||
let fbc = gtk::FlowBoxChild::new();
|
||||
// There's probably a better way to store the id somewhere.
|
||||
// fbc.set_name(&pd.id().to_string());
|
||||
WidgetExt::set_name(&fbc, &pd.id().to_string());
|
||||
fbc.add(&box_);
|
||||
fbc
|
||||
}
|
||||
|
||||
fn configure_banner(pd: &Podcast, banner: >k::Image, banner_title: >k::Label) {
|
||||
let bann = Pixbuf::new_from_resource_at_scale("/org/gnome/hammond/banner.png", 256, 256, true);
|
||||
if let Ok(b) = bann {
|
||||
banner.set_from_pixbuf(&b);
|
||||
|
||||
let new_episodes = dbqueries::get_pd_unplayed_episodes(pd);
|
||||
|
||||
if let Ok(n) = new_episodes {
|
||||
if !n.is_empty() {
|
||||
banner_title.set_text(&n.len().to_string());
|
||||
banner.show();
|
||||
banner_title.show();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn on_flowbox_child_activate(stack: >k::Stack, parent: &Podcast) {
|
||||
let old = stack.get_child_by_name("pdw").unwrap();
|
||||
let pdw = podcast_widget(stack, parent);
|
||||
|
||||
stack.remove(&old);
|
||||
stack.add_named(&pdw, "pdw");
|
||||
stack.set_visible_child(&pdw);
|
||||
|
||||
// aggresive memory cleanup
|
||||
// probably not needed
|
||||
old.destroy();
|
||||
}
|
||||
|
||||
fn setup_podcasts_flowbox(stack: >k::Stack) -> gtk::FlowBox {
|
||||
let builder = gtk::Builder::new_from_resource("/org/gnome/hammond/gtk/podcasts_view.ui");
|
||||
let fb_parent: gtk::Box = builder.get_object("fb_parent").unwrap();
|
||||
let flowbox: gtk::FlowBox = builder.get_object("flowbox").unwrap();
|
||||
init_flowbox(stack, &flowbox);
|
||||
|
||||
stack.add_named(&fb_parent, "fb_parent");
|
||||
|
||||
if flowbox.get_children().is_empty() {
|
||||
show_empty_view(stack);
|
||||
} else {
|
||||
stack.set_visible_child(&fb_parent);
|
||||
};
|
||||
|
||||
flowbox
|
||||
}
|
||||
|
||||
pub fn setup_stack() -> gtk::Stack {
|
||||
let stack = gtk::Stack::new();
|
||||
stack.set_transition_type(gtk::StackTransitionType::SlideLeftRight);
|
||||
setup_empty_view(&stack);
|
||||
setup_podcast_widget(&stack);
|
||||
setup_podcasts_flowbox(&stack);
|
||||
stack
|
||||
}
|
||||
|
||||
pub fn update_podcasts_view(stack: >k::Stack) {
|
||||
let vis = stack.get_visible_child_name().unwrap();
|
||||
let old = stack.get_child_by_name("fb_parent").unwrap();
|
||||
stack.remove(&old);
|
||||
|
||||
let flowbox = setup_podcasts_flowbox(stack);
|
||||
|
||||
if vis == "empty" && !flowbox.get_children().is_empty() {
|
||||
stack.set_visible_child_name("fb_parent");
|
||||
} else if vis == "fb_parent" && flowbox.get_children().is_empty() {
|
||||
stack.set_visible_child_name("empty");
|
||||
} else {
|
||||
// preserve the visible widget
|
||||
stack.set_visible_child_name(&vis);
|
||||
};
|
||||
|
||||
// aggresive memory cleanup
|
||||
// probably not needed
|
||||
old.destroy();
|
||||
}
|
||||
|
||||
fn init_flowbox(stack: >k::Stack, flowbox: >k::FlowBox) {
|
||||
use gtk::WidgetExt;
|
||||
|
||||
// TODO: handle unwraps.
|
||||
flowbox.connect_child_activated(clone!(stack => move |_, child| {
|
||||
// This is such an ugly hack...
|
||||
// let id = child.get_name().unwrap().parse::<i32>().unwrap();
|
||||
let id = WidgetExt::get_name(child).unwrap().parse::<i32>().unwrap();
|
||||
let parent = dbqueries::get_podcast_from_id(id).unwrap();
|
||||
on_flowbox_child_activate(&stack, &parent);
|
||||
}));
|
||||
// Populate the flowbox with the Podcasts.
|
||||
populate_flowbox(flowbox);
|
||||
}
|
||||
@ -1,204 +0,0 @@
|
||||
use open;
|
||||
use hammond_data::dbqueries;
|
||||
use hammond_data::{Episode, Podcast};
|
||||
use hammond_downloader::downloader;
|
||||
use hammond_data::utils::*;
|
||||
use hammond_data::errors::*;
|
||||
|
||||
use dissolve::strip_html_tags;
|
||||
use diesel::associations::Identifiable;
|
||||
|
||||
use std::thread;
|
||||
use std::cell::RefCell;
|
||||
use std::sync::mpsc::{channel, Receiver};
|
||||
use std::path::Path;
|
||||
|
||||
use glib;
|
||||
use gtk;
|
||||
use gtk::prelude::*;
|
||||
use gtk::{ContainerExt, TextBufferExt};
|
||||
|
||||
type Foo = RefCell<Option<(gtk::Button, gtk::Button, gtk::Button, Receiver<bool>)>>;
|
||||
|
||||
thread_local!(static GLOBAL: Foo = RefCell::new(None));
|
||||
|
||||
fn epidose_widget(episode: &mut Episode, pd_title: &str) -> gtk::Box {
|
||||
// This is just a prototype and will be reworked probably.
|
||||
let builder = gtk::Builder::new_from_resource("/org/gnome/hammond/gtk/episode_widget.ui");
|
||||
|
||||
let ep: gtk::Box = builder.get_object("episode_box").unwrap();
|
||||
let download_button: gtk::Button = builder.get_object("download_button").unwrap();
|
||||
let play_button: gtk::Button = builder.get_object("play_button").unwrap();
|
||||
let delete_button: gtk::Button = builder.get_object("delete_button").unwrap();
|
||||
let played_button: gtk::Button = builder.get_object("mark_played_button").unwrap();
|
||||
let unplayed_button: gtk::Button = builder.get_object("mark_unplayed_button").unwrap();
|
||||
|
||||
let title_label: gtk::Label = builder.get_object("title_label").unwrap();
|
||||
// let desc_label: gtk::Label = builder.get_object("desc_label").unwrap();
|
||||
let expander: gtk::Expander = builder.get_object("expand_desc").unwrap();
|
||||
let desc_text_view: gtk::TextView = builder.get_object("desc_text_view").unwrap();
|
||||
|
||||
title_label.set_xalign(0.0);
|
||||
|
||||
if let Some(t) = episode.title() {
|
||||
title_label.set_text(t);
|
||||
}
|
||||
|
||||
if episode.description().is_some() {
|
||||
let d = episode.description().unwrap().to_owned();
|
||||
|
||||
expander.connect_activate(move |_| {
|
||||
let plain_text = strip_html_tags(&d).join(" ");
|
||||
// TODO: handle unwrap
|
||||
let buff = desc_text_view.get_buffer().unwrap();
|
||||
buff.set_text(plain_text.trim());
|
||||
});
|
||||
}
|
||||
|
||||
if episode.played().is_some() {
|
||||
unplayed_button.show();
|
||||
played_button.hide();
|
||||
}
|
||||
|
||||
// Show or hide the play/delete/download buttons upon widget initialization.
|
||||
let local_uri = episode.local_uri();
|
||||
if local_uri.is_some() && Path::new(local_uri.unwrap()).exists() {
|
||||
download_button.hide();
|
||||
play_button.show();
|
||||
delete_button.show();
|
||||
}
|
||||
|
||||
play_button.connect_clicked(clone!(episode, played_button, unplayed_button => move |_| {
|
||||
let mut episode = episode.clone();
|
||||
on_play_bttn_clicked(*episode.id());
|
||||
let _ = episode.set_played_now();
|
||||
played_button.hide();
|
||||
unplayed_button.show();
|
||||
}));
|
||||
|
||||
delete_button.connect_clicked(clone!(episode, play_button, download_button => move |del| {
|
||||
on_delete_bttn_clicked(*episode.id());
|
||||
del.hide();
|
||||
play_button.hide();
|
||||
download_button.show();
|
||||
}));
|
||||
|
||||
played_button.connect_clicked(clone!(episode, unplayed_button => move |played| {
|
||||
let mut episode = episode.clone();
|
||||
let _ = episode.set_played_now();
|
||||
played.hide();
|
||||
unplayed_button.show();
|
||||
}));
|
||||
|
||||
unplayed_button.connect_clicked(clone!(episode, played_button => move |un| {
|
||||
let mut episode = episode.clone();
|
||||
episode.set_played(None);
|
||||
let _ = episode.save();
|
||||
un.hide();
|
||||
played_button.show();
|
||||
}));
|
||||
|
||||
let pd_title = pd_title.to_owned();
|
||||
download_button.connect_clicked(clone!(play_button, delete_button, episode => move |dl| {
|
||||
on_download_clicked(
|
||||
&pd_title,
|
||||
&mut episode.clone(),
|
||||
dl,
|
||||
&play_button,
|
||||
&delete_button,
|
||||
);
|
||||
}));
|
||||
|
||||
ep
|
||||
}
|
||||
|
||||
// TODO: show notification when dl is finished.
|
||||
fn on_download_clicked(
|
||||
pd_title: &str,
|
||||
ep: &mut Episode,
|
||||
download_bttn: >k::Button,
|
||||
play_bttn: >k::Button,
|
||||
del_bttn: >k::Button,
|
||||
) {
|
||||
// Create a async channel.
|
||||
let (sender, receiver) = channel();
|
||||
|
||||
// Pass the desired arguments into the Local Thread Storage.
|
||||
GLOBAL.with(clone!(download_bttn, play_bttn, del_bttn => move |global| {
|
||||
*global.borrow_mut() = Some((download_bttn, play_bttn, del_bttn, receiver));
|
||||
}));
|
||||
|
||||
let pd_title = pd_title.to_owned();
|
||||
let mut ep = ep.clone();
|
||||
thread::spawn(move || {
|
||||
let download_fold = downloader::get_download_folder(&pd_title).unwrap();
|
||||
let e = downloader::get_episode(&mut ep, download_fold.as_str());
|
||||
if let Err(err) = e {
|
||||
error!("Error while trying to download: {}", ep.uri());
|
||||
error!("Error: {}", err);
|
||||
};
|
||||
sender.send(true).expect("Couldn't send data to channel");;
|
||||
glib::idle_add(receive);
|
||||
});
|
||||
}
|
||||
|
||||
fn on_play_bttn_clicked(episode_id: i32) {
|
||||
let local_uri = dbqueries::get_episode_local_uri_from_id(episode_id).unwrap();
|
||||
|
||||
if let Some(uri) = local_uri {
|
||||
if Path::new(&uri).exists() {
|
||||
info!("Opening {}", uri);
|
||||
let e = open::that(&uri);
|
||||
if let Err(err) = e {
|
||||
error!("Error while trying to open file: {}", uri);
|
||||
error!("Error: {}", err);
|
||||
};
|
||||
}
|
||||
} else {
|
||||
error!(
|
||||
"Something went wrong evaluating the following path: {:?}",
|
||||
local_uri
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
fn on_delete_bttn_clicked(episode_id: i32) {
|
||||
let mut ep = dbqueries::get_episode_from_id(episode_id).unwrap();
|
||||
|
||||
let e = delete_local_content(&mut ep);
|
||||
if let Err(err) = e {
|
||||
error!("Error while trying to delete file: {:?}", ep.local_uri());
|
||||
error!("Error: {}", err);
|
||||
};
|
||||
}
|
||||
|
||||
fn receive() -> glib::Continue {
|
||||
GLOBAL.with(|global| {
|
||||
if let Some((ref download_bttn, ref play_bttn, ref del_bttn, ref reciever)) =
|
||||
*global.borrow()
|
||||
{
|
||||
if reciever.try_recv().is_ok() {
|
||||
download_bttn.hide();
|
||||
play_bttn.show();
|
||||
del_bttn.show();
|
||||
}
|
||||
}
|
||||
});
|
||||
glib::Continue(false)
|
||||
}
|
||||
|
||||
pub fn episodes_listbox(pd: &Podcast) -> Result<gtk::ListBox> {
|
||||
let episodes = dbqueries::get_pd_episodes(pd)?;
|
||||
|
||||
let list = gtk::ListBox::new();
|
||||
episodes.into_iter().for_each(|mut ep| {
|
||||
let w = epidose_widget(&mut ep, pd.title());
|
||||
list.add(&w)
|
||||
});
|
||||
|
||||
list.set_vexpand(false);
|
||||
list.set_hexpand(false);
|
||||
list.set_visible(true);
|
||||
list.set_selection_mode(gtk::SelectionMode::None);
|
||||
Ok(list)
|
||||
}
|
||||
@ -1,2 +0,0 @@
|
||||
pub mod podcast;
|
||||
pub mod episode;
|
||||
@ -1,144 +0,0 @@
|
||||
use gtk::prelude::*;
|
||||
use gtk;
|
||||
use gdk_pixbuf::Pixbuf;
|
||||
|
||||
use std::fs;
|
||||
|
||||
use hammond_data::dbqueries;
|
||||
use hammond_data::Podcast;
|
||||
use hammond_downloader::downloader;
|
||||
|
||||
use widgets::episode::episodes_listbox;
|
||||
use podcasts_view::update_podcasts_view;
|
||||
|
||||
pub fn podcast_widget(stack: >k::Stack, pd: &Podcast) -> gtk::Box {
|
||||
// Adapted from gnome-music AlbumWidget
|
||||
let builder = gtk::Builder::new_from_resource("/org/gnome/hammond/gtk/podcast_widget.ui");
|
||||
let pd_widget: gtk::Box = builder.get_object("podcast_widget").unwrap();
|
||||
|
||||
let cover: gtk::Image = builder.get_object("cover").unwrap();
|
||||
let title_label: gtk::Label = builder.get_object("title_label").unwrap();
|
||||
let desc_text_view: gtk::TextView = builder.get_object("desc_text_view").unwrap();
|
||||
let view: gtk::Viewport = builder.get_object("view").unwrap();
|
||||
let unsub_button: gtk::Button = builder.get_object("unsub_button").unwrap();
|
||||
let played_button: gtk::Button = builder.get_object("mark_all_played_button").unwrap();
|
||||
|
||||
// TODO: should spawn a thread to avoid locking the UI probably.
|
||||
unsub_button.connect_clicked(clone!(stack, pd => move |bttn| {
|
||||
on_unsub_button_clicked(&stack, &pd, bttn);
|
||||
}));
|
||||
|
||||
title_label.set_text(pd.title());
|
||||
let listbox = episodes_listbox(pd);
|
||||
if let Ok(l) = listbox {
|
||||
view.add(&l);
|
||||
}
|
||||
|
||||
{
|
||||
let buff = desc_text_view.get_buffer().unwrap();
|
||||
buff.set_text(pd.description());
|
||||
}
|
||||
|
||||
let img = get_pixbuf_from_path(pd);
|
||||
if let Some(i) = img {
|
||||
cover.set_from_pixbuf(&i);
|
||||
}
|
||||
|
||||
played_button.connect_clicked(clone!(stack, pd => move |_| {
|
||||
on_played_button_clicked(&stack, &pd);
|
||||
}));
|
||||
|
||||
show_played_button(pd, &played_button);
|
||||
|
||||
pd_widget
|
||||
}
|
||||
|
||||
fn on_unsub_button_clicked(stack: >k::Stack, pd: &Podcast, unsub_button: >k::Button) {
|
||||
let res = dbqueries::remove_feed(pd);
|
||||
if res.is_ok() {
|
||||
info!("{} was removed succesfully.", pd.title());
|
||||
// hack to get away without properly checking for none.
|
||||
// if pressed twice would panic.
|
||||
unsub_button.hide();
|
||||
|
||||
let dl_fold = downloader::get_download_folder(pd.title());
|
||||
if let Ok(fold) = dl_fold {
|
||||
let res3 = fs::remove_dir_all(&fold);
|
||||
if res3.is_ok() {
|
||||
info!("All the content at, {} was removed succesfully", &fold);
|
||||
}
|
||||
};
|
||||
}
|
||||
stack.set_visible_child_name("fb_parent");
|
||||
update_podcasts_view(stack);
|
||||
}
|
||||
|
||||
fn on_played_button_clicked(stack: >k::Stack, pd: &Podcast) {
|
||||
let _ = dbqueries::update_none_to_played_now(pd);
|
||||
|
||||
update_podcast_widget(stack, pd);
|
||||
}
|
||||
|
||||
fn show_played_button(pd: &Podcast, played_button: >k::Button) {
|
||||
let new_episodes = dbqueries::get_pd_unplayed_episodes(pd);
|
||||
|
||||
if let Ok(n) = new_episodes {
|
||||
if !n.is_empty() {
|
||||
played_button.show()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_pixbuf_from_path(pd: &Podcast) -> Option<Pixbuf> {
|
||||
let img_path = downloader::cache_image(pd);
|
||||
if let Some(i) = img_path {
|
||||
Pixbuf::new_from_file_at_scale(&i, 256, 256, true).ok()
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
pub fn setup_podcast_widget(stack: >k::Stack) {
|
||||
let builder = gtk::Builder::new_from_resource("/org/gnome/hammond/gtk/podcast_widget.ui");
|
||||
let pd_widget: gtk::Box = builder.get_object("podcast_widget").unwrap();
|
||||
|
||||
stack.add_named(&pd_widget, "pdw");
|
||||
}
|
||||
|
||||
pub fn update_podcast_widget(stack: >k::Stack, pd: &Podcast) {
|
||||
let old = stack.get_child_by_name("pdw").unwrap();
|
||||
let pdw = podcast_widget(stack, pd);
|
||||
let vis = stack.get_visible_child_name().unwrap();
|
||||
|
||||
stack.remove(&old);
|
||||
stack.add_named(&pdw, "pdw");
|
||||
stack.set_visible_child_name(&vis);
|
||||
old.destroy();
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use hammond_data::Source;
|
||||
use hammond_data::feed::index;
|
||||
use diesel::Identifiable;
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_get_pixbuf_from_path() {
|
||||
let url = "http://www.newrustacean.com/feed.xml";
|
||||
|
||||
// Create and index a source
|
||||
let source = Source::from_url(url).unwrap();
|
||||
// Copy it's id
|
||||
let sid = source.id().clone();
|
||||
|
||||
// Convert Source it into a Feed and index it
|
||||
let feed = source.into_feed().unwrap();
|
||||
index(vec![feed]);
|
||||
|
||||
// Get the Podcast
|
||||
let pd = dbqueries::get_podcast_from_source_id(sid).unwrap();
|
||||
let pxbuf = get_pixbuf_from_path(&pd);
|
||||
assert!(pxbuf.is_some());
|
||||
}
|
||||
}
|
||||
107
meson.build
107
meson.build
@ -1,37 +1,90 @@
|
||||
# Adatped from:
|
||||
# https://gitlab.gnome.org/danigm/fractal/blob/6e2911f9d2353c99a18a6c19fab7f903c4bbb431/meson.build
|
||||
|
||||
project(
|
||||
'hammond', 'rust',
|
||||
version: '0.2.0',
|
||||
'gnome-podcasts', 'rust',
|
||||
version: '0.4.7',
|
||||
license: 'GPLv3',
|
||||
)
|
||||
|
||||
hammond_version = meson.project_version()
|
||||
version_array = hammond_version.split('.')
|
||||
hammond_major_version = version_array[0].to_int()
|
||||
hammond_minor_version = version_array[1].to_int()
|
||||
hammond_version_micro = version_array[2].to_int()
|
||||
dependency('sqlite3', version: '>= 3.20')
|
||||
dependency('openssl', version: '>= 1.0')
|
||||
dependency('dbus-1')
|
||||
|
||||
hammond_prefix = get_option('prefix')
|
||||
hammond_bindir = join_paths(hammond_prefix, get_option('bindir'))
|
||||
dependency('glib-2.0', version: '>= 2.56')
|
||||
dependency('gio-2.0', version: '>= 2.56')
|
||||
dependency('gdk-pixbuf-2.0')
|
||||
dependency('gtk+-3.0', version: '>= 3.24.11')
|
||||
dependency('libhandy-0.0', version: '>= 0.0.13')
|
||||
|
||||
install_data('hammond-gtk/resources/org.gnome.Hammond.desktop', install_dir : get_option('datadir') + '/applications')
|
||||
install_data('hammond-gtk/resources/org.gnome.Hammond.appdata.xml', install_dir : get_option('datadir') + '/appdata')
|
||||
dependency('gstreamer-1.0', version: '>= 1.16')
|
||||
dependency('gstreamer-base-1.0', version: '>= 1.16')
|
||||
dependency('gstreamer-audio-1.0', version: '>= 1.16')
|
||||
dependency('gstreamer-video-1.0', version: '>= 1.16')
|
||||
dependency('gstreamer-player-1.0', version: '>= 1.16')
|
||||
dependency('gstreamer-plugins-base-1.0', version: '>= 1.16')
|
||||
dependency('gstreamer-plugins-bad-1.0', version: '>= 1.16')
|
||||
dependency('gstreamer-bad-audio-1.0', version: '>= 1.16')
|
||||
|
||||
cargo = find_program('cargo', required: true)
|
||||
gresource = find_program('glib-compile-resources', required: true)
|
||||
gschemas = find_program('glib-compile-schemas', required: true)
|
||||
|
||||
if get_option('profile') == 'development'
|
||||
profile = '.Devel'
|
||||
vcs_tag = run_command('git', 'rev-parse', '--short', 'HEAD').stdout().strip()
|
||||
if vcs_tag == ''
|
||||
version_suffix = '-devel'
|
||||
else
|
||||
version_suffix = '-@0@'.format (vcs_tag)
|
||||
endif
|
||||
else
|
||||
profile = ''
|
||||
version_suffix = ''
|
||||
endif
|
||||
|
||||
podcast_toml = files(
|
||||
'Cargo.toml',
|
||||
'Cargo.lock',
|
||||
'podcasts-data/Cargo.toml',
|
||||
'podcasts-downloader/Cargo.toml',
|
||||
'podcasts-gtk/Cargo.toml',
|
||||
)
|
||||
|
||||
application_id = 'org.gnome.Podcasts@0@'.format(profile)
|
||||
i18n = import('i18n')
|
||||
gnome = import('gnome')
|
||||
|
||||
subdir('podcasts-gtk/po')
|
||||
podir = join_paths (meson.source_root (), 'podcasts-gtk', 'po')
|
||||
|
||||
podcasts_version = meson.project_version()
|
||||
|
||||
podcasts_prefix = get_option('prefix')
|
||||
podcasts_bindir = join_paths(podcasts_prefix, get_option('bindir'))
|
||||
podcasts_localedir = join_paths(podcasts_prefix, get_option('localedir'))
|
||||
|
||||
podcasts_conf = configuration_data()
|
||||
podcasts_conf.set('appid', application_id)
|
||||
podcasts_conf.set('bindir', podcasts_bindir)
|
||||
|
||||
datadir = get_option('datadir')
|
||||
subdir('podcasts-gtk/resources')
|
||||
|
||||
cargo = find_program('cargo', required: false)
|
||||
gresource = find_program('glib-compile-resources', required: false)
|
||||
cargo_vendor = find_program('cargo-vendor', required: false)
|
||||
cargo_script = find_program('scripts/cargo.sh')
|
||||
test_script = find_program('scripts/test.sh')
|
||||
|
||||
cargo_release = custom_target('cargo-build',
|
||||
build_by_default: true,
|
||||
output: ['hammond'],
|
||||
install: true,
|
||||
install_dir: hammond_bindir,
|
||||
command: [cargo_script, '@CURRENT_SOURCE_DIR@', '@OUTPUT@'])
|
||||
subdir('podcasts-data/src')
|
||||
subdir('podcasts-downloader/src')
|
||||
subdir('podcasts-gtk/src')
|
||||
|
||||
run_target('release', command: ['scripts/release.sh',
|
||||
meson.project_name() + '-' + hammond_version
|
||||
],
|
||||
depends: [cargo_release])
|
||||
meson.add_dist_script(
|
||||
'scripts/dist-vendor.sh',
|
||||
meson.source_root(),
|
||||
join_paths(meson.build_root(), 'meson-dist', meson.project_name() + '-' + podcasts_version)
|
||||
)
|
||||
|
||||
test(
|
||||
'cargo-test',
|
||||
test_script,
|
||||
args: meson.build_root(),
|
||||
workdir: meson.source_root(),
|
||||
timeout: 3000
|
||||
)
|
||||
|
||||
9
meson_options.txt
Normal file
9
meson_options.txt
Normal file
@ -0,0 +1,9 @@
|
||||
option (
|
||||
'profile',
|
||||
type: 'combo',
|
||||
choices: [
|
||||
'default',
|
||||
'development'
|
||||
],
|
||||
value: 'default'
|
||||
)
|
||||
@ -1,38 +0,0 @@
|
||||
{
|
||||
"app-id": "org.gnome.Hammond",
|
||||
"runtime": "org.gnome.Platform",
|
||||
"runtime-version": "3.26",
|
||||
"sdk": "org.gnome.Sdk",
|
||||
"sdk-extensions": [
|
||||
"org.freedesktop.Sdk.Extension.rust-stable"
|
||||
],
|
||||
"command": "hammond",
|
||||
"finish-args": [
|
||||
"--share=network",
|
||||
"--socket=x11",
|
||||
"--socket=wayland",
|
||||
"--talk-name=org.freedesktop.Notifications"
|
||||
],
|
||||
"build-options": {
|
||||
"env": {
|
||||
"CARGO_HOME": "/run/build/Hammond/cargo"
|
||||
},
|
||||
"build-args": [ "--share=network" ]
|
||||
},
|
||||
"modules": [
|
||||
{
|
||||
"name": "Hammond",
|
||||
"buildsystem": "simple",
|
||||
"build-commands": [
|
||||
"source /usr/lib/sdk/rust-stable/enable.sh && ./configure --prefix=/app && make && make install"
|
||||
],
|
||||
"sources": [
|
||||
{
|
||||
"type": "git",
|
||||
"url": "https://gitlab.gnome.org/alatiera/Hammond.git",
|
||||
"branch": "master"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
72
org.gnome.Podcasts.Devel.json
Normal file
72
org.gnome.Podcasts.Devel.json
Normal file
@ -0,0 +1,72 @@
|
||||
{
|
||||
"app-id" : "org.gnome.Podcasts.Devel",
|
||||
"runtime" : "org.gnome.Platform",
|
||||
"runtime-version" : "3.36",
|
||||
"sdk" : "org.gnome.Sdk",
|
||||
"sdk-extensions" : [
|
||||
"org.freedesktop.Sdk.Extension.rust-stable"
|
||||
],
|
||||
"command" : "gnome-podcasts",
|
||||
"tags" : [
|
||||
"nightly"
|
||||
],
|
||||
"finish-args" : [
|
||||
"--share=network",
|
||||
"--share=ipc",
|
||||
"--socket=x11",
|
||||
"--socket=fallback-x11",
|
||||
"--socket=wayland",
|
||||
"--socket=pulseaudio",
|
||||
"--env=USE_PLAYBING3=1"
|
||||
],
|
||||
"build-options" : {
|
||||
"append-path" : "/usr/lib/sdk/rust-stable/bin",
|
||||
"build-args" : [
|
||||
"--share=network"
|
||||
],
|
||||
"env" : {
|
||||
"CARGO_HOME" : "/run/build/Podcasts/cargo",
|
||||
"RUSTFLAGS" : "",
|
||||
"RUST_BACKTRACE" : "1"
|
||||
}
|
||||
},
|
||||
"modules" : [
|
||||
{
|
||||
"name" : "libhandy",
|
||||
"buildsystem" : "meson",
|
||||
"config-opts" : [
|
||||
"-Dintrospection=disabled",
|
||||
"-Dgtk_doc=false",
|
||||
"-Dtests=false",
|
||||
"-Dexamples=false",
|
||||
"-Dvapi=false",
|
||||
"-Dglade_catalog=disabled"
|
||||
],
|
||||
"cleanup" : [
|
||||
"/include",
|
||||
"/lib/pkgconfig"
|
||||
],
|
||||
"sources" : [
|
||||
{
|
||||
"type" : "git",
|
||||
"url" : "https://source.puri.sm/Librem5/libhandy.git",
|
||||
"tag" : "v0.0.13"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"name" : "gnome-podcasts",
|
||||
"buildsystem" : "meson",
|
||||
"builddir" : "true",
|
||||
"config-opts" : [
|
||||
"-Dprofile=development"
|
||||
],
|
||||
"sources" : [
|
||||
{
|
||||
"type" : "git",
|
||||
"url" : "https://gitlab.gnome.org/World/podcasts.git"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
69
org.gnome.Podcasts.json
Normal file
69
org.gnome.Podcasts.json
Normal file
@ -0,0 +1,69 @@
|
||||
{
|
||||
"app-id" : "org.gnome.Podcasts",
|
||||
"runtime" : "org.gnome.Platform",
|
||||
"runtime-version" : "3.36",
|
||||
"sdk" : "org.gnome.Sdk",
|
||||
"sdk-extensions" : [
|
||||
"org.freedesktop.Sdk.Extension.rust-stable"
|
||||
],
|
||||
"command" : "gnome-podcasts",
|
||||
"tags" : [
|
||||
"nightly"
|
||||
],
|
||||
"desktop-file-name-suffix" : " ☢️",
|
||||
"finish-args" : [
|
||||
"--share=network",
|
||||
"--share=ipc",
|
||||
"--socket=x11",
|
||||
"--socket=fallback-x11",
|
||||
"--socket=wayland",
|
||||
"--socket=pulseaudio",
|
||||
"--env=USE_PLAYBING3=1"
|
||||
],
|
||||
"build-options" : {
|
||||
"append-path" : "/usr/lib/sdk/rust-stable/bin",
|
||||
"build-args" : [
|
||||
"--share=network"
|
||||
],
|
||||
"env" : {
|
||||
"CARGO_HOME" : "/run/build/Podcasts/cargo",
|
||||
"RUST_BACKTRACE" : "1"
|
||||
}
|
||||
},
|
||||
"modules" : [
|
||||
{
|
||||
"name" : "libhandy",
|
||||
"buildsystem" : "meson",
|
||||
"config-opts" : [
|
||||
"-Dintrospection=disabled",
|
||||
"-Dgtk_doc=false",
|
||||
"-Dtests=false",
|
||||
"-Dexamples=false",
|
||||
"-Dvapi=false",
|
||||
"-Dglade_catalog=disabled"
|
||||
],
|
||||
"cleanup" : [
|
||||
"/include",
|
||||
"/lib/pkgconfig"
|
||||
],
|
||||
"sources" : [
|
||||
{
|
||||
"type" : "git",
|
||||
"url" : "https://source.puri.sm/Librem5/libhandy.git",
|
||||
"tag" : "v0.0.13"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"name" : "gnome-podcasts",
|
||||
"builddir" : "true",
|
||||
"buildsystem" : "meson",
|
||||
"sources" : [
|
||||
{
|
||||
"type" : "git",
|
||||
"url" : "https://gitlab.gnome.org/World/podcasts.git"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
42
podcasts-data/Cargo.toml
Normal file
42
podcasts-data/Cargo.toml
Normal file
@ -0,0 +1,42 @@
|
||||
[package]
|
||||
authors = ["Jordan Petridis <jpetridis@gnome.org>"]
|
||||
name = "podcasts-data"
|
||||
version = "0.1.0"
|
||||
edition = "2018"
|
||||
|
||||
[dependencies]
|
||||
ammonia = "3.1.0"
|
||||
chrono = "0.4.11"
|
||||
derive_builder = "0.9.0"
|
||||
lazy_static = "1.4.0"
|
||||
log = "0.4.8"
|
||||
rayon = "1.3.1"
|
||||
rfc822_sanitizer = "0.3.3"
|
||||
rss = "1.9.0"
|
||||
url = "2.1.1"
|
||||
xdg = "2.2.0"
|
||||
xml-rs = "0.8.3"
|
||||
futures = "0.1.29"
|
||||
hyper = "0.12.35"
|
||||
http = "0.1.19"
|
||||
tokio = "0.1.22"
|
||||
hyper-tls = "0.3.2"
|
||||
native-tls = "0.2.3"
|
||||
num_cpus = "1.13.0"
|
||||
failure = "0.1.8"
|
||||
failure_derive = "0.1.8"
|
||||
base64 = "0.12.2"
|
||||
|
||||
[dependencies.diesel]
|
||||
features = ["sqlite", "r2d2"]
|
||||
version = "1.4.5"
|
||||
|
||||
[dependencies.diesel_migrations]
|
||||
features = ["sqlite"]
|
||||
version = "1.4.0"
|
||||
|
||||
[dev-dependencies]
|
||||
rand = "0.7.2"
|
||||
tempdir = "0.3.7"
|
||||
pretty_assertions = "0.6.1"
|
||||
maplit = "1.0.2"
|
||||
6
podcasts-data/diesel.toml
Normal file
6
podcasts-data/diesel.toml
Normal file
@ -0,0 +1,6 @@
|
||||
# For documentation on how to configure this file,
|
||||
# see diesel.rs/guides/configuring-diesel-cli
|
||||
|
||||
[print_schema]
|
||||
file = "src/schema.rs"
|
||||
patch_file = "src/schema.patch"
|
||||
@ -1,8 +1,3 @@
|
||||
-- Till version 0.2 is released the plan is to edited directly and dont expect
|
||||
-- any kind of non-braking changes.
|
||||
-- After there is a stable prototype, Only diesel migrations will be used
|
||||
-- in order to change the db schema.
|
||||
|
||||
CREATE TABLE `source` (
|
||||
`id` INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT UNIQUE,
|
||||
`uri` TEXT NOT NULL UNIQUE,
|
||||
@ -0,0 +1,23 @@
|
||||
ALTER TABLE episode RENAME TO old_table;
|
||||
|
||||
CREATE TABLE episode (
|
||||
id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT UNIQUE,
|
||||
title TEXT,
|
||||
uri TEXT NOT NULL UNIQUE,
|
||||
local_uri TEXT,
|
||||
description TEXT,
|
||||
published_date TEXT,
|
||||
epoch INTEGER NOT NULL DEFAULT 0,
|
||||
length INTEGER,
|
||||
guid TEXT,
|
||||
played INTEGER,
|
||||
favorite INTEGER NOT NULL DEFAULT 0,
|
||||
archive INTEGER NOT NULL DEFAULT 0,
|
||||
podcast_id INTEGER NOT NULL
|
||||
);
|
||||
|
||||
INSERT INTO episode (title, uri, local_uri, description, published_date, epoch, length, guid, played, favorite, archive, podcast_id)
|
||||
SELECT title, uri, local_uri, description, published_date, epoch, length, guid, played, favorite, archive, podcast_id
|
||||
FROM old_table;
|
||||
|
||||
Drop table old_table;
|
||||
@ -0,0 +1,22 @@
|
||||
ALTER TABLE episode RENAME TO old_table;
|
||||
|
||||
CREATE TABLE episode (
|
||||
title TEXT NOT NULL,
|
||||
uri TEXT,
|
||||
local_uri TEXT,
|
||||
description TEXT,
|
||||
published_date TEXT,
|
||||
epoch INTEGER NOT NULL DEFAULT 0,
|
||||
length INTEGER,
|
||||
guid TEXT,
|
||||
played INTEGER,
|
||||
podcast_id INTEGER NOT NULL,
|
||||
favorite INTEGER DEFAULT 0,
|
||||
archive INTEGER DEFAULT 0,
|
||||
PRIMARY KEY (title, podcast_id)
|
||||
);
|
||||
|
||||
INSERT INTO episode (title, uri, local_uri, description, published_date, epoch, length, guid, played, favorite, archive, podcast_id)
|
||||
SELECT title, uri, local_uri, description, published_date, epoch, length, guid, played, favorite, archive, podcast_id
|
||||
FROM old_table;
|
||||
Drop table old_table;
|
||||
@ -0,0 +1,22 @@
|
||||
ALTER TABLE episode RENAME TO old_table;
|
||||
|
||||
CREATE TABLE episode (
|
||||
title TEXT NOT NULL,
|
||||
uri TEXT,
|
||||
local_uri TEXT,
|
||||
description TEXT,
|
||||
published_date TEXT,
|
||||
epoch INTEGER NOT NULL DEFAULT 0,
|
||||
length INTEGER,
|
||||
guid TEXT,
|
||||
played INTEGER,
|
||||
podcast_id INTEGER NOT NULL,
|
||||
favorite INTEGER DEFAULT 0,
|
||||
archive INTEGER DEFAULT 0,
|
||||
PRIMARY KEY (title, podcast_id)
|
||||
);
|
||||
|
||||
INSERT INTO episode (title, uri, local_uri, description, published_date, epoch, length, guid, played, favorite, archive, podcast_id)
|
||||
SELECT title, uri, local_uri, description, published_date, epoch, length, guid, played, favorite, archive, podcast_id
|
||||
FROM old_table;
|
||||
Drop table old_table;
|
||||
@ -0,0 +1,23 @@
|
||||
ALTER TABLE episode RENAME TO old_table;
|
||||
|
||||
CREATE TABLE episode (
|
||||
title TEXT NOT NULL,
|
||||
uri TEXT,
|
||||
local_uri TEXT,
|
||||
description TEXT,
|
||||
published_date TEXT,
|
||||
epoch INTEGER NOT NULL DEFAULT 0,
|
||||
length INTEGER,
|
||||
duration INTEGER,
|
||||
guid TEXT,
|
||||
played INTEGER,
|
||||
podcast_id INTEGER NOT NULL,
|
||||
favorite INTEGER DEFAULT 0,
|
||||
archive INTEGER DEFAULT 0,
|
||||
PRIMARY KEY (title, podcast_id)
|
||||
);
|
||||
|
||||
INSERT INTO episode (title, uri, local_uri, description, published_date, epoch, length, guid, played, favorite, archive, podcast_id)
|
||||
SELECT title, uri, local_uri, description, published_date, epoch, length, guid, played, favorite, archive, podcast_id
|
||||
FROM old_table;
|
||||
Drop table old_table;
|
||||
@ -0,0 +1,24 @@
|
||||
ALTER TABLE episode RENAME TO old_table;
|
||||
|
||||
CREATE TABLE episode (
|
||||
title TEXT NOT NULL,
|
||||
uri TEXT,
|
||||
local_uri TEXT,
|
||||
description TEXT,
|
||||
published_date TEXT,
|
||||
epoch INTEGER NOT NULL DEFAULT 0,
|
||||
length INTEGER,
|
||||
duration INTEGER,
|
||||
guid TEXT,
|
||||
played INTEGER,
|
||||
podcast_id INTEGER NOT NULL,
|
||||
favorite INTEGER DEFAULT 0,
|
||||
archive INTEGER DEFAULT 0,
|
||||
PRIMARY KEY (title, podcast_id)
|
||||
);
|
||||
|
||||
INSERT INTO episode (title, uri, local_uri, description, epoch, length, duration, guid, played, favorite, archive, podcast_id)
|
||||
SELECT title, uri, local_uri, description, epoch, length, duration, guid, played, favorite, archive, podcast_id
|
||||
FROM old_table;
|
||||
|
||||
Drop table old_table;
|
||||
@ -0,0 +1,23 @@
|
||||
ALTER TABLE episode RENAME TO old_table;
|
||||
|
||||
CREATE TABLE episode (
|
||||
title TEXT NOT NULL,
|
||||
uri TEXT,
|
||||
local_uri TEXT,
|
||||
description TEXT,
|
||||
epoch INTEGER NOT NULL DEFAULT 0,
|
||||
length INTEGER,
|
||||
duration INTEGER,
|
||||
guid TEXT,
|
||||
played INTEGER,
|
||||
podcast_id INTEGER NOT NULL,
|
||||
favorite INTEGER DEFAULT 0,
|
||||
archive INTEGER DEFAULT 0,
|
||||
PRIMARY KEY (title, podcast_id)
|
||||
);
|
||||
|
||||
INSERT INTO episode (title, uri, local_uri, description, epoch, length, duration, guid, played, favorite, archive, podcast_id)
|
||||
SELECT title, uri, local_uri, description, epoch, length, duration, guid, played, favorite, archive, podcast_id
|
||||
FROM old_table;
|
||||
|
||||
Drop table old_table;
|
||||
@ -0,0 +1,53 @@
|
||||
ALTER TABLE episode RENAME TO old_table;
|
||||
|
||||
CREATE TABLE episode (
|
||||
title TEXT NOT NULL,
|
||||
uri TEXT,
|
||||
local_uri TEXT,
|
||||
description TEXT,
|
||||
epoch INTEGER NOT NULL DEFAULT 0,
|
||||
length INTEGER,
|
||||
duration INTEGER,
|
||||
guid TEXT,
|
||||
played INTEGER,
|
||||
podcast_id INTEGER NOT NULL,
|
||||
favorite INTEGER DEFAULT 0,
|
||||
archive INTEGER DEFAULT 0,
|
||||
PRIMARY KEY (title, podcast_id)
|
||||
);
|
||||
|
||||
INSERT INTO episode (title, uri, local_uri, description, epoch, length, duration, guid, played, podcast_id, favorite, archive)
|
||||
SELECT title, uri, local_uri, description, epoch, length, duration, guid, played, podcast_id, 0, 0
|
||||
FROM old_table;
|
||||
|
||||
Drop table old_table;
|
||||
|
||||
ALTER TABLE podcast RENAME TO old_table;
|
||||
CREATE TABLE `podcast` (
|
||||
`id` INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT UNIQUE,
|
||||
`title` TEXT NOT NULL,
|
||||
`link` TEXT NOT NULL,
|
||||
`description` TEXT NOT NULL,
|
||||
`image_uri` TEXT,
|
||||
`source_id` INTEGER NOT NULL UNIQUE,
|
||||
`favorite` INTEGER NOT NULL DEFAULT 0,
|
||||
`archive` INTEGER NOT NULL DEFAULT 0,
|
||||
`always_dl` INTEGER NOT NULL DEFAULT 0
|
||||
);
|
||||
|
||||
INSERT INTO podcast (
|
||||
id,
|
||||
title,
|
||||
link,
|
||||
description,
|
||||
image_uri,
|
||||
source_id
|
||||
) SELECT id,
|
||||
title,
|
||||
link,
|
||||
description,
|
||||
image_uri,
|
||||
source_id
|
||||
FROM old_table;
|
||||
|
||||
Drop table old_table;
|
||||
@ -0,0 +1,66 @@
|
||||
ALTER TABLE episode RENAME TO old_table;
|
||||
|
||||
CREATE TABLE episode (
|
||||
title TEXT NOT NULL,
|
||||
uri TEXT,
|
||||
local_uri TEXT,
|
||||
description TEXT,
|
||||
epoch INTEGER NOT NULL DEFAULT 0,
|
||||
length INTEGER,
|
||||
duration INTEGER,
|
||||
guid TEXT,
|
||||
played INTEGER,
|
||||
podcast_id INTEGER NOT NULL,
|
||||
PRIMARY KEY (title, podcast_id)
|
||||
);
|
||||
|
||||
INSERT INTO episode (
|
||||
title,
|
||||
uri,
|
||||
local_uri,
|
||||
description,
|
||||
epoch,
|
||||
length,
|
||||
duration,
|
||||
guid,
|
||||
played,
|
||||
podcast_id
|
||||
) SELECT title,
|
||||
uri,
|
||||
local_uri,
|
||||
description,
|
||||
epoch, length,
|
||||
duration,
|
||||
guid,
|
||||
played,
|
||||
podcast_id
|
||||
FROM old_table;
|
||||
|
||||
Drop table old_table;
|
||||
|
||||
ALTER TABLE podcast RENAME TO old_table;
|
||||
CREATE TABLE `podcast` (
|
||||
`id` INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT UNIQUE,
|
||||
`title` TEXT NOT NULL,
|
||||
`link` TEXT NOT NULL,
|
||||
`description` TEXT NOT NULL,
|
||||
`image_uri` TEXT,
|
||||
`source_id` INTEGER NOT NULL UNIQUE
|
||||
);
|
||||
|
||||
INSERT INTO podcast (
|
||||
id,
|
||||
title,
|
||||
link,
|
||||
description,
|
||||
image_uri,
|
||||
source_id
|
||||
) SELECT id,
|
||||
title,
|
||||
link,
|
||||
description,
|
||||
image_uri,
|
||||
source_id
|
||||
FROM old_table;
|
||||
|
||||
Drop table old_table;
|
||||
@ -0,0 +1,40 @@
|
||||
ALTER TABLE episodes RENAME TO old_table;
|
||||
ALTER TABLE shows RENAME TO podcast;
|
||||
|
||||
CREATE TABLE episode (
|
||||
title TEXT NOT NULL,
|
||||
uri TEXT,
|
||||
local_uri TEXT,
|
||||
description TEXT,
|
||||
epoch INTEGER NOT NULL DEFAULT 0,
|
||||
length INTEGER,
|
||||
duration INTEGER,
|
||||
guid TEXT,
|
||||
played INTEGER,
|
||||
podcast_id INTEGER NOT NULL,
|
||||
PRIMARY KEY (title, podcast_id)
|
||||
);
|
||||
|
||||
INSERT INTO episode (
|
||||
title,
|
||||
uri,
|
||||
local_uri,
|
||||
description,
|
||||
epoch,
|
||||
length,
|
||||
duration,
|
||||
guid,
|
||||
played,
|
||||
podcast_id
|
||||
) SELECT title,
|
||||
uri,
|
||||
local_uri,
|
||||
description,
|
||||
epoch, length,
|
||||
duration,
|
||||
guid,
|
||||
played,
|
||||
show_id
|
||||
FROM old_table;
|
||||
|
||||
Drop table old_table;
|
||||
@ -0,0 +1,40 @@
|
||||
ALTER TABLE episode RENAME TO old_table;
|
||||
ALTER TABLE podcast RENAME TO shows;
|
||||
|
||||
CREATE TABLE episodes (
|
||||
title TEXT NOT NULL,
|
||||
uri TEXT,
|
||||
local_uri TEXT,
|
||||
description TEXT,
|
||||
epoch INTEGER NOT NULL DEFAULT 0,
|
||||
length INTEGER,
|
||||
duration INTEGER,
|
||||
guid TEXT,
|
||||
played INTEGER,
|
||||
show_id INTEGER NOT NULL,
|
||||
PRIMARY KEY (title, show_id)
|
||||
);
|
||||
|
||||
INSERT INTO episodes (
|
||||
title,
|
||||
uri,
|
||||
local_uri,
|
||||
description,
|
||||
epoch,
|
||||
length,
|
||||
duration,
|
||||
guid,
|
||||
played,
|
||||
show_id
|
||||
) SELECT title,
|
||||
uri,
|
||||
local_uri,
|
||||
description,
|
||||
epoch, length,
|
||||
duration,
|
||||
guid,
|
||||
played,
|
||||
podcast_id
|
||||
FROM old_table;
|
||||
|
||||
Drop table old_table;
|
||||
94
podcasts-data/src/database.rs
Normal file
94
podcasts-data/src/database.rs
Normal file
@ -0,0 +1,94 @@
|
||||
// database.rs
|
||||
//
|
||||
// Copyright 2017 Jordan Petridis <jpetridis@gnome.org>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
//
|
||||
// SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
//! Database Setup. This is only public to help with some unit tests.
|
||||
// Diesel embed_migrations! triggers the lint
|
||||
#![allow(unused_imports)]
|
||||
|
||||
use diesel::prelude::*;
|
||||
use diesel::r2d2;
|
||||
use diesel::r2d2::ConnectionManager;
|
||||
|
||||
use std::io;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use crate::errors::DataError;
|
||||
|
||||
#[cfg(not(test))]
|
||||
use crate::xdg_dirs;
|
||||
|
||||
type Pool = r2d2::Pool<ConnectionManager<SqliteConnection>>;
|
||||
|
||||
embed_migrations!("migrations/");
|
||||
|
||||
lazy_static! {
|
||||
static ref POOL: Pool = init_pool(DB_PATH.to_str().unwrap());
|
||||
}
|
||||
|
||||
#[cfg(not(test))]
|
||||
lazy_static! {
|
||||
static ref DB_PATH: PathBuf = xdg_dirs::PODCASTS_XDG
|
||||
.place_data_file("podcasts.db")
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
lazy_static! {
|
||||
pub(crate) static ref TEMPDIR: tempdir::TempDir =
|
||||
{ tempdir::TempDir::new("podcasts_unit_test").unwrap() };
|
||||
static ref DB_PATH: PathBuf = TEMPDIR.path().join("podcasts.db");
|
||||
}
|
||||
|
||||
/// Get an r2d2 `SqliteConnection`.
|
||||
pub(crate) fn connection() -> Pool {
|
||||
POOL.clone()
|
||||
}
|
||||
|
||||
fn init_pool(db_path: &str) -> Pool {
|
||||
let manager = ConnectionManager::<SqliteConnection>::new(db_path);
|
||||
let pool = r2d2::Pool::builder()
|
||||
.max_size(1)
|
||||
.build(manager)
|
||||
.expect("Failed to create pool.");
|
||||
|
||||
{
|
||||
let db = pool.get().expect("Failed to initialize pool.");
|
||||
run_migration_on(&*db).expect("Failed to run migrations during init.");
|
||||
}
|
||||
info!("Database pool initialized.");
|
||||
pool
|
||||
}
|
||||
|
||||
fn run_migration_on(connection: &SqliteConnection) -> Result<(), DataError> {
|
||||
info!("Running DB Migrations...");
|
||||
// embedded_migrations::run(connection)?;
|
||||
embedded_migrations::run_with_output(connection, &mut io::stdout()).map_err(From::from)
|
||||
}
|
||||
|
||||
/// Reset the database into a clean state.
|
||||
// Test share a Temp file db.
|
||||
#[cfg(test)]
|
||||
pub fn truncate_db() -> Result<(), DataError> {
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
con.execute("DELETE FROM episodes")?;
|
||||
con.execute("DELETE FROM shows")?;
|
||||
con.execute("DELETE FROM source")?;
|
||||
Ok(())
|
||||
}
|
||||
492
podcasts-data/src/dbqueries.rs
Normal file
492
podcasts-data/src/dbqueries.rs
Normal file
@ -0,0 +1,492 @@
|
||||
// dbqueries.rs
|
||||
//
|
||||
// Copyright 2017 Jordan Petridis <jpetridis@gnome.org>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
//
|
||||
// SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
//! Random CRUD helper functions.
|
||||
|
||||
use chrono::prelude::*;
|
||||
use diesel::prelude::*;
|
||||
|
||||
use diesel;
|
||||
use diesel::dsl::exists;
|
||||
use diesel::select;
|
||||
|
||||
use crate::database::connection;
|
||||
use crate::errors::DataError;
|
||||
use crate::models::*;
|
||||
|
||||
pub fn get_sources() -> Result<Vec<Source>, DataError> {
|
||||
use crate::schema::source::dsl::*;
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
source
|
||||
.order((http_etag.asc(), last_modified.asc()))
|
||||
.load::<Source>(&con)
|
||||
.map_err(From::from)
|
||||
}
|
||||
|
||||
pub fn get_podcasts() -> Result<Vec<Show>, DataError> {
|
||||
use crate::schema::shows::dsl::*;
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
shows
|
||||
.order(title.asc())
|
||||
.load::<Show>(&con)
|
||||
.map_err(From::from)
|
||||
}
|
||||
|
||||
pub fn get_podcasts_filter(filter_ids: &[i32]) -> Result<Vec<Show>, DataError> {
|
||||
use crate::schema::shows::dsl::*;
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
shows
|
||||
.order(title.asc())
|
||||
.filter(id.ne_all(filter_ids))
|
||||
.load::<Show>(&con)
|
||||
.map_err(From::from)
|
||||
}
|
||||
|
||||
pub fn get_episodes() -> Result<Vec<Episode>, DataError> {
|
||||
use crate::schema::episodes::dsl::*;
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
episodes
|
||||
.order(epoch.desc())
|
||||
.load::<Episode>(&con)
|
||||
.map_err(From::from)
|
||||
}
|
||||
|
||||
pub(crate) fn get_downloaded_episodes() -> Result<Vec<EpisodeCleanerModel>, DataError> {
|
||||
use crate::schema::episodes::dsl::*;
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
episodes
|
||||
.select((rowid, local_uri, played))
|
||||
.filter(local_uri.is_not_null())
|
||||
.load::<EpisodeCleanerModel>(&con)
|
||||
.map_err(From::from)
|
||||
}
|
||||
|
||||
// pub(crate) fn get_played_episodes() -> Result<Vec<Episode>, DataError> {
|
||||
// use schema::episodes::dsl::*;
|
||||
|
||||
// let db = connection();
|
||||
// let con = db.get()?;
|
||||
// episodes
|
||||
// .filter(played.is_not_null())
|
||||
// .load::<Episode>(&con)
|
||||
// .map_err(From::from)
|
||||
// }
|
||||
|
||||
pub(crate) fn get_played_cleaner_episodes() -> Result<Vec<EpisodeCleanerModel>, DataError> {
|
||||
use crate::schema::episodes::dsl::*;
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
episodes
|
||||
.select((rowid, local_uri, played))
|
||||
.filter(played.is_not_null())
|
||||
.load::<EpisodeCleanerModel>(&con)
|
||||
.map_err(From::from)
|
||||
}
|
||||
|
||||
pub fn get_episode_from_rowid(ep_id: i32) -> Result<Episode, DataError> {
|
||||
use crate::schema::episodes::dsl::*;
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
episodes
|
||||
.filter(rowid.eq(ep_id))
|
||||
.get_result::<Episode>(&con)
|
||||
.map_err(From::from)
|
||||
}
|
||||
|
||||
pub fn get_episode_widget_from_rowid(ep_id: i32) -> Result<EpisodeWidgetModel, DataError> {
|
||||
use crate::schema::episodes::dsl::*;
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
episodes
|
||||
.select((
|
||||
rowid, title, uri, local_uri, epoch, length, duration, played, show_id,
|
||||
))
|
||||
.filter(rowid.eq(ep_id))
|
||||
.get_result::<EpisodeWidgetModel>(&con)
|
||||
.map_err(From::from)
|
||||
}
|
||||
|
||||
pub fn get_episode_local_uri_from_id(ep_id: i32) -> Result<Option<String>, DataError> {
|
||||
use crate::schema::episodes::dsl::*;
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
episodes
|
||||
.filter(rowid.eq(ep_id))
|
||||
.select(local_uri)
|
||||
.get_result::<Option<String>>(&con)
|
||||
.map_err(From::from)
|
||||
}
|
||||
|
||||
pub fn get_episodes_widgets_filter_limit(
|
||||
filter_ids: &[i32],
|
||||
limit: u32,
|
||||
) -> Result<Vec<EpisodeWidgetModel>, DataError> {
|
||||
use crate::schema::episodes::dsl::*;
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
let columns = (
|
||||
rowid, title, uri, local_uri, epoch, length, duration, played, show_id,
|
||||
);
|
||||
|
||||
episodes
|
||||
.select(columns)
|
||||
.order(epoch.desc())
|
||||
.filter(show_id.ne_all(filter_ids))
|
||||
.limit(i64::from(limit))
|
||||
.load::<EpisodeWidgetModel>(&con)
|
||||
.map_err(From::from)
|
||||
}
|
||||
|
||||
pub fn get_podcast_from_id(pid: i32) -> Result<Show, DataError> {
|
||||
use crate::schema::shows::dsl::*;
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
shows
|
||||
.filter(id.eq(pid))
|
||||
.get_result::<Show>(&con)
|
||||
.map_err(From::from)
|
||||
}
|
||||
|
||||
pub fn get_podcast_cover_from_id(pid: i32) -> Result<ShowCoverModel, DataError> {
|
||||
use crate::schema::shows::dsl::*;
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
shows
|
||||
.select((id, title, image_uri))
|
||||
.filter(id.eq(pid))
|
||||
.get_result::<ShowCoverModel>(&con)
|
||||
.map_err(From::from)
|
||||
}
|
||||
|
||||
pub fn get_pd_episodes(parent: &Show) -> Result<Vec<Episode>, DataError> {
|
||||
use crate::schema::episodes::dsl::*;
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
Episode::belonging_to(parent)
|
||||
.order(epoch.desc())
|
||||
.load::<Episode>(&con)
|
||||
.map_err(From::from)
|
||||
}
|
||||
|
||||
pub fn get_pd_episodes_count(parent: &Show) -> Result<i64, DataError> {
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
Episode::belonging_to(parent)
|
||||
.count()
|
||||
.get_result(&con)
|
||||
.map_err(From::from)
|
||||
}
|
||||
|
||||
pub fn get_pd_episodeswidgets(parent: &Show) -> Result<Vec<EpisodeWidgetModel>, DataError> {
|
||||
use crate::schema::episodes::dsl::*;
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
let columns = (
|
||||
rowid, title, uri, local_uri, epoch, length, duration, played, show_id,
|
||||
);
|
||||
|
||||
episodes
|
||||
.select(columns)
|
||||
.filter(show_id.eq(parent.id()))
|
||||
.order(epoch.desc())
|
||||
.load::<EpisodeWidgetModel>(&con)
|
||||
.map_err(From::from)
|
||||
}
|
||||
|
||||
pub fn get_pd_unplayed_episodes(parent: &Show) -> Result<Vec<Episode>, DataError> {
|
||||
use crate::schema::episodes::dsl::*;
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
Episode::belonging_to(parent)
|
||||
.filter(played.is_null())
|
||||
.order(epoch.desc())
|
||||
.load::<Episode>(&con)
|
||||
.map_err(From::from)
|
||||
}
|
||||
|
||||
// pub(crate) fn get_pd_episodes_limit(parent: &Show, limit: u32) ->
|
||||
// Result<Vec<Episode>, DataError> { use schema::episodes::dsl::*;
|
||||
|
||||
// let db = connection();
|
||||
// let con = db.get()?;
|
||||
|
||||
// Episode::belonging_to(parent)
|
||||
// .order(epoch.desc())
|
||||
// .limit(i64::from(limit))
|
||||
// .load::<Episode>(&con)
|
||||
// .map_err(From::from)
|
||||
// }
|
||||
|
||||
pub fn get_source_from_uri(uri_: &str) -> Result<Source, DataError> {
|
||||
use crate::schema::source::dsl::*;
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
source
|
||||
.filter(uri.eq(uri_))
|
||||
.get_result::<Source>(&con)
|
||||
.map_err(From::from)
|
||||
}
|
||||
|
||||
pub fn get_source_from_id(id_: i32) -> Result<Source, DataError> {
|
||||
use crate::schema::source::dsl::*;
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
source
|
||||
.filter(id.eq(id_))
|
||||
.get_result::<Source>(&con)
|
||||
.map_err(From::from)
|
||||
}
|
||||
|
||||
pub fn get_podcast_from_source_id(sid: i32) -> Result<Show, DataError> {
|
||||
use crate::schema::shows::dsl::*;
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
shows
|
||||
.filter(source_id.eq(sid))
|
||||
.get_result::<Show>(&con)
|
||||
.map_err(From::from)
|
||||
}
|
||||
|
||||
pub fn get_episode_from_pk(title_: &str, pid: i32) -> Result<Episode, DataError> {
|
||||
use crate::schema::episodes::dsl::*;
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
episodes
|
||||
.filter(title.eq(title_))
|
||||
.filter(show_id.eq(pid))
|
||||
.get_result::<Episode>(&con)
|
||||
.map_err(From::from)
|
||||
}
|
||||
|
||||
pub(crate) fn get_episode_minimal_from_pk(
|
||||
title_: &str,
|
||||
pid: i32,
|
||||
) -> Result<EpisodeMinimal, DataError> {
|
||||
use crate::schema::episodes::dsl::*;
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
episodes
|
||||
.select((rowid, title, uri, epoch, length, duration, guid, show_id))
|
||||
.filter(title.eq(title_))
|
||||
.filter(show_id.eq(pid))
|
||||
.get_result::<EpisodeMinimal>(&con)
|
||||
.map_err(From::from)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub(crate) fn get_episode_cleaner_from_pk(
|
||||
title_: &str,
|
||||
pid: i32,
|
||||
) -> Result<EpisodeCleanerModel, DataError> {
|
||||
use crate::schema::episodes::dsl::*;
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
episodes
|
||||
.select((rowid, local_uri, played))
|
||||
.filter(title.eq(title_))
|
||||
.filter(show_id.eq(pid))
|
||||
.get_result::<EpisodeCleanerModel>(&con)
|
||||
.map_err(From::from)
|
||||
}
|
||||
|
||||
pub(crate) fn remove_feed(pd: &Show) -> Result<(), DataError> {
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
con.transaction(|| {
|
||||
delete_source(&con, pd.source_id())?;
|
||||
delete_podcast(&con, pd.id())?;
|
||||
delete_podcast_episodes(&con, pd.id())?;
|
||||
info!("Feed removed from the Database.");
|
||||
Ok(())
|
||||
})
|
||||
}
|
||||
|
||||
fn delete_source(con: &SqliteConnection, source_id: i32) -> QueryResult<usize> {
|
||||
use crate::schema::source::dsl::*;
|
||||
|
||||
diesel::delete(source.filter(id.eq(source_id))).execute(con)
|
||||
}
|
||||
|
||||
fn delete_podcast(con: &SqliteConnection, show_id: i32) -> QueryResult<usize> {
|
||||
use crate::schema::shows::dsl::*;
|
||||
|
||||
diesel::delete(shows.filter(id.eq(show_id))).execute(con)
|
||||
}
|
||||
|
||||
fn delete_podcast_episodes(con: &SqliteConnection, parent_id: i32) -> QueryResult<usize> {
|
||||
use crate::schema::episodes::dsl::*;
|
||||
|
||||
diesel::delete(episodes.filter(show_id.eq(parent_id))).execute(con)
|
||||
}
|
||||
|
||||
pub fn source_exists(url: &str) -> Result<bool, DataError> {
|
||||
use crate::schema::source::dsl::*;
|
||||
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
select(exists(source.filter(uri.eq(url))))
|
||||
.get_result(&con)
|
||||
.map_err(From::from)
|
||||
}
|
||||
|
||||
pub(crate) fn podcast_exists(source_id_: i32) -> Result<bool, DataError> {
|
||||
use crate::schema::shows::dsl::*;
|
||||
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
select(exists(shows.filter(source_id.eq(source_id_))))
|
||||
.get_result(&con)
|
||||
.map_err(From::from)
|
||||
}
|
||||
|
||||
#[cfg_attr(rustfmt, rustfmt_skip)]
|
||||
pub(crate) fn episode_exists(title_: &str, show_id_: i32) -> Result<bool, DataError> {
|
||||
use crate::schema::episodes::dsl::*;
|
||||
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
select(exists(episodes.filter(show_id.eq(show_id_)).filter(title.eq(title_))))
|
||||
.get_result(&con)
|
||||
.map_err(From::from)
|
||||
}
|
||||
|
||||
/// Check if the `episodes table contains any rows
|
||||
///
|
||||
/// Return true if `episodes` table is populated.
|
||||
pub fn is_episodes_populated(filter_show_ids: &[i32]) -> Result<bool, DataError> {
|
||||
use crate::schema::episodes::dsl::*;
|
||||
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
select(exists(episodes.filter(show_id.ne_all(filter_show_ids))))
|
||||
.get_result(&con)
|
||||
.map_err(From::from)
|
||||
}
|
||||
|
||||
/// Check if the `shows` table contains any rows
|
||||
///
|
||||
/// Return true if `shows` table is populated.
|
||||
pub fn is_podcasts_populated(filter_ids: &[i32]) -> Result<bool, DataError> {
|
||||
use crate::schema::shows::dsl::*;
|
||||
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
select(exists(shows.filter(id.ne_all(filter_ids))))
|
||||
.get_result(&con)
|
||||
.map_err(From::from)
|
||||
}
|
||||
|
||||
/// Check if the `source` table contains any rows
|
||||
///
|
||||
/// Return true if `source` table is populated.
|
||||
pub fn is_source_populated(filter_ids: &[i32]) -> Result<bool, DataError> {
|
||||
use crate::schema::source::dsl::*;
|
||||
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
select(exists(source.filter(id.ne_all(filter_ids))))
|
||||
.get_result(&con)
|
||||
.map_err(From::from)
|
||||
}
|
||||
|
||||
pub(crate) fn index_new_episodes(eps: &[NewEpisode]) -> Result<(), DataError> {
|
||||
use crate::schema::episodes::dsl::*;
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
diesel::insert_into(episodes)
|
||||
.values(eps)
|
||||
.execute(&*con)
|
||||
.map_err(From::from)
|
||||
.map(|_| ())
|
||||
}
|
||||
|
||||
pub fn update_none_to_played_now(parent: &Show) -> Result<usize, DataError> {
|
||||
use crate::schema::episodes::dsl::*;
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
let epoch_now = Utc::now().timestamp() as i32;
|
||||
con.transaction(|| {
|
||||
diesel::update(Episode::belonging_to(parent).filter(played.is_null()))
|
||||
.set(played.eq(Some(epoch_now)))
|
||||
.execute(&con)
|
||||
.map_err(From::from)
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::database::*;
|
||||
use crate::pipeline;
|
||||
use failure::Error;
|
||||
|
||||
#[test]
|
||||
fn test_update_none_to_played_now() -> Result<(), Error> {
|
||||
truncate_db()?;
|
||||
|
||||
let url = "https://web.archive.org/web/20180120083840if_/https://feeds.feedburner.\
|
||||
com/InterceptedWithJeremyScahill";
|
||||
let source = Source::from_url(url)?;
|
||||
let id = source.id();
|
||||
pipeline::run(vec![source])?;
|
||||
let pd = get_podcast_from_source_id(id)?;
|
||||
|
||||
let eps_num = get_pd_unplayed_episodes(&pd)?.len();
|
||||
assert_ne!(eps_num, 0);
|
||||
|
||||
update_none_to_played_now(&pd)?;
|
||||
let eps_num2 = get_pd_unplayed_episodes(&pd)?.len();
|
||||
assert_eq!(eps_num2, 0);
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
125
podcasts-data/src/errors.rs
Normal file
125
podcasts-data/src/errors.rs
Normal file
@ -0,0 +1,125 @@
|
||||
// errors.rs
|
||||
//
|
||||
// Copyright 2017 Jordan Petridis <jpetridis@gnome.org>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
//
|
||||
// SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
use diesel;
|
||||
use diesel::r2d2;
|
||||
use diesel_migrations::RunMigrationsError;
|
||||
use http;
|
||||
use hyper;
|
||||
use native_tls;
|
||||
use rss;
|
||||
use url;
|
||||
use xml;
|
||||
|
||||
use std::io;
|
||||
|
||||
use crate::models::Source;
|
||||
|
||||
#[fail(
|
||||
display = "Request to {} returned {}. Context: {}",
|
||||
url, status_code, context
|
||||
)]
|
||||
#[derive(Fail, Debug)]
|
||||
pub struct HttpStatusError {
|
||||
url: String,
|
||||
status_code: hyper::StatusCode,
|
||||
context: String,
|
||||
}
|
||||
|
||||
impl HttpStatusError {
|
||||
pub fn new(url: String, code: hyper::StatusCode, context: String) -> Self {
|
||||
HttpStatusError {
|
||||
url,
|
||||
status_code: code,
|
||||
context,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Fail, Debug)]
|
||||
pub enum DataError {
|
||||
#[fail(display = "SQL Query failed: {}", _0)]
|
||||
DieselResultError(#[cause] diesel::result::Error),
|
||||
#[fail(display = "Database Migration error: {}", _0)]
|
||||
DieselMigrationError(#[cause] RunMigrationsError),
|
||||
#[fail(display = "R2D2 error: {}", _0)]
|
||||
R2D2Error(#[cause] r2d2::Error),
|
||||
#[fail(display = "R2D2 Pool error: {}", _0)]
|
||||
R2D2PoolError(#[cause] r2d2::PoolError),
|
||||
#[fail(display = "Hyper Error: {}", _0)]
|
||||
HyperError(#[cause] hyper::Error),
|
||||
#[fail(display = "ToStr Error: {}", _0)]
|
||||
HttpToStr(#[cause] http::header::ToStrError),
|
||||
#[fail(display = "Failed to parse a url: {}", _0)]
|
||||
UrlError(#[cause] url::ParseError),
|
||||
#[fail(display = "TLS Error: {}", _0)]
|
||||
TLSError(#[cause] native_tls::Error),
|
||||
#[fail(display = "IO Error: {}", _0)]
|
||||
IOError(#[cause] io::Error),
|
||||
#[fail(display = "RSS Error: {}", _0)]
|
||||
RssError(#[cause] rss::Error),
|
||||
#[fail(display = "XML Reader Error: {}", _0)]
|
||||
XmlReaderError(#[cause] xml::reader::Error),
|
||||
#[fail(display = "Error: {}", _0)]
|
||||
Bail(String),
|
||||
#[fail(display = "{}", _0)]
|
||||
HttpStatusGeneral(HttpStatusError),
|
||||
#[fail(display = "Source redirects to a new url")]
|
||||
FeedRedirect(Source),
|
||||
#[fail(display = "Feed is up to date")]
|
||||
FeedNotModified(Source),
|
||||
#[fail(
|
||||
display = "Error occurred while Parsing an Episode. Reason: {}",
|
||||
reason
|
||||
)]
|
||||
ParseEpisodeError { reason: String, parent_id: i32 },
|
||||
#[fail(display = "Episode was not changed and thus skipped.")]
|
||||
EpisodeNotChanged,
|
||||
}
|
||||
|
||||
// Maps a type to a variant of the DataError enum
|
||||
macro_rules! easy_from_impl {
|
||||
($outer_type:ty, $from:ty => $to:expr) => (
|
||||
impl From<$from> for $outer_type {
|
||||
fn from(err: $from) -> Self {
|
||||
$to(err)
|
||||
}
|
||||
}
|
||||
);
|
||||
($outer_type:ty, $from:ty => $to:expr, $($f:ty => $t:expr),+) => (
|
||||
easy_from_impl!($outer_type, $from => $to);
|
||||
easy_from_impl!($outer_type, $($f => $t),+);
|
||||
);
|
||||
}
|
||||
|
||||
easy_from_impl!(
|
||||
DataError,
|
||||
RunMigrationsError => DataError::DieselMigrationError,
|
||||
diesel::result::Error => DataError::DieselResultError,
|
||||
r2d2::Error => DataError::R2D2Error,
|
||||
r2d2::PoolError => DataError::R2D2PoolError,
|
||||
hyper::Error => DataError::HyperError,
|
||||
http::header::ToStrError => DataError::HttpToStr,
|
||||
url::ParseError => DataError::UrlError,
|
||||
native_tls::Error => DataError::TLSError,
|
||||
io::Error => DataError::IOError,
|
||||
rss::Error => DataError::RssError,
|
||||
xml::reader::Error => DataError::XmlReaderError,
|
||||
String => DataError::Bail
|
||||
);
|
||||
240
podcasts-data/src/feed.rs
Normal file
240
podcasts-data/src/feed.rs
Normal file
@ -0,0 +1,240 @@
|
||||
// feed.rs
|
||||
//
|
||||
// Copyright 2017 Jordan Petridis <jpetridis@gnome.org>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
//
|
||||
// SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
|
||||
#![allow(clippy::unit_arg)]
|
||||
//! Index Feeds.
|
||||
|
||||
use futures::future::*;
|
||||
use futures::prelude::*;
|
||||
use futures::stream;
|
||||
use rss;
|
||||
|
||||
use crate::dbqueries;
|
||||
use crate::errors::DataError;
|
||||
use crate::models::{Index, IndexState, Update};
|
||||
use crate::models::{NewEpisode, NewEpisodeMinimal, NewShow, Show};
|
||||
|
||||
/// Wrapper struct that hold a `Source` id and the `rss::Channel`
|
||||
/// that corresponds to the `Source.uri` field.
|
||||
#[derive(Debug, Clone, Builder, PartialEq)]
|
||||
#[builder(derive(Debug))]
|
||||
#[builder(setter(into))]
|
||||
pub struct Feed {
|
||||
/// The `rss::Channel` parsed from the `Source` uri.
|
||||
channel: rss::Channel,
|
||||
/// The `Source` id where the xml `rss::Channel` came from.
|
||||
source_id: i32,
|
||||
}
|
||||
|
||||
impl Feed {
|
||||
/// Index the contents of the RSS `Feed` into the database.
|
||||
pub fn index(self) -> impl Future<Item = (), Error = DataError> + Send {
|
||||
ok(self.parse_podcast())
|
||||
.and_then(|pd| pd.to_podcast())
|
||||
.and_then(move |pd| self.index_channel_items(pd))
|
||||
}
|
||||
|
||||
fn parse_podcast(&self) -> NewShow {
|
||||
NewShow::new(&self.channel, self.source_id)
|
||||
}
|
||||
|
||||
fn index_channel_items(self, pd: Show) -> impl Future<Item = (), Error = DataError> + Send {
|
||||
let stream = stream::iter_ok::<_, DataError>(self.channel.into_items());
|
||||
|
||||
// Parse the episodes
|
||||
let episodes = stream.filter_map(move |item| {
|
||||
NewEpisodeMinimal::new(&item, pd.id())
|
||||
.and_then(move |ep| determine_ep_state(ep, &item))
|
||||
.map_err(|err| error!("Failed to parse an episode: {}", err))
|
||||
.ok()
|
||||
});
|
||||
|
||||
// Filter errors, Index updatable episodes, return insertables.
|
||||
filter_episodes(episodes)
|
||||
// Batch index insertable episodes.
|
||||
.and_then(|eps| ok(batch_insert_episodes(&eps)))
|
||||
}
|
||||
}
|
||||
|
||||
fn determine_ep_state(
|
||||
ep: NewEpisodeMinimal,
|
||||
item: &rss::Item,
|
||||
) -> Result<IndexState<NewEpisode>, DataError> {
|
||||
// Check if feed exists
|
||||
let exists = dbqueries::episode_exists(ep.title(), ep.show_id())?;
|
||||
|
||||
if !exists {
|
||||
Ok(IndexState::Index(ep.into_new_episode(item)))
|
||||
} else {
|
||||
let old = dbqueries::get_episode_minimal_from_pk(ep.title(), ep.show_id())?;
|
||||
let rowid = old.rowid();
|
||||
|
||||
if ep != old {
|
||||
Ok(IndexState::Update((ep.into_new_episode(item), rowid)))
|
||||
} else {
|
||||
Ok(IndexState::NotChanged)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn filter_episodes<'a, S>(
|
||||
stream: S,
|
||||
) -> impl Future<Item = Vec<NewEpisode>, Error = DataError> + Send + 'a
|
||||
where
|
||||
S: Stream<Item = IndexState<NewEpisode>, Error = DataError> + Send + 'a,
|
||||
{
|
||||
stream
|
||||
.filter_map(|state| match state {
|
||||
IndexState::NotChanged => None,
|
||||
// Update individual rows, and filter them
|
||||
IndexState::Update((ref ep, rowid)) => {
|
||||
ep.update(rowid)
|
||||
.map_err(|err| error!("{}", err))
|
||||
.map_err(|_| error!("Failed to index episode: {:?}.", ep.title()))
|
||||
.ok();
|
||||
|
||||
None
|
||||
}
|
||||
IndexState::Index(s) => Some(s),
|
||||
})
|
||||
// only Index is left, collect them for batch index
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn batch_insert_episodes(episodes: &[NewEpisode]) {
|
||||
if episodes.is_empty() {
|
||||
return;
|
||||
};
|
||||
|
||||
info!("Indexing {} episodes.", episodes.len());
|
||||
dbqueries::index_new_episodes(episodes)
|
||||
.map_err(|err| {
|
||||
error!("Failed batch indexng: {}", err);
|
||||
info!("Fallign back to individual indexing.");
|
||||
})
|
||||
.unwrap_or_else(|_| {
|
||||
episodes.iter().for_each(|ep| {
|
||||
ep.index()
|
||||
.map_err(|err| error!("Error: {}.", err))
|
||||
.map_err(|_| error!("Failed to index episode: {:?}.", ep.title()))
|
||||
.ok();
|
||||
});
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use failure::Error;
|
||||
use rss::Channel;
|
||||
use tokio::{self, prelude::*};
|
||||
|
||||
use crate::database::truncate_db;
|
||||
use crate::dbqueries;
|
||||
use crate::utils::get_feed;
|
||||
use crate::Source;
|
||||
|
||||
use std::fs;
|
||||
use std::io::BufReader;
|
||||
|
||||
use super::*;
|
||||
|
||||
// (path, url) tuples.
|
||||
const URLS: &[(&str, &str)] = {
|
||||
&[
|
||||
(
|
||||
"tests/feeds/2018-01-20-Intercepted.xml",
|
||||
"https://web.archive.org/web/20180120083840if_/https://feeds.feedburner.\
|
||||
com/InterceptedWithJeremyScahill",
|
||||
),
|
||||
(
|
||||
"tests/feeds/2018-01-20-LinuxUnplugged.xml",
|
||||
"https://web.archive.org/web/20180120110314if_/https://feeds.feedburner.\
|
||||
com/linuxunplugged",
|
||||
),
|
||||
(
|
||||
"tests/feeds/2018-01-20-TheTipOff.xml",
|
||||
"https://web.archive.org/web/20180120110727if_/https://rss.acast.com/thetipoff",
|
||||
),
|
||||
(
|
||||
"tests/feeds/2018-01-20-StealTheStars.xml",
|
||||
"https://web.archive.org/web/20180120104957if_/https://rss.art19.\
|
||||
com/steal-the-stars",
|
||||
),
|
||||
(
|
||||
"tests/feeds/2018-01-20-GreaterThanCode.xml",
|
||||
"https://web.archive.org/web/20180120104741if_/https://www.greaterthancode.\
|
||||
com/feed/podcast",
|
||||
),
|
||||
]
|
||||
};
|
||||
|
||||
#[test]
|
||||
fn test_complete_index() -> Result<(), Error> {
|
||||
truncate_db()?;
|
||||
|
||||
let feeds: Vec<_> = URLS
|
||||
.iter()
|
||||
.map(|&(path, url)| {
|
||||
// Create and insert a Source into db
|
||||
let s = Source::from_url(url).unwrap();
|
||||
get_feed(path, s.id())
|
||||
})
|
||||
.collect();
|
||||
|
||||
// Index the channels
|
||||
let stream_ = stream::iter_ok(feeds).for_each(|x| x.index());
|
||||
tokio::run(stream_.map_err(|_| ()));
|
||||
|
||||
// Assert the index rows equal the controlled results
|
||||
assert_eq!(dbqueries::get_sources()?.len(), 5);
|
||||
assert_eq!(dbqueries::get_podcasts()?.len(), 5);
|
||||
assert_eq!(dbqueries::get_episodes()?.len(), 354);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_feed_parse_podcast() -> Result<(), Error> {
|
||||
truncate_db()?;
|
||||
|
||||
let path = "tests/feeds/2018-01-20-Intercepted.xml";
|
||||
let feed = get_feed(path, 42);
|
||||
|
||||
let file = fs::File::open(path)?;
|
||||
let channel = Channel::read_from(BufReader::new(file))?;
|
||||
|
||||
let pd = NewShow::new(&channel, 42);
|
||||
assert_eq!(feed.parse_podcast(), pd);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_feed_index_channel_items() -> Result<(), Error> {
|
||||
truncate_db()?;
|
||||
|
||||
let path = "tests/feeds/2018-01-20-Intercepted.xml";
|
||||
let feed = get_feed(path, 42);
|
||||
let pd = feed.parse_podcast().to_podcast()?;
|
||||
|
||||
feed.index_channel_items(pd).wait()?;
|
||||
assert_eq!(dbqueries::get_podcasts()?.len(), 1);
|
||||
assert_eq!(dbqueries::get_episodes()?.len(), 43);
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
145
podcasts-data/src/lib.rs
Normal file
145
podcasts-data/src/lib.rs
Normal file
@ -0,0 +1,145 @@
|
||||
// lib.rs
|
||||
//
|
||||
// Copyright 2017 Jordan Petridis <jpetridis@gnome.org>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
//
|
||||
// SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
#![recursion_limit = "1024"]
|
||||
#![allow(unknown_lints)]
|
||||
#![cfg_attr(
|
||||
all(test, feature = "clippy"),
|
||||
allow(option_unwrap_used, result_unwrap_used)
|
||||
)]
|
||||
#![cfg_attr(
|
||||
feature = "clippy",
|
||||
warn(
|
||||
option_unwrap_used,
|
||||
result_unwrap_used,
|
||||
print_stdout,
|
||||
wrong_pub_self_convention,
|
||||
mut_mut,
|
||||
non_ascii_literal,
|
||||
similar_names,
|
||||
unicode_not_nfc,
|
||||
enum_glob_use,
|
||||
if_not_else,
|
||||
items_after_statements,
|
||||
used_underscore_binding
|
||||
)
|
||||
)]
|
||||
// Enable lint group collections
|
||||
#![warn(nonstandard_style, bad_style, unused)]
|
||||
#![warn(edition_2018, rust_2018_idioms)]
|
||||
// standalone lints
|
||||
#![warn(
|
||||
const_err,
|
||||
improper_ctypes,
|
||||
non_shorthand_field_patterns,
|
||||
no_mangle_generic_items,
|
||||
overflowing_literals,
|
||||
plugin_as_library,
|
||||
unconditional_recursion,
|
||||
unions_with_drop_fields,
|
||||
while_true,
|
||||
missing_debug_implementations,
|
||||
missing_docs,
|
||||
trivial_casts,
|
||||
trivial_numeric_casts,
|
||||
elided_lifetime_in_paths,
|
||||
missing_copy_implementations
|
||||
)]
|
||||
#![allow(proc_macro_derive_resolution_fallback)]
|
||||
|
||||
//! FIXME: Docs
|
||||
|
||||
#[cfg(test)]
|
||||
#[macro_use]
|
||||
extern crate pretty_assertions;
|
||||
|
||||
#[cfg(test)]
|
||||
#[macro_use]
|
||||
extern crate maplit;
|
||||
|
||||
#[macro_use]
|
||||
extern crate derive_builder;
|
||||
#[macro_use]
|
||||
extern crate diesel;
|
||||
#[macro_use]
|
||||
extern crate diesel_migrations;
|
||||
// #[macro_use]
|
||||
// extern crate failure;
|
||||
#[macro_use]
|
||||
extern crate failure_derive;
|
||||
#[macro_use]
|
||||
extern crate lazy_static;
|
||||
#[macro_use]
|
||||
extern crate log;
|
||||
|
||||
pub mod database;
|
||||
#[allow(missing_docs)]
|
||||
pub mod dbqueries;
|
||||
#[allow(missing_docs)]
|
||||
pub mod errors;
|
||||
mod feed;
|
||||
pub(crate) mod models;
|
||||
pub mod opml;
|
||||
mod parser;
|
||||
pub mod pipeline;
|
||||
mod schema;
|
||||
pub mod utils;
|
||||
|
||||
pub use crate::feed::{Feed, FeedBuilder};
|
||||
pub use crate::models::Save;
|
||||
pub use crate::models::{Episode, EpisodeWidgetModel, Show, ShowCoverModel, Source};
|
||||
|
||||
// Set the user agent, See #53 for more
|
||||
// Keep this in sync with Tor-browser releases
|
||||
/// The user-agent to be used for all the requests.
|
||||
/// It originates from the Tor-browser UA.
|
||||
pub const USER_AGENT: &str = "Mozilla/5.0 (Windows NT 6.1; rv:60.0) Gecko/20100101 Firefox/60.0";
|
||||
|
||||
/// [XDG Base Directory](https://specifications.freedesktop.org/basedir-spec/basedir-spec-latest.html) Paths.
|
||||
#[allow(missing_debug_implementations)]
|
||||
pub mod xdg_dirs {
|
||||
use std::path::PathBuf;
|
||||
use xdg;
|
||||
|
||||
lazy_static! {
|
||||
pub(crate) static ref PODCASTS_XDG: xdg::BaseDirectories = {
|
||||
xdg::BaseDirectories::with_prefix("gnome-podcasts").unwrap()
|
||||
};
|
||||
|
||||
/// XDG_DATA Directory `Pathbuf`.
|
||||
pub static ref PODCASTS_DATA: PathBuf = {
|
||||
PODCASTS_XDG.create_data_directory(PODCASTS_XDG.get_data_home()).unwrap()
|
||||
};
|
||||
|
||||
/// XDG_CONFIG Directory `Pathbuf`.
|
||||
pub static ref PODCASTS_CONFIG: PathBuf = {
|
||||
PODCASTS_XDG.create_config_directory(PODCASTS_XDG.get_config_home()).unwrap()
|
||||
};
|
||||
|
||||
/// XDG_CACHE Directory `Pathbuf`.
|
||||
pub static ref PODCASTS_CACHE: PathBuf = {
|
||||
PODCASTS_XDG.create_cache_directory(PODCASTS_XDG.get_cache_home()).unwrap()
|
||||
};
|
||||
|
||||
/// GNOME Podcasts Download Directory `PathBuf`.
|
||||
pub static ref DL_DIR: PathBuf = {
|
||||
PODCASTS_XDG.create_data_directory("Downloads").unwrap()
|
||||
};
|
||||
}
|
||||
}
|
||||
19
podcasts-data/src/meson.build
Normal file
19
podcasts-data/src/meson.build
Normal file
@ -0,0 +1,19 @@
|
||||
data_sources = files(
|
||||
'models/episode.rs',
|
||||
'models/mod.rs',
|
||||
'models/new_episode.rs',
|
||||
'models/new_show.rs',
|
||||
'models/new_source.rs',
|
||||
'models/show.rs',
|
||||
'models/source.rs',
|
||||
'database.rs',
|
||||
'dbqueries.rs',
|
||||
'errors.rs',
|
||||
'feed.rs',
|
||||
'lib.rs',
|
||||
'opml.rs',
|
||||
'parser.rs',
|
||||
'pipeline.rs',
|
||||
'schema.rs',
|
||||
'utils.rs',
|
||||
)
|
||||
423
podcasts-data/src/models/episode.rs
Normal file
423
podcasts-data/src/models/episode.rs
Normal file
@ -0,0 +1,423 @@
|
||||
// episode.rs
|
||||
//
|
||||
// Copyright 2017 Jordan Petridis <jpetridis@gnome.org>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
//
|
||||
// SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
use chrono::prelude::*;
|
||||
use diesel;
|
||||
use diesel::prelude::*;
|
||||
use diesel::SaveChangesDsl;
|
||||
|
||||
use crate::database::connection;
|
||||
use crate::errors::DataError;
|
||||
use crate::models::{Save, Show};
|
||||
use crate::schema::episodes;
|
||||
|
||||
#[derive(Queryable, Identifiable, AsChangeset, Associations, PartialEq)]
|
||||
#[table_name = "episodes"]
|
||||
#[changeset_options(treat_none_as_null = "true")]
|
||||
#[primary_key(title, show_id)]
|
||||
#[belongs_to(Show, foreign_key = "show_id")]
|
||||
#[derive(Debug, Clone)]
|
||||
/// Diesel Model of the episode table.
|
||||
pub struct Episode {
|
||||
rowid: i32,
|
||||
title: String,
|
||||
uri: Option<String>,
|
||||
local_uri: Option<String>,
|
||||
description: Option<String>,
|
||||
epoch: i32,
|
||||
length: Option<i32>,
|
||||
duration: Option<i32>,
|
||||
guid: Option<String>,
|
||||
played: Option<i32>,
|
||||
show_id: i32,
|
||||
}
|
||||
|
||||
impl Save<Episode> for Episode {
|
||||
type Error = DataError;
|
||||
|
||||
/// Helper method to easily save/"sync" current state of self to the
|
||||
/// Database.
|
||||
fn save(&self) -> Result<Episode, Self::Error> {
|
||||
let db = connection();
|
||||
let tempdb = db.get()?;
|
||||
|
||||
self.save_changes::<Episode>(&*tempdb).map_err(From::from)
|
||||
}
|
||||
}
|
||||
|
||||
impl Episode {
|
||||
/// Get the value of the sqlite's `ROW_ID`
|
||||
pub fn rowid(&self) -> i32 {
|
||||
self.rowid
|
||||
}
|
||||
|
||||
/// Get the value of the `title` field.
|
||||
pub fn title(&self) -> &str {
|
||||
&self.title
|
||||
}
|
||||
|
||||
/// Get the value of the `uri`.
|
||||
///
|
||||
/// Represents the url(usually) that the media file will be located at.
|
||||
pub fn uri(&self) -> Option<&str> {
|
||||
self.uri.as_ref().map(|s| s.as_str())
|
||||
}
|
||||
|
||||
/// Get the value of the `local_uri`.
|
||||
///
|
||||
/// Represents the local uri,usually filesystem path,
|
||||
/// that the media file will be located at.
|
||||
pub fn local_uri(&self) -> Option<&str> {
|
||||
self.local_uri.as_ref().map(|s| s.as_str())
|
||||
}
|
||||
|
||||
/// Get the `description`.
|
||||
pub fn description(&self) -> Option<&str> {
|
||||
self.description.as_ref().map(|s| s.as_str())
|
||||
}
|
||||
|
||||
/// Get the Episode's `guid`.
|
||||
pub fn guid(&self) -> Option<&str> {
|
||||
self.guid.as_ref().map(|s| s.as_str())
|
||||
}
|
||||
|
||||
/// Get the `epoch` value.
|
||||
///
|
||||
/// Retrieved from the rss Item publish date.
|
||||
/// Value is set to Utc whenever possible.
|
||||
pub fn epoch(&self) -> i32 {
|
||||
self.epoch
|
||||
}
|
||||
|
||||
/// Get the `length`.
|
||||
///
|
||||
/// The number represents the size of the file in bytes.
|
||||
pub fn length(&self) -> Option<i32> {
|
||||
self.length
|
||||
}
|
||||
|
||||
/// Get the `duration` value.
|
||||
///
|
||||
/// The number represents the duration of the item/episode in seconds.
|
||||
pub fn duration(&self) -> Option<i32> {
|
||||
self.duration
|
||||
}
|
||||
|
||||
/// Epoch representation of the last time the episode was played.
|
||||
///
|
||||
/// None/Null for unplayed.
|
||||
pub fn played(&self) -> Option<i32> {
|
||||
self.played
|
||||
}
|
||||
|
||||
/// `Show` table foreign key.
|
||||
pub fn show_id(&self) -> i32 {
|
||||
self.show_id
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Queryable, AsChangeset, PartialEq)]
|
||||
#[table_name = "episodes"]
|
||||
#[changeset_options(treat_none_as_null = "true")]
|
||||
#[primary_key(title, show_id)]
|
||||
#[derive(Debug, Clone)]
|
||||
/// Diesel Model to be used for constructing `EpisodeWidgets`.
|
||||
pub struct EpisodeWidgetModel {
|
||||
rowid: i32,
|
||||
title: String,
|
||||
uri: Option<String>,
|
||||
local_uri: Option<String>,
|
||||
epoch: i32,
|
||||
length: Option<i32>,
|
||||
duration: Option<i32>,
|
||||
played: Option<i32>,
|
||||
show_id: i32,
|
||||
}
|
||||
|
||||
impl From<Episode> for EpisodeWidgetModel {
|
||||
fn from(e: Episode) -> EpisodeWidgetModel {
|
||||
EpisodeWidgetModel {
|
||||
rowid: e.rowid,
|
||||
title: e.title,
|
||||
uri: e.uri,
|
||||
local_uri: e.local_uri,
|
||||
epoch: e.epoch,
|
||||
length: e.length,
|
||||
duration: e.duration,
|
||||
played: e.played,
|
||||
show_id: e.show_id,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Save<usize> for EpisodeWidgetModel {
|
||||
type Error = DataError;
|
||||
|
||||
/// Helper method to easily save/"sync" current state of self to the
|
||||
/// Database.
|
||||
fn save(&self) -> Result<usize, Self::Error> {
|
||||
use crate::schema::episodes::dsl::*;
|
||||
|
||||
let db = connection();
|
||||
let tempdb = db.get()?;
|
||||
|
||||
diesel::update(episodes.filter(rowid.eq(self.rowid)))
|
||||
.set(self)
|
||||
.execute(&*tempdb)
|
||||
.map_err(From::from)
|
||||
}
|
||||
}
|
||||
|
||||
impl EpisodeWidgetModel {
|
||||
/// Get the value of the sqlite's `ROW_ID`
|
||||
pub fn rowid(&self) -> i32 {
|
||||
self.rowid
|
||||
}
|
||||
|
||||
/// Get the value of the `title` field.
|
||||
pub fn title(&self) -> &str {
|
||||
&self.title
|
||||
}
|
||||
|
||||
/// Get the value of the `uri`.
|
||||
///
|
||||
/// Represents the url(usually) that the media file will be located at.
|
||||
pub fn uri(&self) -> Option<&str> {
|
||||
self.uri.as_ref().map(|s| s.as_str())
|
||||
}
|
||||
|
||||
/// Get the value of the `local_uri`.
|
||||
///
|
||||
/// Represents the local uri,usually filesystem path,
|
||||
/// that the media file will be located at.
|
||||
pub fn local_uri(&self) -> Option<&str> {
|
||||
self.local_uri.as_ref().map(|s| s.as_str())
|
||||
}
|
||||
|
||||
/// Set the `local_uri`.
|
||||
pub fn set_local_uri(&mut self, value: Option<&str>) {
|
||||
self.local_uri = value.map(|x| x.to_string());
|
||||
}
|
||||
|
||||
/// Get the `epoch` value.
|
||||
///
|
||||
/// Retrieved from the rss Item publish date.
|
||||
/// Value is set to Utc whenever possible.
|
||||
pub fn epoch(&self) -> i32 {
|
||||
self.epoch
|
||||
}
|
||||
|
||||
/// Get the `length`.
|
||||
///
|
||||
/// The number represents the size of the file in bytes.
|
||||
pub fn length(&self) -> Option<i32> {
|
||||
self.length
|
||||
}
|
||||
|
||||
/// Set the `length`.
|
||||
pub fn set_length(&mut self, value: Option<i32>) {
|
||||
self.length = value;
|
||||
}
|
||||
|
||||
/// Get the `duration` value.
|
||||
///
|
||||
/// The number represents the duration of the item/episode in seconds.
|
||||
pub fn duration(&self) -> Option<i32> {
|
||||
self.duration
|
||||
}
|
||||
|
||||
/// Epoch representation of the last time the episode was played.
|
||||
///
|
||||
/// None/Null for unplayed.
|
||||
pub fn played(&self) -> Option<i32> {
|
||||
self.played
|
||||
}
|
||||
|
||||
/// Set the `played` value.
|
||||
fn set_played(&mut self, value: Option<i32>) {
|
||||
self.played = value;
|
||||
}
|
||||
|
||||
/// `Show` table foreign key.
|
||||
pub fn show_id(&self) -> i32 {
|
||||
self.show_id
|
||||
}
|
||||
|
||||
/// Sets the `played` value with the current `epoch` timestap and save it.
|
||||
pub fn set_played_now(&mut self) -> Result<(), DataError> {
|
||||
let epoch = Utc::now().timestamp() as i32;
|
||||
self.set_played(Some(epoch));
|
||||
self.save().map(|_| ())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Queryable, AsChangeset, PartialEq)]
|
||||
#[table_name = "episodes"]
|
||||
#[changeset_options(treat_none_as_null = "true")]
|
||||
#[primary_key(title, show_id)]
|
||||
#[derive(Debug, Clone)]
|
||||
/// Diesel Model to be used internal with the `utils::checkup` function.
|
||||
pub struct EpisodeCleanerModel {
|
||||
rowid: i32,
|
||||
local_uri: Option<String>,
|
||||
played: Option<i32>,
|
||||
}
|
||||
|
||||
impl Save<usize> for EpisodeCleanerModel {
|
||||
type Error = DataError;
|
||||
|
||||
/// Helper method to easily save/"sync" current state of self to the
|
||||
/// Database.
|
||||
fn save(&self) -> Result<usize, Self::Error> {
|
||||
use crate::schema::episodes::dsl::*;
|
||||
|
||||
let db = connection();
|
||||
let tempdb = db.get()?;
|
||||
|
||||
diesel::update(episodes.filter(rowid.eq(self.rowid)))
|
||||
.set(self)
|
||||
.execute(&*tempdb)
|
||||
.map_err(From::from)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Episode> for EpisodeCleanerModel {
|
||||
fn from(e: Episode) -> EpisodeCleanerModel {
|
||||
EpisodeCleanerModel {
|
||||
rowid: e.rowid(),
|
||||
local_uri: e.local_uri,
|
||||
played: e.played,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl EpisodeCleanerModel {
|
||||
/// Get the value of the sqlite's `ROW_ID`
|
||||
pub fn rowid(&self) -> i32 {
|
||||
self.rowid
|
||||
}
|
||||
|
||||
/// Get the value of the `local_uri`.
|
||||
///
|
||||
/// Represents the local uri,usually filesystem path,
|
||||
/// that the media file will be located at.
|
||||
pub fn local_uri(&self) -> Option<&str> {
|
||||
self.local_uri.as_ref().map(|s| s.as_str())
|
||||
}
|
||||
|
||||
/// Set the `local_uri`.
|
||||
pub fn set_local_uri(&mut self, value: Option<&str>) {
|
||||
self.local_uri = value.map(|x| x.to_string());
|
||||
}
|
||||
|
||||
/// Epoch representation of the last time the episode was played.
|
||||
///
|
||||
/// None/Null for unplayed.
|
||||
pub fn played(&self) -> Option<i32> {
|
||||
self.played
|
||||
}
|
||||
|
||||
/// Set the `played` value.
|
||||
pub fn set_played(&mut self, value: Option<i32>) {
|
||||
self.played = value;
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Queryable, AsChangeset, PartialEq)]
|
||||
#[table_name = "episodes"]
|
||||
#[changeset_options(treat_none_as_null = "true")]
|
||||
#[primary_key(title, show_id)]
|
||||
#[derive(Debug, Clone)]
|
||||
/// Diesel Model to be used for FIXME.
|
||||
pub struct EpisodeMinimal {
|
||||
rowid: i32,
|
||||
title: String,
|
||||
uri: Option<String>,
|
||||
epoch: i32,
|
||||
length: Option<i32>,
|
||||
duration: Option<i32>,
|
||||
guid: Option<String>,
|
||||
show_id: i32,
|
||||
}
|
||||
|
||||
impl From<Episode> for EpisodeMinimal {
|
||||
fn from(e: Episode) -> Self {
|
||||
EpisodeMinimal {
|
||||
rowid: e.rowid,
|
||||
title: e.title,
|
||||
uri: e.uri,
|
||||
length: e.length,
|
||||
guid: e.guid,
|
||||
epoch: e.epoch,
|
||||
duration: e.duration,
|
||||
show_id: e.show_id,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl EpisodeMinimal {
|
||||
/// Get the value of the sqlite's `ROW_ID`
|
||||
pub fn rowid(&self) -> i32 {
|
||||
self.rowid
|
||||
}
|
||||
|
||||
/// Get the value of the `title` field.
|
||||
pub fn title(&self) -> &str {
|
||||
&self.title
|
||||
}
|
||||
|
||||
/// Get the value of the `uri`.
|
||||
///
|
||||
/// Represents the url(usually) that the media file will be located at.
|
||||
pub fn uri(&self) -> Option<&str> {
|
||||
self.uri.as_ref().map(|s| s.as_str())
|
||||
}
|
||||
|
||||
/// Get the Episode's `guid`.
|
||||
pub fn guid(&self) -> Option<&str> {
|
||||
self.guid.as_ref().map(|s| s.as_str())
|
||||
}
|
||||
|
||||
/// Get the `epoch` value.
|
||||
///
|
||||
/// Retrieved from the rss Item publish date.
|
||||
/// Value is set to Utc whenever possible.
|
||||
pub fn epoch(&self) -> i32 {
|
||||
self.epoch
|
||||
}
|
||||
|
||||
/// Get the `length`.
|
||||
///
|
||||
/// The number represents the size of the file in bytes.
|
||||
pub fn length(&self) -> Option<i32> {
|
||||
self.length
|
||||
}
|
||||
|
||||
/// Get the `duration` value.
|
||||
///
|
||||
/// The number represents the duration of the item/episode in seconds.
|
||||
pub fn duration(&self) -> Option<i32> {
|
||||
self.duration
|
||||
}
|
||||
|
||||
/// `Show` table foreign key.
|
||||
pub fn show_id(&self) -> i32 {
|
||||
self.show_id
|
||||
}
|
||||
}
|
||||
78
podcasts-data/src/models/mod.rs
Normal file
78
podcasts-data/src/models/mod.rs
Normal file
@ -0,0 +1,78 @@
|
||||
// mod.rs
|
||||
//
|
||||
// Copyright 2017 Jordan Petridis <jpetridis@gnome.org>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
//
|
||||
// SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
mod new_episode;
|
||||
mod new_show;
|
||||
mod new_source;
|
||||
|
||||
mod episode;
|
||||
mod show;
|
||||
mod source;
|
||||
|
||||
// use futures::prelude::*;
|
||||
// use futures::future::*;
|
||||
|
||||
pub(crate) use self::episode::EpisodeCleanerModel;
|
||||
pub(crate) use self::new_episode::{NewEpisode, NewEpisodeMinimal};
|
||||
pub(crate) use self::new_show::NewShow;
|
||||
pub(crate) use self::new_source::NewSource;
|
||||
|
||||
#[cfg(test)]
|
||||
pub(crate) use self::new_episode::NewEpisodeBuilder;
|
||||
#[cfg(test)]
|
||||
pub(crate) use self::new_show::NewShowBuilder;
|
||||
|
||||
pub use self::episode::{Episode, EpisodeMinimal, EpisodeWidgetModel};
|
||||
pub use self::show::{Show, ShowCoverModel};
|
||||
pub use self::source::Source;
|
||||
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub enum IndexState<T> {
|
||||
Index(T),
|
||||
Update((T, i32)),
|
||||
NotChanged,
|
||||
}
|
||||
|
||||
pub trait Insert<T> {
|
||||
type Error;
|
||||
|
||||
fn insert(&self) -> Result<T, Self::Error>;
|
||||
}
|
||||
|
||||
pub trait Update<T> {
|
||||
type Error;
|
||||
|
||||
fn update(&self, _: i32) -> Result<T, Self::Error>;
|
||||
}
|
||||
|
||||
// This might need to change in the future
|
||||
pub trait Index<T>: Insert<T> + Update<T> {
|
||||
type Error;
|
||||
|
||||
fn index(&self) -> Result<T, <Self as Index<T>>::Error>;
|
||||
}
|
||||
|
||||
/// FIXME: DOCS
|
||||
pub trait Save<T> {
|
||||
/// The Error type to be returned.
|
||||
type Error;
|
||||
/// Helper method to easily save/"sync" current state of a diesel model to
|
||||
/// the Database.
|
||||
fn save(&self) -> Result<T, Self::Error>;
|
||||
}
|
||||
689
podcasts-data/src/models/new_episode.rs
Normal file
689
podcasts-data/src/models/new_episode.rs
Normal file
@ -0,0 +1,689 @@
|
||||
// new_episode.rs
|
||||
//
|
||||
// Copyright 2017 Jordan Petridis <jpetridis@gnome.org>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
//
|
||||
// SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
use ammonia;
|
||||
use diesel;
|
||||
use diesel::prelude::*;
|
||||
use rfc822_sanitizer::parse_from_rfc2822_with_fallback as parse_rfc822;
|
||||
use rss;
|
||||
|
||||
use crate::database::connection;
|
||||
use crate::dbqueries;
|
||||
use crate::errors::DataError;
|
||||
use crate::models::{Episode, EpisodeMinimal, Index, Insert, Update};
|
||||
use crate::parser;
|
||||
use crate::schema::episodes;
|
||||
use crate::utils::url_cleaner;
|
||||
|
||||
#[derive(Insertable, AsChangeset)]
|
||||
#[table_name = "episodes"]
|
||||
#[derive(Debug, Clone, Default, Builder, PartialEq)]
|
||||
#[builder(default)]
|
||||
#[builder(derive(Debug))]
|
||||
#[builder(setter(into))]
|
||||
pub(crate) struct NewEpisode {
|
||||
title: String,
|
||||
uri: Option<String>,
|
||||
description: Option<String>,
|
||||
length: Option<i32>,
|
||||
duration: Option<i32>,
|
||||
guid: Option<String>,
|
||||
epoch: i32,
|
||||
show_id: i32,
|
||||
}
|
||||
|
||||
impl From<NewEpisodeMinimal> for NewEpisode {
|
||||
fn from(e: NewEpisodeMinimal) -> Self {
|
||||
NewEpisodeBuilder::default()
|
||||
.title(e.title)
|
||||
.uri(e.uri)
|
||||
.duration(e.duration)
|
||||
.epoch(e.epoch)
|
||||
.show_id(e.show_id)
|
||||
.guid(e.guid)
|
||||
.build()
|
||||
.unwrap()
|
||||
}
|
||||
}
|
||||
|
||||
impl Insert<()> for NewEpisode {
|
||||
type Error = DataError;
|
||||
|
||||
fn insert(&self) -> Result<(), DataError> {
|
||||
use crate::schema::episodes::dsl::*;
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
info!("Inserting {:?}", self.title);
|
||||
diesel::insert_into(episodes)
|
||||
.values(self)
|
||||
.execute(&con)
|
||||
.map_err(From::from)
|
||||
.map(|_| ())
|
||||
}
|
||||
}
|
||||
|
||||
impl Update<()> for NewEpisode {
|
||||
type Error = DataError;
|
||||
|
||||
fn update(&self, episode_id: i32) -> Result<(), DataError> {
|
||||
use crate::schema::episodes::dsl::*;
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
info!("Updating {:?}", self.title);
|
||||
diesel::update(episodes.filter(rowid.eq(episode_id)))
|
||||
.set(self)
|
||||
.execute(&con)
|
||||
.map_err(From::from)
|
||||
.map(|_| ())
|
||||
}
|
||||
}
|
||||
|
||||
impl Index<()> for NewEpisode {
|
||||
type Error = DataError;
|
||||
|
||||
// Does not update the episode description if it's the only thing that has
|
||||
// changed.
|
||||
fn index(&self) -> Result<(), DataError> {
|
||||
let exists = dbqueries::episode_exists(self.title(), self.show_id())?;
|
||||
|
||||
if exists {
|
||||
let other = dbqueries::get_episode_minimal_from_pk(self.title(), self.show_id())?;
|
||||
|
||||
if self != &other {
|
||||
self.update(other.rowid())
|
||||
} else {
|
||||
Ok(())
|
||||
}
|
||||
} else {
|
||||
self.insert()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq<EpisodeMinimal> for NewEpisode {
|
||||
fn eq(&self, other: &EpisodeMinimal) -> bool {
|
||||
(self.title() == other.title())
|
||||
&& (self.uri() == other.uri())
|
||||
&& (self.duration() == other.duration())
|
||||
&& (self.epoch() == other.epoch())
|
||||
&& (self.guid() == other.guid())
|
||||
&& (self.show_id() == other.show_id())
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq<Episode> for NewEpisode {
|
||||
fn eq(&self, other: &Episode) -> bool {
|
||||
(self.title() == other.title())
|
||||
&& (self.uri() == other.uri())
|
||||
&& (self.duration() == other.duration())
|
||||
&& (self.epoch() == other.epoch())
|
||||
&& (self.guid() == other.guid())
|
||||
&& (self.show_id() == other.show_id())
|
||||
&& (self.description() == other.description())
|
||||
&& (self.length() == other.length())
|
||||
}
|
||||
}
|
||||
|
||||
impl NewEpisode {
|
||||
/// Parses an `rss::Item` into a `NewEpisode` Struct.
|
||||
#[allow(dead_code)]
|
||||
pub(crate) fn new(item: &rss::Item, show_id: i32) -> Result<Self, DataError> {
|
||||
NewEpisodeMinimal::new(item, show_id).map(|ep| ep.into_new_episode(item))
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub(crate) fn to_episode(&self) -> Result<Episode, DataError> {
|
||||
self.index()?;
|
||||
dbqueries::get_episode_from_pk(&self.title, self.show_id).map_err(From::from)
|
||||
}
|
||||
}
|
||||
|
||||
// Ignore the following getters. They are used in unit tests mainly.
|
||||
impl NewEpisode {
|
||||
pub(crate) fn title(&self) -> &str {
|
||||
self.title.as_ref()
|
||||
}
|
||||
|
||||
pub(crate) fn uri(&self) -> Option<&str> {
|
||||
self.uri.as_ref().map(|s| s.as_str())
|
||||
}
|
||||
|
||||
pub(crate) fn description(&self) -> Option<&str> {
|
||||
self.description.as_ref().map(|s| s.as_str())
|
||||
}
|
||||
|
||||
pub(crate) fn guid(&self) -> Option<&str> {
|
||||
self.guid.as_ref().map(|s| s.as_str())
|
||||
}
|
||||
|
||||
pub(crate) fn epoch(&self) -> i32 {
|
||||
self.epoch
|
||||
}
|
||||
|
||||
pub(crate) fn duration(&self) -> Option<i32> {
|
||||
self.duration
|
||||
}
|
||||
|
||||
pub(crate) fn length(&self) -> Option<i32> {
|
||||
self.length
|
||||
}
|
||||
|
||||
pub(crate) fn show_id(&self) -> i32 {
|
||||
self.show_id
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Insertable, AsChangeset)]
|
||||
#[table_name = "episodes"]
|
||||
#[derive(Debug, Clone, Builder, PartialEq)]
|
||||
#[builder(derive(Debug))]
|
||||
#[builder(setter(into))]
|
||||
pub(crate) struct NewEpisodeMinimal {
|
||||
title: String,
|
||||
uri: Option<String>,
|
||||
length: Option<i32>,
|
||||
duration: Option<i32>,
|
||||
epoch: i32,
|
||||
guid: Option<String>,
|
||||
show_id: i32,
|
||||
}
|
||||
|
||||
impl PartialEq<EpisodeMinimal> for NewEpisodeMinimal {
|
||||
fn eq(&self, other: &EpisodeMinimal) -> bool {
|
||||
(self.title() == other.title())
|
||||
&& (self.uri() == other.uri())
|
||||
&& (self.duration() == other.duration())
|
||||
&& (self.epoch() == other.epoch())
|
||||
&& (self.guid() == other.guid())
|
||||
&& (self.show_id() == other.show_id())
|
||||
}
|
||||
}
|
||||
|
||||
impl NewEpisodeMinimal {
|
||||
pub(crate) fn new(item: &rss::Item, parent_id: i32) -> Result<Self, DataError> {
|
||||
if item.title().is_none() {
|
||||
let err = DataError::ParseEpisodeError {
|
||||
reason: "No title specified for this Episode.".into(),
|
||||
parent_id,
|
||||
};
|
||||
|
||||
return Err(err);
|
||||
}
|
||||
|
||||
let title = item.title().unwrap().trim().to_owned();
|
||||
let guid = item.guid().map(|s| s.value().trim().to_owned());
|
||||
|
||||
// Get the mime type, the `http` url and the length from the enclosure
|
||||
// http://www.rssboard.org/rss-specification#ltenclosuregtSubelementOfLtitemgt
|
||||
let enc = item.enclosure();
|
||||
|
||||
// Get the url
|
||||
let uri = enc
|
||||
.map(|s| url_cleaner(s.url().trim()))
|
||||
// Fallback to Rss.Item.link if enclosure is None.
|
||||
.or_else(|| item.link().map(|s| url_cleaner(s.trim())));
|
||||
|
||||
// Get the size of the content, it should be in bytes
|
||||
let length = enc.and_then(|x| x.length().parse().ok());
|
||||
|
||||
// If url is still None return an Error as this behaviour is not
|
||||
// compliant with the RSS Spec.
|
||||
if uri.is_none() {
|
||||
let err = DataError::ParseEpisodeError {
|
||||
reason: "No url specified for the item.".into(),
|
||||
parent_id,
|
||||
};
|
||||
|
||||
return Err(err);
|
||||
};
|
||||
|
||||
// Default to rfc2822 representation of epoch 0.
|
||||
let date = parse_rfc822(item.pub_date().unwrap_or("Thu, 1 Jan 1970 00:00:00 +0000"));
|
||||
// Should treat information from the rss feeds as invalid by default.
|
||||
// Case: "Thu, 05 Aug 2016 06:00:00 -0400" <-- Actually that was friday.
|
||||
let epoch = date.map(|x| x.timestamp() as i32).unwrap_or(0);
|
||||
|
||||
let duration = parser::parse_itunes_duration(item.itunes_ext());
|
||||
|
||||
NewEpisodeMinimalBuilder::default()
|
||||
.title(title)
|
||||
.uri(uri)
|
||||
.length(length)
|
||||
.duration(duration)
|
||||
.epoch(epoch)
|
||||
.guid(guid)
|
||||
.show_id(parent_id)
|
||||
.build()
|
||||
.map_err(From::from)
|
||||
}
|
||||
|
||||
// TODO: TryInto is stabilizing in rustc v1.26!
|
||||
// ^ Jokes on you past self!
|
||||
pub(crate) fn into_new_episode(self, item: &rss::Item) -> NewEpisode {
|
||||
let description = item.description().and_then(|s| {
|
||||
let sanitized_html = ammonia::Builder::new()
|
||||
// Remove `rel` attributes from `<a>` tags
|
||||
.link_rel(None)
|
||||
.clean(s.trim())
|
||||
.to_string();
|
||||
Some(sanitized_html)
|
||||
});
|
||||
|
||||
NewEpisodeBuilder::default()
|
||||
.title(self.title)
|
||||
.uri(self.uri)
|
||||
.duration(self.duration)
|
||||
.epoch(self.epoch)
|
||||
.show_id(self.show_id)
|
||||
.guid(self.guid)
|
||||
.length(self.length)
|
||||
.description(description)
|
||||
.build()
|
||||
.unwrap()
|
||||
}
|
||||
}
|
||||
|
||||
// Ignore the following getters. They are used in unit tests mainly.
|
||||
impl NewEpisodeMinimal {
|
||||
pub(crate) fn title(&self) -> &str {
|
||||
self.title.as_ref()
|
||||
}
|
||||
|
||||
pub(crate) fn uri(&self) -> Option<&str> {
|
||||
self.uri.as_ref().map(|s| s.as_str())
|
||||
}
|
||||
|
||||
pub(crate) fn guid(&self) -> Option<&str> {
|
||||
self.guid.as_ref().map(|s| s.as_str())
|
||||
}
|
||||
|
||||
pub(crate) fn duration(&self) -> Option<i32> {
|
||||
self.duration
|
||||
}
|
||||
|
||||
pub(crate) fn epoch(&self) -> i32 {
|
||||
self.epoch
|
||||
}
|
||||
|
||||
pub(crate) fn show_id(&self) -> i32 {
|
||||
self.show_id
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::database::truncate_db;
|
||||
use crate::dbqueries;
|
||||
use crate::models::new_episode::{NewEpisodeMinimal, NewEpisodeMinimalBuilder};
|
||||
use crate::models::*;
|
||||
use failure::Error;
|
||||
|
||||
use rss::Channel;
|
||||
|
||||
use std::fs::File;
|
||||
use std::io::BufReader;
|
||||
|
||||
// TODO: Add tests for other feeds too.
|
||||
// Especially if you find an *interesting* generated feed.
|
||||
|
||||
// Known prebuilt expected objects.
|
||||
lazy_static! {
|
||||
static ref EXPECTED_MINIMAL_INTERCEPTED_1: NewEpisodeMinimal = {
|
||||
NewEpisodeMinimalBuilder::default()
|
||||
.title("The Super Bowl of Racism")
|
||||
.uri(Some(String::from(
|
||||
"http://traffic.megaphone.fm/PPY6458293736.mp3",
|
||||
)))
|
||||
.guid(Some(String::from("7df4070a-9832-11e7-adac-cb37b05d5e24")))
|
||||
.epoch(1505296800)
|
||||
.length(Some(66738886))
|
||||
.duration(Some(4171))
|
||||
.show_id(42)
|
||||
.build()
|
||||
.unwrap()
|
||||
};
|
||||
static ref EXPECTED_MINIMAL_INTERCEPTED_2: NewEpisodeMinimal = {
|
||||
NewEpisodeMinimalBuilder::default()
|
||||
.title("Atlas Golfed — U.S.-Backed Think Tanks Target Latin America")
|
||||
.uri(Some(String::from(
|
||||
"http://traffic.megaphone.fm/FL5331443769.mp3",
|
||||
)))
|
||||
.guid(Some(String::from("7c207a24-e33f-11e6-9438-eb45dcf36a1d")))
|
||||
.epoch(1502272800)
|
||||
.length(Some(67527575))
|
||||
.duration(Some(4415))
|
||||
.show_id(42)
|
||||
.build()
|
||||
.unwrap()
|
||||
};
|
||||
static ref EXPECTED_INTERCEPTED_1: NewEpisode = {
|
||||
let descr = "NSA whistleblower Edward Snowden discusses the massive Equifax data \
|
||||
breach and allegations of Russian interference in the US election. \
|
||||
Commentator Shaun King explains his call for a boycott of the NFL and \
|
||||
talks about his campaign to bring violent neo-Nazis to justice. Rapper \
|
||||
Open Mike Eagle performs.";
|
||||
|
||||
NewEpisodeBuilder::default()
|
||||
.title("The Super Bowl of Racism")
|
||||
.uri(Some(String::from(
|
||||
"http://traffic.megaphone.fm/PPY6458293736.mp3",
|
||||
)))
|
||||
.description(Some(String::from(descr)))
|
||||
.guid(Some(String::from("7df4070a-9832-11e7-adac-cb37b05d5e24")))
|
||||
.length(Some(66738886))
|
||||
.epoch(1505296800)
|
||||
.duration(Some(4171))
|
||||
.show_id(42)
|
||||
.build()
|
||||
.unwrap()
|
||||
};
|
||||
static ref EXPECTED_INTERCEPTED_2: NewEpisode = {
|
||||
let descr = "This week on Intercepted: Jeremy gives an update on the aftermath of \
|
||||
Blackwater’s 2007 massacre of Iraqi civilians. Intercept reporter Lee \
|
||||
Fang lays out how a network of libertarian think tanks called the Atlas \
|
||||
Network is insidiously shaping political infrastructure in Latin \
|
||||
America. We speak with attorney and former Hugo Chavez adviser Eva \
|
||||
Golinger about the Venezuela\'s political turmoil.And we hear Claudia \
|
||||
Lizardo of the Caracas-based band, La Pequeña Revancha, talk about her \
|
||||
music and hopes for Venezuela.";
|
||||
|
||||
NewEpisodeBuilder::default()
|
||||
.title("Atlas Golfed — U.S.-Backed Think Tanks Target Latin America")
|
||||
.uri(Some(String::from(
|
||||
"http://traffic.megaphone.fm/FL5331443769.mp3",
|
||||
)))
|
||||
.description(Some(String::from(descr)))
|
||||
.guid(Some(String::from("7c207a24-e33f-11e6-9438-eb45dcf36a1d")))
|
||||
.length(Some(67527575))
|
||||
.epoch(1502272800)
|
||||
.duration(Some(4415))
|
||||
.show_id(42)
|
||||
.build()
|
||||
.unwrap()
|
||||
};
|
||||
static ref UPDATED_DURATION_INTERCEPTED_1: NewEpisode = {
|
||||
NewEpisodeBuilder::default()
|
||||
.title("The Super Bowl of Racism")
|
||||
.uri(Some(String::from(
|
||||
"http://traffic.megaphone.fm/PPY6458293736.mp3",
|
||||
)))
|
||||
.description(Some(String::from("New description")))
|
||||
.guid(Some(String::from("7df4070a-9832-11e7-adac-cb37b05d5e24")))
|
||||
.length(Some(66738886))
|
||||
.epoch(1505296800)
|
||||
.duration(Some(424242))
|
||||
.show_id(42)
|
||||
.build()
|
||||
.unwrap()
|
||||
};
|
||||
static ref EXPECTED_MINIMAL_LUP_1: NewEpisodeMinimal = {
|
||||
NewEpisodeMinimalBuilder::default()
|
||||
.title("Hacking Devices with Kali Linux | LUP 214")
|
||||
.uri(Some(String::from(
|
||||
"http://www.podtrac.com/pts/redirect.mp3/traffic.libsyn.com/jnite/lup-0214.mp3",
|
||||
)))
|
||||
.guid(Some(String::from("78A682B4-73E8-47B8-88C0-1BE62DD4EF9D")))
|
||||
.length(Some(46479789))
|
||||
.epoch(1505280282)
|
||||
.duration(Some(5733))
|
||||
.show_id(42)
|
||||
.build()
|
||||
.unwrap()
|
||||
};
|
||||
static ref EXPECTED_MINIMAL_LUP_2: NewEpisodeMinimal = {
|
||||
NewEpisodeMinimalBuilder::default()
|
||||
.title("Gnome Does it Again | LUP 213")
|
||||
.uri(Some(String::from(
|
||||
"http://www.podtrac.com/pts/redirect.mp3/traffic.libsyn.com/jnite/lup-0213.mp3",
|
||||
)))
|
||||
.guid(Some(String::from("1CE57548-B36C-4F14-832A-5D5E0A24E35B")))
|
||||
.epoch(1504670247)
|
||||
.length(Some(36544272))
|
||||
.duration(Some(4491))
|
||||
.show_id(42)
|
||||
.build()
|
||||
.unwrap()
|
||||
};
|
||||
static ref EXPECTED_LUP_1: NewEpisode = {
|
||||
let descr = "Audit your network with a couple of easy commands on Kali Linux. Chris \
|
||||
decides to blow off a little steam by attacking his IoT devices, Wes has \
|
||||
the scope on Equifax blaming open source & the Beard just saved the \
|
||||
show. It’s a really packed episode!";
|
||||
|
||||
NewEpisodeBuilder::default()
|
||||
.title("Hacking Devices with Kali Linux | LUP 214")
|
||||
.uri(Some(String::from(
|
||||
"http://www.podtrac.com/pts/redirect.mp3/traffic.libsyn.com/jnite/lup-0214.mp3",
|
||||
)))
|
||||
.description(Some(String::from(descr)))
|
||||
.guid(Some(String::from("78A682B4-73E8-47B8-88C0-1BE62DD4EF9D")))
|
||||
.length(Some(46479789))
|
||||
.epoch(1505280282)
|
||||
.duration(Some(5733))
|
||||
.show_id(42)
|
||||
.build()
|
||||
.unwrap()
|
||||
};
|
||||
static ref EXPECTED_LUP_2: NewEpisode = {
|
||||
let descr =
|
||||
"<p>The Gnome project is about to solve one of our audience's biggest Wayland’s \
|
||||
concerns. But as the project takes on a new level of relevance, decisions for \
|
||||
the next version of Gnome have us worried about the future.</p>\n\n<p>Plus we \
|
||||
chat with Wimpy about the Ubuntu Rally in NYC, Microsoft’s sneaky move to turn \
|
||||
Windows 10 into the “ULTIMATE LINUX RUNTIME”, community news & more!</p>";
|
||||
|
||||
NewEpisodeBuilder::default()
|
||||
.title("Gnome Does it Again | LUP 213")
|
||||
.uri(Some(String::from(
|
||||
"http://www.podtrac.com/pts/redirect.mp3/traffic.libsyn.com/jnite/lup-0213.mp3",
|
||||
)))
|
||||
.description(Some(String::from(descr)))
|
||||
.guid(Some(String::from("1CE57548-B36C-4F14-832A-5D5E0A24E35B")))
|
||||
.length(Some(36544272))
|
||||
.epoch(1504670247)
|
||||
.duration(Some(4491))
|
||||
.show_id(42)
|
||||
.build()
|
||||
.unwrap()
|
||||
};
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_new_episode_minimal_intercepted() -> Result<(), Error> {
|
||||
let file = File::open("tests/feeds/2018-01-20-Intercepted.xml")?;
|
||||
let channel = Channel::read_from(BufReader::new(file))?;
|
||||
|
||||
let episode = channel.items().iter().nth(14).unwrap();
|
||||
let ep = NewEpisodeMinimal::new(&episode, 42)?;
|
||||
assert_eq!(ep, *EXPECTED_MINIMAL_INTERCEPTED_1);
|
||||
|
||||
let episode = channel.items().iter().nth(15).unwrap();
|
||||
let ep = NewEpisodeMinimal::new(&episode, 42)?;
|
||||
assert_eq!(ep, *EXPECTED_MINIMAL_INTERCEPTED_2);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_new_episode_intercepted() -> Result<(), Error> {
|
||||
let file = File::open("tests/feeds/2018-01-20-Intercepted.xml")?;
|
||||
let channel = Channel::read_from(BufReader::new(file))?;
|
||||
|
||||
let episode = channel.items().iter().nth(14).unwrap();
|
||||
let ep = NewEpisode::new(&episode, 42)?;
|
||||
assert_eq!(ep, *EXPECTED_INTERCEPTED_1);
|
||||
|
||||
let episode = channel.items().iter().nth(15).unwrap();
|
||||
let ep = NewEpisode::new(&episode, 42)?;
|
||||
|
||||
assert_eq!(ep, *EXPECTED_INTERCEPTED_2);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_new_episode_minimal_lup() -> Result<(), Error> {
|
||||
let file = File::open("tests/feeds/2018-01-20-LinuxUnplugged.xml")?;
|
||||
let channel = Channel::read_from(BufReader::new(file))?;
|
||||
|
||||
let episode = channel.items().iter().nth(18).unwrap();
|
||||
let ep = NewEpisodeMinimal::new(&episode, 42)?;
|
||||
assert_eq!(ep, *EXPECTED_MINIMAL_LUP_1);
|
||||
|
||||
let episode = channel.items().iter().nth(19).unwrap();
|
||||
let ep = NewEpisodeMinimal::new(&episode, 42)?;
|
||||
assert_eq!(ep, *EXPECTED_MINIMAL_LUP_2);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_new_episode_lup() -> Result<(), Error> {
|
||||
let file = File::open("tests/feeds/2018-01-20-LinuxUnplugged.xml")?;
|
||||
let channel = Channel::read_from(BufReader::new(file))?;
|
||||
|
||||
let episode = channel.items().iter().nth(18).unwrap();
|
||||
let ep = NewEpisode::new(&episode, 42)?;
|
||||
assert_eq!(ep, *EXPECTED_LUP_1);
|
||||
|
||||
let episode = channel.items().iter().nth(19).unwrap();
|
||||
let ep = NewEpisode::new(&episode, 42)?;
|
||||
assert_eq!(ep, *EXPECTED_LUP_2);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_minimal_into_new_episode() -> Result<(), Error> {
|
||||
truncate_db()?;
|
||||
|
||||
let file = File::open("tests/feeds/2018-01-20-Intercepted.xml")?;
|
||||
let channel = Channel::read_from(BufReader::new(file))?;
|
||||
|
||||
let item = channel.items().iter().nth(14).unwrap();
|
||||
let ep = EXPECTED_MINIMAL_INTERCEPTED_1
|
||||
.clone()
|
||||
.into_new_episode(&item);
|
||||
println!(
|
||||
"EPISODE: {:#?}\nEXPECTED: {:#?}",
|
||||
ep, *EXPECTED_INTERCEPTED_1
|
||||
);
|
||||
assert_eq!(ep, *EXPECTED_INTERCEPTED_1);
|
||||
|
||||
let item = channel.items().iter().nth(15).unwrap();
|
||||
let ep = EXPECTED_MINIMAL_INTERCEPTED_2
|
||||
.clone()
|
||||
.into_new_episode(&item);
|
||||
assert_eq!(ep, *EXPECTED_INTERCEPTED_2);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_new_episode_insert() -> Result<(), Error> {
|
||||
truncate_db()?;
|
||||
|
||||
let file = File::open("tests/feeds/2018-01-20-Intercepted.xml")?;
|
||||
let channel = Channel::read_from(BufReader::new(file))?;
|
||||
|
||||
let episode = channel.items().iter().nth(14).unwrap();
|
||||
let new_ep = NewEpisode::new(&episode, 42)?;
|
||||
new_ep.insert()?;
|
||||
let ep = dbqueries::get_episode_from_pk(new_ep.title(), new_ep.show_id())?;
|
||||
|
||||
assert_eq!(new_ep, ep);
|
||||
assert_eq!(&new_ep, &*EXPECTED_INTERCEPTED_1);
|
||||
assert_eq!(&*EXPECTED_INTERCEPTED_1, &ep);
|
||||
|
||||
let episode = channel.items().iter().nth(15).unwrap();
|
||||
let new_ep = NewEpisode::new(&episode, 42)?;
|
||||
new_ep.insert()?;
|
||||
let ep = dbqueries::get_episode_from_pk(new_ep.title(), new_ep.show_id())?;
|
||||
|
||||
assert_eq!(new_ep, ep);
|
||||
assert_eq!(&new_ep, &*EXPECTED_INTERCEPTED_2);
|
||||
assert_eq!(&*EXPECTED_INTERCEPTED_2, &ep);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_new_episode_update() -> Result<(), Error> {
|
||||
truncate_db()?;
|
||||
let old = EXPECTED_INTERCEPTED_1.clone().to_episode()?;
|
||||
|
||||
let updated = &*UPDATED_DURATION_INTERCEPTED_1;
|
||||
updated.update(old.rowid())?;
|
||||
let new = dbqueries::get_episode_from_pk(old.title(), old.show_id())?;
|
||||
|
||||
// Assert that updating does not change the rowid and show_id
|
||||
assert_ne!(old, new);
|
||||
assert_eq!(old.rowid(), new.rowid());
|
||||
assert_eq!(old.show_id(), new.show_id());
|
||||
|
||||
assert_eq!(updated, &new);
|
||||
assert_ne!(updated, &old);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_new_episode_index() -> Result<(), Error> {
|
||||
truncate_db()?;
|
||||
let expected = &*EXPECTED_INTERCEPTED_1;
|
||||
|
||||
// First insert
|
||||
assert!(expected.index().is_ok());
|
||||
// Second identical, This should take the early return path
|
||||
assert!(expected.index().is_ok());
|
||||
// Get the episode
|
||||
let old = dbqueries::get_episode_from_pk(expected.title(), expected.show_id())?;
|
||||
// Assert that NewPodcast is equal to the Indexed one
|
||||
assert_eq!(*expected, old);
|
||||
|
||||
let updated = &*UPDATED_DURATION_INTERCEPTED_1;
|
||||
|
||||
// Update the podcast
|
||||
assert!(updated.index().is_ok());
|
||||
// Get the new Podcast
|
||||
let new = dbqueries::get_episode_from_pk(expected.title(), expected.show_id())?;
|
||||
// Assert it's diff from the old one.
|
||||
assert_ne!(new, old);
|
||||
assert_eq!(*updated, new);
|
||||
assert_eq!(new.rowid(), old.rowid());
|
||||
assert_eq!(new.show_id(), old.show_id());
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_new_episode_to_episode() -> Result<(), Error> {
|
||||
let expected = &*EXPECTED_INTERCEPTED_1;
|
||||
|
||||
// Assert insert() produces the same result that you would get with to_podcast()
|
||||
truncate_db()?;
|
||||
expected.insert()?;
|
||||
let old = dbqueries::get_episode_from_pk(expected.title(), expected.show_id())?;
|
||||
let ep = expected.to_episode()?;
|
||||
assert_eq!(old, ep);
|
||||
|
||||
// Same as above, diff order
|
||||
truncate_db()?;
|
||||
let ep = expected.to_episode()?;
|
||||
// This should error as a unique constrain violation
|
||||
assert!(expected.insert().is_err());
|
||||
let old = dbqueries::get_episode_from_pk(expected.title(), expected.show_id())?;
|
||||
assert_eq!(old, ep);
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
453
podcasts-data/src/models/new_show.rs
Normal file
453
podcasts-data/src/models/new_show.rs
Normal file
@ -0,0 +1,453 @@
|
||||
// new_show.rs
|
||||
//
|
||||
// Copyright 2017 Jordan Petridis <jpetridis@gnome.org>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
//
|
||||
// SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
use ammonia;
|
||||
use diesel;
|
||||
use diesel::prelude::*;
|
||||
use rss;
|
||||
|
||||
use crate::errors::DataError;
|
||||
use crate::models::Show;
|
||||
use crate::models::{Index, Insert, Update};
|
||||
use crate::schema::shows;
|
||||
|
||||
use crate::database::connection;
|
||||
use crate::dbqueries;
|
||||
use crate::utils::url_cleaner;
|
||||
|
||||
#[derive(Insertable, AsChangeset)]
|
||||
#[table_name = "shows"]
|
||||
#[derive(Debug, Clone, Default, Builder, PartialEq)]
|
||||
#[builder(default)]
|
||||
#[builder(derive(Debug))]
|
||||
#[builder(setter(into))]
|
||||
pub(crate) struct NewShow {
|
||||
title: String,
|
||||
link: String,
|
||||
description: String,
|
||||
image_uri: Option<String>,
|
||||
source_id: i32,
|
||||
}
|
||||
|
||||
impl Insert<()> for NewShow {
|
||||
type Error = DataError;
|
||||
|
||||
fn insert(&self) -> Result<(), Self::Error> {
|
||||
use crate::schema::shows::dsl::*;
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
diesel::insert_into(shows)
|
||||
.values(self)
|
||||
.execute(&con)
|
||||
.map(|_| ())
|
||||
.map_err(From::from)
|
||||
}
|
||||
}
|
||||
|
||||
impl Update<()> for NewShow {
|
||||
type Error = DataError;
|
||||
|
||||
fn update(&self, show_id: i32) -> Result<(), Self::Error> {
|
||||
use crate::schema::shows::dsl::*;
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
info!("Updating {}", self.title);
|
||||
diesel::update(shows.filter(id.eq(show_id)))
|
||||
.set(self)
|
||||
.execute(&con)
|
||||
.map(|_| ())
|
||||
.map_err(From::from)
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: Maybe return an Enum<Action(Resut)> Instead.
|
||||
// It would make unti testing better too.
|
||||
impl Index<()> for NewShow {
|
||||
type Error = DataError;
|
||||
|
||||
fn index(&self) -> Result<(), DataError> {
|
||||
let exists = dbqueries::podcast_exists(self.source_id)?;
|
||||
|
||||
if exists {
|
||||
let other = dbqueries::get_podcast_from_source_id(self.source_id)?;
|
||||
|
||||
if self != &other {
|
||||
self.update(other.id())
|
||||
} else {
|
||||
Ok(())
|
||||
}
|
||||
} else {
|
||||
self.insert()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq<Show> for NewShow {
|
||||
fn eq(&self, other: &Show) -> bool {
|
||||
(self.link() == other.link())
|
||||
&& (self.title() == other.title())
|
||||
&& (self.image_uri() == other.image_uri())
|
||||
&& (self.description() == other.description())
|
||||
&& (self.source_id() == other.source_id())
|
||||
}
|
||||
}
|
||||
|
||||
impl NewShow {
|
||||
/// Parses a `rss::Channel` into a `NewShow` Struct.
|
||||
pub(crate) fn new(chan: &rss::Channel, source_id: i32) -> NewShow {
|
||||
let title = chan.title().trim();
|
||||
let link = url_cleaner(chan.link().trim());
|
||||
|
||||
let description = ammonia::Builder::new()
|
||||
// Remove `rel` attributes from `<a>` tags
|
||||
.link_rel(None)
|
||||
.clean(chan.description().trim())
|
||||
.to_string();
|
||||
|
||||
// Try to get the itunes img first
|
||||
let itunes_img = chan
|
||||
.itunes_ext()
|
||||
.and_then(|s| s.image().map(|url| url.trim()))
|
||||
.map(|s| s.to_owned());
|
||||
// If itunes is None, try to get the channel.image from the rss spec
|
||||
let image_uri = itunes_img.or_else(|| chan.image().map(|s| s.url().trim().to_owned()));
|
||||
|
||||
NewShowBuilder::default()
|
||||
.title(title)
|
||||
.description(description)
|
||||
.link(link)
|
||||
.image_uri(image_uri)
|
||||
.source_id(source_id)
|
||||
.build()
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
// Look out for when tryinto lands into stable.
|
||||
pub(crate) fn to_podcast(&self) -> Result<Show, DataError> {
|
||||
self.index()?;
|
||||
dbqueries::get_podcast_from_source_id(self.source_id).map_err(From::from)
|
||||
}
|
||||
}
|
||||
|
||||
// Ignore the following geters. They are used in unit tests mainly.
|
||||
impl NewShow {
|
||||
#[allow(dead_code)]
|
||||
pub(crate) fn source_id(&self) -> i32 {
|
||||
self.source_id
|
||||
}
|
||||
|
||||
pub(crate) fn title(&self) -> &str {
|
||||
&self.title
|
||||
}
|
||||
|
||||
pub(crate) fn link(&self) -> &str {
|
||||
&self.link
|
||||
}
|
||||
|
||||
pub(crate) fn description(&self) -> &str {
|
||||
&self.description
|
||||
}
|
||||
|
||||
pub(crate) fn image_uri(&self) -> Option<&str> {
|
||||
self.image_uri.as_ref().map(|s| s.as_str())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
// use tokio_core::reactor::Core;
|
||||
|
||||
use failure::Error;
|
||||
use rss::Channel;
|
||||
|
||||
use crate::database::truncate_db;
|
||||
use crate::models::NewShowBuilder;
|
||||
|
||||
use std::fs::File;
|
||||
use std::io::BufReader;
|
||||
|
||||
// Pre-built expected NewShow structs.
|
||||
lazy_static! {
|
||||
static ref EXPECTED_INTERCEPTED: NewShow = {
|
||||
let descr = "The people behind The Intercept’s fearless reporting and incisive \
|
||||
commentary—Jeremy Scahill, Glenn Greenwald, Betsy Reed and \
|
||||
others—discuss the crucial issues of our time: national security, civil \
|
||||
liberties, foreign policy, and criminal justice. Plus interviews with \
|
||||
artists, thinkers, and newsmakers who challenge our preconceptions about \
|
||||
the world we live in.";
|
||||
|
||||
NewShowBuilder::default()
|
||||
.title("Intercepted with Jeremy Scahill")
|
||||
.link("https://theintercept.com/podcasts")
|
||||
.description(descr)
|
||||
.image_uri(Some(String::from(
|
||||
"http://static.megaphone.fm/podcasts/d5735a50-d904-11e6-8532-73c7de466ea6/image/\
|
||||
uploads_2F1484252190700-qhn5krasklbce3dh-a797539282700ea0298a3a26f7e49b0b_\
|
||||
2FIntercepted_COVER%2B_281_29.png")
|
||||
))
|
||||
.source_id(42)
|
||||
.build()
|
||||
.unwrap()
|
||||
};
|
||||
static ref EXPECTED_LUP: NewShow = {
|
||||
let descr = "An open show powered by community LINUX Unplugged takes the best \
|
||||
attributes of open collaboration and focuses them into a weekly \
|
||||
lifestyle show about Linux.";
|
||||
|
||||
NewShowBuilder::default()
|
||||
.title("LINUX Unplugged Podcast")
|
||||
.link("http://www.jupiterbroadcasting.com/")
|
||||
.description(descr)
|
||||
.image_uri(Some(String::from(
|
||||
"http://www.jupiterbroadcasting.com/images/LASUN-Badge1400.jpg",
|
||||
)))
|
||||
.source_id(42)
|
||||
.build()
|
||||
.unwrap()
|
||||
};
|
||||
static ref EXPECTED_TIPOFF: NewShow = {
|
||||
let desc = "<p>Welcome to The Tip Off- the podcast where we take you behind the \
|
||||
scenes of some of the best investigative journalism from recent years. \
|
||||
Each episode we’ll be digging into an investigative scoop- hearing from \
|
||||
the journalists behind the work as they tell us about the leads, the \
|
||||
dead-ends and of course, the tip offs. There’ll be car chases, slammed \
|
||||
doors, terrorist cells, meetings in dimly lit bars and cafes, wrangling \
|
||||
with despotic regimes and much more. So if you’re curious about the fun, \
|
||||
complicated detective work that goes into doing great investigative \
|
||||
journalism- then this is the podcast for you.</p>";
|
||||
|
||||
NewShowBuilder::default()
|
||||
.title("The Tip Off")
|
||||
.link("http://www.acast.com/thetipoff")
|
||||
.description(desc)
|
||||
.image_uri(Some(String::from(
|
||||
"https://imagecdn.acast.com/image?h=1500&w=1500&source=http%3A%2F%2Fi1.sndcdn.\
|
||||
com%2Favatars-000317856075-a2coqz-original.jpg",
|
||||
)))
|
||||
.source_id(42)
|
||||
.build()
|
||||
.unwrap()
|
||||
};
|
||||
static ref EXPECTED_STARS: NewShow = {
|
||||
let descr = "<p>The first audio drama from Tor Labs and Gideon Media, Steal the Stars \
|
||||
is a gripping noir science fiction thriller in 14 episodes: Forbidden \
|
||||
love, a crashed UFO, an alien body, and an impossible heist unlike any \
|
||||
ever attempted - scripted by Mac Rogers, the award-winning playwright \
|
||||
and writer of the multi-million download The Message and LifeAfter.</p>";
|
||||
let img = "https://dfkfj8j276wwv.cloudfront.net/images/2c/5f/a0/1a/2c5fa01a-ae78-4a8c-\
|
||||
b183-7311d2e436c3/b3a4aa57a576bb662191f2a6bc2a436c8c4ae256ecffaff5c4c54fd42e\
|
||||
923914941c264d01efb1833234b52c9530e67d28a8cebbe3d11a4bc0fbbdf13ecdf1c3.jpeg";
|
||||
|
||||
NewShowBuilder::default()
|
||||
.title("Steal the Stars")
|
||||
.link("http://tor-labs.com/")
|
||||
.description(descr)
|
||||
.image_uri(Some(String::from(img)))
|
||||
.source_id(42)
|
||||
.build()
|
||||
.unwrap()
|
||||
};
|
||||
static ref EXPECTED_CODE: NewShow = {
|
||||
let descr = "A podcast about humans and technology. Panelists: Coraline Ada Ehmke, \
|
||||
David Brady, Jessica Kerr, Jay Bobo, Astrid Countee and Sam \
|
||||
Livingston-Gray. Brought to you by @therubyrep.";
|
||||
|
||||
NewShowBuilder::default()
|
||||
.title("Greater Than Code")
|
||||
.link("https://www.greaterthancode.com/")
|
||||
.description(descr)
|
||||
.image_uri(Some(String::from(
|
||||
"http://www.greaterthancode.com/wp-content/uploads/2016/10/code1400-4.jpg",
|
||||
)))
|
||||
.source_id(42)
|
||||
.build()
|
||||
.unwrap()
|
||||
};
|
||||
static ref EXPECTED_ELLINOFRENEIA: NewShow = {
|
||||
NewShowBuilder::default()
|
||||
.title("Ελληνοφρένεια")
|
||||
.link("https://ellinofreneia.sealabs.net/feed.rss")
|
||||
.description("Ανεπίσημο feed της Ελληνοφρένειας")
|
||||
.image_uri(Some("https://ellinofreneia.sealabs.net/logo.png".into()))
|
||||
.source_id(42)
|
||||
.build()
|
||||
.unwrap()
|
||||
};
|
||||
static ref UPDATED_DESC_INTERCEPTED: NewShow = {
|
||||
NewShowBuilder::default()
|
||||
.title("Intercepted with Jeremy Scahill")
|
||||
.link("https://theintercept.com/podcasts")
|
||||
.description("New Description")
|
||||
.image_uri(Some(String::from(
|
||||
"http://static.megaphone.fm/podcasts/d5735a50-d904-11e6-8532-73c7de466ea6/image/\
|
||||
uploads_2F1484252190700-qhn5krasklbce3dh-a797539282700ea0298a3a26f7e49b0b_\
|
||||
2FIntercepted_COVER%2B_281_29.png")
|
||||
))
|
||||
.source_id(42)
|
||||
.build()
|
||||
.unwrap()
|
||||
};
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_new_podcast_intercepted() -> Result<(), Error> {
|
||||
let file = File::open("tests/feeds/2018-01-20-Intercepted.xml")?;
|
||||
let channel = Channel::read_from(BufReader::new(file))?;
|
||||
|
||||
let pd = NewShow::new(&channel, 42);
|
||||
assert_eq!(*EXPECTED_INTERCEPTED, pd);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_new_podcast_lup() -> Result<(), Error> {
|
||||
let file = File::open("tests/feeds/2018-01-20-LinuxUnplugged.xml")?;
|
||||
let channel = Channel::read_from(BufReader::new(file))?;
|
||||
|
||||
let pd = NewShow::new(&channel, 42);
|
||||
assert_eq!(*EXPECTED_LUP, pd);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_new_podcast_thetipoff() -> Result<(), Error> {
|
||||
let file = File::open("tests/feeds/2018-01-20-TheTipOff.xml")?;
|
||||
let channel = Channel::read_from(BufReader::new(file))?;
|
||||
|
||||
let pd = NewShow::new(&channel, 42);
|
||||
assert_eq!(*EXPECTED_TIPOFF, pd);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_new_podcast_steal_the_stars() -> Result<(), Error> {
|
||||
let file = File::open("tests/feeds/2018-01-20-StealTheStars.xml")?;
|
||||
let channel = Channel::read_from(BufReader::new(file))?;
|
||||
|
||||
let pd = NewShow::new(&channel, 42);
|
||||
assert_eq!(*EXPECTED_STARS, pd);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_new_podcast_greater_than_code() -> Result<(), Error> {
|
||||
let file = File::open("tests/feeds/2018-01-20-GreaterThanCode.xml")?;
|
||||
let channel = Channel::read_from(BufReader::new(file))?;
|
||||
|
||||
let pd = NewShow::new(&channel, 42);
|
||||
assert_eq!(*EXPECTED_CODE, pd);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_new_podcast_ellinofreneia() -> Result<(), Error> {
|
||||
let file = File::open("tests/feeds/2018-03-28-Ellinofreneia.xml")?;
|
||||
let channel = Channel::read_from(BufReader::new(file))?;
|
||||
|
||||
let pd = NewShow::new(&channel, 42);
|
||||
assert_eq!(*EXPECTED_ELLINOFRENEIA, pd);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
// This maybe could be a doc test on insert.
|
||||
fn test_new_podcast_insert() -> Result<(), Error> {
|
||||
truncate_db()?;
|
||||
let file = File::open("tests/feeds/2018-01-20-Intercepted.xml")?;
|
||||
let channel = Channel::read_from(BufReader::new(file))?;
|
||||
|
||||
let npd = NewShow::new(&channel, 42);
|
||||
npd.insert()?;
|
||||
let pd = dbqueries::get_podcast_from_source_id(42)?;
|
||||
|
||||
assert_eq!(npd, pd);
|
||||
assert_eq!(*EXPECTED_INTERCEPTED, npd);
|
||||
assert_eq!(&*EXPECTED_INTERCEPTED, &pd);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
// TODO: Add more test/checks
|
||||
// Currently there's a test that only checks new description or title.
|
||||
// If you have time and want to help, implement the test for the other fields
|
||||
// too.
|
||||
fn test_new_podcast_update() -> Result<(), Error> {
|
||||
truncate_db()?;
|
||||
let old = EXPECTED_INTERCEPTED.to_podcast()?;
|
||||
|
||||
let updated = &*UPDATED_DESC_INTERCEPTED;
|
||||
updated.update(old.id())?;
|
||||
let new = dbqueries::get_podcast_from_source_id(42)?;
|
||||
|
||||
assert_ne!(old, new);
|
||||
assert_eq!(old.id(), new.id());
|
||||
assert_eq!(old.source_id(), new.source_id());
|
||||
assert_eq!(updated, &new);
|
||||
assert_ne!(updated, &old);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_new_podcast_index() -> Result<(), Error> {
|
||||
truncate_db()?;
|
||||
|
||||
// First insert
|
||||
assert!(EXPECTED_INTERCEPTED.index().is_ok());
|
||||
// Second identical, This should take the early return path
|
||||
assert!(EXPECTED_INTERCEPTED.index().is_ok());
|
||||
// Get the podcast
|
||||
let old = dbqueries::get_podcast_from_source_id(42)?;
|
||||
// Assert that NewShow is equal to the Indexed one
|
||||
assert_eq!(&*EXPECTED_INTERCEPTED, &old);
|
||||
|
||||
let updated = &*UPDATED_DESC_INTERCEPTED;
|
||||
|
||||
// Update the podcast
|
||||
assert!(updated.index().is_ok());
|
||||
// Get the new Show
|
||||
let new = dbqueries::get_podcast_from_source_id(42)?;
|
||||
// Assert it's diff from the old one.
|
||||
assert_ne!(new, old);
|
||||
assert_eq!(new.id(), old.id());
|
||||
assert_eq!(new.source_id(), old.source_id());
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_to_podcast() -> Result<(), Error> {
|
||||
// Assert insert() produces the same result that you would get with to_podcast()
|
||||
truncate_db()?;
|
||||
EXPECTED_INTERCEPTED.insert()?;
|
||||
let old = dbqueries::get_podcast_from_source_id(42)?;
|
||||
let pd = EXPECTED_INTERCEPTED.to_podcast()?;
|
||||
assert_eq!(old, pd);
|
||||
|
||||
// Same as above, diff order
|
||||
truncate_db()?;
|
||||
let pd = EXPECTED_INTERCEPTED.to_podcast()?;
|
||||
// This should error as a unique constrain violation
|
||||
assert!(EXPECTED_INTERCEPTED.insert().is_err());
|
||||
let old = dbqueries::get_podcast_from_source_id(42)?;
|
||||
assert_eq!(old, pd);
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
69
podcasts-data/src/models/new_source.rs
Normal file
69
podcasts-data/src/models/new_source.rs
Normal file
@ -0,0 +1,69 @@
|
||||
// new_source.rs
|
||||
//
|
||||
// Copyright 2017 Jordan Petridis <jpetridis@gnome.org>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
//
|
||||
// SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
use diesel;
|
||||
use diesel::prelude::*;
|
||||
use url::Url;
|
||||
|
||||
use crate::database::connection;
|
||||
use crate::dbqueries;
|
||||
// use models::{Insert, Update};
|
||||
use crate::errors::DataError;
|
||||
use crate::models::Source;
|
||||
use crate::schema::source;
|
||||
|
||||
#[derive(Insertable)]
|
||||
#[table_name = "source"]
|
||||
#[derive(Debug, Clone, Default, Builder, PartialEq)]
|
||||
#[builder(default)]
|
||||
#[builder(derive(Debug))]
|
||||
#[builder(setter(into))]
|
||||
pub(crate) struct NewSource {
|
||||
uri: String,
|
||||
last_modified: Option<String>,
|
||||
http_etag: Option<String>,
|
||||
}
|
||||
|
||||
impl NewSource {
|
||||
pub(crate) fn new(uri: &Url) -> NewSource {
|
||||
NewSource {
|
||||
uri: uri.to_string(),
|
||||
last_modified: None,
|
||||
http_etag: None,
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn insert_or_ignore(&self) -> Result<(), DataError> {
|
||||
use crate::schema::source::dsl::*;
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
diesel::insert_or_ignore_into(source)
|
||||
.values(self)
|
||||
.execute(&con)
|
||||
.map(|_| ())
|
||||
.map_err(From::from)
|
||||
}
|
||||
|
||||
// Look out for when tryinto lands into stable.
|
||||
pub(crate) fn to_source(&self) -> Result<Source, DataError> {
|
||||
self.insert_or_ignore()?;
|
||||
dbqueries::get_source_from_uri(&self.uri).map_err(From::from)
|
||||
}
|
||||
}
|
||||
110
podcasts-data/src/models/show.rs
Normal file
110
podcasts-data/src/models/show.rs
Normal file
@ -0,0 +1,110 @@
|
||||
// show.rs
|
||||
//
|
||||
// Copyright 2017 Jordan Petridis <jpetridis@gnome.org>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
//
|
||||
// SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
use crate::models::Source;
|
||||
use crate::schema::shows;
|
||||
|
||||
#[derive(Queryable, Identifiable, AsChangeset, Associations, PartialEq)]
|
||||
#[belongs_to(Source, foreign_key = "source_id")]
|
||||
#[changeset_options(treat_none_as_null = "true")]
|
||||
#[table_name = "shows"]
|
||||
#[derive(Debug, Clone)]
|
||||
/// Diesel Model of the shows table.
|
||||
pub struct Show {
|
||||
id: i32,
|
||||
title: String,
|
||||
link: String,
|
||||
description: String,
|
||||
image_uri: Option<String>,
|
||||
source_id: i32,
|
||||
}
|
||||
|
||||
impl Show {
|
||||
/// Get the Feed `id`.
|
||||
pub fn id(&self) -> i32 {
|
||||
self.id
|
||||
}
|
||||
|
||||
/// Get the Feed `title`.
|
||||
pub fn title(&self) -> &str {
|
||||
&self.title
|
||||
}
|
||||
|
||||
/// Get the Feed `link`.
|
||||
///
|
||||
/// Usually the website/homepage of the content creator.
|
||||
pub fn link(&self) -> &str {
|
||||
&self.link
|
||||
}
|
||||
|
||||
/// Get the `description`.
|
||||
pub fn description(&self) -> &str {
|
||||
&self.description
|
||||
}
|
||||
|
||||
/// Get the `image_uri`.
|
||||
///
|
||||
/// Represents the uri(url usually) that the Feed cover image is located at.
|
||||
pub fn image_uri(&self) -> Option<&str> {
|
||||
self.image_uri.as_ref().map(|s| s.as_str())
|
||||
}
|
||||
|
||||
/// `Source` table foreign key.
|
||||
pub fn source_id(&self) -> i32 {
|
||||
self.source_id
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Queryable, Debug, Clone)]
|
||||
/// Diesel Model of the Show cover query.
|
||||
/// Used for fetching information about a Show's cover.
|
||||
pub struct ShowCoverModel {
|
||||
id: i32,
|
||||
title: String,
|
||||
image_uri: Option<String>,
|
||||
}
|
||||
|
||||
impl From<Show> for ShowCoverModel {
|
||||
fn from(p: Show) -> ShowCoverModel {
|
||||
ShowCoverModel {
|
||||
id: p.id(),
|
||||
title: p.title,
|
||||
image_uri: p.image_uri,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ShowCoverModel {
|
||||
/// Get the Feed `id`.
|
||||
pub fn id(&self) -> i32 {
|
||||
self.id
|
||||
}
|
||||
|
||||
/// Get the Feed `title`.
|
||||
pub fn title(&self) -> &str {
|
||||
&self.title
|
||||
}
|
||||
|
||||
/// Get the `image_uri`.
|
||||
///
|
||||
/// Represents the uri(url usually) that the Feed cover image is located at.
|
||||
pub fn image_uri(&self) -> Option<&str> {
|
||||
self.image_uri.as_ref().map(|s| s.as_str())
|
||||
}
|
||||
}
|
||||
358
podcasts-data/src/models/source.rs
Normal file
358
podcasts-data/src/models/source.rs
Normal file
@ -0,0 +1,358 @@
|
||||
// source.rs
|
||||
//
|
||||
// Copyright 2017 Jordan Petridis <jpetridis@gnome.org>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
//
|
||||
// SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
use diesel::SaveChangesDsl;
|
||||
// use failure::ResultExt;
|
||||
use rss::Channel;
|
||||
use url::Url;
|
||||
|
||||
use hyper::client::HttpConnector;
|
||||
use hyper::{Body, Client};
|
||||
use hyper_tls::HttpsConnector;
|
||||
|
||||
use http::header::{
|
||||
HeaderValue, AUTHORIZATION, ETAG, IF_MODIFIED_SINCE, IF_NONE_MATCH, LAST_MODIFIED, LOCATION,
|
||||
USER_AGENT as USER_AGENT_HEADER,
|
||||
};
|
||||
use http::{Request, Response, StatusCode, Uri};
|
||||
// use futures::future::ok;
|
||||
use futures::future::{loop_fn, Future, Loop};
|
||||
use futures::prelude::*;
|
||||
|
||||
use base64::{encode_config, URL_SAFE};
|
||||
|
||||
use crate::database::connection;
|
||||
use crate::errors::*;
|
||||
use crate::feed::{Feed, FeedBuilder};
|
||||
use crate::models::{NewSource, Save};
|
||||
use crate::schema::source;
|
||||
use crate::USER_AGENT;
|
||||
|
||||
use std::str::FromStr;
|
||||
|
||||
#[derive(Queryable, Identifiable, AsChangeset, PartialEq)]
|
||||
#[table_name = "source"]
|
||||
#[changeset_options(treat_none_as_null = "true")]
|
||||
#[derive(Debug, Clone)]
|
||||
/// Diesel Model of the source table.
|
||||
pub struct Source {
|
||||
id: i32,
|
||||
uri: String,
|
||||
last_modified: Option<String>,
|
||||
http_etag: Option<String>,
|
||||
}
|
||||
|
||||
impl Save<Source> for Source {
|
||||
type Error = DataError;
|
||||
|
||||
/// Helper method to easily save/"sync" current state of self to the
|
||||
/// Database.
|
||||
fn save(&self) -> Result<Source, Self::Error> {
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
self.save_changes::<Source>(&*con).map_err(From::from)
|
||||
}
|
||||
}
|
||||
|
||||
impl Source {
|
||||
/// Get the source `id` column.
|
||||
pub fn id(&self) -> i32 {
|
||||
self.id
|
||||
}
|
||||
|
||||
/// Represents the location(usually url) of the Feed xml file.
|
||||
pub fn uri(&self) -> &str {
|
||||
&self.uri
|
||||
}
|
||||
|
||||
/// Set the `uri` field value.
|
||||
pub fn set_uri(&mut self, uri: String) {
|
||||
self.uri = uri;
|
||||
}
|
||||
|
||||
/// Represents the Http Last-Modified Header field.
|
||||
///
|
||||
/// See [RFC 7231](https://tools.ietf.org/html/rfc7231#section-7.2) for more.
|
||||
pub fn last_modified(&self) -> Option<&str> {
|
||||
self.last_modified.as_ref().map(|s| s.as_str())
|
||||
}
|
||||
|
||||
/// Set `last_modified` value.
|
||||
pub fn set_last_modified(&mut self, value: Option<String>) {
|
||||
// self.last_modified = value.map(|x| x.to_string());
|
||||
self.last_modified = value;
|
||||
}
|
||||
|
||||
/// Represents the Http Etag Header field.
|
||||
///
|
||||
/// See [RFC 7231](https://tools.ietf.org/html/rfc7231#section-7.2) for more.
|
||||
pub fn http_etag(&self) -> Option<&str> {
|
||||
self.http_etag.as_ref().map(|s| s.as_str())
|
||||
}
|
||||
|
||||
/// Set `http_etag` value.
|
||||
pub fn set_http_etag(&mut self, value: Option<&str>) {
|
||||
self.http_etag = value.map(|x| x.to_string());
|
||||
}
|
||||
|
||||
/// Extract Etag and LastModifier from res, and update self and the
|
||||
/// corresponding db row.
|
||||
fn update_etag(mut self, res: &Response<Body>) -> Result<Self, DataError> {
|
||||
let headers = res.headers();
|
||||
|
||||
let etag = headers
|
||||
.get(ETAG)
|
||||
.and_then(|h| h.to_str().ok())
|
||||
.map(From::from);
|
||||
let lmod = headers
|
||||
.get(LAST_MODIFIED)
|
||||
.and_then(|h| h.to_str().ok())
|
||||
.map(From::from);
|
||||
|
||||
if (self.http_etag() != etag) || (self.last_modified != lmod) {
|
||||
self.set_http_etag(etag);
|
||||
self.set_last_modified(lmod);
|
||||
self = self.save()?;
|
||||
}
|
||||
|
||||
Ok(self)
|
||||
}
|
||||
|
||||
/// Clear the `HTTP` `Etag` and `Last-modified` headers.
|
||||
/// This method does not sync the state of self in the database, call
|
||||
/// .save() method explicitly
|
||||
fn clear_etags(&mut self) {
|
||||
debug!("Source etags before clear: {:#?}", &self);
|
||||
self.http_etag = None;
|
||||
self.last_modified = None;
|
||||
}
|
||||
|
||||
fn make_err(self, context: &str, code: StatusCode) -> DataError {
|
||||
DataError::HttpStatusGeneral(HttpStatusError::new(self.uri, code, context.into()))
|
||||
}
|
||||
|
||||
// TODO match on more stuff
|
||||
// 301: Moved Permanently
|
||||
// 304: Up to date Feed, checked with the Etag
|
||||
// 307: Temporary redirect of the url
|
||||
// 308: Permanent redirect of the url
|
||||
// 401: Unathorized
|
||||
// 403: Forbidden
|
||||
// 408: Timeout
|
||||
// 410: Feed deleted
|
||||
// TODO: Rething this api,
|
||||
fn match_status(mut self, res: Response<Body>) -> Result<Response<Body>, DataError> {
|
||||
let code = res.status();
|
||||
|
||||
if code.is_success() {
|
||||
// If request is succesful save the etag
|
||||
self = self.update_etag(&res)?
|
||||
} else {
|
||||
match code.as_u16() {
|
||||
// Save etags if it returns NotModified
|
||||
304 => self = self.update_etag(&res)?,
|
||||
// Clear the Etag/lmod else
|
||||
_ => {
|
||||
self.clear_etags();
|
||||
self = self.save()?;
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
match code.as_u16() {
|
||||
304 => {
|
||||
info!("304: Source, (id: {}), is up to date", self.id());
|
||||
return Err(DataError::FeedNotModified(self));
|
||||
}
|
||||
301 | 302 | 308 => {
|
||||
warn!("Feed was moved permanently.");
|
||||
self = self.update_url(&res)?;
|
||||
return Err(DataError::FeedRedirect(self));
|
||||
}
|
||||
307 => {
|
||||
warn!("307: Temporary Redirect.");
|
||||
// FIXME: How is it actually handling the redirect?
|
||||
return Err(DataError::FeedRedirect(self));
|
||||
}
|
||||
401 => return Err(self.make_err("401: Unauthorized.", code)),
|
||||
403 => return Err(self.make_err("403: Forbidden.", code)),
|
||||
404 => return Err(self.make_err("404: Not found.", code)),
|
||||
408 => return Err(self.make_err("408: Request Timeout.", code)),
|
||||
410 => return Err(self.make_err("410: Feed was deleted..", code)),
|
||||
_ => info!("HTTP StatusCode: {}", code),
|
||||
};
|
||||
|
||||
Ok(res)
|
||||
}
|
||||
|
||||
fn update_url(mut self, res: &Response<Body>) -> Result<Self, DataError> {
|
||||
let code = res.status();
|
||||
let headers = res.headers();
|
||||
info!("HTTP StatusCode: {}", code);
|
||||
debug!("Headers {:#?}", headers);
|
||||
|
||||
if let Some(url) = headers.get(LOCATION) {
|
||||
debug!("Previous Source: {:#?}", &self);
|
||||
|
||||
self.set_uri(url.to_str()?.into());
|
||||
self.clear_etags();
|
||||
self = self.save()?;
|
||||
|
||||
debug!("Updated Source: {:#?}", &self);
|
||||
info!(
|
||||
"Feed url of Source {}, was updated successfully.",
|
||||
self.id()
|
||||
);
|
||||
}
|
||||
|
||||
Ok(self)
|
||||
}
|
||||
|
||||
/// Construct a new `Source` with the given `uri` and index it.
|
||||
///
|
||||
/// This only indexes the `Source` struct, not the Podcast Feed.
|
||||
pub fn from_url(uri: &str) -> Result<Source, DataError> {
|
||||
let url = Url::parse(uri)?;
|
||||
|
||||
NewSource::new(&url).to_source()
|
||||
}
|
||||
|
||||
/// `Feed` constructor.
|
||||
///
|
||||
/// Fetches the latest xml Feed.
|
||||
///
|
||||
/// Updates the validator Http Headers.
|
||||
///
|
||||
/// Consumes `self` and Returns the corresponding `Feed` Object.
|
||||
// Refactor into TryInto once it lands on stable.
|
||||
pub fn into_feed(
|
||||
self,
|
||||
client: Client<HttpsConnector<HttpConnector>>,
|
||||
) -> impl Future<Item = Feed, Error = DataError> {
|
||||
let id = self.id();
|
||||
let response = loop_fn(self, move |source| {
|
||||
source
|
||||
.request_constructor(&client.clone())
|
||||
.then(|res| match res {
|
||||
Ok(response) => Ok(Loop::Break(response)),
|
||||
Err(err) => match err {
|
||||
DataError::FeedRedirect(s) => {
|
||||
info!("Following redirect...");
|
||||
Ok(Loop::Continue(s))
|
||||
}
|
||||
e => Err(e),
|
||||
},
|
||||
})
|
||||
});
|
||||
|
||||
response
|
||||
.and_then(response_to_channel)
|
||||
.and_then(move |chan| {
|
||||
FeedBuilder::default()
|
||||
.channel(chan)
|
||||
.source_id(id)
|
||||
.build()
|
||||
.map_err(From::from)
|
||||
})
|
||||
}
|
||||
|
||||
fn request_constructor(
|
||||
self,
|
||||
client: &Client<HttpsConnector<HttpConnector>>,
|
||||
) -> impl Future<Item = Response<Body>, Error = DataError> {
|
||||
// FIXME: remove unwrap somehow
|
||||
let uri = Uri::from_str(self.uri()).unwrap();
|
||||
let mut req = Request::get(uri).body(Body::empty()).unwrap();
|
||||
|
||||
if let Ok(url) = Url::parse(self.uri()) {
|
||||
if let Some(password) = url.password() {
|
||||
let mut auth = "Basic ".to_owned();
|
||||
auth.push_str(&encode_config(
|
||||
&format!("{}:{}", url.username(), password),
|
||||
URL_SAFE,
|
||||
));
|
||||
req.headers_mut()
|
||||
.insert(AUTHORIZATION, HeaderValue::from_str(&auth).unwrap());
|
||||
}
|
||||
}
|
||||
|
||||
// Set the UserAgent cause ppl still seem to check it for some reason...
|
||||
req.headers_mut()
|
||||
.insert(USER_AGENT_HEADER, HeaderValue::from_static(USER_AGENT));
|
||||
|
||||
if let Some(etag) = self.http_etag() {
|
||||
req.headers_mut()
|
||||
.insert(IF_NONE_MATCH, HeaderValue::from_str(etag).unwrap());
|
||||
}
|
||||
|
||||
if let Some(lmod) = self.last_modified() {
|
||||
req.headers_mut()
|
||||
.insert(IF_MODIFIED_SINCE, HeaderValue::from_str(lmod).unwrap());
|
||||
}
|
||||
|
||||
client
|
||||
.request(req)
|
||||
.map_err(From::from)
|
||||
.and_then(move |res| self.match_status(res))
|
||||
}
|
||||
}
|
||||
|
||||
fn response_to_channel(
|
||||
res: Response<Body>,
|
||||
) -> impl Future<Item = Channel, Error = DataError> + Send {
|
||||
res.into_body()
|
||||
.concat2()
|
||||
.map(|x| x.into_iter())
|
||||
.map_err(From::from)
|
||||
.map(|iter| iter.collect::<Vec<u8>>())
|
||||
.map(|utf_8_bytes| String::from_utf8_lossy(&utf_8_bytes).into_owned())
|
||||
.and_then(|buf| Channel::from_str(&buf).map_err(From::from))
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use failure::Error;
|
||||
use num_cpus;
|
||||
use tokio;
|
||||
|
||||
use crate::database::truncate_db;
|
||||
use crate::utils::get_feed;
|
||||
|
||||
#[test]
|
||||
fn test_into_feed() -> Result<(), Error> {
|
||||
truncate_db()?;
|
||||
|
||||
let mut rt = tokio::runtime::Runtime::new()?;
|
||||
let https = HttpsConnector::new(num_cpus::get())?;
|
||||
let client = Client::builder().build::<_, Body>(https);
|
||||
|
||||
let url = "https://web.archive.org/web/20180120083840if_/https://feeds.feedburner.\
|
||||
com/InterceptedWithJeremyScahill";
|
||||
let source = Source::from_url(url)?;
|
||||
let id = source.id();
|
||||
let feed = source.into_feed(client);
|
||||
let feed = rt.block_on(feed)?;
|
||||
|
||||
let expected = get_feed("tests/feeds/2018-01-20-Intercepted.xml", id);
|
||||
assert_eq!(expected, feed);
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
355
podcasts-data/src/opml.rs
Normal file
355
podcasts-data/src/opml.rs
Normal file
@ -0,0 +1,355 @@
|
||||
// opml.rs
|
||||
//
|
||||
// Copyright 2017 Jordan Petridis <jpetridis@gnome.org>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
//
|
||||
// SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
//! FIXME: Docs
|
||||
|
||||
// #![allow(unused)]
|
||||
|
||||
use crate::dbqueries;
|
||||
use crate::errors::DataError;
|
||||
use crate::models::Source;
|
||||
use xml::{
|
||||
common::XmlVersion,
|
||||
reader,
|
||||
writer::{events::XmlEvent, EmitterConfig},
|
||||
};
|
||||
|
||||
use std::collections::HashSet;
|
||||
use std::fs;
|
||||
use std::io::{Read, Write};
|
||||
use std::path::Path;
|
||||
|
||||
use std::fs::File;
|
||||
// use std::io::BufReader;
|
||||
|
||||
use failure::Error;
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
// FIXME: Make it a Diesel model
|
||||
/// Represents an `outline` xml element as per the `OPML` [specification][spec]
|
||||
/// not `RSS` related sub-elements are omitted.
|
||||
///
|
||||
/// [spec]: http://dev.opml.org/spec2.html
|
||||
pub struct Opml {
|
||||
title: String,
|
||||
description: String,
|
||||
url: String,
|
||||
}
|
||||
|
||||
/// Import feed url's from a `R` into the `Source` table.
|
||||
// TODO: Write test
|
||||
pub fn import_to_db<R: Read>(reader: R) -> Result<Vec<Source>, reader::Error> {
|
||||
let feeds = extract_sources(reader)?
|
||||
.iter()
|
||||
.map(|opml| Source::from_url(&opml.url))
|
||||
.filter_map(|s| {
|
||||
if let Err(ref err) = s {
|
||||
let txt = "If you think this might be a bug please consider filling a report over \
|
||||
at https://gitlab.gnome.org/World/podcasts/issues/new";
|
||||
|
||||
error!("Failed to import a Show: {}", err);
|
||||
error!("{}", txt);
|
||||
}
|
||||
|
||||
s.ok()
|
||||
})
|
||||
.collect();
|
||||
|
||||
Ok(feeds)
|
||||
}
|
||||
|
||||
/// Open a File from `P`, try to parse the OPML then insert the Feeds in the database and
|
||||
/// return the new `Source`s
|
||||
// TODO: Write test
|
||||
pub fn import_from_file<P: AsRef<Path>>(path: P) -> Result<Vec<Source>, DataError> {
|
||||
let content = fs::read(path)?;
|
||||
import_to_db(content.as_slice()).map_err(From::from)
|
||||
}
|
||||
|
||||
/// Export a file to `P`, taking the feeds from the database and outputting
|
||||
/// them in opml format.
|
||||
pub fn export_from_db<P: AsRef<Path>>(path: P, export_title: &str) -> Result<(), Error> {
|
||||
let file = File::create(path)?;
|
||||
export_to_file(&file, export_title)
|
||||
}
|
||||
|
||||
/// Export from `Source`s and `Show`s into `F` in OPML format
|
||||
pub fn export_to_file<F: Write>(file: F, export_title: &str) -> Result<(), Error> {
|
||||
let config = EmitterConfig::new().perform_indent(true);
|
||||
|
||||
let mut writer = config.create_writer(file);
|
||||
|
||||
let mut events: Vec<XmlEvent<'_>> = Vec::new();
|
||||
|
||||
// Set up headers
|
||||
let doc = XmlEvent::StartDocument {
|
||||
version: XmlVersion::Version10,
|
||||
encoding: Some("UTF-8"),
|
||||
standalone: Some(false),
|
||||
};
|
||||
events.push(doc);
|
||||
|
||||
let opml: XmlEvent<'_> = XmlEvent::start_element("opml")
|
||||
.attr("version", "2.0")
|
||||
.into();
|
||||
events.push(opml);
|
||||
|
||||
let head: XmlEvent<'_> = XmlEvent::start_element("head").into();
|
||||
events.push(head);
|
||||
|
||||
let title_ev: XmlEvent<'_> = XmlEvent::start_element("title").into();
|
||||
events.push(title_ev);
|
||||
|
||||
let title_chars: XmlEvent<'_> = XmlEvent::characters(export_title).into();
|
||||
events.push(title_chars);
|
||||
|
||||
// Close <title> & <head>
|
||||
events.push(XmlEvent::end_element().into());
|
||||
events.push(XmlEvent::end_element().into());
|
||||
|
||||
let body: XmlEvent<'_> = XmlEvent::start_element("body").into();
|
||||
events.push(body);
|
||||
|
||||
for event in events {
|
||||
writer.write(event)?;
|
||||
}
|
||||
|
||||
// FIXME: Make this a model of a joined query (http://docs.diesel.rs/diesel/macro.joinable.html)
|
||||
let shows = dbqueries::get_podcasts()?.into_iter().map(|show| {
|
||||
let source = dbqueries::get_source_from_id(show.source_id()).unwrap();
|
||||
(source, show)
|
||||
});
|
||||
|
||||
for (ref source, ref show) in shows {
|
||||
let title = show.title();
|
||||
let link = show.link();
|
||||
let xml_url = source.uri();
|
||||
|
||||
let s_ev: XmlEvent<'_> = XmlEvent::start_element("outline")
|
||||
.attr("text", title)
|
||||
.attr("title", title)
|
||||
.attr("type", "rss")
|
||||
.attr("xmlUrl", xml_url)
|
||||
.attr("htmlUrl", link)
|
||||
.into();
|
||||
|
||||
let end_ev: XmlEvent<'_> = XmlEvent::end_element().into();
|
||||
writer.write(s_ev)?;
|
||||
writer.write(end_ev)?;
|
||||
}
|
||||
|
||||
// Close <body> and <opml>
|
||||
let end_bod: XmlEvent<'_> = XmlEvent::end_element().into();
|
||||
writer.write(end_bod)?;
|
||||
let end_opml: XmlEvent<'_> = XmlEvent::end_element().into();
|
||||
writer.write(end_opml)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Extracts the `outline` elements from a reader `R` and returns a `HashSet` of `Opml` structs.
|
||||
pub fn extract_sources<R: Read>(reader: R) -> Result<HashSet<Opml>, reader::Error> {
|
||||
let mut list = HashSet::new();
|
||||
let parser = reader::EventReader::new(reader);
|
||||
|
||||
parser
|
||||
.into_iter()
|
||||
.map(|e| match e {
|
||||
Ok(reader::XmlEvent::StartElement {
|
||||
name, attributes, ..
|
||||
}) => {
|
||||
if name.local_name == "outline" {
|
||||
let mut title = String::new();
|
||||
let mut url = String::new();
|
||||
let mut description = String::new();
|
||||
|
||||
attributes.into_iter().for_each(|attribute| {
|
||||
match attribute.name.local_name.as_str() {
|
||||
"title" => title = attribute.value,
|
||||
"xmlUrl" => url = attribute.value,
|
||||
"description" => description = attribute.value,
|
||||
_ => {}
|
||||
}
|
||||
});
|
||||
|
||||
let feed = Opml {
|
||||
title,
|
||||
description,
|
||||
url,
|
||||
};
|
||||
list.insert(feed);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
Err(err) => Err(err),
|
||||
_ => Ok(()),
|
||||
})
|
||||
.collect::<Result<Vec<_>, reader::Error>>()?;
|
||||
|
||||
Ok(list)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use chrono::Local;
|
||||
use failure::Error;
|
||||
use futures::Future;
|
||||
|
||||
use crate::database::{truncate_db, TEMPDIR};
|
||||
use crate::utils::get_feed;
|
||||
|
||||
const URLS: &[(&str, &str)] = {
|
||||
&[
|
||||
(
|
||||
"tests/feeds/2018-01-20-Intercepted.xml",
|
||||
"https://web.archive.org/web/20180120083840if_/https://feeds.feedburner.\
|
||||
com/InterceptedWithJeremyScahill",
|
||||
),
|
||||
(
|
||||
"tests/feeds/2018-01-20-LinuxUnplugged.xml",
|
||||
"https://web.archive.org/web/20180120110314if_/https://feeds.feedburner.\
|
||||
com/linuxunplugged",
|
||||
),
|
||||
(
|
||||
"tests/feeds/2018-01-20-TheTipOff.xml",
|
||||
"https://web.archive.org/web/20180120110727if_/https://rss.acast.com/thetipoff",
|
||||
),
|
||||
(
|
||||
"tests/feeds/2018-01-20-StealTheStars.xml",
|
||||
"https://web.archive.org/web/20180120104957if_/https://rss.art19.\
|
||||
com/steal-the-stars",
|
||||
),
|
||||
(
|
||||
"tests/feeds/2018-01-20-GreaterThanCode.xml",
|
||||
"https://web.archive.org/web/20180120104741if_/https://www.greaterthancode.\
|
||||
com/feed/podcast",
|
||||
),
|
||||
(
|
||||
"tests/feeds/2019-01-27-ACC.xml",
|
||||
"https://web.archive.org/web/20190127005213if_/https://anticapitalistchronicles.libsyn.com/rss"
|
||||
),
|
||||
]
|
||||
};
|
||||
|
||||
#[test]
|
||||
fn test_extract() -> Result<(), Error> {
|
||||
let int_title = String::from("Intercepted with Jeremy Scahill");
|
||||
let int_url = String::from("https://feeds.feedburner.com/InterceptedWithJeremyScahill");
|
||||
let int_desc = String::from(
|
||||
"The people behind The Intercept’s fearless reporting and incisive \
|
||||
commentary—Jeremy Scahill, Glenn Greenwald, Betsy Reed and others—discuss the \
|
||||
crucial issues of our time: national security, civil liberties, foreign policy, \
|
||||
and criminal justice. Plus interviews with artists, thinkers, and newsmakers \
|
||||
who challenge our preconceptions about the world we live in.",
|
||||
);
|
||||
|
||||
let dec_title = String::from("Deconstructed with Mehdi Hasan");
|
||||
let dec_url = String::from("https://rss.prod.firstlook.media/deconstructed/podcast.rss");
|
||||
let dec_desc = String::from(
|
||||
"Journalist Mehdi Hasan is known around the world for his televised takedowns of \
|
||||
presidents and prime ministers. In this new podcast from The Intercept, Mehdi \
|
||||
unpacks a game-changing news event of the week while challenging the conventional \
|
||||
wisdom. As a Brit, a Muslim and an immigrant based in Donald Trump's Washington \
|
||||
D.C., Mehdi gives a refreshingly provocative perspective on the ups and downs of \
|
||||
American—and global—politics.",
|
||||
);
|
||||
|
||||
#[cfg_attr(rustfmt, rustfmt_skip)]
|
||||
let sample1 = format!(
|
||||
"<?xml version=\"1.0\" encoding=\"UTF-8\"?> \
|
||||
<opml version=\"2.0\"> \
|
||||
<head> \
|
||||
<title>Test OPML File</title> \
|
||||
<dateCreated>{}</dateCreated> \
|
||||
<docs>http://www.opml.org/spec2</docs> \
|
||||
</head> \
|
||||
<body> \
|
||||
<outline type=\"rss\" title=\"{}\" description=\"{}\" xmlUrl=\"{}\"/> \
|
||||
<outline type=\"rss\" title=\"{}\" description=\"{}\" xmlUrl=\"{}\"/> \
|
||||
</body> \
|
||||
</opml>",
|
||||
Local::now().format("%a, %d %b %Y %T %Z"),
|
||||
int_title,
|
||||
int_desc,
|
||||
int_url,
|
||||
dec_title,
|
||||
dec_desc,
|
||||
dec_url,
|
||||
);
|
||||
|
||||
let map = hashset![
|
||||
Opml {
|
||||
title: int_title,
|
||||
description: int_desc,
|
||||
url: int_url
|
||||
},
|
||||
Opml {
|
||||
title: dec_title,
|
||||
description: dec_desc,
|
||||
url: dec_url
|
||||
},
|
||||
];
|
||||
assert_eq!(extract_sources(sample1.as_bytes())?, map);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn text_export() -> Result<(), Error> {
|
||||
truncate_db()?;
|
||||
|
||||
URLS.iter().for_each(|&(path, url)| {
|
||||
// Create and insert a Source into db
|
||||
let s = Source::from_url(url).unwrap();
|
||||
let feed = get_feed(path, s.id());
|
||||
feed.index().wait().unwrap();
|
||||
});
|
||||
|
||||
let mut map: HashSet<Opml> = HashSet::new();
|
||||
let shows = dbqueries::get_podcasts()?.into_iter().map(|show| {
|
||||
let source = dbqueries::get_source_from_id(show.source_id()).unwrap();
|
||||
(source, show)
|
||||
});
|
||||
|
||||
for (ref source, ref show) in shows {
|
||||
let title = show.title().to_string();
|
||||
// description is an optional field that we don't export
|
||||
let description = String::new();
|
||||
let url = source.uri().to_string();
|
||||
|
||||
map.insert(Opml {
|
||||
title,
|
||||
description,
|
||||
url,
|
||||
});
|
||||
}
|
||||
|
||||
let opml_path = TEMPDIR.path().join("podcasts.opml");
|
||||
export_from_db(opml_path.as_path(), "GNOME Podcasts Subscriptions")?;
|
||||
let opml_file = File::open(opml_path.as_path())?;
|
||||
assert_eq!(extract_sources(&opml_file)?, map);
|
||||
|
||||
// extract_sources drains the reader its passed
|
||||
let mut opml_file = File::open(opml_path.as_path())?;
|
||||
let mut opml_str = String::new();
|
||||
opml_file.read_to_string(&mut opml_str)?;
|
||||
assert_eq!(opml_str, include_str!("../tests/export_test.opml"));
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
99
podcasts-data/src/parser.rs
Normal file
99
podcasts-data/src/parser.rs
Normal file
@ -0,0 +1,99 @@
|
||||
// parser.rs
|
||||
//
|
||||
// Copyright 2017 Jordan Petridis <jpetridis@gnome.org>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
//
|
||||
// SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
use rss::extension::itunes::ITunesItemExtension;
|
||||
|
||||
/// Parses an Item Itunes extension and returns it's duration value in seconds.
|
||||
// FIXME: Rafactor
|
||||
#[allow(non_snake_case)]
|
||||
pub(crate) fn parse_itunes_duration(item: Option<&ITunesItemExtension>) -> Option<i32> {
|
||||
let duration = item.map(|s| s.duration())??;
|
||||
|
||||
// FOR SOME FUCKING REASON, IN THE APPLE EXTENSION SPEC
|
||||
// THE DURATION CAN BE EITHER AN INT OF SECONDS OR
|
||||
// A STRING OF THE FOLLOWING FORMATS:
|
||||
// HH:MM:SS, H:MM:SS, MM:SS, M:SS
|
||||
// LIKE WHO THE FUCK THOUGH THAT WOULD BE A GOOD IDEA.
|
||||
if let Ok(NO_FUCKING_LOGIC) = duration.parse::<i32>() {
|
||||
return Some(NO_FUCKING_LOGIC);
|
||||
};
|
||||
|
||||
let mut seconds = 0;
|
||||
let fk_apple = duration.split(':').collect::<Vec<_>>();
|
||||
if fk_apple.len() == 3 {
|
||||
seconds += fk_apple[0].parse::<i32>().unwrap_or(0) * 3600;
|
||||
seconds += fk_apple[1].parse::<i32>().unwrap_or(0) * 60;
|
||||
seconds += fk_apple[2].parse::<i32>().unwrap_or(0);
|
||||
} else if fk_apple.len() == 2 {
|
||||
seconds += fk_apple[0].parse::<i32>().unwrap_or(0) * 60;
|
||||
seconds += fk_apple[1].parse::<i32>().unwrap_or(0);
|
||||
}
|
||||
|
||||
Some(seconds)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use rss::extension::itunes::ITunesItemExtensionBuilder;
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_itunes_duration() {
|
||||
// Input is a String<Int>
|
||||
let extension = ITunesItemExtensionBuilder::default()
|
||||
.duration(Some("3370".into()))
|
||||
.build()
|
||||
.unwrap();
|
||||
let item = Some(&extension);
|
||||
assert_eq!(parse_itunes_duration(item), Some(3370));
|
||||
|
||||
// Input is a String<M:SS>
|
||||
let extension = ITunesItemExtensionBuilder::default()
|
||||
.duration(Some("6:10".into()))
|
||||
.build()
|
||||
.unwrap();
|
||||
let item = Some(&extension);
|
||||
assert_eq!(parse_itunes_duration(item), Some(370));
|
||||
|
||||
// Input is a String<MM:SS>
|
||||
let extension = ITunesItemExtensionBuilder::default()
|
||||
.duration(Some("56:10".into()))
|
||||
.build()
|
||||
.unwrap();
|
||||
let item = Some(&extension);
|
||||
assert_eq!(parse_itunes_duration(item), Some(3370));
|
||||
|
||||
// Input is a String<H:MM:SS>
|
||||
let extension = ITunesItemExtensionBuilder::default()
|
||||
.duration(Some("1:56:10".into()))
|
||||
.build()
|
||||
.unwrap();
|
||||
let item = Some(&extension);
|
||||
assert_eq!(parse_itunes_duration(item), Some(6970));
|
||||
|
||||
// Input is a String<HH:MM:SS>
|
||||
let extension = ITunesItemExtensionBuilder::default()
|
||||
.duration(Some("01:56:10".into()))
|
||||
.build()
|
||||
.unwrap();
|
||||
let item = Some(&extension);
|
||||
assert_eq!(parse_itunes_duration(item), Some(6970));
|
||||
}
|
||||
}
|
||||
133
podcasts-data/src/pipeline.rs
Normal file
133
podcasts-data/src/pipeline.rs
Normal file
@ -0,0 +1,133 @@
|
||||
// pipeline.rs
|
||||
//
|
||||
// Copyright 2017 Jordan Petridis <jpetridis@gnome.org>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
//
|
||||
// SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
// FIXME:
|
||||
//! Docs.
|
||||
|
||||
use futures::{future::ok, lazy, prelude::*, stream::FuturesUnordered};
|
||||
use tokio;
|
||||
|
||||
use hyper::client::HttpConnector;
|
||||
use hyper::{Body, Client};
|
||||
use hyper_tls::HttpsConnector;
|
||||
|
||||
use num_cpus;
|
||||
|
||||
use crate::errors::DataError;
|
||||
use crate::Source;
|
||||
|
||||
use std::iter::FromIterator;
|
||||
|
||||
type HttpsClient = Client<HttpsConnector<HttpConnector>>;
|
||||
|
||||
/// The pipline to be run for indexing and updating a Podcast feed that originates from
|
||||
/// `Source.uri`.
|
||||
///
|
||||
/// Messy temp diagram:
|
||||
/// Source -> GET Request -> Update Etags -> Check Status -> Parse `xml/Rss` ->
|
||||
/// Convert `rss::Channel` into `Feed` -> Index Podcast -> Index Episodes.
|
||||
pub fn pipeline<'a, S>(sources: S, client: HttpsClient) -> impl Future<Item = (), Error = ()> + 'a
|
||||
where
|
||||
S: Stream<Item = Source, Error = DataError> + Send + 'a,
|
||||
{
|
||||
sources
|
||||
.and_then(move |s| s.into_feed(client.clone()))
|
||||
.map_err(|err| {
|
||||
match err {
|
||||
// Avoid spamming the stderr when its not an eactual error
|
||||
DataError::FeedNotModified(_) => (),
|
||||
_ => error!("Error: {}", err),
|
||||
}
|
||||
})
|
||||
.and_then(move |feed| {
|
||||
let fut = lazy(|| feed.index().map_err(|err| error!("Error: {}", err)));
|
||||
tokio::spawn(fut);
|
||||
Ok(())
|
||||
})
|
||||
// For each terminates the stream at the first error so we make sure
|
||||
// we pass good values regardless
|
||||
.then(move |_| ok(()))
|
||||
// Convert the stream into a Future to later execute as a tokio task
|
||||
.for_each(move |_| ok(()))
|
||||
}
|
||||
|
||||
/// Creates a tokio `reactor::Core`, and a `hyper::Client` and
|
||||
/// runs the pipeline to completion. The `reactor::Core` is dropped afterwards.
|
||||
pub fn run<S>(sources: S) -> Result<(), DataError>
|
||||
where
|
||||
S: IntoIterator<Item = Source>,
|
||||
{
|
||||
let https = HttpsConnector::new(num_cpus::get())?;
|
||||
let client = Client::builder().build::<_, Body>(https);
|
||||
|
||||
let foo = sources.into_iter().map(ok::<_, _>);
|
||||
let stream = FuturesUnordered::from_iter(foo);
|
||||
let p = pipeline(stream, client);
|
||||
tokio::run(p);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::database::truncate_db;
|
||||
use crate::dbqueries;
|
||||
use crate::Source;
|
||||
use failure::Error;
|
||||
|
||||
// (path, url) tuples.
|
||||
const URLS: &[&str] = &[
|
||||
"https://web.archive.org/web/20180120083840if_/https://feeds.feedburner.\
|
||||
com/InterceptedWithJeremyScahill",
|
||||
"https://web.archive.org/web/20180120110314if_/https://feeds.feedburner.com/linuxunplugged",
|
||||
"https://web.archive.org/web/20180120110727if_/https://rss.acast.com/thetipoff",
|
||||
"https://web.archive.org/web/20180120104957if_/https://rss.art19.com/steal-the-stars",
|
||||
"https://web.archive.org/web/20180120104741if_/https://www.greaterthancode.\
|
||||
com/feed/podcast",
|
||||
];
|
||||
|
||||
#[test]
|
||||
/// Insert feeds and update/index them.
|
||||
fn test_pipeline() -> Result<(), Error> {
|
||||
truncate_db()?;
|
||||
let bad_url = "https://gitlab.gnome.org/World/podcasts.atom";
|
||||
// if a stream returns error/None it stops
|
||||
// bad we want to parse all feeds regardless if one fails
|
||||
Source::from_url(bad_url)?;
|
||||
|
||||
URLS.iter().for_each(|url| {
|
||||
// Index the urls into the source table.
|
||||
Source::from_url(url).unwrap();
|
||||
});
|
||||
|
||||
let sources = dbqueries::get_sources()?;
|
||||
run(sources)?;
|
||||
|
||||
let sources = dbqueries::get_sources()?;
|
||||
// Run again to cover Unique constrains erros.
|
||||
run(sources)?;
|
||||
|
||||
// Assert the index rows equal the controlled results
|
||||
assert_eq!(dbqueries::get_sources()?.len(), 6);
|
||||
assert_eq!(dbqueries::get_podcasts()?.len(), 5);
|
||||
assert_eq!(dbqueries::get_episodes()?.len(), 354);
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
29
podcasts-data/src/schema.patch
Normal file
29
podcasts-data/src/schema.patch
Normal file
@ -0,0 +1,29 @@
|
||||
diff --git a/podcasts-data/src/schema.rs b/podcasts-data/src/schema.rs
|
||||
index 03cbed0..88f1622 100644
|
||||
--- a/podcasts-data/src/schema.rs
|
||||
+++ b/podcasts-data/src/schema.rs
|
||||
@@ -1,8 +1,11 @@
|
||||
+#![allow(warnings)]
|
||||
+
|
||||
table! {
|
||||
episodes (title, show_id) {
|
||||
+ rowid -> Integer,
|
||||
title -> Text,
|
||||
uri -> Nullable<Text>,
|
||||
local_uri -> Nullable<Text>,
|
||||
description -> Nullable<Text>,
|
||||
epoch -> Integer,
|
||||
length -> Nullable<Integer>,
|
||||
@@ -30,11 +33,7 @@ table! {
|
||||
uri -> Text,
|
||||
last_modified -> Nullable<Text>,
|
||||
http_etag -> Nullable<Text>,
|
||||
}
|
||||
}
|
||||
|
||||
-allow_tables_to_appear_in_same_query!(
|
||||
- episodes,
|
||||
- shows,
|
||||
- source,
|
||||
-);
|
||||
+allow_tables_to_appear_in_same_query!(episodes, shows, source);
|
||||
@ -1,31 +1,28 @@
|
||||
#![allow(warnings)]
|
||||
|
||||
table! {
|
||||
episode (id) {
|
||||
id -> Integer,
|
||||
title -> Nullable<Text>,
|
||||
uri -> Text,
|
||||
episodes (title, show_id) {
|
||||
rowid -> Integer,
|
||||
title -> Text,
|
||||
uri -> Nullable<Text>,
|
||||
local_uri -> Nullable<Text>,
|
||||
description -> Nullable<Text>,
|
||||
published_date -> Nullable<Text>,
|
||||
epoch -> Integer,
|
||||
length -> Nullable<Integer>,
|
||||
duration -> Nullable<Integer>,
|
||||
guid -> Nullable<Text>,
|
||||
played -> Nullable<Integer>,
|
||||
favorite -> Bool,
|
||||
archive -> Bool,
|
||||
podcast_id -> Integer,
|
||||
show_id -> Integer,
|
||||
}
|
||||
}
|
||||
|
||||
table! {
|
||||
podcast (id) {
|
||||
shows (id) {
|
||||
id -> Integer,
|
||||
title -> Text,
|
||||
link -> Text,
|
||||
description -> Text,
|
||||
image_uri -> Nullable<Text>,
|
||||
favorite -> Bool,
|
||||
archive -> Bool,
|
||||
always_dl -> Bool,
|
||||
source_id -> Integer,
|
||||
}
|
||||
}
|
||||
@ -38,3 +35,5 @@ table! {
|
||||
http_etag -> Nullable<Text>,
|
||||
}
|
||||
}
|
||||
|
||||
allow_tables_to_appear_in_same_query!(episodes, shows, source);
|
||||
310
podcasts-data/src/utils.rs
Normal file
310
podcasts-data/src/utils.rs
Normal file
@ -0,0 +1,310 @@
|
||||
// utils.rs
|
||||
//
|
||||
// Copyright 2017 Jordan Petridis <jpetridis@gnome.org>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
//
|
||||
// SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
//! Helper utilities for accomplishing various tasks.
|
||||
|
||||
use chrono::prelude::*;
|
||||
use rayon::prelude::*;
|
||||
|
||||
use url::{Position, Url};
|
||||
|
||||
use crate::dbqueries;
|
||||
use crate::errors::DataError;
|
||||
use crate::models::{EpisodeCleanerModel, Save, Show};
|
||||
use crate::xdg_dirs::DL_DIR;
|
||||
|
||||
use std::fs;
|
||||
use std::path::Path;
|
||||
|
||||
/// Scan downloaded `episode` entries that might have broken `local_uri`s and
|
||||
/// set them to `None`.
|
||||
fn download_checker() -> Result<(), DataError> {
|
||||
let mut episodes = dbqueries::get_downloaded_episodes()?;
|
||||
|
||||
episodes
|
||||
.par_iter_mut()
|
||||
.filter_map(|ep| {
|
||||
if !Path::new(ep.local_uri()?).exists() {
|
||||
return Some(ep);
|
||||
}
|
||||
None
|
||||
})
|
||||
.for_each(|ep| {
|
||||
ep.set_local_uri(None);
|
||||
ep.save()
|
||||
.map_err(|err| error!("{}", err))
|
||||
.map_err(|_| error!("Error while trying to update episode: {:#?}", ep))
|
||||
.ok();
|
||||
});
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Delete watched `episodes` that have exceeded their lifetime after played.
|
||||
fn played_cleaner(cleanup_date: DateTime<Utc>) -> Result<(), DataError> {
|
||||
let mut episodes = dbqueries::get_played_cleaner_episodes()?;
|
||||
let now_utc = cleanup_date.timestamp() as i32;
|
||||
|
||||
episodes
|
||||
.par_iter_mut()
|
||||
.filter(|ep| ep.local_uri().is_some() && ep.played().is_some())
|
||||
.for_each(|ep| {
|
||||
let limit = ep.played().unwrap();
|
||||
if now_utc > limit {
|
||||
delete_local_content(ep)
|
||||
.map(|_| info!("Episode {:?} was deleted successfully.", ep.local_uri()))
|
||||
.map_err(|err| error!("Error: {}", err))
|
||||
.map_err(|_| error!("Failed to delete file: {:?}", ep.local_uri()))
|
||||
.ok();
|
||||
}
|
||||
});
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Check `ep.local_uri` field and delete the file it points to.
|
||||
fn delete_local_content(ep: &mut EpisodeCleanerModel) -> Result<(), DataError> {
|
||||
if ep.local_uri().is_some() {
|
||||
let uri = ep.local_uri().unwrap().to_owned();
|
||||
if Path::new(&uri).exists() {
|
||||
let res = fs::remove_file(&uri);
|
||||
if res.is_ok() {
|
||||
ep.set_local_uri(None);
|
||||
ep.save()?;
|
||||
} else {
|
||||
error!("Error while trying to delete file: {}", uri);
|
||||
error!("{}", res.unwrap_err());
|
||||
};
|
||||
}
|
||||
} else {
|
||||
error!(
|
||||
"Something went wrong evaluating the following path: {:?}",
|
||||
ep.local_uri(),
|
||||
);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Database cleaning tasks.
|
||||
///
|
||||
/// Runs a download checker which looks for `Episode.local_uri` entries that
|
||||
/// doesn't exist and sets them to None
|
||||
///
|
||||
/// Runs a cleaner for played Episode's that are pass the lifetime limit and
|
||||
/// scheduled for removal.
|
||||
pub fn checkup(cleanup_date: DateTime<Utc>) -> Result<(), DataError> {
|
||||
info!("Running database checks.");
|
||||
download_checker()?;
|
||||
played_cleaner(cleanup_date)?;
|
||||
info!("Checks completed.");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Remove fragment identifiers and query pairs from a URL
|
||||
/// If url parsing fails, return's a trimmed version of the original input.
|
||||
pub fn url_cleaner(s: &str) -> String {
|
||||
// Copied from the cookbook.
|
||||
// https://rust-lang-nursery.github.io/rust-cookbook/net.html
|
||||
// #remove-fragment-identifiers-and-query-pairs-from-a-url
|
||||
match Url::parse(s) {
|
||||
Ok(parsed) => parsed[..Position::AfterQuery].to_owned(),
|
||||
_ => s.trim().to_owned(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the URI of a Show Downloads given it's title.
|
||||
pub fn get_download_folder(pd_title: &str) -> Result<String, DataError> {
|
||||
// It might be better to make it a hash of the title or the Show rowid
|
||||
let download_fold = format!("{}/{}", DL_DIR.to_str().unwrap(), pd_title);
|
||||
|
||||
// Create the folder
|
||||
fs::DirBuilder::new()
|
||||
.recursive(true)
|
||||
.create(&download_fold)?;
|
||||
Ok(download_fold)
|
||||
}
|
||||
|
||||
/// Removes all the entries associated with the given show from the database,
|
||||
/// and deletes all of the downloaded content.
|
||||
// TODO: Write Tests
|
||||
pub fn delete_show(pd: &Show) -> Result<(), DataError> {
|
||||
dbqueries::remove_feed(pd)?;
|
||||
info!("{} was removed successfully.", pd.title());
|
||||
|
||||
let fold = get_download_folder(pd.title())?;
|
||||
fs::remove_dir_all(&fold)?;
|
||||
info!("All the content at, {} was removed successfully", &fold);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
use crate::Feed;
|
||||
|
||||
#[cfg(test)]
|
||||
/// Helper function that open a local file, parse the rss::Channel and gives back a Feed object.
|
||||
/// Alternative Feed constructor to be used for tests.
|
||||
pub fn get_feed(file_path: &str, id: i32) -> Feed {
|
||||
use crate::feed::FeedBuilder;
|
||||
use rss::Channel;
|
||||
use std::io::BufReader;
|
||||
|
||||
// open the xml file
|
||||
let feed = fs::File::open(file_path).unwrap();
|
||||
// parse it into a channel
|
||||
let chan = Channel::read_from(BufReader::new(feed)).unwrap();
|
||||
FeedBuilder::default()
|
||||
.channel(chan)
|
||||
.source_id(id)
|
||||
.build()
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use chrono::Duration;
|
||||
use failure::Error;
|
||||
use tempdir::TempDir;
|
||||
|
||||
use crate::database::truncate_db;
|
||||
use crate::models::NewEpisodeBuilder;
|
||||
|
||||
use std::fs::File;
|
||||
use std::io::Write;
|
||||
|
||||
fn helper_db() -> Result<TempDir, Error> {
|
||||
// Clean the db
|
||||
truncate_db()?;
|
||||
// Setup tmp file stuff
|
||||
let tmp_dir = TempDir::new("podcasts_test")?;
|
||||
let valid_path = tmp_dir.path().join("virtual_dl.mp3");
|
||||
let bad_path = tmp_dir.path().join("invalid_thing.mp3");
|
||||
let mut tmp_file = File::create(&valid_path)?;
|
||||
writeln!(tmp_file, "Foooo")?;
|
||||
|
||||
// Setup episodes
|
||||
let n1 = NewEpisodeBuilder::default()
|
||||
.title("foo_bar".to_string())
|
||||
.show_id(0)
|
||||
.build()
|
||||
.unwrap()
|
||||
.to_episode()?;
|
||||
|
||||
let n2 = NewEpisodeBuilder::default()
|
||||
.title("bar_baz".to_string())
|
||||
.show_id(1)
|
||||
.build()
|
||||
.unwrap()
|
||||
.to_episode()?;
|
||||
|
||||
let mut ep1 = dbqueries::get_episode_cleaner_from_pk(n1.title(), n1.show_id())?;
|
||||
let mut ep2 = dbqueries::get_episode_cleaner_from_pk(n2.title(), n2.show_id())?;
|
||||
ep1.set_local_uri(Some(valid_path.to_str().unwrap()));
|
||||
ep2.set_local_uri(Some(bad_path.to_str().unwrap()));
|
||||
|
||||
ep1.save()?;
|
||||
ep2.save()?;
|
||||
|
||||
Ok(tmp_dir)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_download_checker() -> Result<(), Error> {
|
||||
let tmp_dir = helper_db()?;
|
||||
download_checker()?;
|
||||
let episodes = dbqueries::get_downloaded_episodes()?;
|
||||
let valid_path = tmp_dir.path().join("virtual_dl.mp3");
|
||||
|
||||
assert_eq!(episodes.len(), 1);
|
||||
assert_eq!(
|
||||
Some(valid_path.to_str().unwrap()),
|
||||
episodes.first().unwrap().local_uri()
|
||||
);
|
||||
|
||||
let _tmp_dir = helper_db()?;
|
||||
download_checker()?;
|
||||
let episode = dbqueries::get_episode_cleaner_from_pk("bar_baz", 1)?;
|
||||
assert!(episode.local_uri().is_none());
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_download_cleaner() -> Result<(), Error> {
|
||||
let _tmp_dir = helper_db()?;
|
||||
let mut episode: EpisodeCleanerModel =
|
||||
dbqueries::get_episode_cleaner_from_pk("foo_bar", 0)?.into();
|
||||
|
||||
let valid_path = episode.local_uri().unwrap().to_owned();
|
||||
delete_local_content(&mut episode)?;
|
||||
assert_eq!(Path::new(&valid_path).exists(), false);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_played_cleaner_expired() -> Result<(), Error> {
|
||||
let _tmp_dir = helper_db()?;
|
||||
let mut episode = dbqueries::get_episode_cleaner_from_pk("foo_bar", 0)?;
|
||||
let cleanup_date = Utc::now() - Duration::seconds(1000);
|
||||
let epoch = cleanup_date.timestamp() as i32 - 1;
|
||||
episode.set_played(Some(epoch));
|
||||
episode.save()?;
|
||||
let valid_path = episode.local_uri().unwrap().to_owned();
|
||||
|
||||
// This should delete the file
|
||||
played_cleaner(cleanup_date)?;
|
||||
assert_eq!(Path::new(&valid_path).exists(), false);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_played_cleaner_none() -> Result<(), Error> {
|
||||
let _tmp_dir = helper_db()?;
|
||||
let mut episode = dbqueries::get_episode_cleaner_from_pk("foo_bar", 0)?;
|
||||
let cleanup_date = Utc::now() - Duration::seconds(1000);
|
||||
let epoch = cleanup_date.timestamp() as i32 + 1;
|
||||
episode.set_played(Some(epoch));
|
||||
episode.save()?;
|
||||
let valid_path = episode.local_uri().unwrap().to_owned();
|
||||
|
||||
// This should not delete the file
|
||||
played_cleaner(cleanup_date)?;
|
||||
assert_eq!(Path::new(&valid_path).exists(), true);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_url_cleaner() -> Result<(), Error> {
|
||||
let good_url = "http://traffic.megaphone.fm/FL8608731318.mp3?updated=1484685184";
|
||||
let bad_url = "http://traffic.megaphone.fm/FL8608731318.mp3?updated=1484685184#foobar";
|
||||
|
||||
assert_eq!(url_cleaner(bad_url), good_url);
|
||||
assert_eq!(url_cleaner(good_url), good_url);
|
||||
assert_eq!(url_cleaner(&format!(" {}\t\n", bad_url)), good_url);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
// This test needs access to local system so we ignore it by default.
|
||||
#[ignore]
|
||||
fn test_get_dl_folder() -> Result<(), Error> {
|
||||
let foo_ = format!("{}/{}", DL_DIR.to_str().unwrap(), "foo");
|
||||
assert_eq!(get_download_folder("foo")?, foo_);
|
||||
let _ = fs::remove_dir_all(foo_);
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user