Compare commits
1081 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| f2ac198831 | |||
|
|
abd263f127 | ||
|
|
9c2d4ac6a7 | ||
|
|
4d950ac17e | ||
|
|
5e688c104c | ||
|
|
4ce0819c68 | ||
|
|
f7d9d0555c | ||
|
|
b754182e0d | ||
|
|
7b3a607b5e | ||
|
|
0e47e9c07f | ||
|
|
c140e5a163 | ||
|
|
0bc379bf42 | ||
|
|
9f0a3a0d9f | ||
|
|
355cf9a36c | ||
|
|
d9792e99c1 | ||
|
|
6cb7de7fb3 | ||
|
|
cb0860cddf | ||
|
|
6edeb59b16 | ||
|
|
a245aa73d4 | ||
|
|
08be9bdb4e | ||
|
|
2ee2181211 | ||
|
|
59e1c7d6f4 | ||
|
|
c2aca6e3a0 | ||
| 5f39da9273 | |||
| e16d69737e | |||
|
|
e6e2af38d3 | ||
|
|
975697728e | ||
|
|
a2789c9dba | ||
|
|
096197cf81 | ||
|
|
f9d577f596 | ||
|
|
429356a217 | ||
|
|
636e2aefde | ||
|
|
e830589e38 | ||
|
|
933ba62f39 | ||
|
|
93f7aa4457 | ||
|
|
3ef45c42fb | ||
|
|
071e5978aa | ||
|
|
fba5102705 | ||
|
|
138e308c32 | ||
|
|
fa8f1a0c8e | ||
|
|
7325c3b4d3 | ||
|
|
3e5ddb2aff | ||
|
|
d1ebbde778 | ||
|
|
8a042c5714 | ||
|
|
7a2c252bbc | ||
|
|
7d212174a6 | ||
|
|
685b35cb23 | ||
|
|
1175a54266 | ||
|
|
d38064e1c0 | ||
|
|
59a57a740f | ||
|
|
f319851753 | ||
|
|
ce424977c0 | ||
|
|
62c6b0d4cb | ||
|
|
7685570da9 | ||
|
|
ff0c488968 | ||
|
|
ebc6c8df4d | ||
|
|
06a2c3ab12 | ||
|
|
e877da1825 | ||
|
|
0b744b1179 | ||
|
|
1041f21724 | ||
|
|
73012f7976 | ||
|
|
a1b4cece7d | ||
|
|
ac4cd50929 | ||
|
|
bb5b3846a6 | ||
|
|
3a11fbcf18 | ||
|
|
a8da740ada | ||
|
|
082ec0f592 | ||
|
|
d49cad0bc0 | ||
|
|
75d8676de5 | ||
|
|
8228bc8996 | ||
|
|
8081990895 | ||
|
|
eb2d5419f9 | ||
|
|
a9ba3fcbab | ||
|
|
bcd610e2e3 | ||
|
|
217362fe14 | ||
|
|
0e0d860cd2 | ||
|
|
a8246a96cd | ||
|
|
33d28d1e7b | ||
|
|
377d7bdf47 | ||
|
|
f6eb3dd90e | ||
|
|
f632a66d60 | ||
|
|
ea0bb607b6 | ||
|
|
85e15b77d9 | ||
|
|
95055dea1d | ||
|
|
630804c5b5 | ||
|
|
6cce4183e9 | ||
|
|
35bd0d625c | ||
|
|
073d00b45b | ||
|
|
982cb13e7e | ||
|
|
6d3d4c8339 | ||
|
|
911e420c24 | ||
|
|
e69ff0325e | ||
|
|
7c1353a8aa | ||
|
|
4ce9a172b8 | ||
|
|
61a7b24084 | ||
|
|
8b2ca19f1a | ||
|
|
deafa8f4d3 | ||
|
|
c739935335 | ||
|
|
34e4a4f483 | ||
|
|
85c485509c | ||
|
|
026145e0c7 | ||
|
|
bb0cf5b547 | ||
|
|
d8acffa844 | ||
|
|
a1f1cddcbe | ||
|
|
d19612bb3c | ||
|
|
30e81961e8 | ||
|
|
a9873da802 | ||
|
|
6046e62f11 | ||
|
|
eb032619a0 | ||
|
|
d98f0a20a8 | ||
|
|
945b40249c | ||
|
|
1192642811 | ||
|
|
70772a61a9 | ||
|
|
66b3f031a5 | ||
|
|
56a9c115f8 | ||
|
|
bd10ed93af | ||
|
|
c3351f01e4 | ||
|
|
1d13384f6c | ||
|
|
598e225b00 | ||
|
|
d8090a8172 | ||
|
|
f47413686c | ||
|
|
c8a194cf32 | ||
|
|
f07ac1f322 | ||
|
|
02561b614f | ||
|
|
6ca2d02c69 | ||
|
|
300e103fed | ||
|
|
a77c0e5f32 | ||
|
|
cf644d508d | ||
|
|
452be8a22f | ||
|
|
1e816f65a5 | ||
|
|
d8496fe4c4 | ||
|
|
ca10956014 | ||
|
|
3984b84b6c | ||
|
|
493114e825 | ||
|
|
7856b6fd27 | ||
|
|
957b47680d | ||
|
|
86f6a944ff | ||
|
|
2631173a0d | ||
|
|
0dc1f810d2 | ||
|
|
0b8d19fbbe | ||
|
|
51bbe4193b | ||
|
|
d5945f6ac6 | ||
|
|
539f8824d1 | ||
|
|
539efc3f7b | ||
|
|
c0861ba796 | ||
|
|
52d308e5ae | ||
|
|
57f3afae97 | ||
|
|
80ff75debc | ||
|
|
d3a3bd2784 | ||
|
|
2dcccb804e | ||
|
|
ed7ac04d64 | ||
|
|
baf4d2bde6 | ||
|
|
ff047c5823 | ||
|
|
6c701e0c41 | ||
|
|
28ea14f2e9 | ||
|
|
f00f9b104c | ||
|
|
78d91826b1 | ||
|
|
a5be789745 | ||
|
|
028e318bd3 | ||
|
|
b2b0b0f2c8 | ||
|
|
42e73cb7e9 | ||
|
|
89c3733ce8 | ||
|
|
4be473dcea | ||
|
|
97bdc32cce | ||
|
|
d2da46854e | ||
|
|
150d3622e4 | ||
|
|
990ab29200 | ||
|
|
80d3bc84b8 | ||
|
|
b31c79431e | ||
|
|
73587ff47b | ||
|
|
53e9db2f42 | ||
|
|
b290441956 | ||
|
|
9d0d20afbd | ||
|
|
4a7d3d5fc2 | ||
|
|
32ecb05902 | ||
|
|
644ca7d0d0 | ||
|
|
7dc1b25ee7 | ||
|
|
3f28b9abc4 | ||
|
|
cf36d91da4 | ||
|
|
8c1465d3a2 | ||
|
|
bed7a4d6be | ||
|
|
23a91cca16 | ||
|
|
82c99e2cfa | ||
|
|
7d745179d4 | ||
|
|
78283e51f6 | ||
|
|
5a9ff8e331 | ||
|
|
814ddaa532 | ||
|
|
b927b5fac2 | ||
|
|
e9cf140177 | ||
|
|
9ee9de7911 | ||
|
|
53844aa0ff | ||
|
|
f527f743fc | ||
|
|
5801736955 | ||
|
|
11afc4c37d | ||
|
|
5f1427cabd | ||
|
|
27008e0f18 | ||
|
|
8898fd6e2f | ||
|
|
96d4cd50d4 | ||
|
|
7a77f31aa3 | ||
|
|
78f29e726e | ||
|
|
92e2006782 | ||
|
|
5b2edc73ec | ||
|
|
395e31ff85 | ||
|
|
dd0d828794 | ||
|
|
15bb1a2335 | ||
|
|
0a7b7880da | ||
|
|
277f324cf0 | ||
|
|
e496d5bf36 | ||
|
|
0ed6c8979e | ||
|
|
075dd1adeb | ||
|
|
027faf1949 | ||
|
|
54e049874c | ||
|
|
c4c6ba9ea4 | ||
|
|
a77bf0b8fb | ||
|
|
b9bad14df6 | ||
|
|
b1058933b8 | ||
|
|
32c7f6b29e | ||
|
|
137705450b | ||
|
|
4dc6034de8 | ||
|
|
7fa18fe38f | ||
|
|
7e34347ed7 | ||
|
|
68fa547b06 | ||
|
|
a113ed049d | ||
|
|
2f8a6a91f8 | ||
|
|
f75ed257f2 | ||
|
|
06091d1af4 | ||
|
|
fb4e550122 | ||
|
|
a74301f479 | ||
|
|
c49f417d00 | ||
|
|
a2bcd8aa30 | ||
|
|
6c34686d8d | ||
|
|
86ec6f43cb | ||
|
|
e9c7a3b99e | ||
|
|
f0ac63cd96 | ||
|
|
b1e8663ba9 | ||
|
|
2cb6bf2b98 | ||
|
|
b35c63d1c8 | ||
|
|
96b929b313 | ||
|
|
d8cddfafa0 | ||
|
|
de7d12d2c8 | ||
|
|
bc8d521853 | ||
|
|
a6c2666c82 | ||
|
|
32424e7938 | ||
|
|
ca4a3d64eb | ||
|
|
2257688c65 | ||
|
|
ea9ddc58c0 | ||
|
|
c47d375a58 | ||
|
|
36f97a5300 | ||
|
|
4adc3fadaa | ||
|
|
d9c64e7f87 | ||
|
|
9f8ae75691 | ||
|
|
7e0b88ddbd | ||
|
|
ed62f2b1f2 | ||
|
|
4e89cdaca1 | ||
|
|
b72ba8c66a | ||
|
|
fc9de568bd | ||
|
|
fe5a542e08 | ||
|
|
5d86693d98 | ||
|
|
4caefdb3fd | ||
|
|
2f843a4d40 | ||
|
|
0175609f02 | ||
|
|
f9f0dad203 | ||
|
|
fdb064ffc8 | ||
|
|
e98231c327 | ||
|
|
0888da2197 | ||
|
|
2d231ad989 | ||
|
|
19e0b7e565 | ||
|
|
fd4128c364 | ||
|
|
98f105fda0 | ||
|
|
201f2e23c7 | ||
|
|
32b257ec30 | ||
|
|
53bceb89cd | ||
|
|
04770a1e8f | ||
|
|
fbda4c76f0 | ||
|
|
85387a0a9b | ||
|
|
5ac4f6dcf9 | ||
|
|
6671f8c6fe | ||
|
|
5b77bb4649 | ||
|
|
8f6329d71d | ||
|
|
5d71ac584c | ||
|
|
534c627300 | ||
|
|
1a1e5ecd3f | ||
|
|
3e555c64d9 | ||
|
|
8d6b5f7105 | ||
|
|
4692be663e | ||
|
|
5ae0fb1b0e | ||
|
|
990d830f24 | ||
|
|
c6aa90db3e | ||
|
|
c1eed45194 | ||
|
|
31ee668311 | ||
|
|
9f60121609 | ||
|
|
a61d04f445 | ||
|
|
8ba8ada253 | ||
|
|
885b796f85 | ||
|
|
fa47806c93 | ||
|
|
5997666bad | ||
|
|
40186ce155 | ||
|
|
9cfdb35224 | ||
|
|
64860c4624 | ||
|
|
32bd2a89a3 | ||
|
|
1e6eca307b | ||
|
|
fc23fcd7c1 | ||
|
|
ab52825c71 | ||
|
|
28def30510 | ||
|
|
8f4d017180 | ||
|
|
df302ad517 | ||
|
|
357d99ac7c | ||
|
|
7e3fecc44a | ||
|
|
e0b3dd9795 | ||
|
|
6a52a2bc46 | ||
|
|
cd2b087006 | ||
|
|
ef2940142c | ||
|
|
e13b8b8827 | ||
|
|
43fce2e89a | ||
|
|
7856f0d602 | ||
|
|
bcc6ab50e2 | ||
|
|
23aa8c05ab | ||
|
|
986d898217 | ||
|
|
654c0e5e56 | ||
|
|
fed0edbf16 | ||
|
|
ede91da6f8 | ||
|
|
1f18d4291f | ||
|
|
d066e8939d | ||
|
|
e4c3435d34 | ||
|
|
fc80b180bc | ||
|
|
0d9dca99e9 | ||
|
|
2ff921cc1a | ||
|
|
e0e66fa6af | ||
|
|
110e29ec5a | ||
|
|
a2b6d622de | ||
|
|
0887789f5e | ||
|
|
3c4574f2ec | ||
|
|
62029f6164 | ||
|
|
ba986847d6 | ||
|
|
fded78ce6f | ||
|
|
8d44649a1e | ||
|
|
f4d0c51dc2 | ||
|
|
ba60db9977 | ||
|
|
0a5a7a684d | ||
|
|
7181c46ed5 | ||
|
|
afa7e69347 | ||
|
|
5050dda4d2 | ||
|
|
9ea1e16ac8 | ||
|
|
922f44f605 | ||
|
|
6ea3fc918b | ||
|
|
0c201533f0 | ||
|
|
b1e99b96c4 | ||
|
|
de1c8485ae | ||
|
|
565d1d0388 | ||
|
|
a15fea1d65 | ||
|
|
da3cf6ca27 | ||
|
|
d8dbbc6832 | ||
|
|
3563a964ef | ||
|
|
208f0c248d | ||
|
|
c0e034726a | ||
|
|
d676a7071a | ||
|
|
a681b2c944 | ||
|
|
c53701d56b | ||
|
|
a8c1f2eccc | ||
|
|
cf7ee44efc | ||
|
|
2e4a9eaaeb | ||
|
|
569a2b5694 | ||
|
|
baa84773a5 | ||
|
|
a39e642b5a | ||
|
|
e42cb49cbe | ||
|
|
b40c12efbd | ||
|
|
3c5ddad133 | ||
|
|
1ca4cb40dd | ||
|
|
678b0b9db1 | ||
|
|
e633fa41ac | ||
|
|
dac303e33b | ||
|
|
fd77d672c5 | ||
|
|
a991d9f512 | ||
|
|
674b3b54dc | ||
|
|
09948845ca | ||
|
|
0b8a0695f7 | ||
|
|
a7c95d5718 | ||
|
|
8a05597e52 | ||
|
|
145d45f800 | ||
|
|
0476b67b2f | ||
|
|
2751a828e0 | ||
|
|
a3f5dbfe07 | ||
|
|
60e09c0dd7 | ||
|
|
a23297e56a | ||
|
|
b05163632b | ||
|
|
f59be31ded | ||
|
|
2e527250de | ||
|
|
91dd378f5d | ||
|
|
f5b3d033a3 | ||
|
|
db98e3b722 | ||
|
|
586cf16fdc | ||
|
|
064d877205 | ||
|
|
fbf8cc87c9 | ||
|
|
bebabf84a0 | ||
|
|
36f169635a | ||
|
|
3f509f44a1 | ||
|
|
9bc8a8ac2b | ||
|
|
abfe98283b | ||
|
|
ded0224f51 | ||
|
|
0060a634d2 | ||
|
|
d34005e04f | ||
|
|
c734bd48b5 | ||
|
|
174c814541 | ||
|
|
993b6e9d0a | ||
|
|
273c9f7b99 | ||
|
|
822deb2867 | ||
|
|
132e2afce0 | ||
|
|
2a888f0bce | ||
|
|
f8202a7add | ||
|
|
87e8d0b775 | ||
|
|
569c00ff5f | ||
|
|
15457e1db4 | ||
|
|
0ae1eb9578 | ||
|
|
aa1d0161d3 | ||
|
|
a2d8b88337 | ||
|
|
a1b4306954 | ||
|
|
88e07031a6 | ||
|
|
cb4daa1ba1 | ||
|
|
46cfa79e89 | ||
|
|
a480f47cea | ||
|
|
4ef789e7b9 | ||
|
|
05d6d8399d | ||
|
|
8bc81d6b5e | ||
|
|
6c5cb8f07d | ||
|
|
70e79e50d6 | ||
|
|
d2c6c6cc4d | ||
|
|
ba5e22bd21 | ||
|
|
9d0bfdea44 | ||
|
|
92ae681517 | ||
|
|
3afa8c4441 | ||
|
|
547fdef9c4 | ||
|
|
152c250300 | ||
|
|
04161284a7 | ||
|
|
14d4818867 | ||
|
|
9f42e91088 | ||
|
|
79ac3b9700 | ||
|
|
7a3178896b | ||
|
|
ee95512321 | ||
|
|
55519b1855 | ||
|
|
89b99614a0 | ||
|
|
7bbd9a1a4f | ||
|
|
0dfb48593e | ||
|
|
3abe0803d6 | ||
|
|
1e0a919dc7 | ||
|
|
489e8aa4b3 | ||
|
|
5058f2f8d8 | ||
|
|
49aff9f22e | ||
|
|
775d4accf7 | ||
|
|
703a1baf8b | ||
|
|
51fdad2ae2 | ||
|
|
86545e5f99 | ||
|
|
5a0413b3e4 | ||
|
|
a0ff2b8ae4 | ||
|
|
6b6c390cb8 | ||
|
|
cd937c4844 | ||
|
|
cc1a5783fd | ||
|
|
5631caad36 | ||
|
|
d0144fb0d0 | ||
|
|
5d1870c1cf | ||
|
|
fb87460bc7 | ||
|
|
cb122cbc61 | ||
|
|
efc4e299ac | ||
|
|
848caa275b | ||
|
|
03754c56c6 | ||
|
|
471f6ff93b | ||
|
|
c53ad56a6d | ||
|
|
646439d86a | ||
|
|
ae7f65e938 | ||
|
|
b2d71a037c | ||
|
|
019ec8972f | ||
|
|
e25e411ebe | ||
|
|
911dcbac9f | ||
|
|
25195c972c | ||
|
|
304c92f733 | ||
|
|
336b9a126e | ||
|
|
01efbf5c79 | ||
|
|
866fa6a758 | ||
|
|
b8bb5e6d82 | ||
|
|
cc4b3cce55 | ||
|
|
5699562133 | ||
|
|
dae064d2bb | ||
|
|
d54e15cd15 | ||
|
|
dbdf56d494 | ||
|
|
b07cd5515a | ||
|
|
73929f2d25 | ||
|
|
acaa06749e | ||
|
|
1bd6efc0c1 | ||
|
|
936960269d | ||
|
|
5780df20ad | ||
|
|
14e5f33f2a | ||
|
|
7aa86bcec4 | ||
|
|
c77a1e85a4 | ||
|
|
093b8cb6df | ||
|
|
6460198e1d | ||
|
|
662dc3fa85 | ||
|
|
5a4bce2816 | ||
|
|
d5ea0d5a17 | ||
|
|
9e525727fd | ||
|
|
95b6995649 | ||
|
|
745064c5ce | ||
|
|
223a3b46bf | ||
|
|
fee3e320ab | ||
|
|
e068cff37b | ||
|
|
28f08ed196 | ||
|
|
056d971000 | ||
|
|
aa5195e5a9 | ||
|
|
85aaaf80ac | ||
|
|
5d467a22d0 | ||
|
|
bcc1cfb67b | ||
|
|
70a24fba69 | ||
|
|
c3121bef84 | ||
|
|
8c25be7d05 | ||
|
|
7463e9d42c | ||
|
|
e4dd9f5bb3 | ||
|
|
4e59d648ef | ||
|
|
7538e76537 | ||
|
|
20ddc54edc | ||
|
|
ac75205933 | ||
|
|
17b58b159a | ||
|
|
191cf445ef | ||
|
|
65a0c08cb3 | ||
|
|
4da68ff89c | ||
|
|
a7da4525fd | ||
|
|
92dfbce45a | ||
|
|
bfdd6b5f7c | ||
|
|
a8a8c09b90 | ||
|
|
4bd13b81cf | ||
|
|
5a07600664 | ||
|
|
f2bc4b21cb | ||
|
|
778975fbde | ||
|
|
57f920624f | ||
|
|
3d87940da9 | ||
|
|
c33d0836eb | ||
|
|
a01cf21f2c | ||
|
|
10fc1d4a2a | ||
|
|
4fa973007d | ||
|
|
c27f5ec02e | ||
|
|
5d6cf3d17d | ||
|
|
f6c7731377 | ||
|
|
bcd739da76 | ||
|
|
bea4915317 | ||
|
|
f695ba4605 | ||
|
|
24983ba3af | ||
|
|
0a2a3b3377 | ||
|
|
d8d7193cbc | ||
|
|
042c9eed9c | ||
|
|
b0c94dd998 | ||
|
|
4c8cc9e823 | ||
|
|
132c9bbdff | ||
|
|
cc0caff8d0 | ||
|
|
3496df24f8 | ||
|
|
e4e35e4c57 | ||
|
|
838320785e | ||
|
|
91bea85519 | ||
|
|
88ea081661 | ||
|
|
39c0a0dba5 | ||
|
|
9d64d3e30d | ||
|
|
39ff238716 | ||
|
|
7c96152f3f | ||
|
|
a2440c19e1 | ||
|
|
24dff5ce85 | ||
|
|
1ca8e15d19 | ||
|
|
f77a8f09bb | ||
|
|
48a7c8140f | ||
|
|
b03ff46767 | ||
|
|
e66e6364c3 | ||
|
|
89ef7ac4f6 | ||
|
|
27e74ee064 | ||
|
|
83c44aa12c | ||
|
|
17da62d53b | ||
|
|
d43fc268f4 | ||
|
|
5a7ab9795d | ||
|
|
cfcdba5aea | ||
|
|
aaca6a6704 | ||
|
|
5f7c822deb | ||
|
|
53be091a31 | ||
|
|
04c68ba013 | ||
|
|
518ea9c8b5 | ||
|
|
6bb2142f35 | ||
|
|
6aa931c866 | ||
|
|
67ab54f820 | ||
|
|
cc9fc80328 | ||
|
|
3e8a8a6b85 | ||
|
|
70a2d0e5f3 | ||
|
|
09e8d7e1da | ||
|
|
3c3d6c1e7f | ||
|
|
0dcc95cd34 | ||
|
|
edae1b0480 | ||
|
|
5fb2cb7e76 | ||
|
|
403bb71c5d | ||
|
|
9e23b16ae7 | ||
|
|
60a93a2433 | ||
|
|
14dfafcb7c | ||
|
|
dcc06cf8c6 | ||
|
|
54c084040c | ||
|
|
a9c38f5a03 | ||
|
|
bce80cca0b | ||
|
|
f56fac6877 | ||
|
|
ef2286dca4 | ||
|
|
a4a012368e | ||
|
|
1b623ef346 | ||
|
|
f661d24544 | ||
|
|
cfe10553b2 | ||
|
|
cf1042f40d | ||
|
|
e77000076b | ||
|
|
49241664dc | ||
|
|
ffa3e9ec9a | ||
|
|
fbbe0d9ca9 | ||
|
|
001eeecc09 | ||
|
|
454a9c7076 | ||
|
|
fc934ce8e1 | ||
|
|
b9bcc28e0f | ||
|
|
b5ddca65f5 | ||
|
|
536805791e | ||
|
|
5a6c73c4c1 | ||
|
|
d50f5a0488 | ||
|
|
671a31a95a | ||
|
|
5e38f41530 | ||
|
|
7569465a61 | ||
|
|
5913166a13 | ||
|
|
6036562af2 | ||
|
|
39e6c258d5 | ||
|
|
1ab0291483 | ||
|
|
fe024502d4 | ||
|
|
9a76c6428a | ||
|
|
2d4053c792 | ||
|
|
a69254612c | ||
|
|
09a14c1270 | ||
|
|
b343068805 | ||
|
|
ecf50dde2b | ||
|
|
008404ffb3 | ||
|
|
2b6cca6bab | ||
|
|
fe968e19c0 | ||
|
|
479498d8be | ||
|
|
af9669acd0 | ||
|
|
d2eb98f859 | ||
|
|
ae11084f48 | ||
|
|
b02b554105 | ||
|
|
2d66ba918a | ||
|
|
400c0f35f0 | ||
|
|
5b8b265371 | ||
|
|
f3fb27005a | ||
|
|
79bb9bdde8 | ||
|
|
4b983e401d | ||
|
|
5f2f0a9a57 | ||
|
|
8b2ae6d464 | ||
|
|
91aae6a9f5 | ||
|
|
b0fc9ef05e | ||
|
|
5d6fbb6f04 | ||
|
|
301ebdbcd8 | ||
|
|
49bcf46b4f | ||
|
|
f7263c8ab8 | ||
|
|
c69772131a | ||
|
|
e8c025b898 | ||
|
|
8fb5c16bce | ||
|
|
f4551ddf3a | ||
|
|
f337488951 | ||
|
|
f104f11613 | ||
|
|
32e8f952fd | ||
|
|
c7cfc81c6f | ||
|
|
faeafc329c | ||
|
|
eeef0d13ff | ||
|
|
0686fca3b0 | ||
|
|
79b425326b | ||
|
|
2d879b9604 | ||
|
|
38eb14b013 | ||
|
|
ff2f43766e | ||
|
|
593d66ea54 | ||
|
|
ee8cbbf7ef | ||
|
|
474cb49d2c | ||
|
|
590f815dc0 | ||
|
|
a93d5246d2 | ||
|
|
3e2ab8e7ee | ||
|
|
a83270699f | ||
|
|
745afb32a3 | ||
|
|
2fcb8d915d | ||
|
|
a596b62a5f | ||
|
|
55b1504aab | ||
|
|
c42822669b | ||
|
|
b58d28c723 | ||
|
|
a6a34d8246 | ||
|
|
0080399db2 | ||
|
|
50b480ee23 | ||
|
|
da467b7837 | ||
|
|
70914b6c3e | ||
|
|
6c3fbfe0ca | ||
|
|
8e4b705e60 | ||
|
|
48d80d3194 | ||
|
|
38768c777d | ||
|
|
1daa841f31 | ||
|
|
a9f81d0ad3 | ||
|
|
76720424ab | ||
|
|
a7b639a66b | ||
|
|
1b78d221b6 | ||
|
|
ac7b1a3c66 | ||
|
|
039c3182aa | ||
|
|
55d94b1844 | ||
|
|
3baa69b43b | ||
|
|
5f92df97e6 | ||
|
|
47f297c495 | ||
|
|
58f09ba150 | ||
|
|
1142948945 | ||
|
|
9528160b03 | ||
|
|
4afdc54914 | ||
|
|
09973a6a56 | ||
|
|
f56bf3afef | ||
|
|
c4c7bbf46b | ||
|
|
7fdd374911 | ||
|
|
fb9ad9870d | ||
|
|
bbabc6f5e9 | ||
|
|
2060579bb4 | ||
|
|
b0f0940605 | ||
|
|
ee23df176d | ||
|
|
d53865d81b | ||
|
|
7becfd8adb | ||
|
|
a9feed56fe | ||
|
|
dea517c17c | ||
|
|
6d93ceb910 | ||
|
|
9b0ac5b83d | ||
|
|
acabb40171 | ||
|
|
04cd56ca16 | ||
|
|
4371512ba2 | ||
|
|
ced686e1cd | ||
|
|
272aab2397 | ||
|
|
a7f87f2ac8 | ||
|
|
f9e85155a8 | ||
|
|
d281c18951 | ||
|
|
04e7f4f8a7 | ||
|
|
a090c11f4a | ||
|
|
c303c697a9 | ||
|
|
9466c5ea10 | ||
|
|
1268fcf1cc | ||
|
|
86d06fa879 | ||
|
|
cfe79a73d6 | ||
|
|
d7a9d9ddc8 | ||
|
|
b3d45384e1 | ||
|
|
64099e37e5 | ||
|
|
2fe612d392 | ||
|
|
14d72b92cb | ||
|
|
8c0055723c | ||
|
|
bb9e368b2d | ||
|
|
2c203acbd2 | ||
|
|
7115eb573c | ||
|
|
e626c6f286 | ||
|
|
24058f9534 | ||
|
|
a8d47e9a72 | ||
|
|
9a2f51b48d | ||
|
|
667deef5f2 | ||
|
|
aa349aa935 | ||
|
|
666ab01d03 | ||
|
|
ffbab0136f | ||
|
|
b5f7399b2c | ||
|
|
f1892eeba2 | ||
|
|
e7128a57db | ||
|
|
793cafd294 | ||
|
|
9b1097effe | ||
|
|
079ae0e1f3 | ||
|
|
e181a9837a | ||
|
|
ca5c7022ef | ||
|
|
75c50392cb | ||
|
|
74f8e744ac | ||
|
|
1c657036da | ||
|
|
2869bb3ef3 | ||
|
|
784e117a8a | ||
|
|
8c2ea052de | ||
|
|
0b6bbf6733 | ||
|
|
2312df3718 | ||
|
|
095dd73c52 | ||
|
|
ac6ac42860 | ||
|
|
c6ce888cc7 | ||
|
|
c4e6fcc451 | ||
|
|
22ae5242b2 | ||
|
|
4d77281249 | ||
|
|
b77a373efa | ||
|
|
463c3908d4 | ||
|
|
79239a2e01 | ||
|
|
41073615e9 | ||
|
|
bd12b09cbc | ||
|
|
7d598bb1d0 | ||
|
|
ccd3e3ab2c | ||
|
|
118dac5a1a | ||
|
|
7035fe05c4 | ||
|
|
af7331c6c6 | ||
|
|
4d2b64e79d | ||
|
|
d47bbd6131 | ||
|
|
54fafa07a2 | ||
|
|
97eef9149c | ||
|
|
b95e70a8c4 | ||
|
|
a16d7de1ac | ||
|
|
6f590a64af | ||
|
|
00e747eb5f | ||
|
|
2d8164cf0a | ||
|
|
be1a8df3ef | ||
|
|
e8ca2faaa7 | ||
|
|
83f9284b05 | ||
|
|
6e5c70ab71 | ||
|
|
ab4f958b5f | ||
|
|
2711ebca68 | ||
|
|
041684b13a | ||
|
|
777a2102f8 | ||
|
|
65949c5af5 | ||
|
|
e40f880b9e | ||
|
|
f9b34bbd50 | ||
|
|
f06dbd0562 | ||
|
|
4a6f5c6268 | ||
|
|
b5dbfb1a86 | ||
|
|
d86a17f76e | ||
|
|
37c7f20256 | ||
|
|
f324407c9c | ||
|
|
c96b39d597 | ||
|
|
d4d89a56e9 | ||
|
|
f19ad133c6 | ||
|
|
00d9019f29 | ||
|
|
8951a6e237 | ||
|
|
b0ac037964 | ||
|
|
2c8ff2d2f2 | ||
|
|
03bd951848 | ||
|
|
63e2ea987e | ||
|
|
115df7f884 | ||
|
|
ed94d34589 | ||
|
|
dc5ff9d809 | ||
|
|
72a6832571 | ||
|
|
b0c692ab7d | ||
|
|
06e8bc14f4 | ||
|
|
3d160fc35c | ||
|
|
734f85a517 | ||
|
|
a56a80db88 | ||
|
|
c4ed90dd5a | ||
|
|
f173b326a4 | ||
|
|
5e302290de | ||
|
|
75af3c7f2b | ||
|
|
d7aec6fdfb | ||
|
|
9a3586fd1c | ||
|
|
063bcbd0e5 | ||
|
|
a76e69e05d | ||
|
|
e560cce879 | ||
|
|
6406c3af13 | ||
|
|
173d2d3a3a | ||
|
|
9a5cc1595d | ||
|
|
d1962ab745 | ||
|
|
af5b27d0fc | ||
|
|
a9196d27d6 | ||
|
|
2f1ea12cfa | ||
|
|
37a408e58a | ||
|
|
736a993284 | ||
|
|
09359a8df3 | ||
|
|
a68987f257 | ||
|
|
3b5831f317 | ||
|
|
5336981154 | ||
|
|
df417fa619 | ||
|
|
509bbe25d2 | ||
|
|
f49012ab51 | ||
|
|
e4fc7c336e | ||
|
|
0e4430bae4 | ||
|
|
f811a9c8f4 | ||
|
|
771999c603 | ||
|
|
18820202d7 | ||
|
|
abc8fb988f | ||
|
|
885c525d7b | ||
|
|
418a2f02b2 | ||
|
|
031078284c | ||
|
|
3c7ba8c9d9 | ||
|
|
835078a84c | ||
|
|
049418c2f5 | ||
|
|
7c03266d16 | ||
|
|
627f06ea9f | ||
|
|
9f84178182 | ||
|
|
54641fc3c5 | ||
|
|
b8995d838a | ||
|
|
a0154c5919 | ||
|
|
08365c412a | ||
|
|
2d291a08fc | ||
|
|
7a17b3df4b | ||
|
|
0589f2fe2a | ||
|
|
a9abd75b51 | ||
|
|
bc6eeec663 | ||
|
|
3132856efe | ||
|
|
4db7628eed | ||
|
|
7b71f59d3e | ||
|
|
76c177bc0f | ||
|
|
50a508b596 | ||
|
|
59c634f626 | ||
|
|
08ebb9e7d6 | ||
|
|
f5f0a5b873 | ||
|
|
1036176e51 | ||
|
|
c6a24e839a | ||
|
|
87421ce74d | ||
|
|
f94ccb9947 | ||
|
|
1f1d4af8ba | ||
|
|
633803e0ad | ||
|
|
74fb8dc75c | ||
|
|
2523a0cf90 | ||
|
|
47a58a9e65 | ||
|
|
27c4fd9b30 | ||
|
|
f3904c599b | ||
|
|
b86f288e86 | ||
|
|
67bdd3664a | ||
|
|
8d4fdb8ece | ||
|
|
0720222423 | ||
|
|
7bca841a1a | ||
|
|
572ab86bc4 | ||
|
|
29cf5940f5 | ||
|
|
4b4f5c39d4 | ||
|
|
5069430a3a | ||
|
|
28d7373779 | ||
|
|
9d5fa04d49 | ||
|
|
43bf8b3f15 | ||
|
|
ed80605755 | ||
|
|
cc84a4637d | ||
|
|
701b759ba2 | ||
|
|
9cb2782ef9 | ||
|
|
5cd0a3c451 | ||
|
|
b74dbb74bb | ||
|
|
d332636dd4 | ||
|
|
32654f6cb2 | ||
|
|
14a90e7138 | ||
|
|
dd2366a15e | ||
|
|
370ba2d461 | ||
|
|
ef655ef5e0 | ||
|
|
5cc550c830 | ||
|
|
52cbe67756 | ||
|
|
c910e0af40 | ||
|
|
7a74534285 | ||
|
|
916775462d | ||
|
|
3d98600126 | ||
|
|
7ba834ee8d | ||
|
|
1c527cba03 | ||
|
|
2d7ba7b246 | ||
|
|
4ed70a8011 | ||
|
|
7a3a148359 | ||
|
|
e07e35110d | ||
|
|
a463753c84 | ||
|
|
a946ddfab1 | ||
|
|
af1cb43bd6 | ||
|
|
935d61324f | ||
|
|
10e016f2ea | ||
|
|
154655d571 | ||
|
|
491cd8f01c | ||
|
|
2b711ff04c | ||
|
|
07eadd2364 | ||
|
|
7086afe73d | ||
|
|
f21398357b | ||
|
|
c338802329 | ||
|
|
c3076e748e | ||
|
|
1595256c86 | ||
|
|
0623592f75 | ||
|
|
f2444f151c | ||
|
|
710a3f2552 | ||
|
|
6071c664e7 | ||
|
|
e203815f4f | ||
|
|
c3658080d3 | ||
|
|
8703470010 | ||
|
|
88cc7e6fec | ||
|
|
badcbc32c6 | ||
|
|
daa8f15ce9 | ||
|
|
89ee174ded | ||
|
|
cc03c2407b | ||
|
|
89254025f3 | ||
|
|
f693c986ec | ||
|
|
3c7f3ecb56 | ||
|
|
ef3809ed25 | ||
|
|
f5ddb7107e | ||
|
|
1d32018c02 | ||
|
|
c458b27573 | ||
|
|
524e0bb0a8 | ||
|
|
d525d1fe59 | ||
|
|
bdc6264701 | ||
|
|
7e2640e2d0 | ||
|
|
822a72efde | ||
|
|
192b13e393 | ||
|
|
2497cb31d0 | ||
|
|
446a0ede54 | ||
|
|
3d39638c99 | ||
|
|
69e87d129a | ||
|
|
f7a7510322 | ||
|
|
e9f2ba47f2 | ||
|
|
b2c95e5a73 | ||
|
|
482ed7c3c6 | ||
|
|
e497f73316 | ||
|
|
28965dc2b1 | ||
|
|
5425ca35b3 | ||
|
|
9f191d0ab8 | ||
|
|
c2a3ce5096 | ||
|
|
1af06f2e0d | ||
|
|
2f2f11b7bc | ||
|
|
260e6015a1 | ||
|
|
7696014545 | ||
|
|
2457e95f0e | ||
|
|
327c907463 | ||
|
|
1b558d3b30 | ||
|
|
a0d55417cd | ||
|
|
f182d441d1 | ||
|
|
e14f215793 | ||
|
|
34536f4e21 | ||
|
|
bc2da6e59e | ||
|
|
afdedc7575 | ||
|
|
69a7a9b180 | ||
|
|
a7540583d6 | ||
|
|
a253d7ebf5 | ||
|
|
3886402f8e | ||
|
|
97e402b980 | ||
|
|
93e15af209 | ||
|
|
9f3a5a13b3 | ||
|
|
3a0fb4bdec | ||
|
|
8a460930c6 | ||
|
|
a11c4c9bd2 | ||
|
|
d4b98b5cb2 | ||
|
|
fdf3908494 | ||
|
|
cc052eb450 | ||
|
|
4535c3005d | ||
|
|
030fed6d12 | ||
|
|
fc9579cd51 | ||
|
|
74712b5410 | ||
|
|
05628a2529 | ||
|
|
1bdd2f2f5b | ||
|
|
3af6e103aa | ||
|
|
3dcc20ae86 | ||
|
|
8a18630ae5 | ||
|
|
b87c331b12 | ||
|
|
9da2414e8b | ||
|
|
285291b2ed | ||
|
|
dbbb4e589e | ||
|
|
064879c4ce | ||
|
|
8614922213 | ||
|
|
745ea0ca10 | ||
|
|
99bc80c15b | ||
|
|
3423d854e1 | ||
|
|
483fd090f1 | ||
|
|
82988b6011 | ||
|
|
d1821163c2 | ||
|
|
7de118adeb | ||
|
|
abe7215bc0 | ||
|
|
7b064e63b9 | ||
|
|
f6890c709f | ||
|
|
e4814dbfbc | ||
|
|
8261b32c99 | ||
|
|
9a73520b25 | ||
|
|
10db4f7210 | ||
|
|
94f6fdcbe7 | ||
|
|
bb467b7aba | ||
|
|
3da503cdea | ||
|
|
b062f0a19f | ||
|
|
2a6e0b0e07 | ||
|
|
1558ee2177 | ||
|
|
3d542e5554 | ||
|
|
c61938ba62 | ||
|
|
c856b88008 | ||
|
|
fce3684113 | ||
|
|
ae25dd65bf | ||
|
|
a88a1c5f1f | ||
|
|
0cd678cc1d | ||
|
|
c9bf58af66 | ||
|
|
ed87a00225 | ||
|
|
bcc3608c04 | ||
|
|
038d28779c | ||
|
|
973d47ee05 | ||
|
|
e803e11c81 | ||
|
|
f50c990d93 | ||
|
|
72eef6f104 | ||
|
|
5ccdb5d100 | ||
|
|
4b8fceaa7d | ||
|
|
a24c9b1350 | ||
|
|
8913b7aedb | ||
|
|
978edfc11f | ||
|
|
f4b41d0fd3 | ||
|
|
20162a16a8 | ||
|
|
73f7bfa64b | ||
|
|
159b0d92dd | ||
|
|
02de2059db | ||
|
|
bdf8901dd8 | ||
|
|
2fbc833ebe | ||
|
|
f7b5b35374 | ||
|
|
46bd23cf66 | ||
|
|
6d9dfe6fe1 | ||
|
|
fc48ce9c47 | ||
|
|
3a9a2f4033 | ||
|
|
f0ce0eb653 | ||
|
|
23979b8f22 | ||
|
|
e22a78fac6 | ||
|
|
7690cb1356 | ||
|
|
a96f4c57c9 |
22
.gitignore
vendored
22
.gitignore
vendored
@ -1,13 +1,23 @@
|
||||
target/
|
||||
**/*.rs.bk
|
||||
Cargo.lock
|
||||
.vscode
|
||||
*.ui~
|
||||
*.gresource
|
||||
_build
|
||||
resources.gresource
|
||||
_build/
|
||||
build/
|
||||
vendor/
|
||||
.criterion/
|
||||
org.gnome.*.json~
|
||||
podcasts-gtk/po/gnome-podcasts.pot
|
||||
|
||||
# scripts/test.sh
|
||||
target_*/
|
||||
|
||||
# flatpak-builder stuff
|
||||
.flatpak-builder/
|
||||
flatpak-build/
|
||||
app/
|
||||
repo/
|
||||
Makefile
|
||||
.criterion
|
||||
|
||||
# Files configured by meson
|
||||
podcasts-gtk/src/config.rs
|
||||
podcasts-gtk/src/static_resource.rs
|
||||
|
||||
@ -1,61 +1,30 @@
|
||||
stages:
|
||||
# meson uses cargo to do the build
|
||||
# so it's ok to have the tests first.
|
||||
- test
|
||||
# - build
|
||||
- lint
|
||||
include:
|
||||
- project: 'gnome/citemplates'
|
||||
file: 'flatpak/flatpak-ci-initiative-sdk-extensions.yml'
|
||||
# ref: ''
|
||||
|
||||
before_script:
|
||||
- apt-get update -yqq
|
||||
- apt-get install -yqq --no-install-recommends build-essential
|
||||
- apt-get install -yqq --no-install-recommends libgtk-3-dev
|
||||
# - apt-get install -yqq --no-install-recommends meson
|
||||
flatpak:
|
||||
image: 'registry.gitlab.gnome.org/gnome/gnome-runtime-images/rust_bundle:3.36'
|
||||
variables:
|
||||
MANIFEST_PATH: "org.gnome.Podcasts.Devel.json"
|
||||
FLATPAK_MODULE: "gnome-podcasts"
|
||||
MESON_ARGS: "-Dprofile=development"
|
||||
APP_ID: "org.gnome.Podcasts.Devel"
|
||||
RUNTIME_REPO: "https://nightly.gnome.org/gnome-nightly.flatpakrepo"
|
||||
BUNDLE: "org.gnome.Podcasts.Devel.flatpak"
|
||||
extends: '.flatpak'
|
||||
|
||||
.cargo_test_template: &cargo_test
|
||||
stage: test
|
||||
script:
|
||||
- rustc --version && cargo --version
|
||||
# Force regeneration of gresources regardless of artifacts chage
|
||||
- cd hammond-gtk/resources/ && glib-compile-resources --generate resources.xml && cd ../../
|
||||
- cargo build
|
||||
- cargo test --verbose -- --test-threads=1
|
||||
- cargo test --verbose -- --test-threads=1 --ignored
|
||||
|
||||
variables:
|
||||
# RUSTFLAGS: "-C link-dead-code"
|
||||
RUST_BACKTRACE: "FULL"
|
||||
CARGO_HOME: $CI_PROJECT_DIR/cargo
|
||||
|
||||
stable:test:
|
||||
# https://hub.docker.com/_/rust/
|
||||
image: "rust"
|
||||
<<: *cargo_test
|
||||
|
||||
# nightly:test:
|
||||
# # https://hub.docker.com/r/rustlang/rust/
|
||||
# image: "rustlang/rust:nightly"
|
||||
# <<: *cargo_test
|
||||
|
||||
# Configure and run rustfmt on nightly
|
||||
# Configure and run rustfmt
|
||||
# Exits and builds fails if on bad format
|
||||
rustfmt:
|
||||
image: "rustlang/rust:nightly"
|
||||
stage: lint
|
||||
variables:
|
||||
CFG_RELEASE_CHANNEL: "nightly"
|
||||
image: "rust:slim"
|
||||
stage: ".pre"
|
||||
script:
|
||||
- rustc --version && cargo --version
|
||||
- cargo install rustfmt-nightly --force
|
||||
- cargo fmt --all -- --write-mode=diff
|
||||
|
||||
# Configure and run clippy on nightly
|
||||
# Only fails on errors atm.
|
||||
# clippy:
|
||||
# image: "rustlang/rust:nightly"
|
||||
# stage: lint
|
||||
# script:
|
||||
# - rustc --version && cargo --version
|
||||
# - cargo install clippy --force
|
||||
# # Force regeneration of gresources regardless of artifacts chage
|
||||
# - cd hammond-gtk/resources/ && glib-compile-resources --generate resources.xml && cd ../../
|
||||
# - cargo clippy --all
|
||||
- rustup component add rustfmt
|
||||
# Create blank versions of our configured files
|
||||
# so rustfmt does not yell about non-existent files or completely empty files
|
||||
- echo -e "" >> podcasts-gtk/src/config.rs
|
||||
- echo -e "" >> podcasts-gtk/src/static_resource.rs
|
||||
- rustc -Vv && cargo -Vv
|
||||
- cargo fmt --version
|
||||
- cargo fmt --all -- --color=always --check
|
||||
|
||||
@ -18,7 +18,7 @@ Some common cases might be:
|
||||
|
||||
Steps to reproduce:
|
||||
|
||||
1. Open Hammond
|
||||
1. Open GNOME Podcasts
|
||||
2. Do an action
|
||||
3. ...
|
||||
|
||||
|
||||
@ -1,9 +1,40 @@
|
||||
Detailed description of the issue. Put as much information as you can, potentially
|
||||
with images showing the issue.
|
||||
# Steps to reproduce
|
||||
<!--
|
||||
Explain in detail the steps on how the issue can be reproduced.
|
||||
-->
|
||||
1.
|
||||
2.
|
||||
3.
|
||||
|
||||
Steps to reproduce:
|
||||
Reproducible in:
|
||||
<!--
|
||||
Please test if the issue was already fixed in the unstable version of the app.
|
||||
For that, follow these steps:
|
||||
1. Make sure Flatpak is installed or install it following these steps https://flatpak.org/setup
|
||||
2. Install the unstable version of the app following, flatpak bundles can be found in the CI artifacts.
|
||||
|
||||
1. Open Hammond
|
||||
2. Do an action
|
||||
3. ...
|
||||
If these steps failed, write in 'Other' the distribution you’re using and
|
||||
the version of the app.
|
||||
-->
|
||||
- Flatpak unstable: (yes or no) <!-- Write "yes" or "no" after the semicolon. -->
|
||||
- Other:
|
||||
|
||||
# Current behavior
|
||||
<!-- Describe the current behavior. -->
|
||||
|
||||
|
||||
# Expected behavior
|
||||
<!-- Describe the expected behavior. -->
|
||||
|
||||
|
||||
# Additional information
|
||||
<!--
|
||||
Provide more information that could be relevant.
|
||||
|
||||
If the issue is a crash, provide a stack trace following the steps in:
|
||||
https://wiki.gnome.org/Community/GettingInTouch/Bugzilla/GettingTraces
|
||||
-->
|
||||
|
||||
|
||||
<!-- Ignore the text under this line. -->
|
||||
/label ~"Bug"
|
||||
|
||||
41
.gitlab/issue_templates/Epic.md
Normal file
41
.gitlab/issue_templates/Epic.md
Normal file
@ -0,0 +1,41 @@
|
||||
# Current problems
|
||||
<!--
|
||||
What are the problems that the current project has?
|
||||
|
||||
For example:
|
||||
* User cannot use the keyboard to perform most common actions
|
||||
or
|
||||
* User cannot see documents from cloud services
|
||||
-->
|
||||
|
||||
# Goals & use cases
|
||||
<!--
|
||||
What are the use cases that this proposal will cover? What are the end goals?
|
||||
|
||||
For example:
|
||||
* User needs to share a file with their friends.
|
||||
or
|
||||
* It should be easy to edit a picture within the app.
|
||||
-->
|
||||
|
||||
# Requirements
|
||||
<!--
|
||||
What does the solution needs to ensure for being succesful?
|
||||
|
||||
For example:
|
||||
* Work on small form factors and touch
|
||||
or
|
||||
* Use the Meson build system and integrate with it
|
||||
-->
|
||||
|
||||
# Relevant art
|
||||
<!--
|
||||
Is there any product that has implemented something similar? Put links to other
|
||||
projects, pictures, links to other code, etc.
|
||||
-->
|
||||
|
||||
# Proposal & plan
|
||||
<!-- What's the solution and how should be achieved? It can be split in smaller
|
||||
tasks of minimum change, so they can be delivered across several releases. -->
|
||||
|
||||
/label ~"Epic"
|
||||
@ -1,17 +1,24 @@
|
||||
Detailed description of the feature. Put as much information as you can.
|
||||
### Use cases
|
||||
<!--
|
||||
Describe what problem(s) the user is experiencing and that this request
|
||||
is trying to solve.
|
||||
-->
|
||||
|
||||
Proposed Mockups:
|
||||
|
||||
(Add mockups of the proposed feature)
|
||||
### Desired behavior
|
||||
<!-- Describe the desired functionality. -->
|
||||
|
||||
## Design Tasks
|
||||
|
||||
* [ ] design tasks
|
||||
### Benefits of the solution
|
||||
<!-- List the possible benefits of the solution and how it fits in the project. -->
|
||||
|
||||
## Development Tasks
|
||||
|
||||
* [ ] development tasks
|
||||
### Possible drawbacks
|
||||
<!--
|
||||
Describe possible drawbacks of the feature and list how it could affect
|
||||
the project i.e. UI discoverability, complexity, impact in more or less
|
||||
number of users, etc.
|
||||
-->
|
||||
|
||||
## QA Tasks
|
||||
|
||||
* [ ] qa (quality assurance) tasks
|
||||
<!-- Ignore the text under this line. -->
|
||||
/label ~"Feature"
|
||||
|
||||
1
.gitlab/merge_requests_templates/mr.md
Normal file
1
.gitlab/merge_requests_templates/mr.md
Normal file
@ -0,0 +1 @@
|
||||
### Please attach a relevant issue to this MR, if this doesn't exist please create one.
|
||||
276
CHANGELOG.md
276
CHANGELOG.md
@ -6,26 +6,270 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.
|
||||
|
||||
## [Unreleased]
|
||||
|
||||
## [0.3.0] - 2018-02-11
|
||||
### Added:
|
||||
|
||||
* Tobias Bernard Redesigned the whole Gtk+ client.
|
||||
* Complete re-write of hammond-data and hammond-gtk modules.
|
||||
* Error handling for all crates was migrated from error-chain to Failure.
|
||||
* Hammond-data now uses futures to parse feeds.
|
||||
* Custom gtk-widgets are now composed structs as opposed to functions returning Gtk widgets.
|
||||
### Changed:
|
||||
|
||||
### Fixed:
|
||||
|
||||
### Removed:
|
||||
|
||||
## [0.4.7] - 2019-10-23
|
||||
|
||||
### Added:
|
||||
- Improved appdata validation and meson tests World/podcasts!89
|
||||
- The ability to export show subscriptions to opml files World/podcasts!77
|
||||
- Support for feeds requiring authentication World/podcasts!120
|
||||
|
||||
### Changed:
|
||||
|
||||
- Episodes now have a checkmark to show whether or not they've been played World/podcasts!106
|
||||
- Changed to how errors are shown when adding podcasts World/podcasts!108 World/podcasts!109 World/podcasts!110
|
||||
- Improved integration of cargo and meson World/podcasts!94
|
||||
- Refactored some macros for error handling World/podcasts!82
|
||||
- Refactored the handling of styling changes World/podcasts!119
|
||||
- Updated the icon to better match the HIG guidlines World/podcasts#102
|
||||
- Made Podcasts use a GtkApplication subclass World/podcasts!113
|
||||
- Updated the MPRIS permissions in order to remove a sandbox hole World/podcasts!124
|
||||
- Bumped gtk and libhandy minimum versions
|
||||
|
||||
### Fixed:
|
||||
|
||||
- Rewind now works regardless if its the start or the end of the episode World/podcasts!83
|
||||
- Typos in the README and CONTRIBUTING docs World/podcast!97 World/podcast!98 World/podcast!99 World/podcasts!121
|
||||
- Show cover is reset properly now if there isn't an image World/podcasts#114
|
||||
- Query pairs are no longer stripped from URLs World/podcasts!111
|
||||
- Pause MPRIS button now works on KDE Plasma World/podcasts#115
|
||||
- The playback widget now properly reflects the playback state on episode change World/podcasts!116
|
||||
|
||||
### Removed:
|
||||
|
||||
- All preferences World/podcast!104
|
||||
|
||||
## [0.4.6] - 2018-10-07
|
||||
|
||||
### Added:
|
||||
- Felix, @haecker-felix, wrote an [mpris crate](https://crates.io/crates/mpris-player) and implemented MPRIS2 client side support! !74 #68
|
||||
|
||||
### Changed:
|
||||
- Download Cancel button was changed to an Icon instead of a label !72
|
||||
- The applciation will no longer scale below 360p in width 1933c79f7a87d8261d91ca4e14eb51c1ddc66624
|
||||
- Update to the latest HIG 5050dda4d2f75b706842de8507d115dd5a1bd0a9
|
||||
- Chris, @brainblasted, upgraded hyper to 0.12, this brings openssl 1.1 support !75
|
||||
- Pipeline backend is now completly migrated to tokio-runtime 0887789f5e653dd92ad397fb39561df6dffcb45c
|
||||
- Resume playing an episode will attempt to rewind the track only if more than a minute has passed since the last pause !76
|
||||
|
||||
### Fixed:
|
||||
- Fixed a regression where indexing feeds was blocking the `tokio reactor` #88 !70
|
||||
- Episodeds Listbox no longer resizes when a download starts #89 !72
|
||||
- The `total_size` label of the `EpisodeWidget` now behaves correctly if the request fails #90 !73
|
||||
- The Pipeline will no longer log things in stderr for Requests that returned 304 and are expected to be skipped da361d0cb93cd8edd076859b2c607509a96dac8d
|
||||
- A bug where the HomeView wold get into an invalid state if your only shows had no episodes 32bd2a89a34e8e940b3b260c6be76defe11835ed
|
||||
|
||||
### Translations:
|
||||
|
||||
**Added**
|
||||
- Brazilian Portuguese translation 586cf16f
|
||||
- Swedish translation 2e527250
|
||||
- Italian translation a23297e5
|
||||
- Friulian translation 60e09c0d
|
||||
- Hungarian translation 2751a828
|
||||
- Croatian translation 0476b67b
|
||||
- Latvian translation a681b2c9
|
||||
- Czech translation 3563a964
|
||||
- Catalan translation 6ea3fc91
|
||||
|
||||
**Updated**
|
||||
- German translation
|
||||
- Finnish translation
|
||||
- Polish translation
|
||||
- Turkish translation
|
||||
- Croatian translation
|
||||
- Indonesian translation
|
||||
- Spanish translation
|
||||
|
||||
|
||||
## [0.4.5] - 2018-08-31
|
||||
|
||||
### Added:
|
||||
- [OARS](https://hughsie.github.io/oars/) Tags where added for compatibility with Store clients b0c94dd9
|
||||
- Daniel added support for Translations !46
|
||||
- Svitozar Cherepii(@svito) created a [wiki page](https://wiki.gnome.org/Apps/Podcasts) 70e79e50
|
||||
- Libhandy was added as a dependancy #70
|
||||
- Development builds can now be installed in parallel with stable builds !64
|
||||
|
||||
### Changed:
|
||||
- The update indication was moved to an In-App notification #72
|
||||
- The app icon's accent color was changed from orange to red 0dfb4859
|
||||
- The stack switcher in the Headerbar is now insesitive on Empty Views !63
|
||||
|
||||
### Fixed:
|
||||
- Improved handling of HTTP redirections #64 !61 !62
|
||||
- Fixed a major performance regression when loading show covers !67
|
||||
- More refference cycles have been fixed !59
|
||||
- OPML import dialog now exits properly and no longer keeps the application from shuting down !65
|
||||
- Update action is disabled if there isn't something to update #71
|
||||
|
||||
### Translations:
|
||||
- Added Finish 93696026
|
||||
- Added Polish 1bd6efc0
|
||||
- Added Turkish 73929f2d
|
||||
- Added Spanish !46
|
||||
- Added German 6b6c390c
|
||||
- Added Galician 0060a634
|
||||
- Added Indonesian ded0224f
|
||||
- Added Korean 36f16963
|
||||
|
||||
|
||||
## [0.4.4] - 2018-07-31
|
||||
|
||||
### Changed:
|
||||
- `SendCell` crate was replaced with `Fragile`. (Jorda Petridis) 838320785ebbea94e009698b473495cfec076f54
|
||||
- Update dependancies (Jorda Petridis) 91bea8551998b16e44e5358fdd43c53422bcc6f3
|
||||
|
||||
### Fixed:
|
||||
- Fix more refference cycles. (Jorda Petridis) 3496df24f8d8bfa8c8a53d8f00262d42ee39b41c
|
||||
- Actually fix cargo-vendor (Jorda Petridis)
|
||||
|
||||
## [0.4.3] - 2018-07-27
|
||||
|
||||
### Fixed:
|
||||
|
||||
- Fix the cargo vendor config for the tarball releash script. (Jorda Petridis) a2440c19e11ca4dcdbcb67cd85259a41fe3754d6
|
||||
|
||||
## [0.4.2] - 2018-07-27
|
||||
|
||||
### Changed:
|
||||
|
||||
- Minimum size requested by the Views. (Jorda Petridis) 7c96152f3f53f271247230dccf1c9cd5947b685f
|
||||
|
||||
### Fixed:
|
||||
|
||||
- Screenshot metadata in appstream data. (Jorda Petridis) a2440c19e11ca4dcdbcb67cd85259a41fe3754d6
|
||||
|
||||
## [0.4.1] - 2018-07-26
|
||||
### Added:
|
||||
|
||||
- Custom icons for the fast-forward and rewind actions in the Player were added. (Tobias Bernard) e77000076b3d78b8625f4c7ef367376d0130ece6
|
||||
- Hicolor and symbolic icons for the Application. (Tobias Bernard and Sam Hewitt) edae1b04801dba9d91d5d4145db79b287f0eec2c
|
||||
- Basic prefferences dialog (Zander Brown). [34](https://gitlab.gnome.org/World/podcasts/merge_requests/34)
|
||||
- Dbus service preperation. Not used till the MPRIS2 integration has landed. (Zander Brown) [42](https://gitlab.gnome.org/World/podcasts/merge_requests/42)
|
||||
- Episodes and Images will only get drawn when needed. Big Performance impact. (Jordan Petridis) [43](https://gitlab.gnome.org/World/podcasts/merge_requests/43)
|
||||
|
||||
### Changed:
|
||||
|
||||
- The `ShowWidget` control button were moved to a secondary menu in the Headerbar. (Jordan Petridis) 536805791e336a3e112799be554706bb804d2bef
|
||||
- EmptyView layout improvements. (Jorda Petridis) 3c3d6c1e7f15b88308a9054b15a6ca0d8fa233ce 518ea9c8b57885c44bda9c418b19fef26ae0e55d
|
||||
- Improved the `AddButton` behavior. (Jorda Petridis) 67ab54f8203f19aad198dc49e935127d25432b41
|
||||
|
||||
### Fixed:
|
||||
|
||||
- A couple reffence cycles where fixed. (Jorda Petridis)
|
||||
|
||||
### Removed:
|
||||
|
||||
- The delay between the application startup and the `update_on_startup` action. (Jorda Petridis) 7569465a612ee5ef84d0e58f4e1010c8d14080d4
|
||||
|
||||
## [0.4.0] - 2018-07-04
|
||||
### Added:
|
||||
- Keyboard Shortcuts and a Shortcuts dialog were implemented. (ZanderBrown)
|
||||
[!33](https://gitlab.gnome.org/World/podcasts/merge_requests/33)
|
||||
|
||||
### Changed:
|
||||
- The `FileChooser` of the OPML import was changed to use the `FileChooserNative` widget/API. (ZanderBrown)
|
||||
[!33](https://gitlab.gnome.org/World/podcasts/merge_requests/33)
|
||||
- The `EpisdeWidget` was refactored.
|
||||
[!38](https://gitlab.gnome.org/World/podcasts/merge_requests/38)
|
||||
- `EpisdeWidget`'s progressbar was changed to be non-blocking and should feel way more responsive now. 9b0ac5b83dadecdff51cd398293afdf0d5276012
|
||||
- An embeded audio player was implemented!
|
||||
[!40](https://gitlab.gnome.org/World/podcasts/merge_requests/40)
|
||||
- Various Database changes.
|
||||
[!41](https://gitlab.gnome.org/World/podcasts/merge_requests/41)
|
||||
|
||||
### Fixed:
|
||||
- Fixed a bug whre the about dialog would be unclosable. (ZanderBrown) [!37](https://gitlab.gnome.org/World/podcasts/merge_requests/37)
|
||||
|
||||
## [0.3.4] - 2018-05-20
|
||||
### Fixed:
|
||||
- Flatpak can now access the Home folder. This fixes the OPML import feature from
|
||||
not being able to access any file.
|
||||
|
||||
## [0.3.3] - 2018-05-19
|
||||
### Added:
|
||||
- Initial functionality for importing shows from an OPML file was implemented.
|
||||
- ShowsView now rembmers the vertical alignment of the scrollbar between refreshes. 4d2b64e79d8518454b3677612664cd32044cf837
|
||||
|
||||
### Changed:
|
||||
- Minimum `rustc` version requirment was bumped to `1.26`
|
||||
- Some animations should be smoother now. 7d598bb1d08b05fd5ab532657acdad967c0afbc3
|
||||
- InAppNotification now can be used to propagate some erros to the user. 7035fe05c4741b3e7ccce6827f72766226d5fc0a and 118dac5a1ab79c0b4ebe78e88256a4a38b138c04
|
||||
|
||||
### Fixed:
|
||||
- Fixed a of by one bug in the `ShowsView` where the last show was never shown. bd12b09cbc8132fd39a266fd091e24bc6c3c040f
|
||||
|
||||
## [0.3.2] - 2018-05-07
|
||||
### Added:
|
||||
- Vies now have a new fancy scrolling animation when they are refereshed.
|
||||
|
||||
### Changed:
|
||||
- Downlaoding and loading images now is done asynchronously and is not blocking programs execution.
|
||||
[#7](https://gitlab.gnome.org/World/podcasts/issues/7)
|
||||
- Bold, italics links and some other `html` tags can now be rendered in the Show Description.
|
||||
[#25](https://gitlab.gnome.org/World/podcasts/issues/25)
|
||||
- `Rayon` Threadpools are now used instead of unlimited one-off threads.
|
||||
- `EpisdeWidget`s are now loaded asynchronously accross views.
|
||||
- `EpisodeWidget`s no longer trigger a `View` refresh for trivial stuff 03bd95184808ccab3e0ea0e3713a52ee6b7c9ab4
|
||||
- `ShowWidget` layout was changed 9a5cc1595d982f3232ee7595b83b6512ac8f6c88
|
||||
- `ShowWidget` Description is inside a scrolled window now
|
||||
|
||||
### Fixed:
|
||||
- `EpisodeWidget` Height now is consistent accros views [#57](https://gitlab.gnome.org/World/podcasts/issues/57)
|
||||
- Implemented a tail-recursion loop to follow-up when a feed redirects to another url. c6a24e839a8ba77d09673f299cfc1e64ba7078f3
|
||||
|
||||
### Removed:
|
||||
- Removed the custom configuration file and replaced instructions to just use meson. 1f1d4af8ba7db8f56435d13a1c191ecff3d4a85b
|
||||
|
||||
## [0.3.1] - 2018-03-28
|
||||
### Added:
|
||||
- Ability to mark all episodes of a Show as watched.
|
||||
[#47](https://gitlab.gnome.org/World/podcasts/issues/47)
|
||||
- Now you are able to subscribe to itunes™ podcasts by using the itunes link of the show.
|
||||
[#49](https://gitlab.gnome.org/World/podcasts/issues/49)
|
||||
- Hammond now remembers the window size and position. (Rowan Lewis)
|
||||
[#50](https://gitlab.gnome.org/World/podcasts/issues/50)
|
||||
- Implemnted the initial work for integrating with GSettings and storing preferences. (Rowan Lewis)
|
||||
[!22](https://gitlab.gnome.org/World/podcasts/merge_requests/22) [!23](https://gitlab.gnome.org/World/podcasts/merge_requests/23)
|
||||
- Shows without episodes now display an empty message similar to EmptyView.
|
||||
[#44](https://gitlab.gnome.org/World/podcasts/issues/44)
|
||||
|
||||
### Changed:
|
||||
- EpisdeWidget has been reimplemented as a compile time state machine.
|
||||
[!18](https://gitlab.gnome.org/World/podcasts/merge_requests/18)
|
||||
- Content Views no longer scroll horizontally when shrunk bellow their minimum size.
|
||||
[#35](https://gitlab.gnome.org/World/podcasts/issues/35)
|
||||
- Some requests now use the Tor Browser's user agent. (Rowan Lewis)
|
||||
[#53](https://gitlab.gnome.org/World/podcasts/issues/53)
|
||||
|
||||
### Fixed:
|
||||
- Double border aroun the main window was fixed. (Rowan Lewis)
|
||||
[#52](https://gitlab.gnome.org/World/podcasts/issues/52)
|
||||
|
||||
## [0.3.0] - 2018-02-11
|
||||
- Tobias Bernard Redesigned the whole Gtk+ client.
|
||||
- Complete re-write of hammond-data and hammond-gtk modules.
|
||||
- Error handling for all crates was migrated from error-chain to Failure.
|
||||
- Hammond-data now uses futures to parse feeds.
|
||||
- Custom gtk-widgets are now composed structs as opposed to functions returning Gtk widgets.
|
||||
|
||||
## [0.2.0] - 2017-11-28
|
||||
|
||||
* Database Schema Breaking Changes.
|
||||
* Added url sanitization. #4.
|
||||
* Reworked and refactored of the hammond-data API.
|
||||
* Added some more unit tests
|
||||
* Documented hammond-data public API.
|
||||
- Database Schema Breaking Changes.
|
||||
- Added url sanitization. #4.
|
||||
- Reworked and refactored of the hammond-data API.
|
||||
- Added some more unit tests
|
||||
- Documented hammond-data public API.
|
||||
|
||||
## [0.1.1] - 2017-11-13
|
||||
|
||||
* Added appdata.xml file
|
||||
- Added appdata.xml file
|
||||
|
||||
## [0.1.0] - 2017-11-13
|
||||
|
||||
Initial Release
|
||||
- Initial Release
|
||||
|
||||
@ -1,18 +1,18 @@
|
||||
## Contributing to Hammond
|
||||
## Contributing to GNOME Podcasts
|
||||
|
||||
Thank you for looking in this file!
|
||||
|
||||
When contributing to the development of Hammond, please first discuss the change you wish to make via issue, email, or any other method with the maintainers before making a change.
|
||||
When contributing to the development of GNOME Podcasts, please first discuss the change you wish to make via issue, email, or any other method with the maintainers before making a change.
|
||||
|
||||
If you have any questions regarding the use or development of Hammond,
|
||||
want to discuss design or simply hang out, please join us in [#hammond on irc.gnome.org.](irc://irc.gnome.org/#hammond)
|
||||
If you have any questions regarding the use or development of GNOME Podcasts,
|
||||
want to discuss design or simply hang out, please join us in [#gnome-podcasts:matrix.org](https://matrix.to/#/#gnome-podcasts:matrix.org) or [#hammond on irc.gnome.org.](irc://irc.gnome.org/#hammond)
|
||||
|
||||
Please note we have a [code of conduct](https://wiki.gnome.org/Foundation/CodeOfConduc), please follow it in all your interactions with the project.
|
||||
Please note we have a [code of conduct](/code-of-conduct.md), please follow it in all your interactions with the project.
|
||||
|
||||
## Source repository
|
||||
|
||||
Hammond's main source repository is at gitlab.gnome.org. You can view
|
||||
the web interface [here](https://gitlab.gnome.org/alatiera/hammond)
|
||||
GNOME Podcasts's main source repository is at gitlab.gnome.org. You can view
|
||||
the web interface [here](https://gitlab.gnome.org/World/podcasts)
|
||||
|
||||
Development happens in the master branch.
|
||||
|
||||
@ -26,9 +26,12 @@ makes things easier for the maintainers.
|
||||
|
||||
We use [rustfmt](https://github.com/rust-lang-nursery/rustfmt) for code formatting and we enforce it on the gitlab-CI server.
|
||||
|
||||
Quick setup
|
||||
***Installing rustfmt*** As of 2019/Jan, our continuous integration
|
||||
pipeline assumes the version of rustfmt that is distributed through the
|
||||
stable channel of [rustup](rustup.rs). You can install it with
|
||||
|
||||
```
|
||||
cargo install rustfmt-nightly
|
||||
rustup component add rustfmt
|
||||
cargo fmt --all
|
||||
```
|
||||
|
||||
@ -36,7 +39,7 @@ It is recommended to add a pre-commit hook to run cargo test and `cargo fmt`.
|
||||
Don't forget to `git add` again after `cargo fmt`.
|
||||
```
|
||||
#!/bin/sh
|
||||
cargo test -- --test-threads=1 && cargo fmt --all -- --write-mode=diff
|
||||
cargo test -- --test-threads=1 && cargo fmt --all -- --check
|
||||
```
|
||||
|
||||
## Running the test suite
|
||||
@ -44,14 +47,14 @@ cargo test -- --test-threads=1 && cargo fmt --all -- --write-mode=diff
|
||||
Running the tests requires an internet connection and it it will download some files from the [Internet Archive](archive.org)
|
||||
|
||||
The test suite sets a temporary sqlite database in the `/tmp` folder.
|
||||
Due to that it's not possible to run them in parrallel.
|
||||
Due to that it's not possible to run them in parallel.
|
||||
|
||||
In order to run the test suite use the following: `cargo test -- --test-threads=1`
|
||||
|
||||
# Issues, issues and more issues!
|
||||
|
||||
There are many ways you can contribute to Hammond, and all of them involve creating issues
|
||||
in [Hammond issue tracker](https://gitlab.gnome.org/alatiera/Hammond/issues). This is the entry point for your contribution.
|
||||
There are many ways you can contribute to GNOME Podcasts, and all of them involve creating issues
|
||||
in [GNOME Podcasts issue tracker](https://gitlab.gnome.org/World/podcasts/issues). This is the entry point for your contribution.
|
||||
|
||||
To create an effective and high quality ticket, try to put the following information on your
|
||||
ticket:
|
||||
@ -74,7 +77,7 @@ If it's an issue, add the steps to reproduce like this:
|
||||
|
||||
Steps to reproduce:
|
||||
|
||||
1. Open Hammond
|
||||
1. Open GNOME Podcasts
|
||||
2. Do an Action
|
||||
3. ...
|
||||
|
||||
@ -91,13 +94,13 @@ Steps to reproduce:
|
||||
* [ ] qa (quality assurance) tasks
|
||||
```
|
||||
|
||||
## Pull Request Process
|
||||
## Merge Request Process
|
||||
|
||||
1. Ensure your code compiles. Run `make` before creating the pull request.
|
||||
1. Ensure your code compiles. Run `meson` & `ninja` before creating the merge request.
|
||||
2. Ensure the test suit passes. Run `cargo test -- --test-threads=1`.
|
||||
3. Ensure your code is properly formated. Run `cargo fmt --all`.
|
||||
3. Ensure your code is properly formatted. Run `cargo fmt --all`.
|
||||
4. If you're adding new API, it must be properly documented.
|
||||
5. The commit message is formatted as follows:
|
||||
5. The commit message has to be formatted as follows:
|
||||
```
|
||||
component: <summary>
|
||||
|
||||
@ -107,8 +110,8 @@ Steps to reproduce:
|
||||
|
||||
<link to the bug ticket>
|
||||
```
|
||||
6. You may merge the pull request in once you have the sign-off of the maintainers, or if you
|
||||
6. You may merge the merge request once you have the sign-off of the maintainers, or if you
|
||||
do not have permission to do that, you may request the second reviewer to merge it for you.
|
||||
|
||||
## Code of Conduct
|
||||
We follow the Gnome [Code of Conduct.](https://wiki.gnome.org/Foundation/CodeOfConduct)
|
||||
We follow the [GNOME Foundation Code of Conduct](/code-of-conduct.md).
|
||||
|
||||
3241
Cargo.lock
generated
3241
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@ -1,10 +1,9 @@
|
||||
[workspace]
|
||||
members = [
|
||||
"hammond-data",
|
||||
"hammond-downloader",
|
||||
"hammond-gtk"
|
||||
"podcasts-data",
|
||||
"podcasts-downloader",
|
||||
"podcasts-gtk"
|
||||
]
|
||||
|
||||
[profile.release]
|
||||
debug = false
|
||||
|
||||
debug = true
|
||||
|
||||
4
LICENSE
4
LICENSE
@ -631,7 +631,7 @@ to attach them to the start of each source file to most effectively
|
||||
state the exclusion of warranty; and each file should have at least
|
||||
the "copyright" line and a pointer to where the full notice is found.
|
||||
|
||||
Hammond
|
||||
GNOME Podcasts
|
||||
Copyright (C) 2017 Jordan Petridis
|
||||
|
||||
This program is free software: you can redistribute it and/or modify
|
||||
@ -652,7 +652,7 @@ Also add information on how to contact you by electronic and paper mail.
|
||||
If the program does terminal interaction, make it output a short
|
||||
notice like this when it starts in an interactive mode:
|
||||
|
||||
Hammond Copyright (C) 2017 Jordan Petridis
|
||||
GNOME Podcasts Copyright (C) 2017 Jordan Petridis
|
||||
This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
|
||||
This is free software, and you are welcome to redistribute it
|
||||
under certain conditions; type `show c' for details.
|
||||
|
||||
164
README.md
164
README.md
@ -1,120 +1,97 @@
|
||||
# Hammond
|
||||
# GNOME Podcasts
|
||||
|
||||
## A Podcast Client for the GNOME Desktop written in Rust.
|
||||
### A Podcast application for GNOME.
|
||||
Listen to your favorite podcasts, right from your desktop.
|
||||
|
||||
[](https://gitlab.gnome.org/alatiera/Hammond/commits/master)
|
||||
|
||||
### Features
|
||||
|
||||
* TBA
|
||||
|
||||

|
||||

|
||||

|
||||

|
||||
|
||||
## Available on Flathub
|
||||
|
||||
[](https://flathub.org/apps/details/org.gnome.Podcasts)
|
||||
|
||||
## Quick start
|
||||
|
||||
As of January 19 2018, Hammond can be built and run with [Gnome Builder](https://wiki.gnome.org/Apps/Builder) Nightly
|
||||
and any Stable release of Gnome Builder >= 3.28.0.
|
||||
GNOME Podcasts can be built and run with [Gnome Builder][builder] >= 3.28.
|
||||
Just clone the repo and hit the run button!
|
||||
|
||||
Get Builder [here](https://wiki.gnome.org/Apps/Builder/Downloads)
|
||||
|
||||
Manually:
|
||||
|
||||
The following steps assume you have a working installation of rustc and cargo.
|
||||
If you dont take a look at [rustup.rs](https://rustup.rs/)
|
||||
|
||||
```sh
|
||||
git clone https://gitlab.gnome.org/alatiera/hammond.git
|
||||
cd Hammond/
|
||||
cargo run -p hammond-gtk --release
|
||||
```
|
||||
You can get Builder from [here][get_builder].
|
||||
|
||||
## Broken Feeds
|
||||
|
||||
Found a feed that does not work in Hammond?
|
||||
Please [open an issue](https://gitlab.gnome.org/alatiera/Hammond/issues/new) and choose the `BrokenFeed` template so we will know and fix it!
|
||||
Found a feed that does not work in GNOME Podcasts?
|
||||
Please [open an issue][new_issue] and choose the `BrokenFeed` template so we will know and fix it!
|
||||
|
||||
## Getting in Touch
|
||||
|
||||
If you have any questions regarding the use or development of Hammond,
|
||||
want to discuss design or simply hang out, please join us in [#hammond on irc.gnome.org.](irc://irc.gnome.org/#hammond)
|
||||
If you have any questions regarding the use or development of GNOME Podcasts,
|
||||
want to discuss design or simply hang out, please join us on our [irc][irc] or [matrix][matrix] channel.
|
||||
|
||||
Note:
|
||||
|
||||
There isn't much documentation yet, so you will probably have question about parts of the Code.
|
||||
## Building
|
||||
|
||||
### Flatpak
|
||||
|
||||
Flatpak is the reccomended way of building and installing Hammond.
|
||||
Flatpak is the recommended way of building and installing GNOME Podcasts.
|
||||
Here are the dependencies you will need.
|
||||
|
||||
#### Building a Flatpak
|
||||
```sh
|
||||
# Add flathub and the gnome-nightly repo
|
||||
flatpak remote-add --user --if-not-exists flathub https://dl.flathub.org/repo/flathub.flatpakrepo
|
||||
flatpak remote-add --user --if-not-exists gnome-nightly https://nightly.gnome.org/gnome-nightly.flatpakrepo
|
||||
|
||||
Download the `org.gnome.Hammond.json` flatpak manifest from this repo.
|
||||
# Install the gnome-nightly Sdk and Platform runtime
|
||||
flatpak install --user gnome-nightly org.gnome.Sdk org.gnome.Platform
|
||||
|
||||
# Install the required rust-stable extension from flathub
|
||||
flatpak install --user flathub org.freedesktop.Sdk.Extension.rust-stable//19.08
|
||||
```
|
||||
|
||||
To install the resulting flatpak you can do:
|
||||
|
||||
```bash
|
||||
# Add flathub repo
|
||||
flatpak remote-add --if-not-exists flathub https://dl.flathub.org/repo/flathub.flatpakrepo
|
||||
# Add the gnome-nightly repo
|
||||
flatpak --user remote-add gnome-nightly https://sdk.gnome.org/gnome-nightly.flatpakrepo
|
||||
# Install the required rust-stable extension from flathub
|
||||
flatpak --user install flathub org.freedesktop.Sdk.Extension.rust-stable
|
||||
flatpak-builder --repo=repo hammond org.gnome.Hammond.json --force-clean
|
||||
flatpak build-bundle repo hammond org.gnome.Hammond
|
||||
flatpak-builder --user --install --force-clean --repo=repo podcasts org.gnome.Podcasts.json
|
||||
```
|
||||
|
||||
## Building from soure
|
||||
### Building from source
|
||||
|
||||
```sh
|
||||
git clone https://gitlab.gnome.org/alatiera/hammond.git
|
||||
cd Hammond/
|
||||
./configure --prefix=/usr/local
|
||||
make && sudo make install
|
||||
git clone https://gitlab.gnome.org/World/podcasts.git
|
||||
cd gnome-podcasts/
|
||||
meson --prefix=/usr build
|
||||
ninja -C build
|
||||
sudo ninja -C build install
|
||||
```
|
||||
|
||||
**Additional:**
|
||||
#### Dependencies
|
||||
|
||||
You can run `sudo make uninstall` for removal
|
||||
|
||||
### Dependencies
|
||||
|
||||
* Rust stable 1.22 or later.
|
||||
* Gtk+ 3.22 or later
|
||||
* Rust stable 1.34 or later along with cargo.
|
||||
* Gtk+ 3.24.11 or later
|
||||
* Gstreamer 1.16 or later
|
||||
* libhandy 0.0.11 or later
|
||||
* Meson
|
||||
* A network connection
|
||||
|
||||
**Debian/Ubuntu**
|
||||
|
||||
```sh
|
||||
apt-get update -yqq
|
||||
apt-get install -yqq --no-install-recommends build-essential
|
||||
apt-get install -yqq --no-install-recommends libgtk-3-dev meson
|
||||
```
|
||||
|
||||
**Fedora**
|
||||
|
||||
```sh
|
||||
dnf install -y gtk3-devel glib2-devel openssl-devel sqlite-devel meson
|
||||
```
|
||||
|
||||
If you happen to build it on other distributions please let me know the names of the corresponding libraries. Feel free to open a PR or an Issue to note it.
|
||||
|
||||
```sh
|
||||
git clone https://gitlab.gnome.org/alatiera/Hammond.git
|
||||
cd Hammond/
|
||||
cargo build --all
|
||||
```
|
||||
Offline build are possible too, but [`cargo-vendor`][vendor] would have to be setup first
|
||||
|
||||
## Contributing
|
||||
|
||||
There alot of thins yet to be done.
|
||||
There are a lot of things yet to be done.
|
||||
|
||||
If you want to contribute, please check the [Contributions Guidelines][contribution-guidelines].
|
||||
|
||||
You can start by taking a look at [Issues](https://gitlab.gnome.org/alatiera/Hammond/issues) or by opening a [New issue](https://gitlab.gnome.org/alatiera/Hammond/issues/new?issue%5Bassignee_id%5D=&issue%5Bmilestone_id%5D=).
|
||||
You can start by taking a look at [Issues][issues] or by opening a [New issue][new_issue].
|
||||
|
||||
There are also some minor tasks tagged with `TODO:` and `FIXME:` in the source code.
|
||||
|
||||
[contribution-guidelines]: https://gitlab.gnome.org/alatiera/Hammond/blob/master/CONTRIBUTING.md
|
||||
[contribution-guidelines]: https://gitlab.gnome.org/World/podcasts/blob/master/CONTRIBUTING.md
|
||||
|
||||
### Translations
|
||||
|
||||
Translation of this project takes place on the GNOME translation platform,
|
||||
[Damned Lies](https://l10n.gnome.org/module/podcasts). For further
|
||||
information on how to join a language team, or even to create one, please see
|
||||
[GNOME Translation Project wiki page](https://wiki.gnome.org/TranslationProject).
|
||||
|
||||
|
||||
## Overview
|
||||
@ -122,32 +99,45 @@ There are also some minor tasks tagged with `TODO:` and `FIXME:` in the source c
|
||||
```sh
|
||||
$ tree -d
|
||||
├── screenshots # png's used in the README.md
|
||||
├── hammond-data # Storate related stuff, SQLite, XDG setup, RSS Parser.
|
||||
├── podcasts-data # Storate related stuff, SQLite, XDG setup, RSS Parser.
|
||||
│ ├── migrations # Diesel SQL migrations.
|
||||
│ │ └── ...
|
||||
│ ├── src
|
||||
│ └── tests
|
||||
│ └── feeds # Raw RSS Feeds used for tests.
|
||||
├── hammond-downloader # Really basic, Really crappy downloader.
|
||||
├── podcasts-downloader # Really basic, Really crappy downloader.
|
||||
│ └── src
|
||||
├── hammond-gtk # The Gtk+ Client
|
||||
├── podcasts-gtk # The Gtk+ Client
|
||||
│ ├── resources # GResources folder
|
||||
│ │ └── gtk # Contains the glade.ui files.
|
||||
│ └── src
|
||||
│ ├── views # Contains the Empty, Episodes and Shows view.
|
||||
│ ├── stacks # Contains the gtk Stacks that hold all the different views.
|
||||
│ └── widgets # Contains custom widgets such as Show and Episode.
|
||||
```
|
||||
|
||||
## A note about the project's name
|
||||
|
||||
The project was named after Allan Moore's character [Evey Hammond](https://en.wikipedia.org/wiki/Evey_Hammond) from the graphic novel V for Vendetta.
|
||||
|
||||
It has nothing to do with the horrible headlines on the news.
|
||||
The project used to be called Hammond, after Allan Moore's character [Evey Hammond][hammond] from the graphic novel V for Vendetta.
|
||||
It was renamed to GNOME Podcasts on 2018/07/24 shortly before its first public release.
|
||||
|
||||
## Acknowledgments
|
||||
|
||||
Hammond's design is heavily insired by [GNOME Music](https://wiki.gnome.org/Design/Apps/Music) and [Vocal](http://vocalproject.net/).
|
||||
GNOME Podcasts's design is heavily inspired by [GNOME Music][music] and [Vocal][vocal].
|
||||
|
||||
We also copied some elements from [GNOME News](https://wiki.gnome.org/Design/Apps/Potential/News).
|
||||
We also copied some elements from [GNOME News][news].
|
||||
|
||||
And almost the entirety of the build system is copied from the [Fractal](https://gitlab.gnome.org/danigm/fractal) project.
|
||||
And almost the entirety of the build system is copied from the [Fractal][fractal] project.
|
||||
|
||||
[vendor]: https://github.com/alexcrichton/cargo-vendor
|
||||
[irc]: irc://irc.gnome.org/#hammond
|
||||
[matrix]: https://matrix.to/#/#gnome-podcasts:matrix.org
|
||||
[flatpak_setup]: https://flatpak.org/setup/
|
||||
[music]: https://wiki.gnome.org/Design/Apps/Music
|
||||
[vocal]: http://vocalproject.net/
|
||||
[news]: https://wiki.gnome.org/Design/Apps/Potential/News
|
||||
[fractal]: https://gitlab.gnome.org/World/fractal
|
||||
[hammond]: https://en.wikipedia.org/wiki/Evey_Hammond
|
||||
[issues]: https://gitlab.gnome.org/World/podcasts/issues
|
||||
[new_issue]: https://gitlab.gnome.org/World/podcasts/issues/new
|
||||
[builder]: https://wiki.gnome.org/Apps/Builder
|
||||
[get_builder]: https://wiki.gnome.org/Apps/Builder/Downloads
|
||||
|
||||
11
TODO.md
11
TODO.md
@ -4,12 +4,11 @@
|
||||
|
||||
## Priorities
|
||||
|
||||
- [ ] Unplayed Only and Downloaded only view.
|
||||
- [ ] OPML import/export // Probably need to create a crate.
|
||||
|
||||
## Second
|
||||
|
||||
- [ ] Make use of file metadas, [This](https://github.com/GuillaumeGomez/audio-video-metadata) might be helpfull.
|
||||
- [ ] Make use of file metadas?, [This](https://github.com/GuillaumeGomez/audio-video-metadata) might be helpfull.
|
||||
- [ ] Episode queue
|
||||
- [ ] Embedded player
|
||||
- [ ] MPRIS integration
|
||||
@ -19,14 +18,8 @@
|
||||
|
||||
- [ ] Download Queue
|
||||
- [ ] Ability to Stream content on demand
|
||||
- [ ] soundcloud and itunes feeds // [This](http://getrssfeed.com) seems intresting.
|
||||
- [ ] rss feeds from soundcloud urls? // [This](http://getrssfeed.com) seems intresting.
|
||||
- [ ] Integrate with Itunes API for various crap?
|
||||
- [ ] YoutubeFeeds?
|
||||
|
||||
## Rest Tasks
|
||||
|
||||
**Would be nice:**
|
||||
|
||||
- [ ] Make Podcast cover fetchng and loading not block the execution of the program at startup.
|
||||
- [ ] Lazy evaluate episode loading based on the show_widget's scrolling.
|
||||
|
||||
|
||||
126
code-of-conduct.md
Normal file
126
code-of-conduct.md
Normal file
@ -0,0 +1,126 @@
|
||||
# GNOME Code of Conduct
|
||||
|
||||
Thank you for being a part of the GNOME project. We value your participation and want everyone to have an enjoyable and fulfilling experience. Accordingly, all participants are expected to follow this Code of Conduct, and to show respect, understanding, and consideration to one another. Thank you for helping make this a welcoming, friendly community for everyone.
|
||||
|
||||
## Scope
|
||||
|
||||
This Code of Conduct applies to all online GNOME community spaces, including, but not limited to:
|
||||
|
||||
* Issue tracking systems - bugzilla.gnome.org
|
||||
* Documentation and tutorials - developer.gnome.org
|
||||
* Code repositories - git.gnome.org and gitlab.gnome.org
|
||||
* Mailing lists - mail.gnome.org
|
||||
* Wikis - wiki.gnome.org
|
||||
* Chat and forums - irc.gnome.org, discourse.gnome.org, GNOME Telegram channels, and GNOME groups and channels on Matrix.org (including bridges to GNOME IRC channels)
|
||||
* Community spaces hosted on gnome.org infrastructure
|
||||
* Any other channels or groups which exist in order to discuss GNOME project activities
|
||||
|
||||
Communication channels and private conversations that are normally out of scope may be considered in scope if a GNOME participant is being stalked or harassed. Social media conversations may be considered in-scope if the incident occurred under a GNOME event hashtag, or when an official GNOME account on social media is tagged, or within any other discussion about GNOME. The GNOME Foundation reserves the right to take actions against behaviors that happen in any context, if they are deemed to be relevant to the GNOME project and its participants.
|
||||
|
||||
All participants in GNOME online community spaces are subject to the Code of Conduct. This includes GNOME Foundation board members, corporate sponsors, and paid employees. This also includes volunteers, maintainers, leaders, contributors, contribution reviewers, issue reporters, GNOME users, and anyone participating in discussion in GNOME online spaces.
|
||||
|
||||
## Reporting an Incident
|
||||
|
||||
If you believe that someone is violating the Code of Conduct, or have
|
||||
any other concerns, please [contact the Code of Conduct committee](https://wiki.gnome.org/Foundation/CodeOfConduct/ReporterGuide).
|
||||
|
||||
## Our Standards
|
||||
|
||||
The GNOME online community is dedicated to providing a positive experience for everyone, regardless of:
|
||||
|
||||
* age
|
||||
* body size
|
||||
* caste
|
||||
* citizenship
|
||||
* disability
|
||||
* education
|
||||
* ethnicity
|
||||
* familial status
|
||||
* gender expression
|
||||
* gender identity
|
||||
* genetic information
|
||||
* immigration status
|
||||
* level of experience
|
||||
* nationality
|
||||
* personal appearance
|
||||
* pregnancy
|
||||
* race
|
||||
* religion
|
||||
* sex characteristics
|
||||
* sexual orientation
|
||||
* sexual identity
|
||||
* socio-economic status
|
||||
* tribe
|
||||
* veteran status
|
||||
|
||||
### Community Guidelines
|
||||
|
||||
Examples of behavior that contributes to creating a positive environment include:
|
||||
|
||||
* **Be friendly.** Use welcoming and inclusive language.
|
||||
* **Be empathetic.** Be respectful of differing viewpoints and experiences.
|
||||
* **Be respectful.** When we disagree, we do so in a polite and constructive manner.
|
||||
* **Be considerate.** Remember that decisions are often a difficult choice between competing priorities. Focus on what is best for the community. Keep discussions around technology choices constructive and respectful.
|
||||
* **Be patient and generous.** If someone asks for help it is because they need it. When documentation is available that answers the question, politely point them to it. If the question is off-topic, suggest a more appropriate online space to seek help.
|
||||
* **Try to be concise.** Read the discussion before commenting in order to not repeat a point that has been made.
|
||||
|
||||
### Inappropriate Behavior
|
||||
|
||||
Community members asked to stop any inappropriate behavior are expected to comply immediately.
|
||||
|
||||
We want all participants in the GNOME community have the best possible experience they can. In order to be clear what that means, we've provided a list of examples of behaviors that are inappropriate for GNOME community spaces:
|
||||
|
||||
* **Deliberate intimidation, stalking, or following.**
|
||||
* **Sustained disruption of online discussion, talks, or other events.** Sustained disruption of events, online discussions, or meetings, including talks and presentations, will not be tolerated. This includes 'Talking over' or 'heckling' event speakers or influencing crowd actions that cause hostility in event sessions. Sustained disruption also includes drinking alcohol to excess or using recreational drugs to excess, or pushing others to do so.
|
||||
* **Harassment of people who don't drink alcohol.** We do not tolerate derogatory comments about those who abstain from alcohol or other substances. We do not tolerate pushing people to drink, talking about their abstinence or preferences to others, or pressuring them to drink - physically or through jeering.
|
||||
* **Sexist, racist, homophobic, transphobic, ableist language or otherwise exclusionary language.** This includes deliberately referring to someone by a gender that they do not identify with, and/or questioning the legitimacy of an individual's gender identity. If you're unsure if a word is derogatory, don't use it. This also includes repeated subtle and/or indirect discrimination.
|
||||
* **Unwelcome sexual attention or behavior that contributes to a sexualized environment.** This includes sexualized comments, jokes or imagery in interactions, communications or presentation materials, as well as inappropriate touching, groping, or sexual advances. Sponsors should not use sexualized images, activities, or other material. Meetup organizing staff and other volunteer organizers should not use sexualized clothing/uniforms/costumes, or otherwise create a sexualized environment.
|
||||
* **Unwelcome physical contact.** This includes touching a person without permission, including sensitive areas such as their hair, pregnant stomach, mobility device (wheelchair, scooter, etc) or tattoos. This also includes physically blocking or intimidating another person. Physical contact or simulated physical contact (such as emojis like "kiss") without affirmative consent is not acceptable. This includes sharing or distribution of sexualized images or text.
|
||||
* **Violence or threats of violence.** Violence and threats of violence are not acceptable - online or offline. This includes incitement of violence toward any individual, including encouraging a person to commit self-harm. This also includes posting or threatening to post other people's personally identifying information ("doxxing") online.
|
||||
* **Influencing or encouraging inappropriate behavior.** If you influence or encourage another person to violate the Code of Conduct, you may face the same consequences as if you had violated the Code of Conduct.
|
||||
* **Possession of an offensive weapon at a GNOME event.** This includes anything deemed to be a weapon by the event organizers.
|
||||
|
||||
The GNOME community prioritizes marginalized people's safety over privileged people's comfort. The committee will not act on complaints regarding:
|
||||
|
||||
* "Reverse"-isms, including "reverse racism," "reverse sexism," and "cisphobia"
|
||||
* Reasonable communication of boundaries, such as "leave me alone," "go away," or "I'm not discussing this with you."
|
||||
* Criticizing racist, sexist, cissexist, or otherwise oppressive behavior or assumptions
|
||||
* Communicating boundaries or criticizing oppressive behavior in a "tone" you don't find congenial
|
||||
|
||||
The examples listed above are not against the Code of Conduct. If you have questions about the above statements, please [read this document](https://github.com/sagesharp/code-of-conduct-template/blob/master/code-of-conduct/example-reversisms.md#supporting-diversity).
|
||||
|
||||
If a participant engages in behavior that violates this code of conduct, the GNOME Code of Conduct committee may take any action they deem appropriate. Examples of consequences are outlined in the [Committee Procedures Guide](https://wiki.gnome.org/Foundation/CodeOfConduct/CommitteeProcedures).
|
||||
|
||||
## Procedure for Handling Incidents
|
||||
|
||||
* [Reporter Guide](https://wiki.gnome.org/Foundation/CodeOfConduct/ReporterGuide)
|
||||
|
||||
* [Moderator Procedures](https://wiki.gnome.org/Foundation/CodeOfConduct/ModeratorProcedures)
|
||||
|
||||
* [Committee Procedures Guide](https://wiki.gnome.org/Foundation/CodeOfConduct/CommitteeProcedures)
|
||||
|
||||
## License
|
||||
|
||||
The GNOME Online Code of Conduct is licensed under a [Creative Commons Attribution Share-Alike 3.0 Unported License](http://creativecommons.org/licenses/by-sa/3.0/)
|
||||
|
||||

|
||||
|
||||
## Attribution
|
||||
|
||||
The GNOME Online Code of Conduct was forked from the example policy from the [Geek Feminism wiki, created by the Ada Initiative and other volunteers](http://geekfeminism.wikia.com/wiki/Conference_anti-harassment/Policy), which is under a Creative Commons Zero license.
|
||||
|
||||
Additional language was incorporated and modified from the following Codes of Conduct:
|
||||
|
||||
* [Citizen Code of Conduct](http://citizencodeofconduct.org/) is licensed [Creative Commons Attribution Share-Alike 3.0 Unported License](http://creativecommons.org/licenses/by-sa/3.0/).
|
||||
* [Code of Conduct template](https://github.com/sagesharp/code-of-conduct-template/) is licensed [Creative Commons Attribution Share-Alike 3.0 Unported License](http://creativecommons.org/licenses/by-sa/3.0/) by [Otter Tech](https://otter.technology/code-of-conduct-training)
|
||||
* [Contributor Covenant version 1.4](https://www.contributor-covenant.org/version/1/4/code-of-conduct) (licensed [CC BY 4.0](https://github.com/ContributorCovenant/contributor_covenant/blob/master/LICENSE.md))
|
||||
* [Data Carpentry Code of Conduct](https://docs.carpentries.org/topic_folders/policies/index_coc.html) is licensed [Creative Commons Attribution 4.0 License](https://creativecommons.org/licenses/by/4.0/)
|
||||
* [Django Project Code of Conduct](https://www.djangoproject.com/conduct/) is licensed under a [Creative Commons Attribution 3.0 Unported License](http://creativecommons.org/licenses/by/3.0/)
|
||||
* [Fedora Code of Conduct](http://fedoraproject.org/code-of-conduct)
|
||||
* [Geek Feminism Anti-harassment Policy](http://geekfeminism.wikia.com/wiki/Conference_anti-harassment/Policy) which is under a [Creative Commons Zero license](https://creativecommons.org/publicdomain/zero/1.0/)
|
||||
* [Previous GNOME Foundation Code of Conduct](https://wiki.gnome.org/action/recall/Foundation/CodeOfConduct/Old)
|
||||
* [LGBTQ in Technology Slack Code of Conduct](https://lgbtq.technology/coc.html) licensed [Creative Commons Zero](https://creativecommons.org/publicdomain/zero/1.0/)
|
||||
* [Mozilla Community Participation Guidelines](https://www.mozilla.org/en-US/about/governance/policies/participation/) is licensed [Creative Commons Attribution-ShareAlike 3.0 Unported License](https://creativecommons.org/licenses/by-sa/3.0/).
|
||||
* [Python Mentors Code of Conduct](http://pythonmentors.com/)
|
||||
* [Speak Up! Community Code of Conduct](http://web.archive.org/web/20141109123859/http://speakup.io/coc.html), licensed under a [Creative Commons Attribution 3.0 Unported License](http://creativecommons.org/licenses/by/3.0/)
|
||||
|
||||
186
configure
vendored
186
configure
vendored
@ -1,186 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Adapted from:
|
||||
# https://gitlab.gnome.org/danigm/libgepub/blob/27f0d374e0c8f6fa972dbd111d4ce0c0f3096914/configure_meson
|
||||
|
||||
# configure script adapter for Meson
|
||||
# Based on build-api: https://github.com/cgwalters/build-api
|
||||
# Copyright 2010, 2011, 2013 Colin Walters <walters@verbum.org>
|
||||
# Copyright 2016, 2017 Emmanuele Bassi
|
||||
# Copyright 2017 Iñigo Martínez <inigomartinez@gmail.com>
|
||||
# Licensed under the new-BSD license (http://www.opensource.org/licenses/bsd-license.php)
|
||||
|
||||
# Build API variables:
|
||||
|
||||
# Little helper function for reading args from the commandline.
|
||||
# it automatically handles -a b and -a=b variants, and returns 1 if
|
||||
# we need to shift $3.
|
||||
read_arg() {
|
||||
# $1 = arg name
|
||||
# $2 = arg value
|
||||
# $3 = arg parameter
|
||||
local rematch='^[^=]*=(.*)$'
|
||||
if [[ $2 =~ $rematch ]]; then
|
||||
read "$1" <<< "${BASH_REMATCH[1]}"
|
||||
else
|
||||
read "$1" <<< "$3"
|
||||
# There is no way to shift our callers args, so
|
||||
# return 1 to indicate they should do it instead.
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
sanitycheck() {
|
||||
# $1 = arg name
|
||||
# $1 = arg command
|
||||
# $2 = arg alternates
|
||||
local cmd=$( which $2 2>/dev/null )
|
||||
|
||||
if [ -x "$cmd" ]; then
|
||||
read "$1" <<< "$cmd"
|
||||
return 0
|
||||
fi
|
||||
|
||||
test -z $3 || {
|
||||
for alt in $3; do
|
||||
cmd=$( which $alt 2>/dev/null )
|
||||
|
||||
if [ -x "$cmd" ]; then
|
||||
read "$1" <<< "$cmd"
|
||||
return 0
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
||||
echo -e "\e[1;31mERROR\e[0m: Command '$2' not found"
|
||||
exit 1
|
||||
}
|
||||
|
||||
checkoption() {
|
||||
# $1 = arg
|
||||
option="${1#*--}"
|
||||
action="${option%%-*}"
|
||||
name="${option#*-}"
|
||||
if [ ${default_options[$name]+_} ]; then
|
||||
case "$action" in
|
||||
enable) meson_options[$name]=true;;
|
||||
disable) meson_options[$name]=false;;
|
||||
*) echo -e "\e[1;33mINFO\e[0m: Ignoring unknown action '$action'";;
|
||||
esac
|
||||
else
|
||||
echo -e "\e[1;33mINFO\e[0m: Ignoring unknown option '$option'"
|
||||
fi
|
||||
}
|
||||
|
||||
echooption() {
|
||||
# $1 = option
|
||||
if [ ${meson_options[$1]+_} ]; then
|
||||
echo ${meson_options[$1]}
|
||||
elif [ ${default_options[$1]+_} ]; then
|
||||
echo ${default_options[$1]}
|
||||
fi
|
||||
}
|
||||
|
||||
sanitycheck MESON 'meson'
|
||||
sanitycheck MESONTEST 'mesontest'
|
||||
sanitycheck NINJA 'ninja' 'ninja-build'
|
||||
|
||||
declare -A meson_options
|
||||
|
||||
while (($# > 0)); do
|
||||
case "${1%%=*}" in
|
||||
--prefix) read_arg prefix "$@" || shift;;
|
||||
--bindir) read_arg bindir "$@" || shift;;
|
||||
--sbindir) read_arg sbindir "$@" || shift;;
|
||||
--libexecdir) read_arg libexecdir "$@" || shift;;
|
||||
--datarootdir) read_arg datarootdir "$@" || shift;;
|
||||
--datadir) read_arg datadir "$@" || shift;;
|
||||
--sysconfdir) read_arg sysconfdir "$@" || shift;;
|
||||
--libdir) read_arg libdir "$@" || shift;;
|
||||
--mandir) read_arg mandir "$@" || shift;;
|
||||
--includedir) read_arg includedir "$@" || shift;;
|
||||
*) checkoption $1;;
|
||||
esac
|
||||
shift
|
||||
done
|
||||
|
||||
# Defaults
|
||||
test -z ${prefix} && prefix="/usr/local"
|
||||
test -z ${bindir} && bindir=${prefix}/bin
|
||||
test -z ${sbindir} && sbindir=${prefix}/sbin
|
||||
test -z ${libexecdir} && libexecdir=${prefix}/bin
|
||||
test -z ${datarootdir} && datarootdir=${prefix}/share
|
||||
test -z ${datadir} && datadir=${datarootdir}
|
||||
test -z ${sysconfdir} && sysconfdir=${prefix}/etc
|
||||
test -z ${libdir} && libdir=${prefix}/lib
|
||||
test -z ${mandir} && mandir=${prefix}/share/man
|
||||
test -z ${includedir} && includedir=${prefix}/include
|
||||
|
||||
# The source directory is the location of this file
|
||||
srcdir=$(dirname $0)
|
||||
|
||||
# The build directory is the current location
|
||||
builddir=`pwd`
|
||||
|
||||
# If we're calling this file from the source directory then
|
||||
# we automatically create a build directory and ensure that
|
||||
# both Meson and Ninja invocations are relative to that
|
||||
# location
|
||||
if [[ -f "${builddir}/meson.build" ]]; then
|
||||
mkdir -p _build
|
||||
builddir="${builddir}/_build"
|
||||
NINJA_OPT="-C ${builddir}"
|
||||
fi
|
||||
|
||||
# Wrapper Makefile for Ninja
|
||||
cat > Makefile <<END
|
||||
# Generated by configure; do not edit
|
||||
|
||||
all: rebuild
|
||||
${NINJA} ${NINJA_OPT}
|
||||
|
||||
rebuild:
|
||||
rm -f ${builddir}/hammond
|
||||
|
||||
install:
|
||||
DESTDIR="\$(DESTDIR)" ${NINJA} ${NINJA_OPT} install
|
||||
|
||||
uninstall:
|
||||
${NINJA} ${NINJA_OPT} uninstall
|
||||
|
||||
release:
|
||||
${NINJA} ${NINJA_OPT} release
|
||||
|
||||
check:
|
||||
${MESONTEST} ${NINJA_OPT}
|
||||
END
|
||||
|
||||
echo "
|
||||
|
||||
hammond
|
||||
=======
|
||||
|
||||
meson: ${MESON}
|
||||
ninja: ${NINJA}
|
||||
prefix: ${prefix}
|
||||
|
||||
Now type 'make' to build
|
||||
"
|
||||
|
||||
cmd_options=""
|
||||
for key in "${!meson_options[@]}"; do
|
||||
cmd_options="$cmd_options -Denable-$key=${meson_options[$key]}"
|
||||
done
|
||||
|
||||
exec ${MESON} \
|
||||
--prefix=${prefix} \
|
||||
--libdir=${libdir} \
|
||||
--libexecdir=${libexecdir} \
|
||||
--datadir=${datadir} \
|
||||
--sysconfdir=${sysconfdir} \
|
||||
--bindir=${bindir} \
|
||||
--includedir=${includedir} \
|
||||
--mandir=${mandir} \
|
||||
${cmd_options} \
|
||||
${builddir} \
|
||||
${srcdir}
|
||||
@ -1,46 +0,0 @@
|
||||
[package]
|
||||
authors = ["Jordan Petridis <jordanpetridis@protonmail.com>"]
|
||||
name = "hammond-data"
|
||||
version = "0.1.0"
|
||||
workspace = "../"
|
||||
|
||||
[dependencies]
|
||||
ammonia = "1.0.1"
|
||||
chrono = "0.4.0"
|
||||
derive_builder = "0.5.1"
|
||||
dotenv = "0.10.1"
|
||||
error-chain = "0.11.0"
|
||||
itertools = "0.7.6"
|
||||
lazy_static = "1.0.0"
|
||||
log = "0.4.1"
|
||||
rayon = "0.9.0"
|
||||
rfc822_sanitizer = "0.3.3"
|
||||
rss = "1.2.1"
|
||||
url = "1.6.0"
|
||||
xdg = "2.1.0"
|
||||
futures = "0.1.18"
|
||||
hyper = "0.11.18"
|
||||
tokio-core = "0.1.12"
|
||||
hyper-tls = "0.1.2"
|
||||
native-tls = "0.1.5"
|
||||
futures-cpupool = "0.1.8"
|
||||
num_cpus = "1.8.0"
|
||||
failure = "0.1.1"
|
||||
failure_derive = "0.1.1"
|
||||
|
||||
[dependencies.diesel]
|
||||
features = ["sqlite", "r2d2"]
|
||||
version = "1.1.1"
|
||||
|
||||
[dependencies.diesel_migrations]
|
||||
features = ["sqlite"]
|
||||
version = "1.1.0"
|
||||
|
||||
[dev-dependencies]
|
||||
rand = "0.4.2"
|
||||
tempdir = "0.3.6"
|
||||
criterion = "0.2.0"
|
||||
|
||||
[[bench]]
|
||||
name = "bench"
|
||||
harness = false
|
||||
@ -1,125 +0,0 @@
|
||||
#[macro_use]
|
||||
extern crate criterion;
|
||||
use criterion::Criterion;
|
||||
|
||||
// extern crate futures;
|
||||
// extern crate futures_cpupool;
|
||||
extern crate hammond_data;
|
||||
extern crate hyper;
|
||||
extern crate hyper_tls;
|
||||
extern crate rand;
|
||||
extern crate tokio_core;
|
||||
// extern crate rayon;
|
||||
extern crate rss;
|
||||
|
||||
// use rayon::prelude::*;
|
||||
|
||||
// use futures::future::*;
|
||||
// use futures_cpupool::CpuPool;
|
||||
use tokio_core::reactor::Core;
|
||||
|
||||
use hammond_data::FeedBuilder;
|
||||
use hammond_data::Source;
|
||||
use hammond_data::database::truncate_db;
|
||||
use hammond_data::pipeline;
|
||||
// use hammond_data::errors::*;
|
||||
|
||||
use std::io::BufReader;
|
||||
|
||||
// RSS feeds
|
||||
const INTERCEPTED: &[u8] = include_bytes!("../tests/feeds/2018-01-20-Intercepted.xml");
|
||||
const INTERCEPTED_URL: &str = "https://web.archive.org/web/20180120083840if_/https://feeds.\
|
||||
feedburner.com/InterceptedWithJeremyScahill";
|
||||
|
||||
const UNPLUGGED: &[u8] = include_bytes!("../tests/feeds/2018-01-20-LinuxUnplugged.xml");
|
||||
const UNPLUGGED_URL: &str =
|
||||
"https://web.archive.org/web/20180120110314if_/https://feeds.feedburner.com/linuxunplugged";
|
||||
|
||||
const TIPOFF: &[u8] = include_bytes!("../tests/feeds/2018-01-20-TheTipOff.xml");
|
||||
const TIPOFF_URL: &str =
|
||||
"https://web.archive.org/web/20180120110727if_/https://rss.acast.com/thetipoff";
|
||||
|
||||
// This feed has HUGE descripion and summary fields which can be very
|
||||
// very expensive to parse.
|
||||
const CODE: &[u8] = include_bytes!("../tests/feeds/2018-01-20-GreaterThanCode.xml");
|
||||
const CODE_URL: &str =
|
||||
"https://web.archive.org/web/20180120104741if_/https://www.greaterthancode.com/feed/podcast";
|
||||
|
||||
// Relative small feed
|
||||
const STARS: &[u8] = include_bytes!("../tests/feeds/2018-01-20-StealTheStars.xml");
|
||||
const STARS_URL: &str =
|
||||
"https://web.archive.org/web/20180120104957if_/https://rss.art19.com/steal-the-stars";
|
||||
|
||||
static FEEDS: &[(&[u8], &str)] = &[
|
||||
(INTERCEPTED, INTERCEPTED_URL),
|
||||
(UNPLUGGED, UNPLUGGED_URL),
|
||||
(TIPOFF, TIPOFF_URL),
|
||||
(CODE, CODE_URL),
|
||||
(STARS, STARS_URL),
|
||||
];
|
||||
|
||||
// This is broken and I don't know why.
|
||||
fn bench_pipeline(c: &mut Criterion) {
|
||||
truncate_db().unwrap();
|
||||
FEEDS.iter().for_each(|&(_, url)| {
|
||||
Source::from_url(url).unwrap();
|
||||
});
|
||||
|
||||
c.bench_function("pipline", move |b| {
|
||||
b.iter(|| {
|
||||
let sources = hammond_data::dbqueries::get_sources().unwrap();
|
||||
pipeline::run(sources, true).unwrap();
|
||||
})
|
||||
});
|
||||
truncate_db().unwrap();
|
||||
}
|
||||
|
||||
fn bench_index_large_feed(c: &mut Criterion) {
|
||||
truncate_db().unwrap();
|
||||
let url = "https://www.greaterthancode.com/feed/podcast";
|
||||
let mut core = Core::new().unwrap();
|
||||
|
||||
c.bench_function("index_large_feed", move |b| {
|
||||
b.iter(|| {
|
||||
let s = Source::from_url(url).unwrap();
|
||||
// parse it into a channel
|
||||
let chan = rss::Channel::read_from(BufReader::new(CODE)).unwrap();
|
||||
let feed = FeedBuilder::default()
|
||||
.channel(chan)
|
||||
.source_id(s.id())
|
||||
.build()
|
||||
.unwrap();
|
||||
core.run(feed.index()).unwrap();
|
||||
})
|
||||
});
|
||||
truncate_db().unwrap();
|
||||
}
|
||||
|
||||
fn bench_index_small_feed(c: &mut Criterion) {
|
||||
truncate_db().unwrap();
|
||||
let url = "https://rss.art19.com/steal-the-stars";
|
||||
let mut core = Core::new().unwrap();
|
||||
|
||||
c.bench_function("index_small_feed", move |b| {
|
||||
b.iter(|| {
|
||||
let s = Source::from_url(url).unwrap();
|
||||
// parse it into a channel
|
||||
let chan = rss::Channel::read_from(BufReader::new(STARS)).unwrap();
|
||||
let feed = FeedBuilder::default()
|
||||
.channel(chan)
|
||||
.source_id(s.id())
|
||||
.build()
|
||||
.unwrap();
|
||||
core.run(feed.index()).unwrap();
|
||||
})
|
||||
});
|
||||
truncate_db().unwrap();
|
||||
}
|
||||
|
||||
criterion_group!(
|
||||
benches,
|
||||
bench_pipeline,
|
||||
bench_index_large_feed,
|
||||
bench_index_small_feed
|
||||
);
|
||||
criterion_main!(benches);
|
||||
@ -1,350 +0,0 @@
|
||||
//! Random CRUD helper functions.
|
||||
|
||||
use chrono::prelude::*;
|
||||
use diesel::prelude::*;
|
||||
|
||||
use diesel;
|
||||
use diesel::dsl::exists;
|
||||
use diesel::select;
|
||||
|
||||
use database::connection;
|
||||
use errors::DataError;
|
||||
use models::*;
|
||||
|
||||
pub fn get_sources() -> Result<Vec<Source>, DataError> {
|
||||
use schema::source::dsl::*;
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
source
|
||||
.order((http_etag.asc(), last_modified.asc()))
|
||||
.load::<Source>(&con)
|
||||
.map_err(From::from)
|
||||
}
|
||||
|
||||
pub fn get_podcasts() -> Result<Vec<Podcast>, DataError> {
|
||||
use schema::podcast::dsl::*;
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
podcast
|
||||
.order(title.asc())
|
||||
.load::<Podcast>(&con)
|
||||
.map_err(From::from)
|
||||
}
|
||||
|
||||
pub fn get_episodes() -> Result<Vec<Episode>, DataError> {
|
||||
use schema::episode::dsl::*;
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
episode
|
||||
.order(epoch.desc())
|
||||
.load::<Episode>(&con)
|
||||
.map_err(From::from)
|
||||
}
|
||||
|
||||
pub(crate) fn get_downloaded_episodes() -> Result<Vec<EpisodeCleanerQuery>, DataError> {
|
||||
use schema::episode::dsl::*;
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
episode
|
||||
.select((rowid, local_uri, played))
|
||||
.filter(local_uri.is_not_null())
|
||||
.load::<EpisodeCleanerQuery>(&con)
|
||||
.map_err(From::from)
|
||||
}
|
||||
|
||||
// pub(crate) fn get_played_episodes() -> Result<Vec<Episode>, DataError> {
|
||||
// use schema::episode::dsl::*;
|
||||
|
||||
// let db = connection();
|
||||
// let con = db.get()?;
|
||||
// episode
|
||||
// .filter(played.is_not_null())
|
||||
// .load::<Episode>(&con)
|
||||
// .map_err(From::from)
|
||||
// }
|
||||
|
||||
pub(crate) fn get_played_cleaner_episodes() -> Result<Vec<EpisodeCleanerQuery>, DataError> {
|
||||
use schema::episode::dsl::*;
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
episode
|
||||
.select((rowid, local_uri, played))
|
||||
.filter(played.is_not_null())
|
||||
.load::<EpisodeCleanerQuery>(&con)
|
||||
.map_err(From::from)
|
||||
}
|
||||
|
||||
pub fn get_episode_from_rowid(ep_id: i32) -> Result<Episode, DataError> {
|
||||
use schema::episode::dsl::*;
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
episode
|
||||
.filter(rowid.eq(ep_id))
|
||||
.get_result::<Episode>(&con)
|
||||
.map_err(From::from)
|
||||
}
|
||||
|
||||
pub fn get_episode_local_uri_from_id(ep_id: i32) -> Result<Option<String>, DataError> {
|
||||
use schema::episode::dsl::*;
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
episode
|
||||
.filter(rowid.eq(ep_id))
|
||||
.select(local_uri)
|
||||
.get_result::<Option<String>>(&con)
|
||||
.map_err(From::from)
|
||||
}
|
||||
|
||||
pub fn get_episodes_widgets_with_limit(limit: u32) -> Result<Vec<EpisodeWidgetQuery>, DataError> {
|
||||
use schema::episode;
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
episode::table
|
||||
.select((
|
||||
episode::rowid,
|
||||
episode::title,
|
||||
episode::uri,
|
||||
episode::local_uri,
|
||||
episode::epoch,
|
||||
episode::length,
|
||||
episode::duration,
|
||||
episode::played,
|
||||
episode::podcast_id,
|
||||
))
|
||||
.order(episode::epoch.desc())
|
||||
.limit(i64::from(limit))
|
||||
.load::<EpisodeWidgetQuery>(&con)
|
||||
.map_err(From::from)
|
||||
}
|
||||
|
||||
pub fn get_podcast_from_id(pid: i32) -> Result<Podcast, DataError> {
|
||||
use schema::podcast::dsl::*;
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
podcast
|
||||
.filter(id.eq(pid))
|
||||
.get_result::<Podcast>(&con)
|
||||
.map_err(From::from)
|
||||
}
|
||||
|
||||
pub fn get_podcast_cover_from_id(pid: i32) -> Result<PodcastCoverQuery, DataError> {
|
||||
use schema::podcast::dsl::*;
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
podcast
|
||||
.select((id, title, image_uri))
|
||||
.filter(id.eq(pid))
|
||||
.get_result::<PodcastCoverQuery>(&con)
|
||||
.map_err(From::from)
|
||||
}
|
||||
|
||||
pub fn get_pd_episodes(parent: &Podcast) -> Result<Vec<Episode>, DataError> {
|
||||
use schema::episode::dsl::*;
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
Episode::belonging_to(parent)
|
||||
.order(epoch.desc())
|
||||
.load::<Episode>(&con)
|
||||
.map_err(From::from)
|
||||
}
|
||||
|
||||
pub fn get_pd_episodeswidgets(parent: &Podcast) -> Result<Vec<EpisodeWidgetQuery>, DataError> {
|
||||
use schema::episode::dsl::*;
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
episode.select((rowid, title, uri, local_uri, epoch, length, duration, played, podcast_id))
|
||||
.filter(podcast_id.eq(parent.id()))
|
||||
// .group_by(epoch)
|
||||
.order(epoch.desc())
|
||||
.load::<EpisodeWidgetQuery>(&con)
|
||||
.map_err(From::from)
|
||||
}
|
||||
|
||||
pub fn get_pd_unplayed_episodes(parent: &Podcast) -> Result<Vec<Episode>, DataError> {
|
||||
use schema::episode::dsl::*;
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
Episode::belonging_to(parent)
|
||||
.filter(played.is_null())
|
||||
.order(epoch.desc())
|
||||
.load::<Episode>(&con)
|
||||
.map_err(From::from)
|
||||
}
|
||||
|
||||
// pub(crate) fn get_pd_episodes_limit(parent: &Podcast, limit: u32) ->
|
||||
// Result<Vec<Episode>, DataError> { use schema::episode::dsl::*;
|
||||
|
||||
// let db = connection();
|
||||
// let con = db.get()?;
|
||||
|
||||
// Episode::belonging_to(parent)
|
||||
// .order(epoch.desc())
|
||||
// .limit(i64::from(limit))
|
||||
// .load::<Episode>(&con)
|
||||
// .map_err(From::from)
|
||||
// }
|
||||
|
||||
pub fn get_source_from_uri(uri_: &str) -> Result<Source, DataError> {
|
||||
use schema::source::dsl::*;
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
source
|
||||
.filter(uri.eq(uri_))
|
||||
.get_result::<Source>(&con)
|
||||
.map_err(From::from)
|
||||
}
|
||||
|
||||
pub fn get_source_from_id(id_: i32) -> Result<Source, DataError> {
|
||||
use schema::source::dsl::*;
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
source
|
||||
.filter(id.eq(id_))
|
||||
.get_result::<Source>(&con)
|
||||
.map_err(From::from)
|
||||
}
|
||||
|
||||
pub fn get_podcast_from_source_id(sid: i32) -> Result<Podcast, DataError> {
|
||||
use schema::podcast::dsl::*;
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
podcast
|
||||
.filter(source_id.eq(sid))
|
||||
.get_result::<Podcast>(&con)
|
||||
.map_err(From::from)
|
||||
}
|
||||
|
||||
pub fn get_episode_from_pk(title_: &str, pid: i32) -> Result<Episode, DataError> {
|
||||
use schema::episode::dsl::*;
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
episode
|
||||
.filter(title.eq(title_))
|
||||
.filter(podcast_id.eq(pid))
|
||||
.get_result::<Episode>(&con)
|
||||
.map_err(From::from)
|
||||
}
|
||||
|
||||
pub(crate) fn get_episode_minimal_from_pk(
|
||||
title_: &str,
|
||||
pid: i32,
|
||||
) -> Result<EpisodeMinimal, DataError> {
|
||||
use schema::episode::dsl::*;
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
episode
|
||||
.select((rowid, title, uri, epoch, duration, guid, podcast_id))
|
||||
.filter(title.eq(title_))
|
||||
.filter(podcast_id.eq(pid))
|
||||
.get_result::<EpisodeMinimal>(&con)
|
||||
.map_err(From::from)
|
||||
}
|
||||
|
||||
pub(crate) fn remove_feed(pd: &Podcast) -> Result<(), DataError> {
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
con.transaction(|| {
|
||||
delete_source(&con, pd.source_id())?;
|
||||
delete_podcast(&con, pd.id())?;
|
||||
delete_podcast_episodes(&con, pd.id())?;
|
||||
info!("Feed removed from the Database.");
|
||||
Ok(())
|
||||
})
|
||||
}
|
||||
|
||||
fn delete_source(con: &SqliteConnection, source_id: i32) -> QueryResult<usize> {
|
||||
use schema::source::dsl::*;
|
||||
|
||||
diesel::delete(source.filter(id.eq(source_id))).execute(con)
|
||||
}
|
||||
|
||||
fn delete_podcast(con: &SqliteConnection, podcast_id: i32) -> QueryResult<usize> {
|
||||
use schema::podcast::dsl::*;
|
||||
|
||||
diesel::delete(podcast.filter(id.eq(podcast_id))).execute(con)
|
||||
}
|
||||
|
||||
fn delete_podcast_episodes(con: &SqliteConnection, parent_id: i32) -> QueryResult<usize> {
|
||||
use schema::episode::dsl::*;
|
||||
|
||||
diesel::delete(episode.filter(podcast_id.eq(parent_id))).execute(con)
|
||||
}
|
||||
|
||||
pub fn source_exists(url: &str) -> Result<bool, DataError> {
|
||||
use schema::source::dsl::*;
|
||||
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
select(exists(source.filter(uri.eq(url))))
|
||||
.get_result(&con)
|
||||
.map_err(From::from)
|
||||
}
|
||||
|
||||
pub(crate) fn podcast_exists(source_id_: i32) -> Result<bool, DataError> {
|
||||
use schema::podcast::dsl::*;
|
||||
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
select(exists(podcast.filter(source_id.eq(source_id_))))
|
||||
.get_result(&con)
|
||||
.map_err(From::from)
|
||||
}
|
||||
|
||||
#[cfg_attr(rustfmt, rustfmt_skip)]
|
||||
pub(crate) fn episode_exists(title_: &str, podcast_id_: i32) -> Result<bool, DataError> {
|
||||
use schema::episode::dsl::*;
|
||||
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
select(exists(episode.filter(podcast_id.eq(podcast_id_)).filter(title.eq(title_))))
|
||||
.get_result(&con)
|
||||
.map_err(From::from)
|
||||
}
|
||||
|
||||
pub(crate) fn index_new_episodes(eps: &[NewEpisode]) -> Result<(), DataError> {
|
||||
use schema::episode::dsl::*;
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
diesel::insert_into(episode)
|
||||
.values(eps)
|
||||
.execute(&*con)
|
||||
.map_err(From::from)
|
||||
.map(|_| ())
|
||||
}
|
||||
|
||||
pub fn update_none_to_played_now(parent: &Podcast) -> Result<usize, DataError> {
|
||||
use schema::episode::dsl::*;
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
let epoch_now = Utc::now().timestamp() as i32;
|
||||
con.transaction(|| {
|
||||
diesel::update(Episode::belonging_to(parent).filter(played.is_null()))
|
||||
.set(played.eq(Some(epoch_now)))
|
||||
.execute(&con)
|
||||
.map_err(From::from)
|
||||
})
|
||||
}
|
||||
@ -1,107 +0,0 @@
|
||||
use diesel;
|
||||
use diesel::r2d2;
|
||||
use diesel_migrations::RunMigrationsError;
|
||||
use hyper;
|
||||
use native_tls;
|
||||
// use rss;
|
||||
use url;
|
||||
|
||||
use std::io;
|
||||
// use std::fmt;
|
||||
|
||||
// fadsadfs NOT SYNC
|
||||
// #[derive(Fail, Debug)]
|
||||
// #[fail(display = "RSS Error: {}", _0)]
|
||||
// struct RSSError(rss::Error);
|
||||
|
||||
#[derive(Fail, Debug)]
|
||||
pub enum DataError {
|
||||
#[fail(display = "SQL Query failed: {}", _0)]
|
||||
DieselResultError(#[cause] diesel::result::Error),
|
||||
#[fail(display = "Database Migration error: {}", _0)]
|
||||
DieselMigrationError(#[cause] RunMigrationsError),
|
||||
#[fail(display = "R2D2 error: {}", _0)]
|
||||
R2D2Error(#[cause] r2d2::Error),
|
||||
#[fail(display = "R2D2 Pool error: {}", _0)]
|
||||
R2D2PoolError(#[cause] r2d2::PoolError),
|
||||
#[fail(display = "Hyper Error: {}", _0)]
|
||||
HyperError(#[cause] hyper::Error),
|
||||
#[fail(display = "Failed to parse a url: {}", _0)]
|
||||
// TODO: print the url too
|
||||
UrlError(#[cause] url::ParseError),
|
||||
#[fail(display = "TLS Error: {}", _0)]
|
||||
TLSError(#[cause] native_tls::Error),
|
||||
#[fail(display = "IO Error: {}", _0)]
|
||||
IOError(#[cause] io::Error),
|
||||
#[fail(display = "RSS Error: {}", _0)]
|
||||
// Rss::Error is not yet Sync
|
||||
RssCrateError(String),
|
||||
#[fail(display = "Error: {}", _0)]
|
||||
Bail(String),
|
||||
#[fail(display = "Request to {} returned {}. Context: {}", url, status_code, context)]
|
||||
HttpStatusError {
|
||||
url: String,
|
||||
status_code: hyper::StatusCode,
|
||||
context: String,
|
||||
},
|
||||
#[fail(display = "Error occured while Parsing an Episode. Reason: {}", reason)]
|
||||
ParseEpisodeError { reason: String, parent_id: i32 },
|
||||
#[fail(display = "No Futures where produced to be run.")]
|
||||
EmptyFuturesList,
|
||||
#[fail(display = "Episode was not changed and thus skipped.")]
|
||||
EpisodeNotChanged,
|
||||
}
|
||||
|
||||
impl From<RunMigrationsError> for DataError {
|
||||
fn from(err: RunMigrationsError) -> Self {
|
||||
DataError::DieselMigrationError(err)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<diesel::result::Error> for DataError {
|
||||
fn from(err: diesel::result::Error) -> Self {
|
||||
DataError::DieselResultError(err)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<r2d2::Error> for DataError {
|
||||
fn from(err: r2d2::Error) -> Self {
|
||||
DataError::R2D2Error(err)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<r2d2::PoolError> for DataError {
|
||||
fn from(err: r2d2::PoolError) -> Self {
|
||||
DataError::R2D2PoolError(err)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<hyper::Error> for DataError {
|
||||
fn from(err: hyper::Error) -> Self {
|
||||
DataError::HyperError(err)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<url::ParseError> for DataError {
|
||||
fn from(err: url::ParseError) -> Self {
|
||||
DataError::UrlError(err)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<native_tls::Error> for DataError {
|
||||
fn from(err: native_tls::Error) -> Self {
|
||||
DataError::TLSError(err)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<io::Error> for DataError {
|
||||
fn from(err: io::Error) -> Self {
|
||||
DataError::IOError(err)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<String> for DataError {
|
||||
fn from(err: String) -> Self {
|
||||
DataError::Bail(err)
|
||||
}
|
||||
}
|
||||
@ -1,232 +0,0 @@
|
||||
//! Index Feeds.
|
||||
|
||||
use futures::future::*;
|
||||
use itertools::{Either, Itertools};
|
||||
use rss;
|
||||
|
||||
use dbqueries;
|
||||
use errors::DataError;
|
||||
use models::{Index, IndexState, Update};
|
||||
use models::{NewEpisode, NewPodcast, Podcast};
|
||||
use pipeline::*;
|
||||
|
||||
type InsertUpdate = (Vec<NewEpisode>, Vec<Option<(NewEpisode, i32)>>);
|
||||
|
||||
/// Wrapper struct that hold a `Source` id and the `rss::Channel`
|
||||
/// that corresponds to the `Source.uri` field.
|
||||
#[derive(Debug, Clone, Builder, PartialEq)]
|
||||
#[builder(derive(Debug))]
|
||||
#[builder(setter(into))]
|
||||
pub struct Feed {
|
||||
/// The `rss::Channel` parsed from the `Source` uri.
|
||||
channel: rss::Channel,
|
||||
/// The `Source` id where the xml `rss::Channel` came from.
|
||||
source_id: i32,
|
||||
}
|
||||
|
||||
impl Feed {
|
||||
/// Index the contents of the RSS `Feed` into the database.
|
||||
pub fn index(self) -> Box<Future<Item = (), Error = DataError> + Send> {
|
||||
let fut = self.parse_podcast_async()
|
||||
.and_then(|pd| pd.to_podcast())
|
||||
.and_then(move |pd| self.index_channel_items(&pd));
|
||||
|
||||
Box::new(fut)
|
||||
}
|
||||
|
||||
fn parse_podcast(&self) -> NewPodcast {
|
||||
NewPodcast::new(&self.channel, self.source_id)
|
||||
}
|
||||
|
||||
fn parse_podcast_async(&self) -> Box<Future<Item = NewPodcast, Error = DataError> + Send> {
|
||||
Box::new(ok(self.parse_podcast()))
|
||||
}
|
||||
|
||||
fn index_channel_items(
|
||||
&self,
|
||||
pd: &Podcast,
|
||||
) -> Box<Future<Item = (), Error = DataError> + Send> {
|
||||
let fut = self.get_stuff(pd)
|
||||
.and_then(|(insert, update)| {
|
||||
if !insert.is_empty() {
|
||||
info!("Indexing {} episodes.", insert.len());
|
||||
if let Err(err) = dbqueries::index_new_episodes(insert.as_slice()) {
|
||||
error!("Failed batch indexng, Fallign back to individual indexing.");
|
||||
error!("{}", err);
|
||||
insert.iter().for_each(|ep| {
|
||||
if let Err(err) = ep.index() {
|
||||
error!("Failed to index episode: {:?}.", ep.title());
|
||||
error!("{}", err);
|
||||
};
|
||||
})
|
||||
}
|
||||
}
|
||||
Ok((insert, update))
|
||||
})
|
||||
.map(|(_, update)| {
|
||||
if !update.is_empty() {
|
||||
info!("Updating {} episodes.", update.len());
|
||||
// see get_stuff for more
|
||||
update
|
||||
.into_iter()
|
||||
.filter_map(|x| x)
|
||||
.for_each(|(ref ep, rowid)| {
|
||||
if let Err(err) = ep.update(rowid) {
|
||||
error!("Failed to index episode: {:?}.", ep.title());
|
||||
error!("{}", err);
|
||||
};
|
||||
})
|
||||
}
|
||||
});
|
||||
|
||||
Box::new(fut)
|
||||
}
|
||||
|
||||
fn get_stuff(
|
||||
&self,
|
||||
pd: &Podcast,
|
||||
) -> Box<Future<Item = InsertUpdate, Error = DataError> + Send> {
|
||||
let (insert, update): (Vec<_>, Vec<_>) = self.channel
|
||||
.items()
|
||||
.into_iter()
|
||||
.map(|item| glue_async(item, pd.id()))
|
||||
// This is sort of ugly but I think it's cheaper than pushing None
|
||||
// to updated and filtering it out later.
|
||||
// Even though we already map_filter in index_channel_items.
|
||||
// I am not sure what the optimizations are on match vs allocating None.
|
||||
.map(|fut| {
|
||||
fut.and_then(|x| match x {
|
||||
IndexState::NotChanged => return Err(DataError::EpisodeNotChanged),
|
||||
_ => Ok(x),
|
||||
})
|
||||
})
|
||||
.flat_map(|fut| fut.wait())
|
||||
.partition_map(|state| match state {
|
||||
IndexState::Index(e) => Either::Left(e),
|
||||
IndexState::Update(e) => Either::Right(Some(e)),
|
||||
// This should never occur
|
||||
IndexState::NotChanged => Either::Right(None),
|
||||
});
|
||||
|
||||
Box::new(ok((insert, update)))
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use rss::Channel;
|
||||
use tokio_core::reactor::Core;
|
||||
|
||||
use Source;
|
||||
use database::truncate_db;
|
||||
use dbqueries;
|
||||
use utils::get_feed;
|
||||
|
||||
use std::fs;
|
||||
use std::io::BufReader;
|
||||
|
||||
use super::*;
|
||||
|
||||
// (path, url) tuples.
|
||||
const URLS: &[(&str, &str)] = {
|
||||
&[
|
||||
(
|
||||
"tests/feeds/2018-01-20-Intercepted.xml",
|
||||
"https://web.archive.org/web/20180120083840if_/https://feeds.feedburner.\
|
||||
com/InterceptedWithJeremyScahill",
|
||||
),
|
||||
(
|
||||
"tests/feeds/2018-01-20-LinuxUnplugged.xml",
|
||||
"https://web.archive.org/web/20180120110314if_/https://feeds.feedburner.\
|
||||
com/linuxunplugged",
|
||||
),
|
||||
(
|
||||
"tests/feeds/2018-01-20-TheTipOff.xml",
|
||||
"https://web.archive.org/web/20180120110727if_/https://rss.acast.com/thetipoff",
|
||||
),
|
||||
(
|
||||
"tests/feeds/2018-01-20-StealTheStars.xml",
|
||||
"https://web.archive.org/web/20180120104957if_/https://rss.art19.\
|
||||
com/steal-the-stars",
|
||||
),
|
||||
(
|
||||
"tests/feeds/2018-01-20-GreaterThanCode.xml",
|
||||
"https://web.archive.org/web/20180120104741if_/https://www.greaterthancode.\
|
||||
com/feed/podcast",
|
||||
),
|
||||
]
|
||||
};
|
||||
|
||||
#[test]
|
||||
fn test_complete_index() {
|
||||
truncate_db().unwrap();
|
||||
|
||||
let feeds: Vec<_> = URLS.iter()
|
||||
.map(|&(path, url)| {
|
||||
// Create and insert a Source into db
|
||||
let s = Source::from_url(url).unwrap();
|
||||
get_feed(path, s.id())
|
||||
})
|
||||
.collect();
|
||||
|
||||
let mut core = Core::new().unwrap();
|
||||
// Index the channels
|
||||
let list: Vec<_> = feeds.into_iter().map(|x| x.index()).collect();
|
||||
let _foo = core.run(join_all(list));
|
||||
|
||||
// Assert the index rows equal the controlled results
|
||||
assert_eq!(dbqueries::get_sources().unwrap().len(), 5);
|
||||
assert_eq!(dbqueries::get_podcasts().unwrap().len(), 5);
|
||||
assert_eq!(dbqueries::get_episodes().unwrap().len(), 354);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_feed_parse_podcast() {
|
||||
truncate_db().unwrap();
|
||||
|
||||
let path = "tests/feeds/2018-01-20-Intercepted.xml";
|
||||
let feed = get_feed(path, 42);
|
||||
|
||||
let file = fs::File::open(path).unwrap();
|
||||
let channel = Channel::read_from(BufReader::new(file)).unwrap();
|
||||
|
||||
let pd = NewPodcast::new(&channel, 42);
|
||||
assert_eq!(feed.parse_podcast(), pd);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_feed_index_channel_items() {
|
||||
truncate_db().unwrap();
|
||||
|
||||
let path = "tests/feeds/2018-01-20-Intercepted.xml";
|
||||
let feed = get_feed(path, 42);
|
||||
let pd = feed.parse_podcast().to_podcast().unwrap();
|
||||
|
||||
feed.index_channel_items(&pd).wait().unwrap();
|
||||
assert_eq!(dbqueries::get_podcasts().unwrap().len(), 1);
|
||||
assert_eq!(dbqueries::get_episodes().unwrap().len(), 43);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_feed_get_stuff() {
|
||||
truncate_db().unwrap();
|
||||
|
||||
let path = "tests/feeds/2018-01-20-Intercepted.xml";
|
||||
let feed = get_feed(path, 42);
|
||||
let pd = feed.parse_podcast().to_podcast().unwrap();
|
||||
|
||||
let (insert, update) = feed.get_stuff(&pd).wait().unwrap();
|
||||
assert_eq!(43, insert.len());
|
||||
assert_eq!(0, update.len());
|
||||
|
||||
feed.index().wait().unwrap();
|
||||
|
||||
let path = "tests/feeds/2018-02-03-Intercepted.xml";
|
||||
let feed = get_feed(path, 42);
|
||||
let pd = feed.parse_podcast().to_podcast().unwrap();
|
||||
|
||||
let (insert, update) = feed.get_stuff(&pd).wait().unwrap();
|
||||
assert_eq!(4, insert.len());
|
||||
assert_eq!(43, update.len());
|
||||
}
|
||||
}
|
||||
@ -1,101 +0,0 @@
|
||||
#![recursion_limit = "1024"]
|
||||
#![cfg_attr(all(test, feature = "clippy"), allow(option_unwrap_used, result_unwrap_used))]
|
||||
#![cfg_attr(feature = "cargo-clippy", allow(blacklisted_name))]
|
||||
#![cfg_attr(feature = "clippy",
|
||||
warn(option_unwrap_used, result_unwrap_used, print_stdout,
|
||||
wrong_pub_self_convention, mut_mut, non_ascii_literal, similar_names,
|
||||
unicode_not_nfc, enum_glob_use, if_not_else, items_after_statements,
|
||||
used_underscore_binding))]
|
||||
#![allow(unknown_lints)]
|
||||
#![deny(bad_style, const_err, dead_code, improper_ctypes, legacy_directory_ownership,
|
||||
non_shorthand_field_patterns, no_mangle_generic_items, overflowing_literals,
|
||||
path_statements, patterns_in_fns_without_body, plugin_as_library, private_in_public,
|
||||
private_no_mangle_fns, private_no_mangle_statics, safe_extern_statics,
|
||||
unconditional_recursion, unions_with_drop_fields, unused_allocation, unused_comparisons,
|
||||
unused_parens, while_true)]
|
||||
#![deny(missing_debug_implementations, missing_docs, trivial_casts, trivial_numeric_casts)]
|
||||
#![deny(unused_extern_crates, unused)]
|
||||
|
||||
// #![feature(conservative_impl_trait)]
|
||||
|
||||
//! FIXME: Docs
|
||||
|
||||
#[macro_use]
|
||||
extern crate derive_builder;
|
||||
#[macro_use]
|
||||
extern crate diesel;
|
||||
#[macro_use]
|
||||
extern crate diesel_migrations;
|
||||
// #[macro_use]
|
||||
extern crate failure;
|
||||
#[macro_use]
|
||||
extern crate failure_derive;
|
||||
#[macro_use]
|
||||
extern crate lazy_static;
|
||||
#[macro_use]
|
||||
extern crate log;
|
||||
|
||||
extern crate ammonia;
|
||||
extern crate chrono;
|
||||
extern crate futures;
|
||||
extern crate futures_cpupool;
|
||||
extern crate hyper;
|
||||
extern crate hyper_tls;
|
||||
extern crate itertools;
|
||||
extern crate native_tls;
|
||||
extern crate num_cpus;
|
||||
extern crate rayon;
|
||||
extern crate rfc822_sanitizer;
|
||||
extern crate rss;
|
||||
extern crate tokio_core;
|
||||
extern crate url;
|
||||
extern crate xdg;
|
||||
|
||||
#[allow(missing_docs)]
|
||||
pub mod dbqueries;
|
||||
#[allow(missing_docs)]
|
||||
pub mod errors;
|
||||
pub mod utils;
|
||||
pub mod database;
|
||||
pub mod pipeline;
|
||||
pub(crate) mod models;
|
||||
mod feed;
|
||||
mod parser;
|
||||
mod schema;
|
||||
|
||||
pub use feed::{Feed, FeedBuilder};
|
||||
pub use models::{Episode, EpisodeWidgetQuery, Podcast, PodcastCoverQuery, Source};
|
||||
pub use models::Save;
|
||||
|
||||
/// [XDG Base Direcotory](https://specifications.freedesktop.org/basedir-spec/basedir-spec-latest.html) Paths.
|
||||
#[allow(missing_debug_implementations)]
|
||||
pub mod xdg_dirs {
|
||||
use std::path::PathBuf;
|
||||
use xdg;
|
||||
|
||||
lazy_static!{
|
||||
pub(crate) static ref HAMMOND_XDG: xdg::BaseDirectories = {
|
||||
xdg::BaseDirectories::with_prefix("hammond").unwrap()
|
||||
};
|
||||
|
||||
/// XDG_DATA Directory `Pathbuf`.
|
||||
pub static ref HAMMOND_DATA: PathBuf = {
|
||||
HAMMOND_XDG.create_data_directory(HAMMOND_XDG.get_data_home()).unwrap()
|
||||
};
|
||||
|
||||
/// XDG_CONFIG Directory `Pathbuf`.
|
||||
pub static ref HAMMOND_CONFIG: PathBuf = {
|
||||
HAMMOND_XDG.create_config_directory(HAMMOND_XDG.get_config_home()).unwrap()
|
||||
};
|
||||
|
||||
/// XDG_CACHE Directory `Pathbuf`.
|
||||
pub static ref HAMMOND_CACHE: PathBuf = {
|
||||
HAMMOND_XDG.create_cache_directory(HAMMOND_XDG.get_cache_home()).unwrap()
|
||||
};
|
||||
|
||||
/// Hammond Download Direcotry `PathBuf`.
|
||||
pub static ref DL_DIR: PathBuf = {
|
||||
HAMMOND_XDG.create_data_directory("Downloads").unwrap()
|
||||
};
|
||||
}
|
||||
}
|
||||
@ -1,50 +0,0 @@
|
||||
mod new_episode;
|
||||
mod new_podcast;
|
||||
mod new_source;
|
||||
|
||||
mod episode;
|
||||
mod podcast;
|
||||
mod source;
|
||||
|
||||
// use futures::prelude::*;
|
||||
// use futures::future::*;
|
||||
|
||||
pub(crate) use self::episode::EpisodeCleanerQuery;
|
||||
pub(crate) use self::new_episode::{NewEpisode, NewEpisodeMinimal};
|
||||
pub(crate) use self::new_podcast::NewPodcast;
|
||||
pub(crate) use self::new_source::NewSource;
|
||||
|
||||
#[cfg(test)]
|
||||
pub(crate) use self::new_episode::NewEpisodeBuilder;
|
||||
#[cfg(test)]
|
||||
pub(crate) use self::new_podcast::NewPodcastBuilder;
|
||||
|
||||
pub use self::episode::{Episode, EpisodeMinimal, EpisodeWidgetQuery};
|
||||
pub use self::podcast::{Podcast, PodcastCoverQuery};
|
||||
pub use self::source::Source;
|
||||
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub enum IndexState<T> {
|
||||
Index(T),
|
||||
Update((T, i32)),
|
||||
NotChanged,
|
||||
}
|
||||
|
||||
pub trait Insert<T, E> {
|
||||
fn insert(&self) -> Result<T, E>;
|
||||
}
|
||||
|
||||
pub trait Update<T, E> {
|
||||
fn update(&self, i32) -> Result<T, E>;
|
||||
}
|
||||
|
||||
// This might need to change in the future
|
||||
pub trait Index<T, E>: Insert<T, E> + Update<T, E> {
|
||||
fn index(&self) -> Result<T, E>;
|
||||
}
|
||||
|
||||
/// FIXME: DOCS
|
||||
pub trait Save<T, E> {
|
||||
/// Helper method to easily save/"sync" current state of a diesel model to the Database.
|
||||
fn save(&self) -> Result<T, E>;
|
||||
}
|
||||
@ -1,420 +0,0 @@
|
||||
use diesel;
|
||||
use diesel::prelude::*;
|
||||
|
||||
use ammonia;
|
||||
use rss;
|
||||
|
||||
use errors::DataError;
|
||||
use models::{Index, Insert, Update};
|
||||
use models::Podcast;
|
||||
use schema::podcast;
|
||||
|
||||
use database::connection;
|
||||
use dbqueries;
|
||||
use utils::{replace_extra_spaces, url_cleaner};
|
||||
|
||||
#[derive(Insertable, AsChangeset)]
|
||||
#[table_name = "podcast"]
|
||||
#[derive(Debug, Clone, Default, Builder, PartialEq)]
|
||||
#[builder(default)]
|
||||
#[builder(derive(Debug))]
|
||||
#[builder(setter(into))]
|
||||
pub(crate) struct NewPodcast {
|
||||
title: String,
|
||||
link: String,
|
||||
description: String,
|
||||
image_uri: Option<String>,
|
||||
source_id: i32,
|
||||
}
|
||||
|
||||
impl Insert<(), DataError> for NewPodcast {
|
||||
fn insert(&self) -> Result<(), DataError> {
|
||||
use schema::podcast::dsl::*;
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
diesel::insert_into(podcast)
|
||||
.values(self)
|
||||
.execute(&con)
|
||||
.map(|_| ())
|
||||
.map_err(From::from)
|
||||
}
|
||||
}
|
||||
|
||||
impl Update<(), DataError> for NewPodcast {
|
||||
fn update(&self, podcast_id: i32) -> Result<(), DataError> {
|
||||
use schema::podcast::dsl::*;
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
info!("Updating {}", self.title);
|
||||
diesel::update(podcast.filter(id.eq(podcast_id)))
|
||||
.set(self)
|
||||
.execute(&con)
|
||||
.map(|_| ())
|
||||
.map_err(From::from)
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: Maybe return an Enum<Action(Resut)> Instead.
|
||||
// It would make unti testing better too.
|
||||
impl Index<(), DataError> for NewPodcast {
|
||||
fn index(&self) -> Result<(), DataError> {
|
||||
let exists = dbqueries::podcast_exists(self.source_id)?;
|
||||
|
||||
if exists {
|
||||
let other = dbqueries::get_podcast_from_source_id(self.source_id)?;
|
||||
|
||||
if self != &other {
|
||||
self.update(other.id())
|
||||
} else {
|
||||
Ok(())
|
||||
}
|
||||
} else {
|
||||
self.insert()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq<Podcast> for NewPodcast {
|
||||
fn eq(&self, other: &Podcast) -> bool {
|
||||
(self.link() == other.link()) && (self.title() == other.title())
|
||||
&& (self.image_uri() == other.image_uri())
|
||||
&& (self.description() == other.description())
|
||||
&& (self.source_id() == other.source_id())
|
||||
}
|
||||
}
|
||||
|
||||
impl NewPodcast {
|
||||
/// Parses a `rss::Channel` into a `NewPodcast` Struct.
|
||||
pub(crate) fn new(chan: &rss::Channel, source_id: i32) -> NewPodcast {
|
||||
let title = chan.title().trim();
|
||||
|
||||
// Prefer itunes summary over rss.description since many feeds put html into
|
||||
// rss.description.
|
||||
let summary = chan.itunes_ext().map(|s| s.summary()).and_then(|s| s);
|
||||
let description = if let Some(sum) = summary {
|
||||
replace_extra_spaces(&ammonia::clean(sum))
|
||||
} else {
|
||||
replace_extra_spaces(&ammonia::clean(chan.description()))
|
||||
};
|
||||
|
||||
let link = url_cleaner(chan.link());
|
||||
let x = chan.itunes_ext().map(|s| s.image());
|
||||
let image_uri = if let Some(img) = x {
|
||||
img.map(|s| s.to_owned())
|
||||
} else {
|
||||
chan.image().map(|foo| foo.url().to_owned())
|
||||
};
|
||||
|
||||
NewPodcastBuilder::default()
|
||||
.title(title)
|
||||
.description(description)
|
||||
.link(link)
|
||||
.image_uri(image_uri)
|
||||
.source_id(source_id)
|
||||
.build()
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
// Look out for when tryinto lands into stable.
|
||||
pub(crate) fn to_podcast(&self) -> Result<Podcast, DataError> {
|
||||
self.index()?;
|
||||
dbqueries::get_podcast_from_source_id(self.source_id).map_err(From::from)
|
||||
}
|
||||
}
|
||||
|
||||
// Ignore the following geters. They are used in unit tests mainly.
|
||||
impl NewPodcast {
|
||||
#[allow(dead_code)]
|
||||
pub(crate) fn source_id(&self) -> i32 {
|
||||
self.source_id
|
||||
}
|
||||
|
||||
pub(crate) fn title(&self) -> &str {
|
||||
&self.title
|
||||
}
|
||||
|
||||
pub(crate) fn link(&self) -> &str {
|
||||
&self.link
|
||||
}
|
||||
|
||||
pub(crate) fn description(&self) -> &str {
|
||||
&self.description
|
||||
}
|
||||
|
||||
pub(crate) fn image_uri(&self) -> Option<&str> {
|
||||
self.image_uri.as_ref().map(|s| s.as_str())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
// use tokio_core::reactor::Core;
|
||||
|
||||
use rss::Channel;
|
||||
|
||||
use database::truncate_db;
|
||||
use models::{NewPodcastBuilder, Save};
|
||||
|
||||
use std::fs::File;
|
||||
use std::io::BufReader;
|
||||
|
||||
// Pre-built expected NewPodcast structs.
|
||||
lazy_static!{
|
||||
static ref EXPECTED_INTERCEPTED: NewPodcast = {
|
||||
let descr = "The people behind The Intercept’s fearless reporting and incisive \
|
||||
commentary—Jeremy Scahill, Glenn Greenwald, Betsy Reed and others—discuss \
|
||||
the crucial issues of our time: national security, civil liberties, foreign \
|
||||
policy, and criminal justice. Plus interviews with artists, thinkers, and \
|
||||
newsmakers who challenge our preconceptions about the world we live in.";
|
||||
|
||||
NewPodcastBuilder::default()
|
||||
.title("Intercepted with Jeremy Scahill")
|
||||
.link("https://theintercept.com/podcasts")
|
||||
.description(descr)
|
||||
.image_uri(Some(String::from(
|
||||
"http://static.megaphone.fm/podcasts/d5735a50-d904-11e6-8532-73c7de466ea6/image/\
|
||||
uploads_2F1484252190700-qhn5krasklbce3dh-a797539282700ea0298a3a26f7e49b0b_\
|
||||
2FIntercepted_COVER%2B_281_29.png")
|
||||
))
|
||||
.source_id(42)
|
||||
.build()
|
||||
.unwrap()
|
||||
};
|
||||
|
||||
static ref EXPECTED_LUP: NewPodcast = {
|
||||
let descr = "An open show powered by community LINUX Unplugged takes the best attributes \
|
||||
of open collaboration and focuses them into a weekly lifestyle show about \
|
||||
Linux.";
|
||||
|
||||
NewPodcastBuilder::default()
|
||||
.title("LINUX Unplugged Podcast")
|
||||
.link("http://www.jupiterbroadcasting.com/")
|
||||
.description(descr)
|
||||
.image_uri(Some(String::from(
|
||||
"http://www.jupiterbroadcasting.com/images/LASUN-Badge1400.jpg",
|
||||
)))
|
||||
.source_id(42)
|
||||
.build()
|
||||
.unwrap()
|
||||
};
|
||||
|
||||
static ref EXPECTED_TIPOFF: NewPodcast = {
|
||||
let desc = "Welcome to The Tip Off- the podcast where we take you behind the scenes of \
|
||||
some of the best investigative journalism from recent years. Each episode \
|
||||
we’ll be digging into an investigative scoop- hearing from the journalists \
|
||||
behind the work as they tell us about the leads, the dead-ends and of course, \
|
||||
the tip offs. There’ll be car chases, slammed doors, terrorist cells, \
|
||||
meetings in dimly lit bars and cafes, wrangling with despotic regimes and \
|
||||
much more. So if you’re curious about the fun, complicated detective work \
|
||||
that goes into doing great investigative journalism- then this is the podcast \
|
||||
for you.";
|
||||
|
||||
NewPodcastBuilder::default()
|
||||
.title("The Tip Off")
|
||||
.link("http://www.acast.com/thetipoff")
|
||||
.description(desc)
|
||||
.image_uri(Some(String::from(
|
||||
"https://imagecdn.acast.com/image?h=1500&w=1500&source=http%3A%2F%2Fi1.sndcdn.\
|
||||
com%2Favatars-000317856075-a2coqz-original.jpg",
|
||||
)))
|
||||
.source_id(42)
|
||||
.build()
|
||||
.unwrap()
|
||||
|
||||
};
|
||||
|
||||
static ref EXPECTED_STARS: NewPodcast = {
|
||||
let descr = "<p>The first audio drama from Tor Labs and Gideon Media, Steal the Stars is \
|
||||
a gripping noir science fiction thriller in 14 episodes: Forbidden love, a \
|
||||
crashed UFO, an alien body, and an impossible heist unlike any ever \
|
||||
attempted - scripted by Mac Rogers, the award-winning playwright and writer \
|
||||
of the multi-million download The Message and LifeAfter.</p>";
|
||||
let img = "https://dfkfj8j276wwv.cloudfront.net/images/2c/5f/a0/1a/2c5fa01a-ae78-4a8c-\
|
||||
b183-7311d2e436c3/b3a4aa57a576bb662191f2a6bc2a436c8c4ae256ecffaff5c4c54fd42e\
|
||||
923914941c264d01efb1833234b52c9530e67d28a8cebbe3d11a4bc0fbbdf13ecdf1c3.jpeg";
|
||||
|
||||
NewPodcastBuilder::default()
|
||||
.title("Steal the Stars")
|
||||
.link("http://tor-labs.com/")
|
||||
.description(descr)
|
||||
.image_uri(Some(String::from(img)))
|
||||
.source_id(42)
|
||||
.build()
|
||||
.unwrap()
|
||||
};
|
||||
|
||||
static ref EXPECTED_CODE: NewPodcast = {
|
||||
let descr = "A podcast about humans and technology. Panelists: Coraline Ada Ehmke, David \
|
||||
Brady, Jessica Kerr, Jay Bobo, Astrid Countee and Sam Livingston-Gray. \
|
||||
Brought to you by @therubyrep.";
|
||||
|
||||
NewPodcastBuilder::default()
|
||||
.title("Greater Than Code")
|
||||
.link("https://www.greaterthancode.com/")
|
||||
.description(descr)
|
||||
.image_uri(Some(String::from(
|
||||
"http://www.greaterthancode.com/wp-content/uploads/2016/10/code1400-4.jpg",
|
||||
)))
|
||||
.source_id(42)
|
||||
.build()
|
||||
.unwrap()
|
||||
};
|
||||
|
||||
static ref UPDATED_DESC_INTERCEPTED: NewPodcast = {
|
||||
NewPodcastBuilder::default()
|
||||
.title("Intercepted with Jeremy Scahill")
|
||||
.link("https://theintercept.com/podcasts")
|
||||
.description("New Description")
|
||||
.image_uri(Some(String::from(
|
||||
"http://static.megaphone.fm/podcasts/d5735a50-d904-11e6-8532-73c7de466ea6/image/\
|
||||
uploads_2F1484252190700-qhn5krasklbce3dh-a797539282700ea0298a3a26f7e49b0b_\
|
||||
2FIntercepted_COVER%2B_281_29.png")
|
||||
))
|
||||
.source_id(42)
|
||||
.build()
|
||||
.unwrap()
|
||||
};
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_new_podcast_intercepted() {
|
||||
let file = File::open("tests/feeds/2018-01-20-Intercepted.xml").unwrap();
|
||||
let channel = Channel::read_from(BufReader::new(file)).unwrap();
|
||||
|
||||
let pd = NewPodcast::new(&channel, 42);
|
||||
assert_eq!(*EXPECTED_INTERCEPTED, pd);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_new_podcast_lup() {
|
||||
let file = File::open("tests/feeds/2018-01-20-LinuxUnplugged.xml").unwrap();
|
||||
let channel = Channel::read_from(BufReader::new(file)).unwrap();
|
||||
|
||||
let pd = NewPodcast::new(&channel, 42);
|
||||
assert_eq!(*EXPECTED_LUP, pd);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_new_podcast_thetipoff() {
|
||||
let file = File::open("tests/feeds/2018-01-20-TheTipOff.xml").unwrap();
|
||||
let channel = Channel::read_from(BufReader::new(file)).unwrap();
|
||||
|
||||
let pd = NewPodcast::new(&channel, 42);
|
||||
assert_eq!(*EXPECTED_TIPOFF, pd);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_new_podcast_steal_the_stars() {
|
||||
let file = File::open("tests/feeds/2018-01-20-StealTheStars.xml").unwrap();
|
||||
let channel = Channel::read_from(BufReader::new(file)).unwrap();
|
||||
|
||||
let pd = NewPodcast::new(&channel, 42);
|
||||
assert_eq!(*EXPECTED_STARS, pd);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_new_podcast_greater_than_code() {
|
||||
let file = File::open("tests/feeds/2018-01-20-GreaterThanCode.xml").unwrap();
|
||||
let channel = Channel::read_from(BufReader::new(file)).unwrap();
|
||||
|
||||
let pd = NewPodcast::new(&channel, 42);
|
||||
assert_eq!(*EXPECTED_CODE, pd);
|
||||
}
|
||||
|
||||
#[test]
|
||||
// This maybe could be a doc test on insert.
|
||||
fn test_new_podcast_insert() {
|
||||
truncate_db().unwrap();
|
||||
let file = File::open("tests/feeds/2018-01-20-Intercepted.xml").unwrap();
|
||||
let channel = Channel::read_from(BufReader::new(file)).unwrap();
|
||||
|
||||
let npd = NewPodcast::new(&channel, 42);
|
||||
npd.insert().unwrap();
|
||||
let pd = dbqueries::get_podcast_from_source_id(42).unwrap();
|
||||
|
||||
assert_eq!(npd, pd);
|
||||
assert_eq!(*EXPECTED_INTERCEPTED, npd);
|
||||
assert_eq!(&*EXPECTED_INTERCEPTED, &pd);
|
||||
}
|
||||
|
||||
#[test]
|
||||
// TODO: Add more test/checks
|
||||
// Currently there's a test that only checks new description or title.
|
||||
// If you have time and want to help, implement the test for the other fields too.
|
||||
fn test_new_podcast_update() {
|
||||
truncate_db().unwrap();
|
||||
let old = EXPECTED_INTERCEPTED.to_podcast().unwrap();
|
||||
|
||||
let updated = &*UPDATED_DESC_INTERCEPTED;
|
||||
updated.update(old.id()).unwrap();
|
||||
let mut new = dbqueries::get_podcast_from_source_id(42).unwrap();
|
||||
|
||||
assert_ne!(old, new);
|
||||
assert_eq!(old.id(), new.id());
|
||||
assert_eq!(old.source_id(), new.source_id());
|
||||
assert_eq!(updated, &new);
|
||||
assert_ne!(updated, &old);
|
||||
|
||||
// Chech that the update does not override user preferences.
|
||||
new.set_archive(true);
|
||||
new.save().unwrap();
|
||||
|
||||
let new2 = dbqueries::get_podcast_from_source_id(42).unwrap();
|
||||
assert_eq!(true, new2.archive());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_new_podcast_index() {
|
||||
truncate_db().unwrap();
|
||||
|
||||
// First insert
|
||||
assert!(EXPECTED_INTERCEPTED.index().is_ok());
|
||||
// Second identical, This should take the early return path
|
||||
assert!(EXPECTED_INTERCEPTED.index().is_ok());
|
||||
// Get the podcast
|
||||
let old = dbqueries::get_podcast_from_source_id(42).unwrap();
|
||||
// Assert that NewPodcast is equal to the Indexed one
|
||||
assert_eq!(&*EXPECTED_INTERCEPTED, &old);
|
||||
|
||||
let updated = &*UPDATED_DESC_INTERCEPTED;
|
||||
|
||||
// Update the podcast
|
||||
assert!(updated.index().is_ok());
|
||||
// Get the new Podcast
|
||||
let new = dbqueries::get_podcast_from_source_id(42).unwrap();
|
||||
// Assert it's diff from the old one.
|
||||
assert_ne!(new, old);
|
||||
assert_eq!(new.id(), old.id());
|
||||
assert_eq!(new.source_id(), old.source_id());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_to_podcast() {
|
||||
// Assert insert() produces the same result that you would get with to_podcast()
|
||||
truncate_db().unwrap();
|
||||
EXPECTED_INTERCEPTED.insert().unwrap();
|
||||
let old = dbqueries::get_podcast_from_source_id(42).unwrap();
|
||||
let pd = EXPECTED_INTERCEPTED.to_podcast().unwrap();
|
||||
assert_eq!(old, pd);
|
||||
|
||||
// Same as above, diff order
|
||||
truncate_db().unwrap();
|
||||
let pd = EXPECTED_INTERCEPTED.to_podcast().unwrap();
|
||||
// This should error as a unique constrain violation
|
||||
assert!(EXPECTED_INTERCEPTED.insert().is_err());
|
||||
let mut old = dbqueries::get_podcast_from_source_id(42).unwrap();
|
||||
assert_eq!(old, pd);
|
||||
|
||||
old.set_archive(true);
|
||||
old.save().unwrap();
|
||||
|
||||
// Assert that it does not mess with user preferences
|
||||
let pd = UPDATED_DESC_INTERCEPTED.to_podcast().unwrap();
|
||||
let old = dbqueries::get_podcast_from_source_id(42).unwrap();
|
||||
assert_eq!(old, pd);
|
||||
assert_eq!(old.archive(), true);
|
||||
}
|
||||
}
|
||||
@ -1,170 +0,0 @@
|
||||
use diesel::SaveChangesDsl;
|
||||
|
||||
use database::connection;
|
||||
use errors::DataError;
|
||||
use models::{Save, Source};
|
||||
use schema::podcast;
|
||||
|
||||
use std::sync::Arc;
|
||||
|
||||
#[derive(Queryable, Identifiable, AsChangeset, Associations, PartialEq)]
|
||||
#[belongs_to(Source, foreign_key = "source_id")]
|
||||
#[changeset_options(treat_none_as_null = "true")]
|
||||
#[table_name = "podcast"]
|
||||
#[derive(Debug, Clone)]
|
||||
/// Diesel Model of the podcast table.
|
||||
pub struct Podcast {
|
||||
id: i32,
|
||||
title: String,
|
||||
link: String,
|
||||
description: String,
|
||||
image_uri: Option<String>,
|
||||
favorite: bool,
|
||||
archive: bool,
|
||||
always_dl: bool,
|
||||
source_id: i32,
|
||||
}
|
||||
|
||||
impl Save<Podcast, DataError> for Podcast {
|
||||
/// Helper method to easily save/"sync" current state of self to the Database.
|
||||
fn save(&self) -> Result<Podcast, DataError> {
|
||||
let db = connection();
|
||||
let tempdb = db.get()?;
|
||||
|
||||
self.save_changes::<Podcast>(&*tempdb).map_err(From::from)
|
||||
}
|
||||
}
|
||||
|
||||
impl Podcast {
|
||||
/// Get the Feed `id`.
|
||||
pub fn id(&self) -> i32 {
|
||||
self.id
|
||||
}
|
||||
|
||||
/// Get the Feed `title`.
|
||||
pub fn title(&self) -> &str {
|
||||
&self.title
|
||||
}
|
||||
|
||||
/// Get the Feed `link`.
|
||||
///
|
||||
/// Usually the website/homepage of the content creator.
|
||||
pub fn link(&self) -> &str {
|
||||
&self.link
|
||||
}
|
||||
|
||||
/// Set the Podcast/Feed `link`.
|
||||
pub fn set_link(&mut self, value: &str) {
|
||||
self.link = value.to_string();
|
||||
}
|
||||
|
||||
/// Get the `description`.
|
||||
pub fn description(&self) -> &str {
|
||||
&self.description
|
||||
}
|
||||
|
||||
/// Set the `description`.
|
||||
pub fn set_description(&mut self, value: &str) {
|
||||
self.description = value.to_string();
|
||||
}
|
||||
|
||||
/// Get the `image_uri`.
|
||||
///
|
||||
/// Represents the uri(url usually) that the Feed cover image is located at.
|
||||
pub fn image_uri(&self) -> Option<&str> {
|
||||
self.image_uri.as_ref().map(|s| s.as_str())
|
||||
}
|
||||
|
||||
/// Set the `image_uri`.
|
||||
pub fn set_image_uri(&mut self, value: Option<&str>) {
|
||||
self.image_uri = value.map(|x| x.to_string());
|
||||
}
|
||||
|
||||
/// Represents the archiving policy for the episode.
|
||||
pub fn archive(&self) -> bool {
|
||||
self.archive
|
||||
}
|
||||
|
||||
/// Set the `archive` policy.
|
||||
pub fn set_archive(&mut self, b: bool) {
|
||||
self.archive = b
|
||||
}
|
||||
|
||||
/// Get the `favorite` status of the `Podcast` Feed.
|
||||
pub fn favorite(&self) -> bool {
|
||||
self.favorite
|
||||
}
|
||||
|
||||
/// Set `favorite` status.
|
||||
pub fn set_favorite(&mut self, b: bool) {
|
||||
self.favorite = b
|
||||
}
|
||||
|
||||
/// Represents the download policy for the `Podcast` Feed.
|
||||
///
|
||||
/// Reserved for the use with a Download manager, yet to be implemented.
|
||||
///
|
||||
/// If true Podcast Episode should be downloaded automaticly/skipping
|
||||
/// the selection queue.
|
||||
pub fn always_download(&self) -> bool {
|
||||
self.always_dl
|
||||
}
|
||||
|
||||
/// Set the download policy.
|
||||
pub fn set_always_download(&mut self, b: bool) {
|
||||
self.always_dl = b
|
||||
}
|
||||
|
||||
/// `Source` table foreign key.
|
||||
pub fn source_id(&self) -> i32 {
|
||||
self.source_id
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Queryable, Debug, Clone)]
|
||||
/// Diesel Model of the podcast cover query.
|
||||
/// Used for fetching information about a Podcast's cover.
|
||||
pub struct PodcastCoverQuery {
|
||||
id: i32,
|
||||
title: String,
|
||||
image_uri: Option<String>,
|
||||
}
|
||||
|
||||
impl From<Podcast> for PodcastCoverQuery {
|
||||
fn from(p: Podcast) -> PodcastCoverQuery {
|
||||
PodcastCoverQuery {
|
||||
id: p.id(),
|
||||
title: p.title,
|
||||
image_uri: p.image_uri,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Arc<Podcast>> for PodcastCoverQuery {
|
||||
fn from(p: Arc<Podcast>) -> PodcastCoverQuery {
|
||||
PodcastCoverQuery {
|
||||
id: p.id(),
|
||||
title: p.title.clone(),
|
||||
image_uri: p.image_uri.clone(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl PodcastCoverQuery {
|
||||
/// Get the Feed `id`.
|
||||
pub fn id(&self) -> i32 {
|
||||
self.id
|
||||
}
|
||||
|
||||
/// Get the Feed `title`.
|
||||
pub fn title(&self) -> &str {
|
||||
&self.title
|
||||
}
|
||||
|
||||
/// Get the `image_uri`.
|
||||
///
|
||||
/// Represents the uri(url usually) that the Feed cover image is located at.
|
||||
pub fn image_uri(&self) -> Option<&str> {
|
||||
self.image_uri.as_ref().map(|s| s.as_str())
|
||||
}
|
||||
}
|
||||
@ -1,316 +0,0 @@
|
||||
use diesel::SaveChangesDsl;
|
||||
// use failure::ResultExt;
|
||||
use rss::Channel;
|
||||
use url::Url;
|
||||
|
||||
use hyper::{Client, Method, Request, Response, StatusCode, Uri};
|
||||
use hyper::client::HttpConnector;
|
||||
use hyper::header::{ETag, EntityTag, HttpDate, IfModifiedSince, IfNoneMatch, LastModified,
|
||||
Location};
|
||||
use hyper_tls::HttpsConnector;
|
||||
|
||||
// use futures::future::ok;
|
||||
use futures::prelude::*;
|
||||
use futures_cpupool::CpuPool;
|
||||
|
||||
use database::connection;
|
||||
use errors::DataError;
|
||||
use feed::{Feed, FeedBuilder};
|
||||
use models::{NewSource, Save};
|
||||
use schema::source;
|
||||
|
||||
use std::str::FromStr;
|
||||
|
||||
#[derive(Queryable, Identifiable, AsChangeset, PartialEq)]
|
||||
#[table_name = "source"]
|
||||
#[changeset_options(treat_none_as_null = "true")]
|
||||
#[derive(Debug, Clone)]
|
||||
/// Diesel Model of the source table.
|
||||
pub struct Source {
|
||||
id: i32,
|
||||
uri: String,
|
||||
last_modified: Option<String>,
|
||||
http_etag: Option<String>,
|
||||
}
|
||||
|
||||
impl Save<Source, DataError> for Source {
|
||||
/// Helper method to easily save/"sync" current state of self to the Database.
|
||||
fn save(&self) -> Result<Source, DataError> {
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
self.save_changes::<Source>(&con).map_err(From::from)
|
||||
}
|
||||
}
|
||||
|
||||
impl Source {
|
||||
/// Get the source `id` column.
|
||||
pub fn id(&self) -> i32 {
|
||||
self.id
|
||||
}
|
||||
|
||||
/// Represents the location(usually url) of the Feed xml file.
|
||||
pub fn uri(&self) -> &str {
|
||||
&self.uri
|
||||
}
|
||||
|
||||
/// Set the `uri` field value.
|
||||
pub fn set_uri(&mut self, uri: String) {
|
||||
self.uri = uri;
|
||||
}
|
||||
|
||||
/// Represents the Http Last-Modified Header field.
|
||||
///
|
||||
/// See [RFC 7231](https://tools.ietf.org/html/rfc7231#section-7.2) for more.
|
||||
pub fn last_modified(&self) -> Option<&str> {
|
||||
self.last_modified.as_ref().map(|s| s.as_str())
|
||||
}
|
||||
|
||||
/// Set `last_modified` value.
|
||||
pub fn set_last_modified(&mut self, value: Option<String>) {
|
||||
// self.last_modified = value.map(|x| x.to_string());
|
||||
self.last_modified = value;
|
||||
}
|
||||
|
||||
/// Represents the Http Etag Header field.
|
||||
///
|
||||
/// See [RFC 7231](https://tools.ietf.org/html/rfc7231#section-7.2) for more.
|
||||
pub fn http_etag(&self) -> Option<&str> {
|
||||
self.http_etag.as_ref().map(|s| s.as_str())
|
||||
}
|
||||
|
||||
/// Set `http_etag` value.
|
||||
pub fn set_http_etag(&mut self, value: Option<&str>) {
|
||||
self.http_etag = value.map(|x| x.to_string());
|
||||
}
|
||||
|
||||
/// Extract Etag and LastModifier from res, and update self and the
|
||||
/// corresponding db row.
|
||||
fn update_etag(&mut self, res: &Response) -> Result<(), DataError> {
|
||||
let headers = res.headers();
|
||||
|
||||
let etag = headers.get::<ETag>().map(|x| x.tag());
|
||||
let lmod = headers.get::<LastModified>().map(|x| format!("{}", x));
|
||||
|
||||
if (self.http_etag() != etag) || (self.last_modified != lmod) {
|
||||
self.set_http_etag(etag);
|
||||
self.set_last_modified(lmod);
|
||||
self.save()?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// TODO match on more stuff
|
||||
// 301: Moved Permanently
|
||||
// 304: Up to date Feed, checked with the Etag
|
||||
// 307: Temporary redirect of the url
|
||||
// 308: Permanent redirect of the url
|
||||
// 401: Unathorized
|
||||
// 403: Forbidden
|
||||
// 408: Timeout
|
||||
// 410: Feed deleted
|
||||
// TODO: Rething this api,
|
||||
fn match_status(mut self, res: Response) -> Result<(Self, Response), DataError> {
|
||||
self.update_etag(&res)?;
|
||||
let code = res.status();
|
||||
match code {
|
||||
StatusCode::NotModified => {
|
||||
let err = DataError::HttpStatusError {
|
||||
url: self.uri,
|
||||
status_code: code,
|
||||
context: format!("304: skipping.."),
|
||||
};
|
||||
|
||||
return Err(err);
|
||||
}
|
||||
StatusCode::MovedPermanently => {
|
||||
error!("Feed was moved permanently.");
|
||||
self.handle_301(&res)?;
|
||||
|
||||
let err = DataError::HttpStatusError {
|
||||
url: self.uri,
|
||||
status_code: code,
|
||||
context: format!("301: Feed was moved permanently."),
|
||||
};
|
||||
|
||||
return Err(err);
|
||||
}
|
||||
StatusCode::TemporaryRedirect => debug!("307: Temporary Redirect."),
|
||||
StatusCode::PermanentRedirect => warn!("308: Permanent Redirect."),
|
||||
StatusCode::Unauthorized => {
|
||||
let err = DataError::HttpStatusError {
|
||||
url: self.uri,
|
||||
status_code: code,
|
||||
context: format!("401: Unauthorized."),
|
||||
};
|
||||
|
||||
return Err(err);
|
||||
}
|
||||
StatusCode::Forbidden => {
|
||||
let err = DataError::HttpStatusError {
|
||||
url: self.uri,
|
||||
status_code: code,
|
||||
context: format!("403: Forbidden."),
|
||||
};
|
||||
|
||||
return Err(err);
|
||||
}
|
||||
StatusCode::NotFound => return Err(format!("404: Not found.")).map_err(From::from),
|
||||
StatusCode::RequestTimeout => {
|
||||
let err = DataError::HttpStatusError {
|
||||
url: self.uri,
|
||||
status_code: code,
|
||||
context: format!("408: Request Timeout."),
|
||||
};
|
||||
|
||||
return Err(err);
|
||||
}
|
||||
StatusCode::Gone => {
|
||||
let err = DataError::HttpStatusError {
|
||||
url: self.uri,
|
||||
status_code: code,
|
||||
context: format!("410: Feed was deleted.."),
|
||||
};
|
||||
|
||||
return Err(err);
|
||||
}
|
||||
_ => info!("HTTP StatusCode: {}", code),
|
||||
};
|
||||
Ok((self, res))
|
||||
}
|
||||
|
||||
fn handle_301(&mut self, res: &Response) -> Result<(), DataError> {
|
||||
let headers = res.headers();
|
||||
|
||||
if let Some(url) = headers.get::<Location>() {
|
||||
self.set_uri(url.to_string());
|
||||
self.http_etag = None;
|
||||
self.last_modified = None;
|
||||
self.save()?;
|
||||
info!("Feed url was updated succesfully.");
|
||||
// TODO: Refresh in place instead of next time, Not a priority.
|
||||
info!("New content will be fetched with the next refesh.");
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Construct a new `Source` with the given `uri` and index it.
|
||||
///
|
||||
/// This only indexes the `Source` struct, not the Podcast Feed.
|
||||
pub fn from_url(uri: &str) -> Result<Source, DataError> {
|
||||
let url = Url::parse(uri)?;
|
||||
|
||||
NewSource::new(&url).to_source()
|
||||
}
|
||||
|
||||
/// `Feed` constructor.
|
||||
///
|
||||
/// Fetches the latest xml Feed.
|
||||
///
|
||||
/// Updates the validator Http Headers.
|
||||
///
|
||||
/// Consumes `self` and Returns the corresponding `Feed` Object.
|
||||
// Refactor into TryInto once it lands on stable.
|
||||
pub fn into_feed(
|
||||
self,
|
||||
client: &Client<HttpsConnector<HttpConnector>>,
|
||||
pool: CpuPool,
|
||||
ignore_etags: bool,
|
||||
) -> Box<Future<Item = Feed, Error = DataError>> {
|
||||
let id = self.id();
|
||||
let feed = self.request_constructor(client, ignore_etags)
|
||||
.and_then(move |(_, res)| response_to_channel(res, pool))
|
||||
.and_then(move |chan| {
|
||||
FeedBuilder::default()
|
||||
.channel(chan)
|
||||
.source_id(id)
|
||||
.build()
|
||||
.map_err(From::from)
|
||||
});
|
||||
|
||||
Box::new(feed)
|
||||
}
|
||||
|
||||
// TODO: make ignore_etags an Enum for better ergonomics.
|
||||
// #bools_are_just_2variant_enmus
|
||||
fn request_constructor(
|
||||
self,
|
||||
client: &Client<HttpsConnector<HttpConnector>>,
|
||||
ignore_etags: bool,
|
||||
) -> Box<Future<Item = (Self, Response), Error = DataError>> {
|
||||
// FIXME: remove unwrap somehow
|
||||
let uri = Uri::from_str(self.uri()).unwrap();
|
||||
let mut req = Request::new(Method::Get, uri);
|
||||
|
||||
if !ignore_etags {
|
||||
if let Some(foo) = self.http_etag() {
|
||||
req.headers_mut().set(IfNoneMatch::Items(vec![
|
||||
EntityTag::new(true, foo.to_owned()),
|
||||
]));
|
||||
}
|
||||
|
||||
if let Some(foo) = self.last_modified() {
|
||||
if let Ok(x) = foo.parse::<HttpDate>() {
|
||||
req.headers_mut().set(IfModifiedSince(x));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let work = client
|
||||
.request(req)
|
||||
.map_err(From::from)
|
||||
// TODO: tail recursion loop that would follow redirects directly
|
||||
.and_then(move |res| self.match_status(res));
|
||||
Box::new(work)
|
||||
}
|
||||
}
|
||||
|
||||
fn response_to_channel(
|
||||
res: Response,
|
||||
pool: CpuPool,
|
||||
) -> Box<Future<Item = Channel, Error = DataError> + Send> {
|
||||
let chan = res.body()
|
||||
.concat2()
|
||||
.map(|x| x.into_iter())
|
||||
.map_err(From::from)
|
||||
.map(|iter| iter.collect::<Vec<u8>>())
|
||||
.map(|utf_8_bytes| String::from_utf8_lossy(&utf_8_bytes).into_owned())
|
||||
.and_then(|buf| {
|
||||
Channel::from_str(&buf).or_else(|err| Err(DataError::RssCrateError(format!("{}", err))))
|
||||
});
|
||||
let cpu_chan = pool.spawn(chan);
|
||||
Box::new(cpu_chan)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use tokio_core::reactor::Core;
|
||||
|
||||
use database::truncate_db;
|
||||
use utils::get_feed;
|
||||
|
||||
#[test]
|
||||
fn test_into_feed() {
|
||||
truncate_db().unwrap();
|
||||
|
||||
let pool = CpuPool::new_num_cpus();
|
||||
let mut core = Core::new().unwrap();
|
||||
let client = Client::configure()
|
||||
.connector(HttpsConnector::new(4, &core.handle()).unwrap())
|
||||
.build(&core.handle());
|
||||
|
||||
let url = "https://web.archive.org/web/20180120083840if_/https://feeds.feedburner.\
|
||||
com/InterceptedWithJeremyScahill";
|
||||
let source = Source::from_url(url).unwrap();
|
||||
let id = source.id();
|
||||
|
||||
let feed = source.into_feed(&client, pool.clone(), true);
|
||||
let feed = core.run(feed).unwrap();
|
||||
|
||||
let expected = get_feed("tests/feeds/2018-01-20-Intercepted.xml", id);
|
||||
assert_eq!(expected, feed);
|
||||
}
|
||||
}
|
||||
@ -1,217 +0,0 @@
|
||||
// FIXME:
|
||||
//! Docs.
|
||||
|
||||
use futures::future::*;
|
||||
use futures_cpupool::CpuPool;
|
||||
// use futures::prelude::*;
|
||||
|
||||
use hyper::Client;
|
||||
use hyper::client::HttpConnector;
|
||||
use hyper_tls::HttpsConnector;
|
||||
use tokio_core::reactor::Core;
|
||||
|
||||
use num_cpus;
|
||||
use rss;
|
||||
|
||||
use Source;
|
||||
use dbqueries;
|
||||
use errors::DataError;
|
||||
use models::{IndexState, NewEpisode, NewEpisodeMinimal};
|
||||
|
||||
// use std::sync::{Arc, Mutex};
|
||||
|
||||
macro_rules! clone {
|
||||
(@param _) => ( _ );
|
||||
(@param $x:ident) => ( $x );
|
||||
($($n:ident),+ => move || $body:expr) => (
|
||||
{
|
||||
$( let $n = $n.clone(); )+
|
||||
move || $body
|
||||
}
|
||||
);
|
||||
($($n:ident),+ => move |$($p:tt),+| $body:expr) => (
|
||||
{
|
||||
$( let $n = $n.clone(); )+
|
||||
move |$(clone!(@param $p),)+| $body
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
/// The pipline to be run for indexing and updating a Podcast feed that originates from
|
||||
/// `Source.uri`.
|
||||
///
|
||||
/// Messy temp diagram:
|
||||
/// Source -> GET Request -> Update Etags -> Check Status -> Parse xml/Rss ->
|
||||
/// Convert `rss::Channel` into Feed -> Index Podcast -> Index Episodes.
|
||||
pub fn pipeline<S: IntoIterator<Item = Source>>(
|
||||
sources: S,
|
||||
ignore_etags: bool,
|
||||
tokio_core: &mut Core,
|
||||
pool: &CpuPool,
|
||||
client: Client<HttpsConnector<HttpConnector>>,
|
||||
) -> Result<(), DataError> {
|
||||
let list: Vec<_> = sources
|
||||
.into_iter()
|
||||
.map(clone!(pool => move |s| s.into_feed(&client, pool.clone(), ignore_etags)))
|
||||
.map(|fut| fut.and_then(clone!(pool => move |feed| pool.clone().spawn(feed.index()))))
|
||||
.map(|fut| fut.map(|_| ()).map_err(|err| error!("Error: {}", err)))
|
||||
.collect();
|
||||
|
||||
if list.is_empty() {
|
||||
return Err(DataError::EmptyFuturesList);
|
||||
}
|
||||
|
||||
// Thats not really concurrent yet I think.
|
||||
tokio_core.run(collect_futures(list))?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Creates a tokio `reactor::Core`, a `CpuPool`, and a `hyper::Client` and runs the pipeline.
|
||||
pub fn run(sources: Vec<Source>, ignore_etags: bool) -> Result<(), DataError> {
|
||||
if sources.is_empty() {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let pool = CpuPool::new_num_cpus();
|
||||
let mut core = Core::new()?;
|
||||
let handle = core.handle();
|
||||
let client = Client::configure()
|
||||
.connector(HttpsConnector::new(num_cpus::get(), &handle)?)
|
||||
.build(&handle);
|
||||
|
||||
pipeline(sources, ignore_etags, &mut core, &pool, client)
|
||||
}
|
||||
|
||||
/// Docs
|
||||
pub fn index_single_source(s: Source, ignore_etags: bool) -> Result<(), DataError> {
|
||||
let pool = CpuPool::new_num_cpus();
|
||||
let mut core = Core::new()?;
|
||||
let handle = core.handle();
|
||||
|
||||
let client = Client::configure()
|
||||
.connector(HttpsConnector::new(num_cpus::get(), &handle)?)
|
||||
.build(&handle);
|
||||
|
||||
let work = s.into_feed(&client, pool.clone(), ignore_etags)
|
||||
.and_then(clone!(pool => move |feed| pool.clone().spawn(feed.index())))
|
||||
.map(|_| ());
|
||||
|
||||
core.run(work)
|
||||
}
|
||||
|
||||
fn determine_ep_state(
|
||||
ep: NewEpisodeMinimal,
|
||||
item: &rss::Item,
|
||||
) -> Result<IndexState<NewEpisode>, DataError> {
|
||||
// Check if feed exists
|
||||
let exists = dbqueries::episode_exists(ep.title(), ep.podcast_id())?;
|
||||
|
||||
if !exists {
|
||||
Ok(IndexState::Index(ep.into_new_episode(item)))
|
||||
} else {
|
||||
let old = dbqueries::get_episode_minimal_from_pk(ep.title(), ep.podcast_id())?;
|
||||
let rowid = old.rowid();
|
||||
|
||||
if ep != old {
|
||||
Ok(IndexState::Update((ep.into_new_episode(item), rowid)))
|
||||
} else {
|
||||
Ok(IndexState::NotChanged)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn glue_async<'a>(
|
||||
item: &'a rss::Item,
|
||||
id: i32,
|
||||
) -> Box<Future<Item = IndexState<NewEpisode>, Error = DataError> + 'a> {
|
||||
Box::new(
|
||||
result(NewEpisodeMinimal::new(item, id)).and_then(move |ep| determine_ep_state(ep, item)),
|
||||
)
|
||||
}
|
||||
|
||||
// Weird magic from #rust irc channel
|
||||
// kudos to remexre
|
||||
/// FIXME: Docs
|
||||
#[cfg_attr(feature = "cargo-clippy", allow(type_complexity))]
|
||||
pub fn collect_futures<F>(
|
||||
futures: Vec<F>,
|
||||
) -> Box<Future<Item = Vec<Result<F::Item, F::Error>>, Error = DataError>>
|
||||
where
|
||||
F: 'static + Future,
|
||||
<F as Future>::Item: 'static,
|
||||
<F as Future>::Error: 'static,
|
||||
{
|
||||
Box::new(loop_fn((futures, vec![]), |(futures, mut done)| {
|
||||
select_all(futures).then(|r| {
|
||||
let (r, rest) = match r {
|
||||
Ok((r, _, rest)) => (Ok(r), rest),
|
||||
Err((r, _, rest)) => (Err(r), rest),
|
||||
};
|
||||
done.push(r);
|
||||
if rest.is_empty() {
|
||||
Ok(Loop::Break(done))
|
||||
} else {
|
||||
Ok(Loop::Continue((rest, done)))
|
||||
}
|
||||
})
|
||||
}))
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use Source;
|
||||
use database::truncate_db;
|
||||
|
||||
// (path, url) tuples.
|
||||
const URLS: &[(&str, &str)] = {
|
||||
&[
|
||||
(
|
||||
"tests/feeds/2018-01-20-Intercepted.xml",
|
||||
"https://web.archive.org/web/20180120083840if_/https://feeds.feedburner.\
|
||||
com/InterceptedWithJeremyScahill",
|
||||
),
|
||||
(
|
||||
"tests/feeds/2018-01-20-LinuxUnplugged.xml",
|
||||
"https://web.archive.org/web/20180120110314if_/https://feeds.feedburner.\
|
||||
com/linuxunplugged",
|
||||
),
|
||||
(
|
||||
"tests/feeds/2018-01-20-TheTipOff.xml",
|
||||
"https://web.archive.org/web/20180120110727if_/https://rss.acast.com/thetipoff",
|
||||
),
|
||||
(
|
||||
"tests/feeds/2018-01-20-StealTheStars.xml",
|
||||
"https://web.archive.org/web/20180120104957if_/https://rss.art19.\
|
||||
com/steal-the-stars",
|
||||
),
|
||||
(
|
||||
"tests/feeds/2018-01-20-GreaterThanCode.xml",
|
||||
"https://web.archive.org/web/20180120104741if_/https://www.greaterthancode.\
|
||||
com/feed/podcast",
|
||||
),
|
||||
]
|
||||
};
|
||||
|
||||
#[test]
|
||||
/// Insert feeds and update/index them.
|
||||
fn test_pipeline() {
|
||||
truncate_db().unwrap();
|
||||
URLS.iter().for_each(|&(_, url)| {
|
||||
// Index the urls into the source table.
|
||||
Source::from_url(url).unwrap();
|
||||
});
|
||||
let sources = dbqueries::get_sources().unwrap();
|
||||
run(sources, true).unwrap();
|
||||
|
||||
let sources = dbqueries::get_sources().unwrap();
|
||||
// Run again to cover Unique constrains erros.
|
||||
run(sources, true).unwrap();
|
||||
|
||||
// Assert the index rows equal the controlled results
|
||||
assert_eq!(dbqueries::get_sources().unwrap().len(), 5);
|
||||
assert_eq!(dbqueries::get_podcasts().unwrap().len(), 5);
|
||||
assert_eq!(dbqueries::get_episodes().unwrap().len(), 354);
|
||||
}
|
||||
}
|
||||
@ -1,20 +0,0 @@
|
||||
[package]
|
||||
authors = ["Jordan Petridis <jordanpetridis@protonmail.com>"]
|
||||
name = "hammond-downloader"
|
||||
version = "0.1.0"
|
||||
workspace = "../"
|
||||
|
||||
[dependencies]
|
||||
error-chain = "0.11.0"
|
||||
hyper = "0.11.18"
|
||||
log = "0.4.1"
|
||||
mime_guess = "1.8.3"
|
||||
reqwest = "0.8.4"
|
||||
tempdir = "0.3.6"
|
||||
glob = "0.2.11"
|
||||
failure = "0.1.1"
|
||||
failure_derive = "0.1.1"
|
||||
|
||||
[dependencies.hammond-data]
|
||||
path = "../hammond-data"
|
||||
|
||||
@ -1,41 +0,0 @@
|
||||
use hammond_data::errors::DataError;
|
||||
use reqwest;
|
||||
use std::io;
|
||||
|
||||
#[derive(Fail, Debug)]
|
||||
pub enum DownloadError {
|
||||
#[fail(display = "Reqwest error: {}", _0)]
|
||||
RequestError(#[cause] reqwest::Error),
|
||||
#[fail(display = "Data error: {}", _0)]
|
||||
DataError(#[cause] DataError),
|
||||
#[fail(display = "Io error: {}", _0)]
|
||||
IoError(#[cause] io::Error),
|
||||
#[fail(display = "Unexpected server response: {}", _0)]
|
||||
UnexpectedResponse(reqwest::StatusCode),
|
||||
#[fail(display = "The Download was cancelled.")]
|
||||
DownloadCancelled,
|
||||
#[fail(display = "Remote Image location not found.")]
|
||||
NoImageLocation,
|
||||
#[fail(display = "Failed to parse CacheLocation.")]
|
||||
InvalidCacheLocation,
|
||||
#[fail(display = "Failed to parse Cached Image Location.")]
|
||||
InvalidCachedImageLocation,
|
||||
}
|
||||
|
||||
impl From<reqwest::Error> for DownloadError {
|
||||
fn from(err: reqwest::Error) -> Self {
|
||||
DownloadError::RequestError(err)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<io::Error> for DownloadError {
|
||||
fn from(err: io::Error) -> Self {
|
||||
DownloadError::IoError(err)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<DataError> for DownloadError {
|
||||
fn from(err: DataError) -> Self {
|
||||
DownloadError::DataError(err)
|
||||
}
|
||||
}
|
||||
@ -1,18 +0,0 @@
|
||||
#![recursion_limit = "1024"]
|
||||
#![deny(unused_extern_crates, unused)]
|
||||
|
||||
extern crate failure;
|
||||
#[macro_use]
|
||||
extern crate failure_derive;
|
||||
#[macro_use]
|
||||
extern crate log;
|
||||
|
||||
extern crate glob;
|
||||
extern crate hammond_data;
|
||||
extern crate hyper;
|
||||
extern crate mime_guess;
|
||||
extern crate reqwest;
|
||||
extern crate tempdir;
|
||||
|
||||
pub mod downloader;
|
||||
pub mod errors;
|
||||
@ -1,34 +0,0 @@
|
||||
[package]
|
||||
authors = ["Jordan Petridis <jordanpetridis@protonmail.com>"]
|
||||
build = "build.rs"
|
||||
name = "hammond-gtk"
|
||||
version = "0.1.0"
|
||||
workspace = "../"
|
||||
|
||||
[dependencies]
|
||||
chrono = "0.4.0"
|
||||
dissolve = "0.2.2"
|
||||
gdk = "0.7.0"
|
||||
gdk-pixbuf = "0.3.0"
|
||||
gio = "0.3.0"
|
||||
glib = "0.4.1"
|
||||
humansize = "1.1.0"
|
||||
lazy_static = "1.0.0"
|
||||
log = "0.4.1"
|
||||
loggerv = "0.7.0"
|
||||
open = "1.2.1"
|
||||
rayon = "0.9.0"
|
||||
send-cell = "0.1.2"
|
||||
url = "1.6.0"
|
||||
failure = "0.1.1"
|
||||
failure_derive = "0.1.1"
|
||||
|
||||
[dependencies.gtk]
|
||||
features = ["v3_22"]
|
||||
version = "0.3.0"
|
||||
|
||||
[dependencies.hammond-data]
|
||||
path = "../hammond-data"
|
||||
|
||||
[dependencies.hammond-downloader]
|
||||
path = "../hammond-downloader"
|
||||
@ -1,9 +0,0 @@
|
||||
use std::process::Command;
|
||||
|
||||
fn main() {
|
||||
Command::new("glib-compile-resources")
|
||||
.args(&["--generate", "resources.xml"])
|
||||
.current_dir("resources")
|
||||
.status()
|
||||
.unwrap();
|
||||
}
|
||||
@ -1,91 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!-- Generated with glade 3.21.0
|
||||
|
||||
Copyright (C) 2017 - 2018
|
||||
|
||||
This file is part of Hammond.
|
||||
|
||||
Hammond is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License as published by
|
||||
the Free Software Foundation, either version 3 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
Hammond is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU General Public License
|
||||
along with Hammond. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
Authors:
|
||||
Jordan Petridis
|
||||
Tobias Bernard
|
||||
|
||||
-->
|
||||
<interface>
|
||||
<requires lib="gtk+" version="3.20"/>
|
||||
<!-- interface-license-type gplv3 -->
|
||||
<!-- interface-name Hammond -->
|
||||
<!-- interface-description A podcast client for the GNOME Desktop -->
|
||||
<!-- interface-copyright 2017 - 2018 -->
|
||||
<!-- interface-authors Jordan Petridis\nTobias Bernard -->
|
||||
<object class="GtkBox" id="empty_view">
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">False</property>
|
||||
<property name="halign">center</property>
|
||||
<property name="valign">center</property>
|
||||
<property name="hexpand">True</property>
|
||||
<property name="vexpand">True</property>
|
||||
<property name="orientation">vertical</property>
|
||||
<property name="spacing">12</property>
|
||||
<child>
|
||||
<object class="GtkImage">
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">False</property>
|
||||
<property name="pixel_size">128</property>
|
||||
<property name="icon_name">application-rss+xml-symbolic</property>
|
||||
<property name="use_fallback">True</property>
|
||||
<style>
|
||||
<class name="dim-label"/>
|
||||
</style>
|
||||
</object>
|
||||
<packing>
|
||||
<property name="expand">False</property>
|
||||
<property name="fill">True</property>
|
||||
<property name="position">0</property>
|
||||
</packing>
|
||||
</child>
|
||||
<child>
|
||||
<object class="GtkLabel">
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">False</property>
|
||||
<property name="label" translatable="yes">No Feed Subscription Found</property>
|
||||
<attributes>
|
||||
<attribute name="weight" value="bold"/>
|
||||
<attribute name="scale" value="1.4399999999999999"/>
|
||||
</attributes>
|
||||
</object>
|
||||
<packing>
|
||||
<property name="expand">False</property>
|
||||
<property name="fill">True</property>
|
||||
<property name="position">1</property>
|
||||
</packing>
|
||||
</child>
|
||||
<child>
|
||||
<object class="GtkLabel">
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">False</property>
|
||||
<property name="label" translatable="yes">You can subscribe to feeds using the "+" button</property>
|
||||
<style>
|
||||
<class name="dim-label"/>
|
||||
</style>
|
||||
</object>
|
||||
<packing>
|
||||
<property name="expand">False</property>
|
||||
<property name="fill">True</property>
|
||||
<property name="position">2</property>
|
||||
</packing>
|
||||
</child>
|
||||
</object>
|
||||
</interface>
|
||||
@ -1,383 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!-- Generated with glade 3.21.0
|
||||
|
||||
Copyright (C) 2017 - 2018
|
||||
|
||||
This file is part of Hammond.
|
||||
|
||||
Hammond is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License as published by
|
||||
the Free Software Foundation, either version 3 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
Hammond is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU General Public License
|
||||
along with Hammond. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
Authors:
|
||||
Jordan Petridis
|
||||
Tobias Bernard
|
||||
|
||||
-->
|
||||
<interface>
|
||||
<requires lib="gtk+" version="3.20"/>
|
||||
<!-- interface-license-type gplv3 -->
|
||||
<!-- interface-name Hammond -->
|
||||
<!-- interface-description A podcast client for the GNOME Desktop -->
|
||||
<!-- interface-copyright 2017 - 2018 -->
|
||||
<!-- interface-authors Jordan Petridis\nTobias Bernard -->
|
||||
<object class="GtkBox" id="container">
|
||||
<property name="name">container</property>
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">False</property>
|
||||
<property name="orientation">vertical</property>
|
||||
<child>
|
||||
<object class="GtkScrolledWindow" id="scrolled_window">
|
||||
<property name="name">scrolled_window</property>
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">True</property>
|
||||
<property name="shadow_type">in</property>
|
||||
<child>
|
||||
<object class="GtkViewport">
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">False</property>
|
||||
<child>
|
||||
<object class="GtkBox">
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">False</property>
|
||||
<property name="halign">center</property>
|
||||
<child>
|
||||
<object class="GtkBox">
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">False</property>
|
||||
<property name="orientation">vertical</property>
|
||||
<child>
|
||||
<placeholder/>
|
||||
</child>
|
||||
</object>
|
||||
<packing>
|
||||
<property name="expand">True</property>
|
||||
<property name="fill">False</property>
|
||||
<property name="position">0</property>
|
||||
</packing>
|
||||
</child>
|
||||
<child>
|
||||
<object class="GtkBox" id="frame_parent">
|
||||
<property name="width_request">720</property>
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">False</property>
|
||||
<property name="halign">center</property>
|
||||
<property name="margin_top">24</property>
|
||||
<property name="margin_bottom">24</property>
|
||||
<property name="orientation">vertical</property>
|
||||
<property name="spacing">24</property>
|
||||
<child>
|
||||
<object class="GtkBox" id="today_box">
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">False</property>
|
||||
<property name="no_show_all">True</property>
|
||||
<property name="orientation">vertical</property>
|
||||
<property name="spacing">6</property>
|
||||
<child>
|
||||
<object class="GtkLabel">
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">False</property>
|
||||
<property name="halign">start</property>
|
||||
<property name="label" translatable="yes">Today</property>
|
||||
<attributes>
|
||||
<attribute name="weight" value="bold"/>
|
||||
<attribute name="scale" value="1.5"/>
|
||||
</attributes>
|
||||
</object>
|
||||
<packing>
|
||||
<property name="expand">False</property>
|
||||
<property name="fill">True</property>
|
||||
<property name="position">0</property>
|
||||
</packing>
|
||||
</child>
|
||||
<child>
|
||||
<object class="GtkFrame">
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">False</property>
|
||||
<property name="label_xalign">0</property>
|
||||
<property name="shadow_type">in</property>
|
||||
<child>
|
||||
<object class="GtkListBox" id="today_list">
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">False</property>
|
||||
<property name="selection_mode">none</property>
|
||||
</object>
|
||||
</child>
|
||||
<child type="label_item">
|
||||
<placeholder/>
|
||||
</child>
|
||||
</object>
|
||||
<packing>
|
||||
<property name="expand">False</property>
|
||||
<property name="fill">True</property>
|
||||
<property name="position">1</property>
|
||||
</packing>
|
||||
</child>
|
||||
</object>
|
||||
<packing>
|
||||
<property name="expand">False</property>
|
||||
<property name="fill">True</property>
|
||||
<property name="position">0</property>
|
||||
</packing>
|
||||
</child>
|
||||
<child>
|
||||
<object class="GtkBox" id="yday_box">
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">False</property>
|
||||
<property name="no_show_all">True</property>
|
||||
<property name="orientation">vertical</property>
|
||||
<property name="spacing">6</property>
|
||||
<child>
|
||||
<object class="GtkLabel">
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">False</property>
|
||||
<property name="halign">start</property>
|
||||
<property name="label" translatable="yes">Yesterday</property>
|
||||
<attributes>
|
||||
<attribute name="weight" value="bold"/>
|
||||
<attribute name="scale" value="1.5"/>
|
||||
</attributes>
|
||||
</object>
|
||||
<packing>
|
||||
<property name="expand">False</property>
|
||||
<property name="fill">True</property>
|
||||
<property name="position">0</property>
|
||||
</packing>
|
||||
</child>
|
||||
<child>
|
||||
<object class="GtkFrame">
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">False</property>
|
||||
<property name="label_xalign">0</property>
|
||||
<property name="shadow_type">in</property>
|
||||
<child>
|
||||
<object class="GtkListBox" id="yday_list">
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">False</property>
|
||||
<property name="selection_mode">none</property>
|
||||
</object>
|
||||
</child>
|
||||
<child type="label_item">
|
||||
<placeholder/>
|
||||
</child>
|
||||
</object>
|
||||
<packing>
|
||||
<property name="expand">False</property>
|
||||
<property name="fill">True</property>
|
||||
<property name="position">1</property>
|
||||
</packing>
|
||||
</child>
|
||||
</object>
|
||||
<packing>
|
||||
<property name="expand">False</property>
|
||||
<property name="fill">True</property>
|
||||
<property name="position">1</property>
|
||||
</packing>
|
||||
</child>
|
||||
<child>
|
||||
<object class="GtkBox" id="week_box">
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">False</property>
|
||||
<property name="no_show_all">True</property>
|
||||
<property name="orientation">vertical</property>
|
||||
<property name="spacing">6</property>
|
||||
<child>
|
||||
<object class="GtkLabel">
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">False</property>
|
||||
<property name="halign">start</property>
|
||||
<property name="label" translatable="yes">This Week</property>
|
||||
<attributes>
|
||||
<attribute name="weight" value="bold"/>
|
||||
<attribute name="scale" value="1.5"/>
|
||||
</attributes>
|
||||
</object>
|
||||
<packing>
|
||||
<property name="expand">False</property>
|
||||
<property name="fill">True</property>
|
||||
<property name="position">0</property>
|
||||
</packing>
|
||||
</child>
|
||||
<child>
|
||||
<object class="GtkFrame">
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">False</property>
|
||||
<property name="label_xalign">0</property>
|
||||
<property name="shadow_type">in</property>
|
||||
<child>
|
||||
<object class="GtkListBox" id="week_list">
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">False</property>
|
||||
<property name="selection_mode">none</property>
|
||||
</object>
|
||||
</child>
|
||||
<child type="label_item">
|
||||
<placeholder/>
|
||||
</child>
|
||||
</object>
|
||||
<packing>
|
||||
<property name="expand">False</property>
|
||||
<property name="fill">True</property>
|
||||
<property name="position">1</property>
|
||||
</packing>
|
||||
</child>
|
||||
</object>
|
||||
<packing>
|
||||
<property name="expand">False</property>
|
||||
<property name="fill">True</property>
|
||||
<property name="position">2</property>
|
||||
</packing>
|
||||
</child>
|
||||
<child>
|
||||
<object class="GtkBox" id="month_box">
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">False</property>
|
||||
<property name="no_show_all">True</property>
|
||||
<property name="orientation">vertical</property>
|
||||
<property name="spacing">6</property>
|
||||
<child>
|
||||
<object class="GtkLabel">
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">False</property>
|
||||
<property name="halign">start</property>
|
||||
<property name="label" translatable="yes">This Month</property>
|
||||
<attributes>
|
||||
<attribute name="weight" value="bold"/>
|
||||
<attribute name="scale" value="1.5"/>
|
||||
</attributes>
|
||||
</object>
|
||||
<packing>
|
||||
<property name="expand">False</property>
|
||||
<property name="fill">True</property>
|
||||
<property name="position">0</property>
|
||||
</packing>
|
||||
</child>
|
||||
<child>
|
||||
<object class="GtkFrame">
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">False</property>
|
||||
<property name="label_xalign">0</property>
|
||||
<property name="shadow_type">in</property>
|
||||
<child>
|
||||
<object class="GtkListBox" id="month_list">
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">False</property>
|
||||
<property name="selection_mode">none</property>
|
||||
</object>
|
||||
</child>
|
||||
<child type="label_item">
|
||||
<placeholder/>
|
||||
</child>
|
||||
</object>
|
||||
<packing>
|
||||
<property name="expand">False</property>
|
||||
<property name="fill">True</property>
|
||||
<property name="position">1</property>
|
||||
</packing>
|
||||
</child>
|
||||
</object>
|
||||
<packing>
|
||||
<property name="expand">False</property>
|
||||
<property name="fill">True</property>
|
||||
<property name="position">3</property>
|
||||
</packing>
|
||||
</child>
|
||||
<child>
|
||||
<object class="GtkBox" id="rest_box">
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">False</property>
|
||||
<property name="no_show_all">True</property>
|
||||
<property name="orientation">vertical</property>
|
||||
<property name="spacing">6</property>
|
||||
<child>
|
||||
<object class="GtkLabel">
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">False</property>
|
||||
<property name="halign">start</property>
|
||||
<property name="label" translatable="yes">Older</property>
|
||||
<attributes>
|
||||
<attribute name="weight" value="bold"/>
|
||||
<attribute name="scale" value="1.5"/>
|
||||
</attributes>
|
||||
</object>
|
||||
<packing>
|
||||
<property name="expand">False</property>
|
||||
<property name="fill">True</property>
|
||||
<property name="position">0</property>
|
||||
</packing>
|
||||
</child>
|
||||
<child>
|
||||
<object class="GtkFrame">
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">False</property>
|
||||
<property name="label_xalign">0</property>
|
||||
<property name="shadow_type">in</property>
|
||||
<child>
|
||||
<object class="GtkListBox" id="rest_list">
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">False</property>
|
||||
<property name="no_show_all">True</property>
|
||||
<property name="selection_mode">none</property>
|
||||
</object>
|
||||
</child>
|
||||
<child type="label_item">
|
||||
<placeholder/>
|
||||
</child>
|
||||
</object>
|
||||
<packing>
|
||||
<property name="expand">False</property>
|
||||
<property name="fill">True</property>
|
||||
<property name="position">1</property>
|
||||
</packing>
|
||||
</child>
|
||||
</object>
|
||||
<packing>
|
||||
<property name="expand">False</property>
|
||||
<property name="fill">True</property>
|
||||
<property name="position">5</property>
|
||||
</packing>
|
||||
</child>
|
||||
</object>
|
||||
<packing>
|
||||
<property name="expand">False</property>
|
||||
<property name="fill">True</property>
|
||||
<property name="position">1</property>
|
||||
</packing>
|
||||
</child>
|
||||
<child>
|
||||
<object class="GtkBox">
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">False</property>
|
||||
<property name="orientation">vertical</property>
|
||||
<child>
|
||||
<placeholder/>
|
||||
</child>
|
||||
</object>
|
||||
<packing>
|
||||
<property name="expand">True</property>
|
||||
<property name="fill">False</property>
|
||||
<property name="position">2</property>
|
||||
</packing>
|
||||
</child>
|
||||
</object>
|
||||
</child>
|
||||
</object>
|
||||
</child>
|
||||
</object>
|
||||
<packing>
|
||||
<property name="expand">True</property>
|
||||
<property name="fill">True</property>
|
||||
<property name="position">0</property>
|
||||
</packing>
|
||||
</child>
|
||||
</object>
|
||||
</interface>
|
||||
@ -1,282 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!-- Generated with glade 3.21.0
|
||||
|
||||
Copyright (C) 2017 - 2018
|
||||
|
||||
This file is part of Hammond.
|
||||
|
||||
Hammond is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License as published by
|
||||
the Free Software Foundation, either version 3 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
Hammond is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU General Public License
|
||||
along with Hammond. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
Authors:
|
||||
Jordan Petridis
|
||||
Tobias Bernard
|
||||
|
||||
-->
|
||||
<interface domain="">
|
||||
<requires lib="gtk+" version="3.12"/>
|
||||
<!-- interface-license-type gplv3 -->
|
||||
<!-- interface-name Hammond -->
|
||||
<!-- interface-description A podcast client for the GNOME Desktop -->
|
||||
<!-- interface-copyright 2017 - 2018 -->
|
||||
<!-- interface-authors Jordan Petridis\nTobias Bernard -->
|
||||
<object class="GtkBox" id="container">
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">False</property>
|
||||
<property name="orientation">vertical</property>
|
||||
<child>
|
||||
<object class="GtkScrolledWindow" id="scrolled_window">
|
||||
<property name="name">scrolled_window</property>
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">True</property>
|
||||
<property name="shadow_type">in</property>
|
||||
<child>
|
||||
<object class="GtkViewport">
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">False</property>
|
||||
<child>
|
||||
<object class="GtkBox">
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">False</property>
|
||||
<property name="halign">center</property>
|
||||
<property name="margin_top">24</property>
|
||||
<property name="margin_bottom">24</property>
|
||||
<child>
|
||||
<object class="GtkBox">
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">False</property>
|
||||
<property name="orientation">vertical</property>
|
||||
<child>
|
||||
<placeholder/>
|
||||
</child>
|
||||
</object>
|
||||
<packing>
|
||||
<property name="expand">True</property>
|
||||
<property name="fill">False</property>
|
||||
<property name="position">0</property>
|
||||
</packing>
|
||||
</child>
|
||||
<child>
|
||||
<object class="GtkBox">
|
||||
<property name="width_request">624</property>
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">False</property>
|
||||
<property name="halign">center</property>
|
||||
<property name="orientation">vertical</property>
|
||||
<property name="spacing">24</property>
|
||||
<child>
|
||||
<object class="GtkFrame">
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">False</property>
|
||||
<property name="label_xalign">0</property>
|
||||
<property name="shadow_type">none</property>
|
||||
<child>
|
||||
<object class="GtkBox">
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">False</property>
|
||||
<property name="orientation">vertical</property>
|
||||
<child>
|
||||
<object class="GtkBox">
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">False</property>
|
||||
<property name="valign">center</property>
|
||||
<property name="spacing">12</property>
|
||||
<child>
|
||||
<object class="GtkImage" id="cover">
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">False</property>
|
||||
<property name="pixel_size">128</property>
|
||||
<property name="icon_name">image-x-generic-symbolic</property>
|
||||
</object>
|
||||
<packing>
|
||||
<property name="expand">False</property>
|
||||
<property name="fill">True</property>
|
||||
<property name="position">0</property>
|
||||
</packing>
|
||||
</child>
|
||||
<child>
|
||||
<object class="GtkBox">
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">False</property>
|
||||
<property name="orientation">vertical</property>
|
||||
<property name="spacing">12</property>
|
||||
<child type="center">
|
||||
<object class="GtkLabel" id="description">
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">False</property>
|
||||
<property name="halign">start</property>
|
||||
<property name="valign">end</property>
|
||||
<property name="label" translatable="yes">Show description</property>
|
||||
<property name="wrap">True</property>
|
||||
<property name="max_width_chars">57</property>
|
||||
<attributes>
|
||||
<attribute name="weight" value="medium"/>
|
||||
</attributes>
|
||||
</object>
|
||||
<packing>
|
||||
<property name="expand">False</property>
|
||||
<property name="fill">False</property>
|
||||
<property name="pack_type">end</property>
|
||||
<property name="position">1</property>
|
||||
</packing>
|
||||
</child>
|
||||
<child>
|
||||
<object class="GtkBox">
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">False</property>
|
||||
<property name="spacing">5</property>
|
||||
<child>
|
||||
<object class="GtkMenuButton" id="settings_button">
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">True</property>
|
||||
<property name="receives_default">True</property>
|
||||
<child>
|
||||
<object class="GtkImage">
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">False</property>
|
||||
<property name="halign">center</property>
|
||||
<property name="valign">center</property>
|
||||
<property name="icon_name">emblem-system-symbolic</property>
|
||||
</object>
|
||||
</child>
|
||||
</object>
|
||||
<packing>
|
||||
<property name="expand">False</property>
|
||||
<property name="fill">True</property>
|
||||
<property name="position">0</property>
|
||||
</packing>
|
||||
</child>
|
||||
<child>
|
||||
<object class="GtkButton" id="link_button">
|
||||
<property name="label" translatable="yes">Website</property>
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">True</property>
|
||||
<property name="receives_default">True</property>
|
||||
<property name="halign">center</property>
|
||||
<property name="valign">center</property>
|
||||
</object>
|
||||
<packing>
|
||||
<property name="expand">False</property>
|
||||
<property name="fill">True</property>
|
||||
<property name="padding">5</property>
|
||||
<property name="position">1</property>
|
||||
</packing>
|
||||
</child>
|
||||
<child>
|
||||
<object class="GtkButton" id="unsub_button">
|
||||
<property name="label" translatable="yes">Unsubscribe</property>
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">True</property>
|
||||
<property name="receives_default">True</property>
|
||||
<property name="halign">center</property>
|
||||
<property name="valign">center</property>
|
||||
<style>
|
||||
<class name="destructive-action"/>
|
||||
</style>
|
||||
</object>
|
||||
<packing>
|
||||
<property name="expand">False</property>
|
||||
<property name="fill">True</property>
|
||||
<property name="padding">5</property>
|
||||
<property name="pack_type">end</property>
|
||||
<property name="position">2</property>
|
||||
</packing>
|
||||
</child>
|
||||
</object>
|
||||
<packing>
|
||||
<property name="expand">False</property>
|
||||
<property name="fill">False</property>
|
||||
<property name="pack_type">end</property>
|
||||
<property name="position">0</property>
|
||||
</packing>
|
||||
</child>
|
||||
</object>
|
||||
<packing>
|
||||
<property name="expand">True</property>
|
||||
<property name="fill">True</property>
|
||||
<property name="position">1</property>
|
||||
</packing>
|
||||
</child>
|
||||
</object>
|
||||
<packing>
|
||||
<property name="expand">False</property>
|
||||
<property name="fill">False</property>
|
||||
<property name="position">0</property>
|
||||
</packing>
|
||||
</child>
|
||||
</object>
|
||||
</child>
|
||||
<child type="label_item">
|
||||
<placeholder/>
|
||||
</child>
|
||||
</object>
|
||||
<packing>
|
||||
<property name="expand">False</property>
|
||||
<property name="fill">True</property>
|
||||
<property name="position">0</property>
|
||||
</packing>
|
||||
</child>
|
||||
<child>
|
||||
<object class="GtkFrame" id="episodes">
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">False</property>
|
||||
<property name="label_xalign">0</property>
|
||||
<property name="shadow_type">in</property>
|
||||
<child>
|
||||
<placeholder/>
|
||||
</child>
|
||||
<child type="label_item">
|
||||
<placeholder/>
|
||||
</child>
|
||||
</object>
|
||||
<packing>
|
||||
<property name="expand">False</property>
|
||||
<property name="fill">True</property>
|
||||
<property name="position">1</property>
|
||||
</packing>
|
||||
</child>
|
||||
</object>
|
||||
<packing>
|
||||
<property name="expand">False</property>
|
||||
<property name="fill">True</property>
|
||||
<property name="position">1</property>
|
||||
</packing>
|
||||
</child>
|
||||
<child>
|
||||
<object class="GtkBox">
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">False</property>
|
||||
<property name="orientation">vertical</property>
|
||||
<child>
|
||||
<placeholder/>
|
||||
</child>
|
||||
</object>
|
||||
<packing>
|
||||
<property name="expand">True</property>
|
||||
<property name="fill">False</property>
|
||||
<property name="position">2</property>
|
||||
</packing>
|
||||
</child>
|
||||
</object>
|
||||
</child>
|
||||
</object>
|
||||
</child>
|
||||
</object>
|
||||
<packing>
|
||||
<property name="expand">True</property>
|
||||
<property name="fill">True</property>
|
||||
<property name="position">0</property>
|
||||
</packing>
|
||||
</child>
|
||||
</object>
|
||||
</interface>
|
||||
@ -1,67 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!-- Generated with glade 3.21.0
|
||||
|
||||
Copyright (C) 2017 - 2018
|
||||
|
||||
This file is part of Hammond.
|
||||
|
||||
Hammond is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License as published by
|
||||
the Free Software Foundation, either version 3 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
Hammond is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU General Public License
|
||||
along with Hammond. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
Authors:
|
||||
Jordan Petridis
|
||||
Tobias Bernard
|
||||
|
||||
-->
|
||||
<interface>
|
||||
<requires lib="gtk+" version="3.20"/>
|
||||
<!-- interface-license-type gplv3 -->
|
||||
<!-- interface-name Hammond -->
|
||||
<!-- interface-description A podcast client for the GNOME Desktop -->
|
||||
<!-- interface-copyright 2017 - 2018 -->
|
||||
<!-- interface-authors Jordan Petridis\nTobias Bernard -->
|
||||
<object class="GtkBox" id="fb_child">
|
||||
<property name="width_request">256</property>
|
||||
<property name="height_request">256</property>
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">False</property>
|
||||
<property name="halign">center</property>
|
||||
<property name="valign">center</property>
|
||||
<property name="orientation">vertical</property>
|
||||
<child>
|
||||
<object class="GtkOverlay">
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">False</property>
|
||||
<child>
|
||||
<placeholder/>
|
||||
</child>
|
||||
<child type="overlay">
|
||||
<object class="GtkImage" id="pd_cover">
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">False</property>
|
||||
<property name="halign">center</property>
|
||||
<property name="valign">center</property>
|
||||
<property name="pixel_size">256</property>
|
||||
<property name="icon_name">image-x-generic-symbolic</property>
|
||||
<property name="icon_size">0</property>
|
||||
</object>
|
||||
</child>
|
||||
</object>
|
||||
<packing>
|
||||
<property name="expand">True</property>
|
||||
<property name="fill">True</property>
|
||||
<property name="position">0</property>
|
||||
</packing>
|
||||
</child>
|
||||
</object>
|
||||
</interface>
|
||||
@ -1,73 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!-- Generated with glade 3.21.0
|
||||
|
||||
Copyright (C) 2017 - 2018
|
||||
|
||||
This file is part of Hammond.
|
||||
|
||||
Hammond is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License as published by
|
||||
the Free Software Foundation, either version 3 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
Hammond is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU General Public License
|
||||
along with Hammond. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
Authors:
|
||||
Jordan Petridis
|
||||
Tobias Bernard
|
||||
|
||||
-->
|
||||
<interface>
|
||||
<requires lib="gtk+" version="3.20"/>
|
||||
<!-- interface-license-type gplv3 -->
|
||||
<!-- interface-name Hammond -->
|
||||
<!-- interface-description A podcast client for the GNOME Desktop -->
|
||||
<!-- interface-copyright 2017 - 2018 -->
|
||||
<!-- interface-authors Jordan Petridis\nTobias Bernard -->
|
||||
<object class="GtkBox" id="fb_parent">
|
||||
<property name="name">fb_parent</property>
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">False</property>
|
||||
<property name="orientation">vertical</property>
|
||||
<child>
|
||||
<object class="GtkScrolledWindow" id="scrolled_window">
|
||||
<property name="name">scrolled_window</property>
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">True</property>
|
||||
<property name="shadow_type">in</property>
|
||||
<child>
|
||||
<object class="GtkViewport">
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">False</property>
|
||||
<child>
|
||||
<object class="GtkFlowBox" id="flowbox">
|
||||
<property name="visible">True</property>
|
||||
<property name="can_focus">False</property>
|
||||
<property name="halign">center</property>
|
||||
<property name="valign">start</property>
|
||||
<property name="margin_top">24</property>
|
||||
<property name="margin_bottom">24</property>
|
||||
<property name="homogeneous">True</property>
|
||||
<property name="column_spacing">12</property>
|
||||
<property name="row_spacing">12</property>
|
||||
<property name="max_children_per_line">20</property>
|
||||
<property name="selection_mode">none</property>
|
||||
</object>
|
||||
</child>
|
||||
</object>
|
||||
</child>
|
||||
</object>
|
||||
<packing>
|
||||
<property name="expand">True</property>
|
||||
<property name="fill">True</property>
|
||||
<property name="position">0</property>
|
||||
</packing>
|
||||
</child>
|
||||
</object>
|
||||
</interface>
|
||||
@ -1,11 +0,0 @@
|
||||
row {
|
||||
border-bottom: solid 1px rgba(0,0,0, 0.1);
|
||||
}
|
||||
|
||||
row:last-child {
|
||||
border-bottom: none;
|
||||
}
|
||||
|
||||
list, border {
|
||||
border-radius: 4px;
|
||||
}
|
||||
@ -1,4 +0,0 @@
|
||||
# subdir('icons')
|
||||
|
||||
install_data('org.gnome.Hammond.desktop', install_dir : datadir + '/applications')
|
||||
install_data('org.gnome.Hammond.appdata.xml', install_dir : datadir + '/appdata')
|
||||
@ -1,23 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<component type="desktop">
|
||||
<id>org.gnome.Hammond</id>
|
||||
<name>Hammond</name>
|
||||
<project_license>GPL-3.0</project_license>
|
||||
<metadata_license>CC0-1.0</metadata_license>
|
||||
<developer_name>Jordan Petridis</developer_name>
|
||||
<summary>GNOME Podcast Client written in Rust</summary>
|
||||
<url type="homepage">https://gitlab.gnome.org/alatiera/Hammond</url>
|
||||
<description>
|
||||
Hammond is a Gtk+ Podcast client for the GNOME Desktop written in Rust
|
||||
</description>
|
||||
<screenshots>
|
||||
<screenshot>
|
||||
<image type="source">https://gitlab.gnome.org/alatiera/Hammond/raw/master/screenshots/podcasts_view.png</image>
|
||||
<image type="source">https://gitlab.gnome.org/alatiera/Hammond/raw/master/screenshots/podcast_widget.png</image>
|
||||
</screenshot>
|
||||
</screenshots>
|
||||
<releases>
|
||||
<release version="0.3.0" date="2018-02-11"/>
|
||||
</releases>
|
||||
<update_contact>jordanpetridis@protonmail.com</update_contact>
|
||||
</component>
|
||||
@ -1,11 +0,0 @@
|
||||
[Desktop Entry]
|
||||
Name=Hammond
|
||||
GenericName=Podcast Client
|
||||
Comment=Play, Subscribe and Manage Podcast Feeds.
|
||||
Icon=multimedia-player
|
||||
Exec=hammond
|
||||
Terminal=false
|
||||
Type=Application
|
||||
StartupNotify=true
|
||||
Categories=AudioVideo;Audio;Video;
|
||||
Keywords=Podcast
|
||||
@ -1,14 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<gresources>
|
||||
<gresource prefix="/org/gnome/hammond/">
|
||||
<file preprocess="xml-stripblanks">gtk/episode_widget.ui</file>
|
||||
<file preprocess="xml-stripblanks">gtk/show_widget.ui</file>
|
||||
<file preprocess="xml-stripblanks">gtk/empty_view.ui</file>
|
||||
<file preprocess="xml-stripblanks">gtk/episodes_view.ui</file>
|
||||
<file preprocess="xml-stripblanks">gtk/episodes_view_widget.ui</file>
|
||||
<file preprocess="xml-stripblanks">gtk/shows_view.ui</file>
|
||||
<file preprocess="xml-stripblanks">gtk/shows_child.ui</file>
|
||||
<file preprocess="xml-stripblanks">gtk/headerbar.ui</file>
|
||||
<file compressed="true">gtk/style.css</file>
|
||||
</gresource>
|
||||
</gresources>
|
||||
@ -1,168 +0,0 @@
|
||||
use gio::{ApplicationExt, ApplicationExtManual, ApplicationFlags};
|
||||
use glib;
|
||||
use gtk;
|
||||
use gtk::prelude::*;
|
||||
|
||||
use hammond_data::{Podcast, Source};
|
||||
use hammond_data::utils::checkup;
|
||||
|
||||
use headerbar::Header;
|
||||
use stacks::Content;
|
||||
use utils;
|
||||
|
||||
use std::sync::Arc;
|
||||
use std::sync::mpsc::{channel, Receiver, Sender};
|
||||
use std::time::Duration;
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub enum Action {
|
||||
UpdateSources(Option<Source>),
|
||||
RefreshAllViews,
|
||||
RefreshEpisodesView,
|
||||
RefreshEpisodesViewBGR,
|
||||
RefreshShowsView,
|
||||
RefreshWidget,
|
||||
RefreshWidgetIfVis,
|
||||
ReplaceWidget(Arc<Podcast>),
|
||||
RefreshWidgetIfSame(i32),
|
||||
ShowWidgetAnimated,
|
||||
ShowShowsAnimated,
|
||||
HeaderBarShowTile(String),
|
||||
HeaderBarNormal,
|
||||
HeaderBarShowUpdateIndicator,
|
||||
HeaderBarHideUpdateIndicator,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct App {
|
||||
app_instance: gtk::Application,
|
||||
window: gtk::Window,
|
||||
header: Arc<Header>,
|
||||
content: Arc<Content>,
|
||||
receiver: Receiver<Action>,
|
||||
sender: Sender<Action>,
|
||||
}
|
||||
|
||||
impl App {
|
||||
pub fn new() -> App {
|
||||
let application = gtk::Application::new("org.gnome.Hammond", ApplicationFlags::empty())
|
||||
.expect("Application Initialization failed...");
|
||||
|
||||
// Weird magic I copy-pasted that sets the Application Name in the Shell.
|
||||
glib::set_application_name("Hammond");
|
||||
glib::set_prgname(Some("Hammond"));
|
||||
|
||||
// Create the main window
|
||||
let window = gtk::Window::new(gtk::WindowType::Toplevel);
|
||||
window.set_default_size(860, 640);
|
||||
window.set_title("Hammond");
|
||||
let app_clone = application.clone();
|
||||
window.connect_delete_event(move |_, _| {
|
||||
app_clone.quit();
|
||||
Inhibit(false)
|
||||
});
|
||||
|
||||
let (sender, receiver) = channel();
|
||||
|
||||
// Create a content instance
|
||||
let content =
|
||||
Arc::new(Content::new(sender.clone()).expect("Content Initialization failed."));
|
||||
|
||||
// Create the headerbar
|
||||
let header = Arc::new(Header::new(&content, &window, sender.clone()));
|
||||
|
||||
// Add the content main stack to the window.
|
||||
window.add(&content.get_stack());
|
||||
|
||||
App {
|
||||
app_instance: application,
|
||||
window,
|
||||
header,
|
||||
content,
|
||||
receiver,
|
||||
sender,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn setup_timed_callbacks(&self) {
|
||||
let sender = self.sender.clone();
|
||||
// Update the feeds right after the Application is initialized.
|
||||
gtk::timeout_add_seconds(2, move || {
|
||||
utils::refresh_feed_wrapper(None, sender.clone());
|
||||
glib::Continue(false)
|
||||
});
|
||||
|
||||
let sender = self.sender.clone();
|
||||
// Auto-updater, runs every hour.
|
||||
// TODO: expose the interval in which it run to a user setting.
|
||||
gtk::timeout_add_seconds(3600, move || {
|
||||
utils::refresh_feed_wrapper(None, sender.clone());
|
||||
glib::Continue(true)
|
||||
});
|
||||
|
||||
// Run a database checkup once the application is initialized.
|
||||
gtk::timeout_add(300, || {
|
||||
if let Err(err) = checkup() {
|
||||
error!("Check up failed: {}", err);
|
||||
}
|
||||
|
||||
glib::Continue(false)
|
||||
});
|
||||
}
|
||||
|
||||
pub fn run(self) {
|
||||
let window = self.window.clone();
|
||||
let app = self.app_instance.clone();
|
||||
self.app_instance.connect_startup(move |_| {
|
||||
build_ui(&window, &app);
|
||||
});
|
||||
self.setup_timed_callbacks();
|
||||
|
||||
let content = self.content.clone();
|
||||
let headerbar = self.header.clone();
|
||||
let sender = self.sender.clone();
|
||||
let receiver = self.receiver;
|
||||
gtk::idle_add(move || {
|
||||
match receiver.recv_timeout(Duration::from_millis(10)) {
|
||||
Ok(Action::UpdateSources(source)) => {
|
||||
if let Some(s) = source {
|
||||
utils::refresh_feed_wrapper(Some(vec![s]), sender.clone());
|
||||
} else {
|
||||
utils::refresh_feed_wrapper(None, sender.clone());
|
||||
}
|
||||
}
|
||||
Ok(Action::RefreshAllViews) => content.update(),
|
||||
Ok(Action::RefreshShowsView) => content.update_shows_view(),
|
||||
Ok(Action::RefreshWidget) => content.update_widget(),
|
||||
Ok(Action::RefreshWidgetIfVis) => content.update_widget_if_visible(),
|
||||
Ok(Action::RefreshWidgetIfSame(id)) => content.update_widget_if_same(id),
|
||||
Ok(Action::RefreshEpisodesView) => content.update_episode_view(),
|
||||
Ok(Action::RefreshEpisodesViewBGR) => content.update_episode_view_if_baground(),
|
||||
Ok(Action::ReplaceWidget(pd)) => {
|
||||
if let Err(err) = content.get_shows().replace_widget(pd) {
|
||||
error!("Something went wrong while trying to update the ShowWidget.");
|
||||
error!("Error: {}", err);
|
||||
}
|
||||
}
|
||||
Ok(Action::ShowWidgetAnimated) => content.get_shows().switch_widget_animated(),
|
||||
Ok(Action::ShowShowsAnimated) => content.get_shows().switch_podcasts_animated(),
|
||||
Ok(Action::HeaderBarShowTile(title)) => headerbar.switch_to_back(&title),
|
||||
Ok(Action::HeaderBarNormal) => headerbar.switch_to_normal(),
|
||||
Ok(Action::HeaderBarShowUpdateIndicator) => headerbar.show_update_notification(),
|
||||
Ok(Action::HeaderBarHideUpdateIndicator) => headerbar.hide_update_notification(),
|
||||
Err(_) => (),
|
||||
}
|
||||
|
||||
Continue(true)
|
||||
});
|
||||
|
||||
ApplicationExtManual::run(&self.app_instance, &[]);
|
||||
}
|
||||
}
|
||||
|
||||
fn build_ui(window: >k::Window, app: >k::Application) {
|
||||
window.set_application(app);
|
||||
window.show_all();
|
||||
window.activate();
|
||||
app.connect_activate(move |_| ());
|
||||
}
|
||||
@ -1,240 +0,0 @@
|
||||
use gtk;
|
||||
use gtk::prelude::*;
|
||||
|
||||
use failure::Error;
|
||||
use failure::ResultExt;
|
||||
use url::Url;
|
||||
|
||||
use hammond_data::Source;
|
||||
use hammond_data::dbqueries;
|
||||
|
||||
use std::sync::mpsc::Sender;
|
||||
|
||||
use app::Action;
|
||||
use stacks::Content;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct Header {
|
||||
container: gtk::HeaderBar,
|
||||
add_toggle: gtk::MenuButton,
|
||||
switch: gtk::StackSwitcher,
|
||||
back_button: gtk::Button,
|
||||
show_title: gtk::Label,
|
||||
about_button: gtk::ModelButton,
|
||||
update_button: gtk::ModelButton,
|
||||
update_box: gtk::Box,
|
||||
update_label: gtk::Label,
|
||||
update_spinner: gtk::Spinner,
|
||||
}
|
||||
|
||||
impl Default for Header {
|
||||
fn default() -> Header {
|
||||
let builder = gtk::Builder::new_from_resource("/org/gnome/hammond/gtk/headerbar.ui");
|
||||
|
||||
let header: gtk::HeaderBar = builder.get_object("headerbar").unwrap();
|
||||
let add_toggle: gtk::MenuButton = builder.get_object("add_toggle").unwrap();
|
||||
let switch: gtk::StackSwitcher = builder.get_object("switch").unwrap();
|
||||
let back_button: gtk::Button = builder.get_object("back_button").unwrap();
|
||||
let show_title: gtk::Label = builder.get_object("show_title").unwrap();
|
||||
let update_button: gtk::ModelButton = builder.get_object("update_button").unwrap();
|
||||
let update_box: gtk::Box = builder.get_object("update_notification").unwrap();
|
||||
let update_label: gtk::Label = builder.get_object("update_label").unwrap();
|
||||
let update_spinner: gtk::Spinner = builder.get_object("update_spinner").unwrap();
|
||||
let about_button: gtk::ModelButton = builder.get_object("about_button").unwrap();
|
||||
|
||||
Header {
|
||||
container: header,
|
||||
add_toggle,
|
||||
switch,
|
||||
back_button,
|
||||
show_title,
|
||||
about_button,
|
||||
update_button,
|
||||
update_box,
|
||||
update_label,
|
||||
update_spinner,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: Refactor components into smaller state machines
|
||||
impl Header {
|
||||
pub fn new(content: &Content, window: >k::Window, sender: Sender<Action>) -> Header {
|
||||
let h = Header::default();
|
||||
h.init(content, window, sender);
|
||||
h
|
||||
}
|
||||
|
||||
pub fn init(&self, content: &Content, window: >k::Window, sender: Sender<Action>) {
|
||||
let builder = gtk::Builder::new_from_resource("/org/gnome/hammond/gtk/headerbar.ui");
|
||||
|
||||
let add_popover: gtk::Popover = builder.get_object("add_popover").unwrap();
|
||||
let new_url: gtk::Entry = builder.get_object("new_url").unwrap();
|
||||
let add_button: gtk::Button = builder.get_object("add_button").unwrap();
|
||||
let result_label: gtk::Label = builder.get_object("result_label").unwrap();
|
||||
self.switch.set_stack(&content.get_stack());
|
||||
|
||||
new_url.connect_changed(clone!(add_button => move |url| {
|
||||
if let Err(err) = on_url_change(url, &result_label, &add_button) {
|
||||
error!("Error: {}", err);
|
||||
}
|
||||
}));
|
||||
|
||||
add_button.connect_clicked(clone!(add_popover, new_url, sender => move |_| {
|
||||
if let Err(err) = on_add_bttn_clicked(&new_url, sender.clone()) {
|
||||
error!("Error: {}", err);
|
||||
}
|
||||
add_popover.hide();
|
||||
}));
|
||||
|
||||
self.add_toggle.set_popover(&add_popover);
|
||||
|
||||
self.update_button
|
||||
.connect_clicked(clone!(sender => move |_| {
|
||||
sender
|
||||
.send(Action::UpdateSources(None))
|
||||
.expect("Action channel blew up.");
|
||||
}));
|
||||
|
||||
self.about_button
|
||||
.connect_clicked(clone!(window => move |_| {
|
||||
about_dialog(&window);
|
||||
}));
|
||||
|
||||
// Add the Headerbar to the window.
|
||||
window.set_titlebar(&self.container);
|
||||
|
||||
let switch = &self.switch;
|
||||
let add_toggle = &self.add_toggle;
|
||||
let show_title = &self.show_title;
|
||||
self.back_button.connect_clicked(
|
||||
clone!(switch, add_toggle, show_title, sender => move |back| {
|
||||
switch.show();
|
||||
add_toggle.show();
|
||||
back.hide();
|
||||
show_title.hide();
|
||||
if let Err(err) = sender.send(Action::ShowShowsAnimated) {
|
||||
error!("Action channel blew up: {}", err);
|
||||
}
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
pub fn switch_to_back(&self, title: &str) {
|
||||
self.switch.hide();
|
||||
self.add_toggle.hide();
|
||||
self.back_button.show();
|
||||
self.set_show_title(title);
|
||||
self.show_title.show();
|
||||
}
|
||||
|
||||
pub fn switch_to_normal(&self) {
|
||||
self.switch.show();
|
||||
self.add_toggle.show();
|
||||
self.back_button.hide();
|
||||
self.show_title.hide();
|
||||
}
|
||||
|
||||
pub fn set_show_title(&self, title: &str) {
|
||||
self.show_title.set_text(title)
|
||||
}
|
||||
|
||||
pub fn show_update_notification(&self) {
|
||||
self.update_spinner.start();
|
||||
self.update_box.show();
|
||||
self.update_spinner.show();
|
||||
self.update_label.show();
|
||||
}
|
||||
|
||||
pub fn hide_update_notification(&self) {
|
||||
self.update_spinner.stop();
|
||||
self.update_box.hide();
|
||||
self.update_spinner.hide();
|
||||
self.update_label.hide();
|
||||
}
|
||||
}
|
||||
|
||||
fn on_add_bttn_clicked(entry: >k::Entry, sender: Sender<Action>) -> Result<(), Error> {
|
||||
let url = entry.get_text().unwrap_or_default();
|
||||
let source = Source::from_url(&url).context("Failed to convert url to a Source entry.")?;
|
||||
entry.set_text("");
|
||||
|
||||
sender
|
||||
.send(Action::UpdateSources(Some(source)))
|
||||
.context("App channel blew up.")?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn on_url_change(
|
||||
entry: >k::Entry,
|
||||
result: >k::Label,
|
||||
add_button: >k::Button,
|
||||
) -> Result<(), Error> {
|
||||
let uri = entry
|
||||
.get_text()
|
||||
.ok_or_else(|| format_err!("GtkEntry blew up somehow."))?;
|
||||
debug!("Url: {}", uri);
|
||||
|
||||
let url = Url::parse(&uri);
|
||||
// TODO: refactor to avoid duplication
|
||||
match url {
|
||||
Ok(u) => {
|
||||
if !dbqueries::source_exists(u.as_str())? {
|
||||
add_button.set_sensitive(true);
|
||||
result.hide();
|
||||
result.set_label("");
|
||||
} else {
|
||||
add_button.set_sensitive(false);
|
||||
result.set_label("Show already exists.");
|
||||
result.show();
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
Err(err) => {
|
||||
add_button.set_sensitive(false);
|
||||
if !uri.is_empty() {
|
||||
result.set_label("Invalid url.");
|
||||
result.show();
|
||||
error!("Error: {}", err);
|
||||
} else {
|
||||
result.hide();
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Totally copied it from fractal.
|
||||
// https://gitlab.gnome.org/danigm/fractal/blob/503e311e22b9d7540089d735b92af8e8f93560c5/fractal-gtk/src/app.rs#L1883-1912
|
||||
fn about_dialog(window: >k::Window) {
|
||||
// Feel free to add yourself if you contribured.
|
||||
let authors = &[
|
||||
"Constantin Nickel",
|
||||
"Gabriele Musco",
|
||||
"James Wykeham-Martin",
|
||||
"Jordan Petridis",
|
||||
"Julian Sparber",
|
||||
];
|
||||
|
||||
let dialog = gtk::AboutDialog::new();
|
||||
// Waiting for a logo.
|
||||
// dialog.set_logo_icon_name("org.gnome.Hammond");
|
||||
dialog.set_logo_icon_name("multimedia-player");
|
||||
dialog.set_comments("A Podcast Client for the GNOME Desktop.");
|
||||
dialog.set_copyright("© 2017, 2018 Jordan Petridis");
|
||||
dialog.set_license_type(gtk::License::Gpl30);
|
||||
dialog.set_modal(true);
|
||||
// TODO: make it show it fetches the commit hash from which it was built
|
||||
// and the version number is kept in sync automaticly
|
||||
dialog.set_version("0.3");
|
||||
dialog.set_program_name("Hammond");
|
||||
// TODO: Need a wiki page first.
|
||||
// dialog.set_website("https://wiki.gnome.org/Design/Apps/Potential/Podcasts");
|
||||
// dialog.set_website_label("Learn more about Hammond");
|
||||
dialog.set_transient_for(window);
|
||||
|
||||
dialog.set_artists(&["Tobias Bernard"]);
|
||||
dialog.set_authors(authors);
|
||||
|
||||
dialog.show();
|
||||
}
|
||||
@ -1,91 +0,0 @@
|
||||
#![cfg_attr(feature = "cargo-clippy", allow(clone_on_ref_ptr, needless_pass_by_value))]
|
||||
// #![deny(unused_extern_crates, unused)]
|
||||
|
||||
extern crate gdk;
|
||||
extern crate gdk_pixbuf;
|
||||
extern crate gio;
|
||||
extern crate glib;
|
||||
extern crate gtk;
|
||||
|
||||
#[macro_use]
|
||||
extern crate failure;
|
||||
// #[macro_use]
|
||||
// extern crate failure_derive;
|
||||
#[macro_use]
|
||||
extern crate lazy_static;
|
||||
#[macro_use]
|
||||
extern crate log;
|
||||
|
||||
extern crate chrono;
|
||||
extern crate dissolve;
|
||||
extern crate hammond_data;
|
||||
extern crate hammond_downloader;
|
||||
extern crate humansize;
|
||||
extern crate loggerv;
|
||||
extern crate open;
|
||||
extern crate send_cell;
|
||||
extern crate url;
|
||||
// extern crate rayon;
|
||||
|
||||
// use rayon::prelude::*;
|
||||
use log::Level;
|
||||
|
||||
use gtk::prelude::*;
|
||||
|
||||
// http://gtk-rs.org/tuto/closures
|
||||
#[macro_export]
|
||||
macro_rules! clone {
|
||||
(@param _) => ( _ );
|
||||
(@param $x:ident) => ( $x );
|
||||
($($n:ident),+ => move || $body:expr) => (
|
||||
{
|
||||
$( let $n = $n.clone(); )+
|
||||
move || $body
|
||||
}
|
||||
);
|
||||
($($n:ident),+ => move |$($p:tt),+| $body:expr) => (
|
||||
{
|
||||
$( let $n = $n.clone(); )+
|
||||
move |$(clone!(@param $p),)+| $body
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
// They do not need to be public
|
||||
// But it helps when looking at the generated docs.
|
||||
pub mod views;
|
||||
pub mod widgets;
|
||||
pub mod stacks;
|
||||
|
||||
pub mod headerbar;
|
||||
pub mod app;
|
||||
|
||||
pub mod utils;
|
||||
pub mod manager;
|
||||
pub mod static_resource;
|
||||
|
||||
use app::App;
|
||||
|
||||
fn main() {
|
||||
// TODO: make the the logger a cli -vv option
|
||||
loggerv::init_with_level(Level::Info).expect("Error initializing loggerv.");
|
||||
gtk::init().expect("Error initializing gtk.");
|
||||
static_resource::init().expect("Something went wrong with the resource file initialization.");
|
||||
|
||||
// Add custom style
|
||||
let provider = gtk::CssProvider::new();
|
||||
gtk::CssProvider::load_from_resource(&provider, "/org/gnome/hammond/gtk/style.css");
|
||||
gtk::StyleContext::add_provider_for_screen(
|
||||
&gdk::Screen::get_default().expect("Error initializing gtk css provider."),
|
||||
&provider,
|
||||
600,
|
||||
);
|
||||
|
||||
// This set's the app to dark mode.
|
||||
// It wiil be in the user's preference later.
|
||||
// Uncomment it to run with the dark theme variant.
|
||||
// let settings = gtk::Settings::get_default().unwrap();
|
||||
// settings.set_property_gtk_application_prefer_dark_theme(true);
|
||||
|
||||
App::new().run();
|
||||
}
|
||||
@ -1,202 +0,0 @@
|
||||
use failure::Error;
|
||||
|
||||
// use hammond_data::Episode;
|
||||
use hammond_data::dbqueries;
|
||||
use hammond_downloader::downloader::{get_episode, DownloadProgress};
|
||||
|
||||
use app::Action;
|
||||
|
||||
use std::collections::HashMap;
|
||||
use std::sync::{Arc, Mutex, RwLock};
|
||||
use std::sync::mpsc::Sender;
|
||||
// use std::sync::atomic::AtomicUsize;
|
||||
// use std::path::PathBuf;
|
||||
use std::thread;
|
||||
|
||||
// This is messy, undocumented and hacky af.
|
||||
// I am terrible at writting downloaders and download managers.
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Progress {
|
||||
total_bytes: u64,
|
||||
downloaded_bytes: u64,
|
||||
cancel: bool,
|
||||
}
|
||||
|
||||
impl Default for Progress {
|
||||
fn default() -> Self {
|
||||
Progress {
|
||||
total_bytes: 0,
|
||||
downloaded_bytes: 0,
|
||||
cancel: false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Progress {
|
||||
pub fn get_fraction(&self) -> f64 {
|
||||
let ratio = self.downloaded_bytes as f64 / self.total_bytes as f64;
|
||||
debug!("{:?}", self);
|
||||
debug!("Ratio completed: {}", ratio);
|
||||
|
||||
if ratio >= 1.0 {
|
||||
return 1.0;
|
||||
};
|
||||
ratio
|
||||
}
|
||||
|
||||
pub fn get_total_size(&self) -> u64 {
|
||||
self.total_bytes
|
||||
}
|
||||
|
||||
pub fn get_downloaded(&self) -> u64 {
|
||||
self.downloaded_bytes
|
||||
}
|
||||
|
||||
pub fn cancel(&mut self) {
|
||||
self.cancel = true;
|
||||
}
|
||||
}
|
||||
|
||||
impl DownloadProgress for Progress {
|
||||
fn set_downloaded(&mut self, downloaded: u64) {
|
||||
self.downloaded_bytes = downloaded
|
||||
}
|
||||
|
||||
fn set_size(&mut self, bytes: u64) {
|
||||
self.total_bytes = bytes;
|
||||
}
|
||||
|
||||
fn should_cancel(&self) -> bool {
|
||||
self.cancel
|
||||
}
|
||||
}
|
||||
|
||||
lazy_static! {
|
||||
pub static ref ACTIVE_DOWNLOADS: Arc<RwLock<HashMap<i32, Arc<Mutex<Progress>>>>> = {
|
||||
Arc::new(RwLock::new(HashMap::new()))
|
||||
};
|
||||
}
|
||||
|
||||
pub fn add(id: i32, directory: &str, sender: Sender<Action>) -> Result<(), Error> {
|
||||
// Create a new `Progress` struct to keep track of dl progress.
|
||||
let prog = Arc::new(Mutex::new(Progress::default()));
|
||||
|
||||
{
|
||||
let mut m = ACTIVE_DOWNLOADS
|
||||
.write()
|
||||
.map_err(|_| format_err!("Failed to get a lock on the mutex."))?;
|
||||
m.insert(id, prog.clone());
|
||||
}
|
||||
|
||||
let dir = directory.to_owned();
|
||||
thread::spawn(move || {
|
||||
if let Ok(episode) = dbqueries::get_episode_from_rowid(id) {
|
||||
let pid = episode.podcast_id();
|
||||
let id = episode.rowid();
|
||||
|
||||
if let Err(err) = get_episode(&mut episode.into(), dir.as_str(), Some(prog)) {
|
||||
error!("Error while trying to download an episode");
|
||||
error!("Error: {}", err);
|
||||
}
|
||||
|
||||
{
|
||||
if let Ok(mut m) = ACTIVE_DOWNLOADS.write() {
|
||||
info!("Removed: {:?}", m.remove(&id));
|
||||
}
|
||||
}
|
||||
|
||||
// {
|
||||
// if let Ok(m) = ACTIVE_DOWNLOADS.read() {
|
||||
// debug!("ACTIVE DOWNLOADS: {:#?}", m);
|
||||
// }
|
||||
// }
|
||||
|
||||
sender
|
||||
.send(Action::RefreshEpisodesView)
|
||||
.expect("Action channel blew up.");
|
||||
sender
|
||||
.send(Action::RefreshWidgetIfSame(pid))
|
||||
.expect("Action channel blew up.");
|
||||
}
|
||||
});
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
use hammond_data::{Episode, Source};
|
||||
use hammond_data::dbqueries;
|
||||
use hammond_data::pipeline;
|
||||
use hammond_data::utils::get_download_folder;
|
||||
|
||||
use hammond_downloader::downloader::get_episode;
|
||||
|
||||
use std::{thread, time};
|
||||
use std::fs;
|
||||
use std::path::Path;
|
||||
use std::sync::mpsc::channel;
|
||||
|
||||
#[test]
|
||||
// This test inserts an rss feed to your `XDG_DATA/hammond/hammond.db` so we make it explicit
|
||||
// to run it.
|
||||
#[ignore]
|
||||
// THIS IS NOT A RELIABLE TEST
|
||||
// Just quick sanity check
|
||||
fn test_start_dl() {
|
||||
let url = "https://web.archive.org/web/20180120110727if_/https://rss.acast.com/thetipoff";
|
||||
// Create and index a source
|
||||
let source = Source::from_url(url).unwrap();
|
||||
// Copy it's id
|
||||
let sid = source.id();
|
||||
pipeline::run(vec![source], true).unwrap();
|
||||
|
||||
// Get the Podcast
|
||||
let pd = dbqueries::get_podcast_from_source_id(sid).unwrap();
|
||||
let title = "Coming soon... The Tip Off";
|
||||
// Get an episode
|
||||
let episode: Episode = dbqueries::get_episode_from_pk(title, pd.id()).unwrap();
|
||||
|
||||
let (sender, _rx) = channel();
|
||||
|
||||
let download_fold = get_download_folder(&pd.title()).unwrap();
|
||||
add(episode.rowid(), download_fold.as_str(), sender).unwrap();
|
||||
assert_eq!(ACTIVE_DOWNLOADS.read().unwrap().len(), 1);
|
||||
|
||||
// Give it soem time to download the file
|
||||
thread::sleep(time::Duration::from_secs(20));
|
||||
|
||||
let final_path = format!("{}/{}.mp3", &download_fold, episode.rowid());
|
||||
assert!(Path::new(&final_path).exists());
|
||||
fs::remove_file(final_path).unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_dl_steal_the_stars() {
|
||||
let url =
|
||||
"https://web.archive.org/web/20180120104957if_/https://rss.art19.com/steal-the-stars";
|
||||
// Create and index a source
|
||||
let source = Source::from_url(url).unwrap();
|
||||
// Copy it's id
|
||||
let sid = source.id();
|
||||
pipeline::run(vec![source], true).unwrap();
|
||||
|
||||
// Get the Podcast
|
||||
let pd = dbqueries::get_podcast_from_source_id(sid).unwrap();
|
||||
let title = "Introducing Steal the Stars";
|
||||
// Get an episode
|
||||
let mut episode = dbqueries::get_episode_from_pk(title, pd.id())
|
||||
.unwrap()
|
||||
.into();
|
||||
let download_fold = get_download_folder(&pd.title()).unwrap();
|
||||
|
||||
get_episode(&mut episode, &download_fold, None).unwrap();
|
||||
|
||||
let final_path = format!("{}/{}.mp3", &download_fold, episode.rowid());
|
||||
assert!(Path::new(&final_path).exists());
|
||||
fs::remove_file(final_path).unwrap();
|
||||
}
|
||||
}
|
||||
@ -1,94 +0,0 @@
|
||||
use gtk;
|
||||
use gtk::prelude::*;
|
||||
|
||||
use failure::Error;
|
||||
|
||||
use app::Action;
|
||||
use stacks::EpisodeStack;
|
||||
use stacks::ShowStack;
|
||||
|
||||
use std::sync::Arc;
|
||||
use std::sync::mpsc::Sender;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct Content {
|
||||
stack: gtk::Stack,
|
||||
shows: Arc<ShowStack>,
|
||||
episodes: Arc<EpisodeStack>,
|
||||
sender: Sender<Action>,
|
||||
}
|
||||
|
||||
impl Content {
|
||||
pub fn new(sender: Sender<Action>) -> Result<Content, Error> {
|
||||
let stack = gtk::Stack::new();
|
||||
let episodes = Arc::new(EpisodeStack::new(sender.clone())?);
|
||||
let shows = Arc::new(ShowStack::new(sender.clone())?);
|
||||
|
||||
stack.add_titled(&episodes.get_stack(), "episodes", "Episodes");
|
||||
stack.add_titled(&shows.get_stack(), "shows", "Shows");
|
||||
|
||||
Ok(Content {
|
||||
stack,
|
||||
shows,
|
||||
episodes,
|
||||
sender,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn update(&self) {
|
||||
self.update_episode_view();
|
||||
self.update_shows_view();
|
||||
self.update_widget()
|
||||
}
|
||||
|
||||
// TODO: Maybe propagate the error?
|
||||
pub fn update_episode_view(&self) {
|
||||
if let Err(err) = self.episodes.update() {
|
||||
error!("Something went wrong while trying to update the episode view.");
|
||||
error!("Error: {}", err);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn update_episode_view_if_baground(&self) {
|
||||
if self.stack.get_visible_child_name() != Some("episodes".into()) {
|
||||
self.update_episode_view();
|
||||
}
|
||||
}
|
||||
|
||||
pub fn update_shows_view(&self) {
|
||||
if let Err(err) = self.shows.update_podcasts() {
|
||||
error!("Something went wrong while trying to update the ShowsView.");
|
||||
error!("Error: {}", err);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn update_widget(&self) {
|
||||
if let Err(err) = self.shows.update_widget() {
|
||||
error!("Something went wrong while trying to update the Show Widget.");
|
||||
error!("Error: {}", err);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn update_widget_if_same(&self, pid: i32) {
|
||||
if let Err(err) = self.shows.update_widget_if_same(pid) {
|
||||
error!("Something went wrong while trying to update the Show Widget.");
|
||||
error!("Error: {}", err);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn update_widget_if_visible(&self) {
|
||||
if self.stack.get_visible_child_name() == Some("shows".to_string())
|
||||
&& self.shows.get_stack().get_visible_child_name() == Some("widget".to_string())
|
||||
{
|
||||
self.update_widget();
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_stack(&self) -> gtk::Stack {
|
||||
self.stack.clone()
|
||||
}
|
||||
|
||||
pub fn get_shows(&self) -> Arc<ShowStack> {
|
||||
self.shows.clone()
|
||||
}
|
||||
}
|
||||
@ -1,77 +0,0 @@
|
||||
use gtk;
|
||||
use gtk::Cast;
|
||||
use gtk::prelude::*;
|
||||
|
||||
use failure::Error;
|
||||
|
||||
use views::{EmptyView, EpisodesView};
|
||||
|
||||
use app::Action;
|
||||
|
||||
use std::sync::mpsc::Sender;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct EpisodeStack {
|
||||
stack: gtk::Stack,
|
||||
sender: Sender<Action>,
|
||||
}
|
||||
|
||||
impl EpisodeStack {
|
||||
pub fn new(sender: Sender<Action>) -> Result<EpisodeStack, Error> {
|
||||
let episodes = EpisodesView::new(sender.clone())?;
|
||||
let empty = EmptyView::new();
|
||||
let stack = gtk::Stack::new();
|
||||
|
||||
stack.add_named(&episodes.container, "episodes");
|
||||
stack.add_named(&empty.container, "empty");
|
||||
|
||||
if episodes.is_empty() {
|
||||
stack.set_visible_child_name("empty");
|
||||
} else {
|
||||
stack.set_visible_child_name("episodes");
|
||||
}
|
||||
|
||||
Ok(EpisodeStack { stack, sender })
|
||||
}
|
||||
|
||||
// Look into refactoring to a state-machine.
|
||||
pub fn update(&self) -> Result<(), Error> {
|
||||
let old = self.stack
|
||||
.get_child_by_name("episodes")
|
||||
.ok_or_else(|| format_err!("Faild to get \"episodes\" child from the stack."))?
|
||||
.downcast::<gtk::Box>()
|
||||
.map_err(|_| format_err!("Failed to downcast stack child to a Box."))?;
|
||||
debug!("Name: {:?}", WidgetExt::get_name(&old));
|
||||
|
||||
let scrolled_window = old.get_children()
|
||||
.first()
|
||||
.ok_or_else(|| format_err!("Box container has no childs."))?
|
||||
.clone()
|
||||
.downcast::<gtk::ScrolledWindow>()
|
||||
.map_err(|_| format_err!("Failed to downcast stack child to a ScrolledWindow."))?;
|
||||
debug!("Name: {:?}", WidgetExt::get_name(&scrolled_window));
|
||||
|
||||
let eps = EpisodesView::new(self.sender.clone())?;
|
||||
// Copy the vertical scrollbar adjustment from the old view into the new one.
|
||||
scrolled_window
|
||||
.get_vadjustment()
|
||||
.map(|x| eps.set_vadjustment(&x));
|
||||
|
||||
self.stack.remove(&old);
|
||||
self.stack.add_named(&eps.container, "episodes");
|
||||
|
||||
if eps.is_empty() {
|
||||
self.stack.set_visible_child_name("empty");
|
||||
} else {
|
||||
self.stack.set_visible_child_name("episodes");
|
||||
}
|
||||
|
||||
old.destroy();
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn get_stack(&self) -> gtk::Stack {
|
||||
self.stack.clone()
|
||||
}
|
||||
}
|
||||
@ -1,7 +0,0 @@
|
||||
mod content;
|
||||
mod episode;
|
||||
mod show;
|
||||
|
||||
pub use self::content::Content;
|
||||
pub use self::episode::EpisodeStack;
|
||||
pub use self::show::ShowStack;
|
||||
@ -1,180 +0,0 @@
|
||||
use gtk;
|
||||
use gtk::Cast;
|
||||
use gtk::prelude::*;
|
||||
|
||||
use failure::Error;
|
||||
|
||||
use hammond_data::Podcast;
|
||||
use hammond_data::dbqueries;
|
||||
|
||||
use views::{EmptyView, ShowsPopulated};
|
||||
|
||||
use app::Action;
|
||||
use widgets::ShowWidget;
|
||||
|
||||
use std::sync::Arc;
|
||||
use std::sync::mpsc::Sender;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ShowStack {
|
||||
stack: gtk::Stack,
|
||||
sender: Sender<Action>,
|
||||
}
|
||||
|
||||
impl ShowStack {
|
||||
pub fn new(sender: Sender<Action>) -> Result<ShowStack, Error> {
|
||||
let stack = gtk::Stack::new();
|
||||
|
||||
let show = ShowStack {
|
||||
stack,
|
||||
sender: sender.clone(),
|
||||
};
|
||||
|
||||
let pop = ShowsPopulated::new(sender.clone())?;
|
||||
let widget = ShowWidget::default();
|
||||
let empty = EmptyView::new();
|
||||
|
||||
show.stack.add_named(&pop.container, "podcasts");
|
||||
show.stack.add_named(&widget.container, "widget");
|
||||
show.stack.add_named(&empty.container, "empty");
|
||||
|
||||
if pop.is_empty() {
|
||||
show.stack.set_visible_child_name("empty")
|
||||
} else {
|
||||
show.stack.set_visible_child_name("podcasts")
|
||||
}
|
||||
|
||||
Ok(show)
|
||||
}
|
||||
|
||||
// pub fn update(&self) {
|
||||
// self.update_widget();
|
||||
// self.update_podcasts();
|
||||
// }
|
||||
|
||||
pub fn update_podcasts(&self) -> Result<(), Error> {
|
||||
let vis = self.stack
|
||||
.get_visible_child_name()
|
||||
.ok_or_else(|| format_err!("Failed to get visible child name."))?;
|
||||
|
||||
let old = self.stack
|
||||
.get_child_by_name("podcasts")
|
||||
.ok_or_else(|| format_err!("Faild to get \"podcasts\" child from the stack."))?
|
||||
.downcast::<gtk::Box>()
|
||||
.map_err(|_| format_err!("Failed to downcast stack child to a Box."))?;
|
||||
debug!("Name: {:?}", WidgetExt::get_name(&old));
|
||||
|
||||
let scrolled_window = old.get_children()
|
||||
.first()
|
||||
.ok_or_else(|| format_err!("Box container has no childs."))?
|
||||
.clone()
|
||||
.downcast::<gtk::ScrolledWindow>()
|
||||
.map_err(|_| format_err!("Failed to downcast stack child to a ScrolledWindow."))?;
|
||||
debug!("Name: {:?}", WidgetExt::get_name(&scrolled_window));
|
||||
|
||||
let pop = ShowsPopulated::new(self.sender.clone())?;
|
||||
// Copy the vertical scrollbar adjustment from the old view into the new one.
|
||||
scrolled_window
|
||||
.get_vadjustment()
|
||||
.map(|x| pop.set_vadjustment(&x));
|
||||
|
||||
self.stack.remove(&old);
|
||||
self.stack.add_named(&pop.container, "podcasts");
|
||||
|
||||
if pop.is_empty() {
|
||||
self.stack.set_visible_child_name("empty");
|
||||
} else if vis != "empty" {
|
||||
self.stack.set_visible_child_name(&vis);
|
||||
} else {
|
||||
self.stack.set_visible_child_name("podcasts");
|
||||
}
|
||||
|
||||
old.destroy();
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn replace_widget(&self, pd: Arc<Podcast>) -> Result<(), Error> {
|
||||
let old = self.stack
|
||||
.get_child_by_name("widget")
|
||||
.ok_or_else(|| format_err!("Faild to get \"widget\" child from the stack."))?
|
||||
.downcast::<gtk::Box>()
|
||||
.map_err(|_| format_err!("Failed to downcast stack child to a Box."))?;
|
||||
debug!("Name: {:?}", WidgetExt::get_name(&old));
|
||||
|
||||
let new = ShowWidget::new(pd, self.sender.clone());
|
||||
// Each composite ShowWidget is a gtkBox with the Podcast.id encoded in the gtk::Widget
|
||||
// name. It's a hack since we can't yet subclass GObject easily.
|
||||
let oldid = WidgetExt::get_name(&old);
|
||||
let newid = WidgetExt::get_name(&new.container);
|
||||
debug!("Old widget Name: {:?}\nNew widget Name: {:?}", oldid, newid);
|
||||
|
||||
// Only copy the old scrollbar if both widget's represent the same podcast.
|
||||
if newid == oldid {
|
||||
let scrolled_window = old.get_children()
|
||||
.first()
|
||||
.ok_or_else(|| format_err!("Box container has no childs."))?
|
||||
.clone()
|
||||
.downcast::<gtk::ScrolledWindow>()
|
||||
.map_err(|_| format_err!("Failed to downcast stack child to a ScrolledWindow."))?;
|
||||
debug!("Name: {:?}", WidgetExt::get_name(&scrolled_window));
|
||||
|
||||
// Copy the vertical scrollbar adjustment from the old view into the new one.
|
||||
scrolled_window
|
||||
.get_vadjustment()
|
||||
.map(|x| new.set_vadjustment(&x));
|
||||
}
|
||||
|
||||
self.stack.remove(&old);
|
||||
self.stack.add_named(&new.container, "widget");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn update_widget(&self) -> Result<(), Error> {
|
||||
let vis = self.stack
|
||||
.get_visible_child_name()
|
||||
.ok_or_else(|| format_err!("Failed to get visible child name."))?;
|
||||
let old = self.stack
|
||||
.get_child_by_name("widget")
|
||||
.ok_or_else(|| format_err!("Faild to get \"widget\" child from the stack."))?;
|
||||
|
||||
let id = WidgetExt::get_name(&old);
|
||||
if id == Some("GtkBox".to_string()) || id.is_none() {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let id = id.ok_or_else(|| format_err!("Failed to get widget's name."))?;
|
||||
let pd = dbqueries::get_podcast_from_id(id.parse::<i32>()?)?;
|
||||
self.replace_widget(Arc::new(pd))?;
|
||||
self.stack.set_visible_child_name(&vis);
|
||||
old.destroy();
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Only update widget if it's podcast_id is equal to pid.
|
||||
pub fn update_widget_if_same(&self, pid: i32) -> Result<(), Error> {
|
||||
let old = self.stack
|
||||
.get_child_by_name("widget")
|
||||
.ok_or_else(|| format_err!("Faild to get \"widget\" child from the stack."))?;
|
||||
|
||||
let id = WidgetExt::get_name(&old);
|
||||
if id != Some(pid.to_string()) || id.is_none() {
|
||||
debug!("Different widget. Early return");
|
||||
return Ok(());
|
||||
}
|
||||
self.update_widget()
|
||||
}
|
||||
|
||||
pub fn switch_podcasts_animated(&self) {
|
||||
self.stack
|
||||
.set_visible_child_full("podcasts", gtk::StackTransitionType::SlideRight);
|
||||
}
|
||||
|
||||
pub fn switch_widget_animated(&self) {
|
||||
self.stack
|
||||
.set_visible_child_full("widget", gtk::StackTransitionType::SlideLeft)
|
||||
}
|
||||
|
||||
pub fn get_stack(&self) -> gtk::Stack {
|
||||
self.stack.clone()
|
||||
}
|
||||
}
|
||||
@ -1,16 +0,0 @@
|
||||
use gio::{resources_register, Error, Resource};
|
||||
use glib::Bytes;
|
||||
|
||||
pub fn init() -> Result<(), Error> {
|
||||
// load the gresource binary at build time and include/link it into the final binary.
|
||||
let res_bytes = include_bytes!("../resources/resources.gresource");
|
||||
|
||||
// Create Resource it will live as long the value lives.
|
||||
let gbytes = Bytes::from_static(res_bytes.as_ref());
|
||||
let resource = Resource::new_from_data(&gbytes)?;
|
||||
|
||||
// Register the resource so It wont be dropped and will continue to live in memory.
|
||||
resources_register(&resource);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@ -1,134 +0,0 @@
|
||||
#![cfg_attr(feature = "cargo-clippy", allow(type_complexity))]
|
||||
|
||||
use failure::Error;
|
||||
use gdk_pixbuf::Pixbuf;
|
||||
use send_cell::SendCell;
|
||||
|
||||
// use hammond_data::feed;
|
||||
use hammond_data::{PodcastCoverQuery, Source};
|
||||
use hammond_data::dbqueries;
|
||||
use hammond_data::pipeline;
|
||||
use hammond_downloader::downloader;
|
||||
|
||||
use std::collections::HashMap;
|
||||
use std::sync::{Mutex, RwLock};
|
||||
use std::sync::mpsc::Sender;
|
||||
use std::thread;
|
||||
|
||||
use app::Action;
|
||||
|
||||
pub fn refresh_feed_wrapper(source: Option<Vec<Source>>, sender: Sender<Action>) {
|
||||
if let Err(err) = refresh_feed(source, sender) {
|
||||
error!("An error occured while trying to update the feeds.");
|
||||
error!("Error: {}", err);
|
||||
}
|
||||
}
|
||||
|
||||
/// Update the rss feed(s) originating from `source`.
|
||||
/// If `source` is None, Fetches all the `Source` entries in the database and updates them.
|
||||
/// When It's done,it queues up a `RefreshViews` action.
|
||||
fn refresh_feed(source: Option<Vec<Source>>, sender: Sender<Action>) -> Result<(), Error> {
|
||||
sender.send(Action::HeaderBarShowUpdateIndicator)?;
|
||||
|
||||
thread::spawn(move || {
|
||||
let mut sources = source.unwrap_or_else(|| {
|
||||
dbqueries::get_sources().expect("Failed to retrieve Sources from the database.")
|
||||
});
|
||||
|
||||
// Work around to improve the feed addition experience.
|
||||
// Many times links to rss feeds are just redirects(usually to an https version).
|
||||
// Sadly I haven't figured yet a nice way to follow up links redirects without getting
|
||||
// to lifetime hell with futures and hyper.
|
||||
// So the requested refresh is only of 1 feed, and the feed fails to be indexed,
|
||||
// (as a 301 redict would update the source entry and exit), another refresh is run.
|
||||
// For more see hammond_data/src/models/source.rs `fn request_constructor`.
|
||||
// also ping me on irc if or open an issue if you want to tackle it.
|
||||
if sources.len() == 1 {
|
||||
let source = sources.remove(0);
|
||||
let id = source.id();
|
||||
if let Err(err) = pipeline::index_single_source(source, false) {
|
||||
error!("Error While trying to update the database.");
|
||||
error!("Error msg: {}", err);
|
||||
if let Ok(source) = dbqueries::get_source_from_id(id) {
|
||||
if let Err(err) = pipeline::index_single_source(source, false) {
|
||||
error!("Error While trying to update the database.");
|
||||
error!("Error msg: {}", err);
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// This is what would normally run
|
||||
if let Err(err) = pipeline::run(sources, false) {
|
||||
error!("Error While trying to update the database.");
|
||||
error!("Error msg: {}", err);
|
||||
}
|
||||
}
|
||||
|
||||
sender
|
||||
.send(Action::HeaderBarHideUpdateIndicator)
|
||||
.expect("Action channel blew up.");
|
||||
sender
|
||||
.send(Action::RefreshAllViews)
|
||||
.expect("Action channel blew up.");
|
||||
});
|
||||
Ok(())
|
||||
}
|
||||
|
||||
lazy_static! {
|
||||
static ref CACHED_PIXBUFS: RwLock<HashMap<(i32, u32), Mutex<SendCell<Pixbuf>>>> = {
|
||||
RwLock::new(HashMap::new())
|
||||
};
|
||||
}
|
||||
|
||||
// Since gdk_pixbuf::Pixbuf is refference counted and every episode,
|
||||
// use the cover of the Podcast Feed/Show, We can only create a Pixbuf
|
||||
// cover per show and pass around the Rc pointer.
|
||||
//
|
||||
// GObjects do not implement Send trait, so SendCell is a way around that.
|
||||
// Also lazy_static requires Sync trait, so that's what the mutexes are.
|
||||
// TODO: maybe use something that would just scale to requested size?
|
||||
pub fn get_pixbuf_from_path(pd: &PodcastCoverQuery, size: u32) -> Result<Pixbuf, Error> {
|
||||
{
|
||||
let hashmap = CACHED_PIXBUFS
|
||||
.read()
|
||||
.map_err(|_| format_err!("Failed to get a lock on the pixbuf cache mutex."))?;
|
||||
if let Some(px) = hashmap.get(&(pd.id(), size)) {
|
||||
let m = px.lock()
|
||||
.map_err(|_| format_err!("Failed to lock pixbuf mutex."))?;
|
||||
return Ok(m.clone().into_inner());
|
||||
}
|
||||
}
|
||||
|
||||
let img_path = downloader::cache_image(pd)?;
|
||||
let px = Pixbuf::new_from_file_at_scale(&img_path, size as i32, size as i32, true)?;
|
||||
let mut hashmap = CACHED_PIXBUFS
|
||||
.write()
|
||||
.map_err(|_| format_err!("Failed to lock pixbuf mutex."))?;
|
||||
hashmap.insert((pd.id(), size), Mutex::new(SendCell::new(px.clone())));
|
||||
Ok(px)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use hammond_data::Source;
|
||||
use hammond_data::dbqueries;
|
||||
|
||||
#[test]
|
||||
// This test inserts an rss feed to your `XDG_DATA/hammond/hammond.db` so we make it explicit
|
||||
// to run it.
|
||||
#[ignore]
|
||||
fn test_get_pixbuf_from_path() {
|
||||
let url = "https://web.archive.org/web/20180120110727if_/https://rss.acast.com/thetipoff";
|
||||
// Create and index a source
|
||||
let source = Source::from_url(url).unwrap();
|
||||
// Copy it's id
|
||||
let sid = source.id();
|
||||
pipeline::run(vec![source], true).unwrap();
|
||||
|
||||
// Get the Podcast
|
||||
let pd = dbqueries::get_podcast_from_source_id(sid).unwrap();
|
||||
let pxbuf = get_pixbuf_from_path(&pd.into(), 256);
|
||||
assert!(pxbuf.is_ok());
|
||||
}
|
||||
}
|
||||
@ -1,21 +0,0 @@
|
||||
use gtk;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct EmptyView {
|
||||
pub container: gtk::Box,
|
||||
}
|
||||
|
||||
impl Default for EmptyView {
|
||||
fn default() -> Self {
|
||||
let builder = gtk::Builder::new_from_resource("/org/gnome/hammond/gtk/empty_view.ui");
|
||||
let view: gtk::Box = builder.get_object("empty_view").unwrap();
|
||||
|
||||
EmptyView { container: view }
|
||||
}
|
||||
}
|
||||
|
||||
impl EmptyView {
|
||||
pub fn new() -> EmptyView {
|
||||
EmptyView::default()
|
||||
}
|
||||
}
|
||||
@ -1,234 +0,0 @@
|
||||
use chrono::prelude::*;
|
||||
use failure::Error;
|
||||
use gtk;
|
||||
use gtk::prelude::*;
|
||||
|
||||
use hammond_data::EpisodeWidgetQuery;
|
||||
use hammond_data::dbqueries;
|
||||
|
||||
use app::Action;
|
||||
use utils::get_pixbuf_from_path;
|
||||
use widgets::EpisodeWidget;
|
||||
|
||||
use std::sync::mpsc::Sender;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
enum ListSplit {
|
||||
Today,
|
||||
Yday,
|
||||
Week,
|
||||
Month,
|
||||
Rest,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct EpisodesView {
|
||||
pub container: gtk::Box,
|
||||
scrolled_window: gtk::ScrolledWindow,
|
||||
frame_parent: gtk::Box,
|
||||
today_box: gtk::Box,
|
||||
yday_box: gtk::Box,
|
||||
week_box: gtk::Box,
|
||||
month_box: gtk::Box,
|
||||
rest_box: gtk::Box,
|
||||
today_list: gtk::ListBox,
|
||||
yday_list: gtk::ListBox,
|
||||
week_list: gtk::ListBox,
|
||||
month_list: gtk::ListBox,
|
||||
rest_list: gtk::ListBox,
|
||||
}
|
||||
|
||||
impl Default for EpisodesView {
|
||||
fn default() -> Self {
|
||||
let builder = gtk::Builder::new_from_resource("/org/gnome/hammond/gtk/episodes_view.ui");
|
||||
let container: gtk::Box = builder.get_object("container").unwrap();
|
||||
let scrolled_window: gtk::ScrolledWindow = builder.get_object("scrolled_window").unwrap();
|
||||
let frame_parent: gtk::Box = builder.get_object("frame_parent").unwrap();
|
||||
let today_box: gtk::Box = builder.get_object("today_box").unwrap();
|
||||
let yday_box: gtk::Box = builder.get_object("yday_box").unwrap();
|
||||
let week_box: gtk::Box = builder.get_object("week_box").unwrap();
|
||||
let month_box: gtk::Box = builder.get_object("month_box").unwrap();
|
||||
let rest_box: gtk::Box = builder.get_object("rest_box").unwrap();
|
||||
let today_list: gtk::ListBox = builder.get_object("today_list").unwrap();
|
||||
let yday_list: gtk::ListBox = builder.get_object("yday_list").unwrap();
|
||||
let week_list: gtk::ListBox = builder.get_object("week_list").unwrap();
|
||||
let month_list: gtk::ListBox = builder.get_object("month_list").unwrap();
|
||||
let rest_list: gtk::ListBox = builder.get_object("rest_list").unwrap();
|
||||
|
||||
EpisodesView {
|
||||
container,
|
||||
scrolled_window,
|
||||
frame_parent,
|
||||
today_box,
|
||||
yday_box,
|
||||
week_box,
|
||||
month_box,
|
||||
rest_box,
|
||||
today_list,
|
||||
yday_list,
|
||||
week_list,
|
||||
month_list,
|
||||
rest_list,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: REFACTOR ME
|
||||
impl EpisodesView {
|
||||
pub fn new(sender: Sender<Action>) -> Result<EpisodesView, Error> {
|
||||
let view = EpisodesView::default();
|
||||
let episodes = dbqueries::get_episodes_widgets_with_limit(50)?;
|
||||
let now_utc = Utc::now();
|
||||
|
||||
episodes.into_iter().for_each(|ep| {
|
||||
let epoch = ep.epoch();
|
||||
let viewep = EpisodesViewWidget::new(ep, sender.clone());
|
||||
|
||||
let t = split(&now_utc, i64::from(epoch));
|
||||
match t {
|
||||
ListSplit::Today => {
|
||||
view.today_list.add(&viewep.container);
|
||||
}
|
||||
ListSplit::Yday => {
|
||||
view.yday_list.add(&viewep.container);
|
||||
}
|
||||
ListSplit::Week => {
|
||||
view.week_list.add(&viewep.container);
|
||||
}
|
||||
ListSplit::Month => {
|
||||
view.month_list.add(&viewep.container);
|
||||
}
|
||||
ListSplit::Rest => {
|
||||
view.rest_list.add(&viewep.container);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
if view.today_list.get_children().is_empty() {
|
||||
view.today_box.hide();
|
||||
}
|
||||
|
||||
if view.yday_list.get_children().is_empty() {
|
||||
view.yday_box.hide();
|
||||
}
|
||||
|
||||
if view.week_list.get_children().is_empty() {
|
||||
view.week_box.hide();
|
||||
}
|
||||
|
||||
if view.month_list.get_children().is_empty() {
|
||||
view.month_box.hide();
|
||||
}
|
||||
|
||||
if view.rest_list.get_children().is_empty() {
|
||||
view.rest_box.hide();
|
||||
}
|
||||
|
||||
view.container.show_all();
|
||||
Ok(view)
|
||||
}
|
||||
|
||||
pub fn is_empty(&self) -> bool {
|
||||
if !self.today_list.get_children().is_empty() {
|
||||
return false;
|
||||
}
|
||||
|
||||
if !self.yday_list.get_children().is_empty() {
|
||||
return false;
|
||||
}
|
||||
|
||||
if !self.week_list.get_children().is_empty() {
|
||||
return false;
|
||||
}
|
||||
|
||||
if !self.month_list.get_children().is_empty() {
|
||||
return false;
|
||||
}
|
||||
|
||||
if !self.rest_list.get_children().is_empty() {
|
||||
return false;
|
||||
}
|
||||
|
||||
true
|
||||
}
|
||||
|
||||
/// Set scrolled window vertical adjustment.
|
||||
pub fn set_vadjustment(&self, vadjustment: >k::Adjustment) {
|
||||
self.scrolled_window.set_vadjustment(vadjustment)
|
||||
}
|
||||
}
|
||||
|
||||
fn split(now: &DateTime<Utc>, epoch: i64) -> ListSplit {
|
||||
let ep = Utc.timestamp(epoch, 0);
|
||||
|
||||
if now.ordinal() == ep.ordinal() && now.year() == ep.year() {
|
||||
ListSplit::Today
|
||||
} else if now.ordinal() == ep.ordinal() + 1 && now.year() == ep.year() {
|
||||
ListSplit::Yday
|
||||
} else if now.iso_week().week() == ep.iso_week().week() && now.year() == ep.year() {
|
||||
ListSplit::Week
|
||||
} else if now.month() == ep.month() && now.year() == ep.year() {
|
||||
ListSplit::Month
|
||||
} else {
|
||||
ListSplit::Rest
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
struct EpisodesViewWidget {
|
||||
container: gtk::Box,
|
||||
image: gtk::Image,
|
||||
episode: gtk::Box,
|
||||
}
|
||||
|
||||
impl Default for EpisodesViewWidget {
|
||||
fn default() -> Self {
|
||||
let builder =
|
||||
gtk::Builder::new_from_resource("/org/gnome/hammond/gtk/episodes_view_widget.ui");
|
||||
let container: gtk::Box = builder.get_object("container").unwrap();
|
||||
let image: gtk::Image = builder.get_object("cover").unwrap();
|
||||
let ep = EpisodeWidget::default();
|
||||
container.pack_start(&ep.container, true, true, 6);
|
||||
|
||||
EpisodesViewWidget {
|
||||
container,
|
||||
image,
|
||||
episode: ep.container,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl EpisodesViewWidget {
|
||||
fn new(episode: EpisodeWidgetQuery, sender: Sender<Action>) -> EpisodesViewWidget {
|
||||
let builder =
|
||||
gtk::Builder::new_from_resource("/org/gnome/hammond/gtk/episodes_view_widget.ui");
|
||||
let container: gtk::Box = builder.get_object("container").unwrap();
|
||||
let image: gtk::Image = builder.get_object("cover").unwrap();
|
||||
let pid = episode.podcast_id();
|
||||
let ep = EpisodeWidget::new(episode, sender.clone());
|
||||
|
||||
let view = EpisodesViewWidget {
|
||||
container,
|
||||
image,
|
||||
episode: ep.container,
|
||||
};
|
||||
|
||||
view.init(pid);
|
||||
view
|
||||
}
|
||||
|
||||
fn init(&self, podcast_id: i32) {
|
||||
if let Err(err) = self.set_cover(podcast_id) {
|
||||
error!("Failed to set a cover: {}", err)
|
||||
}
|
||||
|
||||
self.container.pack_start(&self.episode, true, true, 6);
|
||||
}
|
||||
|
||||
fn set_cover(&self, podcast_id: i32) -> Result<(), Error> {
|
||||
let pd = dbqueries::get_podcast_cover_from_id(podcast_id)?;
|
||||
let img = get_pixbuf_from_path(&pd, 64)?;
|
||||
self.image.set_from_pixbuf(&img);
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
@ -1,7 +0,0 @@
|
||||
mod shows;
|
||||
mod episodes;
|
||||
mod empty;
|
||||
|
||||
pub use self::empty::EmptyView;
|
||||
pub use self::episodes::EpisodesView;
|
||||
pub use self::shows::ShowsPopulated;
|
||||
@ -1,142 +0,0 @@
|
||||
use failure::Error;
|
||||
use gtk;
|
||||
use gtk::prelude::*;
|
||||
|
||||
use hammond_data::Podcast;
|
||||
use hammond_data::dbqueries;
|
||||
|
||||
use app::Action;
|
||||
use utils::get_pixbuf_from_path;
|
||||
|
||||
use std::sync::Arc;
|
||||
use std::sync::mpsc::Sender;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ShowsPopulated {
|
||||
pub container: gtk::Box,
|
||||
scrolled_window: gtk::ScrolledWindow,
|
||||
flowbox: gtk::FlowBox,
|
||||
}
|
||||
|
||||
impl Default for ShowsPopulated {
|
||||
fn default() -> Self {
|
||||
let builder = gtk::Builder::new_from_resource("/org/gnome/hammond/gtk/shows_view.ui");
|
||||
let container: gtk::Box = builder.get_object("fb_parent").unwrap();
|
||||
let scrolled_window: gtk::ScrolledWindow = builder.get_object("scrolled_window").unwrap();
|
||||
let flowbox: gtk::FlowBox = builder.get_object("flowbox").unwrap();
|
||||
|
||||
ShowsPopulated {
|
||||
container,
|
||||
scrolled_window,
|
||||
flowbox,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ShowsPopulated {
|
||||
pub fn new(sender: Sender<Action>) -> Result<ShowsPopulated, Error> {
|
||||
let pop = ShowsPopulated::default();
|
||||
pop.init(sender)?;
|
||||
Ok(pop)
|
||||
}
|
||||
|
||||
pub fn init(&self, sender: Sender<Action>) -> Result<(), Error> {
|
||||
self.flowbox.connect_child_activated(move |_, child| {
|
||||
if let Err(err) = on_child_activate(child, sender.clone()) {
|
||||
error!(
|
||||
"Something went wrong during flowbox child activation: {}.",
|
||||
err
|
||||
)
|
||||
};
|
||||
});
|
||||
// Populate the flowbox with the Podcasts.
|
||||
self.populate_flowbox()
|
||||
}
|
||||
|
||||
fn populate_flowbox(&self) -> Result<(), Error> {
|
||||
let podcasts = dbqueries::get_podcasts()?;
|
||||
|
||||
podcasts
|
||||
.into_iter()
|
||||
.map(|pd| Arc::new(pd))
|
||||
.for_each(|parent| {
|
||||
let flowbox_child = ShowsChild::new(parent);
|
||||
self.flowbox.add(&flowbox_child.child);
|
||||
});
|
||||
self.flowbox.show_all();
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.flowbox.get_children().is_empty()
|
||||
}
|
||||
|
||||
/// Set scrolled window vertical adjustment.
|
||||
pub fn set_vadjustment(&self, vadjustment: >k::Adjustment) {
|
||||
self.scrolled_window.set_vadjustment(vadjustment)
|
||||
}
|
||||
}
|
||||
|
||||
fn on_child_activate(child: >k::FlowBoxChild, sender: Sender<Action>) -> Result<(), Error> {
|
||||
use gtk::WidgetExt;
|
||||
|
||||
// This is such an ugly hack...
|
||||
let id = WidgetExt::get_name(child)
|
||||
.ok_or_else(|| format_err!("Faild to get \"episodes\" child from the stack."))?
|
||||
.parse::<i32>()?;
|
||||
let pd = Arc::new(dbqueries::get_podcast_from_id(id)?);
|
||||
|
||||
sender.send(Action::HeaderBarShowTile(pd.title().into()))?;
|
||||
sender.send(Action::ReplaceWidget(pd))?;
|
||||
sender.send(Action::ShowWidgetAnimated)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct ShowsChild {
|
||||
container: gtk::Box,
|
||||
cover: gtk::Image,
|
||||
child: gtk::FlowBoxChild,
|
||||
}
|
||||
|
||||
impl Default for ShowsChild {
|
||||
fn default() -> Self {
|
||||
let builder = gtk::Builder::new_from_resource("/org/gnome/hammond/gtk/shows_child.ui");
|
||||
|
||||
let container: gtk::Box = builder.get_object("fb_child").unwrap();
|
||||
let cover: gtk::Image = builder.get_object("pd_cover").unwrap();
|
||||
|
||||
let child = gtk::FlowBoxChild::new();
|
||||
child.add(&container);
|
||||
|
||||
ShowsChild {
|
||||
container,
|
||||
cover,
|
||||
child,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ShowsChild {
|
||||
pub fn new(pd: Arc<Podcast>) -> ShowsChild {
|
||||
let child = ShowsChild::default();
|
||||
child.init(pd);
|
||||
child
|
||||
}
|
||||
|
||||
fn init(&self, pd: Arc<Podcast>) {
|
||||
self.container.set_tooltip_text(pd.title());
|
||||
|
||||
if let Err(err) = self.set_cover(pd.clone()) {
|
||||
error!("Failed to set a cover: {}", err)
|
||||
}
|
||||
|
||||
WidgetExt::set_name(&self.child, &pd.id().to_string());
|
||||
}
|
||||
|
||||
fn set_cover(&self, pd: Arc<Podcast>) -> Result<(), Error> {
|
||||
let image = get_pixbuf_from_path(&pd.clone().into(), 256)?;
|
||||
self.cover.set_from_pixbuf(&image);
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
@ -1,433 +0,0 @@
|
||||
use glib;
|
||||
use gtk;
|
||||
|
||||
use chrono::prelude::*;
|
||||
use gtk::prelude::*;
|
||||
|
||||
use chrono::Duration;
|
||||
use failure::Error;
|
||||
use humansize::{file_size_opts as size_opts, FileSize};
|
||||
use open;
|
||||
|
||||
use hammond_data::{EpisodeWidgetQuery, Podcast};
|
||||
use hammond_data::dbqueries;
|
||||
use hammond_data::utils::get_download_folder;
|
||||
|
||||
use app::Action;
|
||||
use manager;
|
||||
|
||||
use std::path::Path;
|
||||
use std::sync::{Arc, Mutex};
|
||||
use std::sync::mpsc::Sender;
|
||||
|
||||
lazy_static! {
|
||||
static ref SIZE_OPTS: Arc<size_opts::FileSizeOpts> = {
|
||||
// Declare a custom humansize option struct
|
||||
// See: https://docs.rs/humansize/1.0.2/humansize/file_size_opts/struct.FileSizeOpts.html
|
||||
Arc::new(size_opts::FileSizeOpts {
|
||||
divider: size_opts::Kilo::Binary,
|
||||
units: size_opts::Kilo::Decimal,
|
||||
decimal_places: 0,
|
||||
decimal_zeroes: 0,
|
||||
fixed_at: size_opts::FixedAt::No,
|
||||
long_units: false,
|
||||
space: true,
|
||||
suffix: "",
|
||||
allow_negative: false,
|
||||
})
|
||||
};
|
||||
|
||||
static ref NOW: DateTime<Utc> = Utc::now();
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct EpisodeWidget {
|
||||
pub container: gtk::Box,
|
||||
play: gtk::Button,
|
||||
download: gtk::Button,
|
||||
cancel: gtk::Button,
|
||||
title: gtk::Label,
|
||||
date: gtk::Label,
|
||||
duration: gtk::Label,
|
||||
progress: gtk::ProgressBar,
|
||||
total_size: gtk::Label,
|
||||
local_size: gtk::Label,
|
||||
separator1: gtk::Label,
|
||||
separator2: gtk::Label,
|
||||
prog_separator: gtk::Label,
|
||||
}
|
||||
|
||||
impl Default for EpisodeWidget {
|
||||
fn default() -> Self {
|
||||
let builder = gtk::Builder::new_from_resource("/org/gnome/hammond/gtk/episode_widget.ui");
|
||||
|
||||
let container: gtk::Box = builder.get_object("episode_container").unwrap();
|
||||
let progress: gtk::ProgressBar = builder.get_object("progress_bar").unwrap();
|
||||
|
||||
let download: gtk::Button = builder.get_object("download_button").unwrap();
|
||||
let play: gtk::Button = builder.get_object("play_button").unwrap();
|
||||
let cancel: gtk::Button = builder.get_object("cancel_button").unwrap();
|
||||
|
||||
let title: gtk::Label = builder.get_object("title_label").unwrap();
|
||||
let date: gtk::Label = builder.get_object("date_label").unwrap();
|
||||
let duration: gtk::Label = builder.get_object("duration_label").unwrap();
|
||||
let local_size: gtk::Label = builder.get_object("local_size").unwrap();
|
||||
let total_size: gtk::Label = builder.get_object("total_size").unwrap();
|
||||
|
||||
let separator1: gtk::Label = builder.get_object("separator1").unwrap();
|
||||
let separator2: gtk::Label = builder.get_object("separator2").unwrap();
|
||||
let prog_separator: gtk::Label = builder.get_object("prog_separator").unwrap();
|
||||
|
||||
EpisodeWidget {
|
||||
container,
|
||||
progress,
|
||||
download,
|
||||
play,
|
||||
cancel,
|
||||
title,
|
||||
duration,
|
||||
date,
|
||||
total_size,
|
||||
local_size,
|
||||
separator1,
|
||||
separator2,
|
||||
prog_separator,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl EpisodeWidget {
|
||||
pub fn new(episode: EpisodeWidgetQuery, sender: Sender<Action>) -> EpisodeWidget {
|
||||
let widget = EpisodeWidget::default();
|
||||
widget.init(episode, sender);
|
||||
widget
|
||||
}
|
||||
|
||||
fn init(&self, episode: EpisodeWidgetQuery, sender: Sender<Action>) {
|
||||
WidgetExt::set_name(&self.container, &episode.rowid().to_string());
|
||||
|
||||
// Set the title label state.
|
||||
self.set_title(&episode);
|
||||
|
||||
// Set the duaration label.
|
||||
self.set_duration(episode.duration());
|
||||
|
||||
// Set the date label.
|
||||
self.set_date(episode.epoch());
|
||||
|
||||
// Show or hide the play/delete/download buttons upon widget initialization.
|
||||
if let Err(err) = self.show_buttons(episode.local_uri()) {
|
||||
debug!("Failed to determine play/download button state.");
|
||||
debug!("Error: {}", err);
|
||||
}
|
||||
|
||||
// Set the size label.
|
||||
if let Err(err) = self.set_total_size(episode.length()) {
|
||||
error!("Failed to set the Size label.");
|
||||
error!("Error: {}", err);
|
||||
}
|
||||
|
||||
// Determine what the state of the progress bar should be.
|
||||
if let Err(err) = self.determine_progess_bar() {
|
||||
error!("Something went wrong determining the ProgressBar State.");
|
||||
error!("Error: {}", err);
|
||||
}
|
||||
|
||||
let episode = Arc::new(Mutex::new(episode));
|
||||
|
||||
let title = self.title.clone();
|
||||
self.play
|
||||
.connect_clicked(clone!(episode, sender => move |_| {
|
||||
if let Ok(mut ep) = episode.lock() {
|
||||
if let Err(err) = on_play_bttn_clicked(&mut ep, &title, sender.clone()){
|
||||
error!("Error: {}", err);
|
||||
};
|
||||
}
|
||||
}));
|
||||
|
||||
self.download
|
||||
.connect_clicked(clone!(episode, sender => move |dl| {
|
||||
dl.set_sensitive(false);
|
||||
if let Ok(ep) = episode.lock() {
|
||||
if let Err(err) = on_download_clicked(&ep, sender.clone()) {
|
||||
error!("Download failed to start.");
|
||||
error!("Error: {}", err);
|
||||
} else {
|
||||
info!("Donwload started succesfully.");
|
||||
}
|
||||
}
|
||||
}));
|
||||
}
|
||||
|
||||
/// Show or hide the play/delete/download buttons upon widget initialization.
|
||||
fn show_buttons(&self, local_uri: Option<&str>) -> Result<(), Error> {
|
||||
let path = local_uri.ok_or_else(|| format_err!("Path is None"))?;
|
||||
if Path::new(path).exists() {
|
||||
self.download.hide();
|
||||
self.play.show();
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Determine the title state.
|
||||
fn set_title(&self, episode: &EpisodeWidgetQuery) {
|
||||
self.title.set_text(episode.title());
|
||||
|
||||
// Grey out the title if the episode is played.
|
||||
if episode.played().is_some() {
|
||||
self.title
|
||||
.get_style_context()
|
||||
.map(|c| c.add_class("dim-label"));
|
||||
}
|
||||
}
|
||||
|
||||
/// Set the date label depending on the current time.
|
||||
fn set_date(&self, epoch: i32) {
|
||||
let date = Utc.timestamp(i64::from(epoch), 0);
|
||||
if NOW.year() == date.year() {
|
||||
self.date.set_text(date.format("%e %b").to_string().trim());
|
||||
} else {
|
||||
self.date
|
||||
.set_text(date.format("%e %b %Y").to_string().trim());
|
||||
};
|
||||
}
|
||||
|
||||
/// Set the duration label.
|
||||
fn set_duration(&self, seconds: Option<i32>) -> Option<()> {
|
||||
let minutes = Duration::seconds(seconds?.into()).num_minutes();
|
||||
if minutes == 0 {
|
||||
return None;
|
||||
}
|
||||
|
||||
self.duration.set_text(&format!("{} min", minutes));
|
||||
self.duration.show();
|
||||
self.separator1.show();
|
||||
Some(())
|
||||
}
|
||||
|
||||
/// Set the Episode label dependings on its size
|
||||
fn set_total_size(&self, bytes: Option<i32>) -> Result<(), Error> {
|
||||
let size = bytes.ok_or_else(|| format_err!("Size is None."))?;
|
||||
if size == 0 {
|
||||
bail!("Size is 0.");
|
||||
}
|
||||
|
||||
let s = size.file_size(SIZE_OPTS.clone())
|
||||
.map_err(|err| format_err!("{}", err))?;
|
||||
self.total_size.set_text(&s);
|
||||
self.total_size.show();
|
||||
self.separator2.show();
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// FIXME: REFACTOR ME
|
||||
// Something Something State-Machine?
|
||||
fn determine_progess_bar(&self) -> Result<(), Error> {
|
||||
let id = WidgetExt::get_name(&self.container)
|
||||
.ok_or_else(|| format_err!("Failed to get widget Name"))?
|
||||
.parse::<i32>()?;
|
||||
|
||||
let active_dl = || -> Result<Option<_>, Error> {
|
||||
let m = manager::ACTIVE_DOWNLOADS
|
||||
.read()
|
||||
.map_err(|_| format_err!("Failed to get a lock on the mutex."))?;
|
||||
|
||||
Ok(m.get(&id).cloned())
|
||||
}()?;
|
||||
|
||||
if let Some(prog) = active_dl {
|
||||
// FIXME: Document me?
|
||||
self.download.hide();
|
||||
self.progress.show();
|
||||
self.local_size.show();
|
||||
self.total_size.show();
|
||||
self.separator2.show();
|
||||
self.prog_separator.show();
|
||||
self.cancel.show();
|
||||
|
||||
let progress_bar = self.progress.clone();
|
||||
let total_size = self.total_size.clone();
|
||||
let local_size = self.local_size.clone();
|
||||
|
||||
// Setup a callback that will update the progress bar.
|
||||
update_progressbar_callback(prog.clone(), id, &progress_bar, &local_size);
|
||||
|
||||
// Setup a callback that will update the total_size label
|
||||
// with the http ContentLength header number rather than
|
||||
// relying to the RSS feed.
|
||||
update_total_size_callback(prog.clone(), &total_size);
|
||||
|
||||
self.cancel.connect_clicked(clone!(prog => move |cancel| {
|
||||
if let Ok(mut m) = prog.lock() {
|
||||
m.cancel();
|
||||
cancel.set_sensitive(false);
|
||||
}
|
||||
}));
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
fn on_download_clicked(ep: &EpisodeWidgetQuery, sender: Sender<Action>) -> Result<(), Error> {
|
||||
let pd = dbqueries::get_podcast_from_id(ep.podcast_id())?;
|
||||
let download_fold = get_download_folder(&pd.title().to_owned())?;
|
||||
|
||||
// Start a new download.
|
||||
manager::add(ep.rowid(), &download_fold, sender.clone())?;
|
||||
|
||||
// Update Views
|
||||
sender.send(Action::RefreshEpisodesView)?;
|
||||
sender.send(Action::RefreshWidgetIfVis)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn on_play_bttn_clicked(
|
||||
episode: &mut EpisodeWidgetQuery,
|
||||
title: >k::Label,
|
||||
sender: Sender<Action>,
|
||||
) -> Result<(), Error> {
|
||||
open_uri(episode.rowid())?;
|
||||
|
||||
if episode.set_played_now().is_ok() {
|
||||
title.get_style_context().map(|c| c.add_class("dim-label"));
|
||||
sender.send(Action::RefreshEpisodesViewBGR)?;
|
||||
};
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn open_uri(rowid: i32) -> Result<(), Error> {
|
||||
let uri = dbqueries::get_episode_local_uri_from_id(rowid)?
|
||||
.ok_or_else(|| format_err!("Expected Some found None."))?;
|
||||
|
||||
if Path::new(&uri).exists() {
|
||||
info!("Opening {}", uri);
|
||||
open::that(&uri)?;
|
||||
} else {
|
||||
bail!("File \"{}\" does not exist.", uri);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Setup a callback that will update the progress bar.
|
||||
#[cfg_attr(feature = "cargo-clippy", allow(if_same_then_else))]
|
||||
fn update_progressbar_callback(
|
||||
prog: Arc<Mutex<manager::Progress>>,
|
||||
episode_rowid: i32,
|
||||
progress_bar: >k::ProgressBar,
|
||||
local_size: >k::Label,
|
||||
) {
|
||||
timeout_add(
|
||||
400,
|
||||
clone!(prog, progress_bar, progress_bar, local_size=> move || {
|
||||
progress_bar_helper(prog.clone(), episode_rowid, &progress_bar, &local_size)
|
||||
.unwrap_or(glib::Continue(false))
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
fn progress_bar_helper(
|
||||
prog: Arc<Mutex<manager::Progress>>,
|
||||
episode_rowid: i32,
|
||||
progress_bar: >k::ProgressBar,
|
||||
local_size: >k::Label,
|
||||
) -> Result<glib::Continue, Error> {
|
||||
let (fraction, downloaded) = {
|
||||
let m = prog.lock()
|
||||
.map_err(|_| format_err!("Failed to get a lock on the mutex."))?;
|
||||
(m.get_fraction(), m.get_downloaded())
|
||||
};
|
||||
|
||||
// Update local_size label
|
||||
downloaded
|
||||
.file_size(SIZE_OPTS.clone())
|
||||
.map_err(|err| format_err!("{}", err))
|
||||
.map(|x| local_size.set_text(&x))?;
|
||||
|
||||
// I hate floating points.
|
||||
// Update the progress_bar.
|
||||
if (fraction >= 0.0) && (fraction <= 1.0) && (!fraction.is_nan()) {
|
||||
progress_bar.set_fraction(fraction);
|
||||
}
|
||||
|
||||
// info!("Fraction: {}", progress_bar.get_fraction());
|
||||
// info!("Fraction: {}", fraction);
|
||||
|
||||
// Check if the download is still active
|
||||
let active = {
|
||||
let m = manager::ACTIVE_DOWNLOADS
|
||||
.read()
|
||||
.map_err(|_| format_err!("Failed to get a lock on the mutex."))?;
|
||||
m.contains_key(&episode_rowid)
|
||||
};
|
||||
|
||||
if (fraction >= 1.0) && (!fraction.is_nan()) {
|
||||
Ok(glib::Continue(false))
|
||||
} else if !active {
|
||||
Ok(glib::Continue(false))
|
||||
} else {
|
||||
Ok(glib::Continue(true))
|
||||
}
|
||||
}
|
||||
|
||||
// Setup a callback that will update the total_size label
|
||||
// with the http ContentLength header number rather than
|
||||
// relying to the RSS feed.
|
||||
fn update_total_size_callback(prog: Arc<Mutex<manager::Progress>>, total_size: >k::Label) {
|
||||
timeout_add(
|
||||
500,
|
||||
clone!(prog, total_size => move || {
|
||||
total_size_helper(prog.clone(), &total_size).unwrap_or(glib::Continue(true))
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
fn total_size_helper(
|
||||
prog: Arc<Mutex<manager::Progress>>,
|
||||
total_size: >k::Label,
|
||||
) -> Result<glib::Continue, Error> {
|
||||
// Get the total_bytes.
|
||||
let total_bytes = {
|
||||
let m = prog.lock()
|
||||
.map_err(|_| format_err!("Failed to get a lock on the mutex."))?;
|
||||
m.get_total_size()
|
||||
};
|
||||
|
||||
debug!("Total Size: {}", total_bytes);
|
||||
if total_bytes != 0 {
|
||||
// Update the total_size label
|
||||
total_bytes
|
||||
.file_size(SIZE_OPTS.clone())
|
||||
.map_err(|err| format_err!("{}", err))
|
||||
.map(|x| total_size.set_text(&x))?;
|
||||
// Do not call again the callback
|
||||
Ok(glib::Continue(false))
|
||||
} else {
|
||||
Ok(glib::Continue(true))
|
||||
}
|
||||
}
|
||||
|
||||
// fn on_delete_bttn_clicked(episode_id: i32) -> Result<(), Error> {
|
||||
// let mut ep = dbqueries::get_episode_from_rowid(episode_id)?.into();
|
||||
// delete_local_content(&mut ep).map_err(From::from).map(|_| ())
|
||||
// }
|
||||
|
||||
pub fn episodes_listbox(pd: &Podcast, sender: Sender<Action>) -> Result<gtk::ListBox, Error> {
|
||||
let episodes = dbqueries::get_pd_episodeswidgets(pd)?;
|
||||
|
||||
let list = gtk::ListBox::new();
|
||||
|
||||
episodes.into_iter().for_each(|ep| {
|
||||
let widget = EpisodeWidget::new(ep, sender.clone());
|
||||
list.add(&widget.container);
|
||||
});
|
||||
|
||||
list.set_vexpand(false);
|
||||
list.set_hexpand(false);
|
||||
list.set_visible(true);
|
||||
list.set_selection_mode(gtk::SelectionMode::None);
|
||||
Ok(list)
|
||||
}
|
||||
@ -1,5 +0,0 @@
|
||||
mod show;
|
||||
mod episode;
|
||||
|
||||
pub use self::episode::EpisodeWidget;
|
||||
pub use self::show::ShowWidget;
|
||||
@ -1,149 +0,0 @@
|
||||
use dissolve;
|
||||
use failure::Error;
|
||||
use gtk;
|
||||
use gtk::prelude::*;
|
||||
use open;
|
||||
|
||||
use hammond_data::Podcast;
|
||||
use hammond_data::dbqueries;
|
||||
use hammond_data::utils::{delete_show, replace_extra_spaces};
|
||||
|
||||
use app::Action;
|
||||
use utils::get_pixbuf_from_path;
|
||||
use widgets::episode::episodes_listbox;
|
||||
|
||||
use std::sync::Arc;
|
||||
use std::sync::mpsc::Sender;
|
||||
use std::thread;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ShowWidget {
|
||||
pub container: gtk::Box,
|
||||
scrolled_window: gtk::ScrolledWindow,
|
||||
cover: gtk::Image,
|
||||
description: gtk::Label,
|
||||
link: gtk::Button,
|
||||
settings: gtk::MenuButton,
|
||||
unsub: gtk::Button,
|
||||
episodes: gtk::Frame,
|
||||
}
|
||||
|
||||
impl Default for ShowWidget {
|
||||
fn default() -> Self {
|
||||
let builder = gtk::Builder::new_from_resource("/org/gnome/hammond/gtk/show_widget.ui");
|
||||
let container: gtk::Box = builder.get_object("container").unwrap();
|
||||
let scrolled_window: gtk::ScrolledWindow = builder.get_object("scrolled_window").unwrap();
|
||||
let episodes: gtk::Frame = builder.get_object("episodes").unwrap();
|
||||
|
||||
let cover: gtk::Image = builder.get_object("cover").unwrap();
|
||||
let description: gtk::Label = builder.get_object("description").unwrap();
|
||||
let unsub: gtk::Button = builder.get_object("unsub_button").unwrap();
|
||||
let link: gtk::Button = builder.get_object("link_button").unwrap();
|
||||
let settings: gtk::MenuButton = builder.get_object("settings_button").unwrap();
|
||||
|
||||
ShowWidget {
|
||||
container,
|
||||
scrolled_window,
|
||||
cover,
|
||||
description,
|
||||
unsub,
|
||||
link,
|
||||
settings,
|
||||
episodes,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ShowWidget {
|
||||
pub fn new(pd: Arc<Podcast>, sender: Sender<Action>) -> ShowWidget {
|
||||
let pdw = ShowWidget::default();
|
||||
pdw.init(pd, sender);
|
||||
pdw
|
||||
}
|
||||
|
||||
pub fn init(&self, pd: Arc<Podcast>, sender: Sender<Action>) {
|
||||
// Hacky workaround so the pd.id() can be retrieved from the `ShowStack`.
|
||||
WidgetExt::set_name(&self.container, &pd.id().to_string());
|
||||
|
||||
self.unsub
|
||||
.connect_clicked(clone!(pd, sender => move |bttn| {
|
||||
if let Err(err) = on_unsub_button_clicked(pd.clone(), bttn, sender.clone()) {
|
||||
error!("Error: {}", err);
|
||||
}
|
||||
}));
|
||||
|
||||
self.setup_listbox(&pd, sender.clone());
|
||||
self.set_description(pd.description());
|
||||
|
||||
if let Err(err) = self.set_cover(pd.clone()) {
|
||||
error!("Failed to set a cover: {}", err)
|
||||
}
|
||||
|
||||
let link = pd.link().to_owned();
|
||||
self.link.set_tooltip_text(Some(link.as_str()));
|
||||
self.link.connect_clicked(move |_| {
|
||||
info!("Opening link: {}", &link);
|
||||
if let Err(err) = open::that(&link) {
|
||||
error!("Failed to open link: {}", &link);
|
||||
error!("Error: {}", err);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/// Populate the listbox with the shows episodes.
|
||||
fn setup_listbox(&self, pd: &Podcast, sender: Sender<Action>) {
|
||||
let listbox = episodes_listbox(pd, sender.clone());
|
||||
listbox.ok().map(|l| self.episodes.add(&l));
|
||||
}
|
||||
|
||||
/// Set the show cover.
|
||||
fn set_cover(&self, pd: Arc<Podcast>) -> Result<(), Error> {
|
||||
let image = get_pixbuf_from_path(&pd.into(), 128)?;
|
||||
self.cover.set_from_pixbuf(&image);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Set the descripton text.
|
||||
fn set_description(&self, text: &str) {
|
||||
// TODO: Temporary solution until we render html urls/bold/italic probably with markup.
|
||||
let desc = dissolve::strip_html_tags(text).join(" ");
|
||||
self.description.set_text(&replace_extra_spaces(&desc));
|
||||
}
|
||||
|
||||
/// Set scrolled window vertical adjustment.
|
||||
pub fn set_vadjustment(&self, vadjustment: >k::Adjustment) {
|
||||
self.scrolled_window.set_vadjustment(vadjustment)
|
||||
}
|
||||
}
|
||||
|
||||
fn on_unsub_button_clicked(
|
||||
pd: Arc<Podcast>,
|
||||
unsub_button: >k::Button,
|
||||
sender: Sender<Action>,
|
||||
) -> Result<(), Error> {
|
||||
// hack to get away without properly checking for none.
|
||||
// if pressed twice would panic.
|
||||
unsub_button.hide();
|
||||
// Spawn a thread so it won't block the ui.
|
||||
thread::spawn(move || {
|
||||
if let Err(err) = delete_show(&pd) {
|
||||
error!("Something went wrong trying to remove {}", pd.title());
|
||||
error!("Error: {}", err);
|
||||
}
|
||||
});
|
||||
|
||||
sender.send(Action::HeaderBarNormal)?;
|
||||
sender.send(Action::ShowShowsAnimated)?;
|
||||
// Queue a refresh after the switch to avoid blocking the db.
|
||||
sender.send(Action::RefreshShowsView)?;
|
||||
sender.send(Action::RefreshEpisodesView)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
fn on_played_button_clicked(pd: &Podcast, sender: Sender<Action>) -> Result<(), Error> {
|
||||
dbqueries::update_none_to_played_now(pd)?;
|
||||
sender.send(Action::RefreshWidget)?;
|
||||
Ok(())
|
||||
}
|
||||
106
meson.build
106
meson.build
@ -1,38 +1,90 @@
|
||||
# Adatped from:
|
||||
# https://gitlab.gnome.org/danigm/fractal/blob/6e2911f9d2353c99a18a6c19fab7f903c4bbb431/meson.build
|
||||
|
||||
project(
|
||||
'hammond', 'rust',
|
||||
version: '0.3.0',
|
||||
'gnome-podcasts', 'rust',
|
||||
version: '0.4.7',
|
||||
license: 'GPLv3',
|
||||
)
|
||||
|
||||
hammond_version = meson.project_version()
|
||||
version_array = hammond_version.split('.')
|
||||
hammond_major_version = version_array[0].to_int()
|
||||
hammond_minor_version = version_array[1].to_int()
|
||||
hammond_version_micro = version_array[2].to_int()
|
||||
dependency('sqlite3', version: '>= 3.20')
|
||||
dependency('openssl', version: '>= 1.0')
|
||||
dependency('dbus-1')
|
||||
|
||||
hammond_prefix = get_option('prefix')
|
||||
hammond_bindir = join_paths(hammond_prefix, get_option('bindir'))
|
||||
dependency('glib-2.0', version: '>= 2.56')
|
||||
dependency('gio-2.0', version: '>= 2.56')
|
||||
dependency('gdk-pixbuf-2.0')
|
||||
dependency('gtk+-3.0', version: '>= 3.24.11')
|
||||
dependency('libhandy-0.0', version: '>= 0.0.13')
|
||||
|
||||
dependency('gstreamer-1.0', version: '>= 1.16')
|
||||
dependency('gstreamer-base-1.0', version: '>= 1.16')
|
||||
dependency('gstreamer-audio-1.0', version: '>= 1.16')
|
||||
dependency('gstreamer-video-1.0', version: '>= 1.16')
|
||||
dependency('gstreamer-player-1.0', version: '>= 1.16')
|
||||
dependency('gstreamer-plugins-base-1.0', version: '>= 1.16')
|
||||
dependency('gstreamer-plugins-bad-1.0', version: '>= 1.16')
|
||||
dependency('gstreamer-bad-audio-1.0', version: '>= 1.16')
|
||||
|
||||
cargo = find_program('cargo', required: true)
|
||||
gresource = find_program('glib-compile-resources', required: true)
|
||||
gschemas = find_program('glib-compile-schemas', required: true)
|
||||
|
||||
if get_option('profile') == 'development'
|
||||
profile = '.Devel'
|
||||
vcs_tag = run_command('git', 'rev-parse', '--short', 'HEAD').stdout().strip()
|
||||
if vcs_tag == ''
|
||||
version_suffix = '-devel'
|
||||
else
|
||||
version_suffix = '-@0@'.format (vcs_tag)
|
||||
endif
|
||||
else
|
||||
profile = ''
|
||||
version_suffix = ''
|
||||
endif
|
||||
|
||||
podcast_toml = files(
|
||||
'Cargo.toml',
|
||||
'Cargo.lock',
|
||||
'podcasts-data/Cargo.toml',
|
||||
'podcasts-downloader/Cargo.toml',
|
||||
'podcasts-gtk/Cargo.toml',
|
||||
)
|
||||
|
||||
application_id = 'org.gnome.Podcasts@0@'.format(profile)
|
||||
i18n = import('i18n')
|
||||
gnome = import('gnome')
|
||||
|
||||
subdir('podcasts-gtk/po')
|
||||
podir = join_paths (meson.source_root (), 'podcasts-gtk', 'po')
|
||||
|
||||
podcasts_version = meson.project_version()
|
||||
|
||||
podcasts_prefix = get_option('prefix')
|
||||
podcasts_bindir = join_paths(podcasts_prefix, get_option('bindir'))
|
||||
podcasts_localedir = join_paths(podcasts_prefix, get_option('localedir'))
|
||||
|
||||
podcasts_conf = configuration_data()
|
||||
podcasts_conf.set('appid', application_id)
|
||||
podcasts_conf.set('bindir', podcasts_bindir)
|
||||
|
||||
datadir = get_option('datadir')
|
||||
icondir = join_paths(datadir, 'icons')
|
||||
subdir('hammond-gtk/resources')
|
||||
subdir('podcasts-gtk/resources')
|
||||
|
||||
cargo = find_program('cargo', required: false)
|
||||
gresource = find_program('glib-compile-resources', required: false)
|
||||
cargo_vendor = find_program('cargo-vendor', required: false)
|
||||
cargo_script = find_program('scripts/cargo.sh')
|
||||
test_script = find_program('scripts/test.sh')
|
||||
|
||||
cargo_release = custom_target('cargo-build',
|
||||
build_by_default: true,
|
||||
build_always: true,
|
||||
output: ['hammond'],
|
||||
install: true,
|
||||
install_dir: hammond_bindir,
|
||||
command: [cargo_script, '@CURRENT_SOURCE_DIR@', '@OUTPUT@'])
|
||||
subdir('podcasts-data/src')
|
||||
subdir('podcasts-downloader/src')
|
||||
subdir('podcasts-gtk/src')
|
||||
|
||||
run_target('release', command: ['scripts/release.sh',
|
||||
meson.project_name() + '-' + hammond_version
|
||||
])
|
||||
meson.add_dist_script(
|
||||
'scripts/dist-vendor.sh',
|
||||
meson.source_root(),
|
||||
join_paths(meson.build_root(), 'meson-dist', meson.project_name() + '-' + podcasts_version)
|
||||
)
|
||||
|
||||
test(
|
||||
'cargo-test',
|
||||
test_script,
|
||||
args: meson.build_root(),
|
||||
workdir: meson.source_root(),
|
||||
timeout: 3000
|
||||
)
|
||||
|
||||
9
meson_options.txt
Normal file
9
meson_options.txt
Normal file
@ -0,0 +1,9 @@
|
||||
option (
|
||||
'profile',
|
||||
type: 'combo',
|
||||
choices: [
|
||||
'default',
|
||||
'development'
|
||||
],
|
||||
value: 'default'
|
||||
)
|
||||
@ -1,43 +0,0 @@
|
||||
{
|
||||
"app-id" : "org.gnome.Hammond",
|
||||
"runtime" : "org.gnome.Platform",
|
||||
"runtime-version" : "master",
|
||||
"sdk" : "org.gnome.Sdk",
|
||||
"sdk-extensions" : [
|
||||
"org.freedesktop.Sdk.Extension.rust-stable"
|
||||
],
|
||||
"command" : "hammond",
|
||||
"tags" : [
|
||||
"nightly"
|
||||
],
|
||||
"desktop-file-name-prefix" : "(Nightly) ",
|
||||
"finish-args" : [
|
||||
"--share=network",
|
||||
"--share=ipc",
|
||||
"--socket=x11",
|
||||
"--socket=wayland",
|
||||
"--talk-name=org.freedesktop.Desktop"
|
||||
],
|
||||
"build-options" : {
|
||||
"append-path" : "/usr/lib/sdk/rust-stable/bin",
|
||||
"build-args" : [
|
||||
"--share=network"
|
||||
],
|
||||
"env" : {
|
||||
"CARGO_HOME" : "/run/build/Hammond/cargo"
|
||||
}
|
||||
},
|
||||
"modules" : [
|
||||
{
|
||||
"name" : "Hammond",
|
||||
"buildsystem" : "meson",
|
||||
"sources" : [
|
||||
{
|
||||
"type" : "git",
|
||||
"url" : "https://gitlab.gnome.org/alatiera/Hammond.git",
|
||||
"branch" : "master"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
72
org.gnome.Podcasts.Devel.json
Normal file
72
org.gnome.Podcasts.Devel.json
Normal file
@ -0,0 +1,72 @@
|
||||
{
|
||||
"app-id" : "org.gnome.Podcasts.Devel",
|
||||
"runtime" : "org.gnome.Platform",
|
||||
"runtime-version" : "3.36",
|
||||
"sdk" : "org.gnome.Sdk",
|
||||
"sdk-extensions" : [
|
||||
"org.freedesktop.Sdk.Extension.rust-stable"
|
||||
],
|
||||
"command" : "gnome-podcasts",
|
||||
"tags" : [
|
||||
"nightly"
|
||||
],
|
||||
"finish-args" : [
|
||||
"--share=network",
|
||||
"--share=ipc",
|
||||
"--socket=x11",
|
||||
"--socket=fallback-x11",
|
||||
"--socket=wayland",
|
||||
"--socket=pulseaudio",
|
||||
"--env=USE_PLAYBING3=1"
|
||||
],
|
||||
"build-options" : {
|
||||
"append-path" : "/usr/lib/sdk/rust-stable/bin",
|
||||
"build-args" : [
|
||||
"--share=network"
|
||||
],
|
||||
"env" : {
|
||||
"CARGO_HOME" : "/run/build/Podcasts/cargo",
|
||||
"RUSTFLAGS" : "",
|
||||
"RUST_BACKTRACE" : "1"
|
||||
}
|
||||
},
|
||||
"modules" : [
|
||||
{
|
||||
"name" : "libhandy",
|
||||
"buildsystem" : "meson",
|
||||
"config-opts" : [
|
||||
"-Dintrospection=disabled",
|
||||
"-Dgtk_doc=false",
|
||||
"-Dtests=false",
|
||||
"-Dexamples=false",
|
||||
"-Dvapi=false",
|
||||
"-Dglade_catalog=disabled"
|
||||
],
|
||||
"cleanup" : [
|
||||
"/include",
|
||||
"/lib/pkgconfig"
|
||||
],
|
||||
"sources" : [
|
||||
{
|
||||
"type" : "git",
|
||||
"url" : "https://source.puri.sm/Librem5/libhandy.git",
|
||||
"tag" : "v0.0.13"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"name" : "gnome-podcasts",
|
||||
"buildsystem" : "meson",
|
||||
"builddir" : "true",
|
||||
"config-opts" : [
|
||||
"-Dprofile=development"
|
||||
],
|
||||
"sources" : [
|
||||
{
|
||||
"type" : "git",
|
||||
"url" : "https://gitlab.gnome.org/World/podcasts.git"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
69
org.gnome.Podcasts.json
Normal file
69
org.gnome.Podcasts.json
Normal file
@ -0,0 +1,69 @@
|
||||
{
|
||||
"app-id" : "org.gnome.Podcasts",
|
||||
"runtime" : "org.gnome.Platform",
|
||||
"runtime-version" : "3.36",
|
||||
"sdk" : "org.gnome.Sdk",
|
||||
"sdk-extensions" : [
|
||||
"org.freedesktop.Sdk.Extension.rust-stable"
|
||||
],
|
||||
"command" : "gnome-podcasts",
|
||||
"tags" : [
|
||||
"nightly"
|
||||
],
|
||||
"desktop-file-name-suffix" : " ☢️",
|
||||
"finish-args" : [
|
||||
"--share=network",
|
||||
"--share=ipc",
|
||||
"--socket=x11",
|
||||
"--socket=fallback-x11",
|
||||
"--socket=wayland",
|
||||
"--socket=pulseaudio",
|
||||
"--env=USE_PLAYBING3=1"
|
||||
],
|
||||
"build-options" : {
|
||||
"append-path" : "/usr/lib/sdk/rust-stable/bin",
|
||||
"build-args" : [
|
||||
"--share=network"
|
||||
],
|
||||
"env" : {
|
||||
"CARGO_HOME" : "/run/build/Podcasts/cargo",
|
||||
"RUST_BACKTRACE" : "1"
|
||||
}
|
||||
},
|
||||
"modules" : [
|
||||
{
|
||||
"name" : "libhandy",
|
||||
"buildsystem" : "meson",
|
||||
"config-opts" : [
|
||||
"-Dintrospection=disabled",
|
||||
"-Dgtk_doc=false",
|
||||
"-Dtests=false",
|
||||
"-Dexamples=false",
|
||||
"-Dvapi=false",
|
||||
"-Dglade_catalog=disabled"
|
||||
],
|
||||
"cleanup" : [
|
||||
"/include",
|
||||
"/lib/pkgconfig"
|
||||
],
|
||||
"sources" : [
|
||||
{
|
||||
"type" : "git",
|
||||
"url" : "https://source.puri.sm/Librem5/libhandy.git",
|
||||
"tag" : "v0.0.13"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"name" : "gnome-podcasts",
|
||||
"builddir" : "true",
|
||||
"buildsystem" : "meson",
|
||||
"sources" : [
|
||||
{
|
||||
"type" : "git",
|
||||
"url" : "https://gitlab.gnome.org/World/podcasts.git"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
42
podcasts-data/Cargo.toml
Normal file
42
podcasts-data/Cargo.toml
Normal file
@ -0,0 +1,42 @@
|
||||
[package]
|
||||
authors = ["Jordan Petridis <jpetridis@gnome.org>"]
|
||||
name = "podcasts-data"
|
||||
version = "0.1.0"
|
||||
edition = "2018"
|
||||
|
||||
[dependencies]
|
||||
ammonia = "3.1.0"
|
||||
chrono = "0.4.11"
|
||||
derive_builder = "0.9.0"
|
||||
lazy_static = "1.4.0"
|
||||
log = "0.4.8"
|
||||
rayon = "1.3.1"
|
||||
rfc822_sanitizer = "0.3.3"
|
||||
rss = "1.9.0"
|
||||
url = "2.1.1"
|
||||
xdg = "2.2.0"
|
||||
xml-rs = "0.8.3"
|
||||
futures = "0.1.29"
|
||||
hyper = "0.12.35"
|
||||
http = "0.1.19"
|
||||
tokio = "0.1.22"
|
||||
hyper-tls = "0.3.2"
|
||||
native-tls = "0.2.3"
|
||||
num_cpus = "1.13.0"
|
||||
failure = "0.1.8"
|
||||
failure_derive = "0.1.8"
|
||||
base64 = "0.12.2"
|
||||
|
||||
[dependencies.diesel]
|
||||
features = ["sqlite", "r2d2"]
|
||||
version = "1.4.5"
|
||||
|
||||
[dependencies.diesel_migrations]
|
||||
features = ["sqlite"]
|
||||
version = "1.4.0"
|
||||
|
||||
[dev-dependencies]
|
||||
rand = "0.7.2"
|
||||
tempdir = "0.3.7"
|
||||
pretty_assertions = "0.6.1"
|
||||
maplit = "1.0.2"
|
||||
6
podcasts-data/diesel.toml
Normal file
6
podcasts-data/diesel.toml
Normal file
@ -0,0 +1,6 @@
|
||||
# For documentation on how to configure this file,
|
||||
# see diesel.rs/guides/configuring-diesel-cli
|
||||
|
||||
[print_schema]
|
||||
file = "src/schema.rs"
|
||||
patch_file = "src/schema.patch"
|
||||
@ -0,0 +1,53 @@
|
||||
ALTER TABLE episode RENAME TO old_table;
|
||||
|
||||
CREATE TABLE episode (
|
||||
title TEXT NOT NULL,
|
||||
uri TEXT,
|
||||
local_uri TEXT,
|
||||
description TEXT,
|
||||
epoch INTEGER NOT NULL DEFAULT 0,
|
||||
length INTEGER,
|
||||
duration INTEGER,
|
||||
guid TEXT,
|
||||
played INTEGER,
|
||||
podcast_id INTEGER NOT NULL,
|
||||
favorite INTEGER DEFAULT 0,
|
||||
archive INTEGER DEFAULT 0,
|
||||
PRIMARY KEY (title, podcast_id)
|
||||
);
|
||||
|
||||
INSERT INTO episode (title, uri, local_uri, description, epoch, length, duration, guid, played, podcast_id, favorite, archive)
|
||||
SELECT title, uri, local_uri, description, epoch, length, duration, guid, played, podcast_id, 0, 0
|
||||
FROM old_table;
|
||||
|
||||
Drop table old_table;
|
||||
|
||||
ALTER TABLE podcast RENAME TO old_table;
|
||||
CREATE TABLE `podcast` (
|
||||
`id` INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT UNIQUE,
|
||||
`title` TEXT NOT NULL,
|
||||
`link` TEXT NOT NULL,
|
||||
`description` TEXT NOT NULL,
|
||||
`image_uri` TEXT,
|
||||
`source_id` INTEGER NOT NULL UNIQUE,
|
||||
`favorite` INTEGER NOT NULL DEFAULT 0,
|
||||
`archive` INTEGER NOT NULL DEFAULT 0,
|
||||
`always_dl` INTEGER NOT NULL DEFAULT 0
|
||||
);
|
||||
|
||||
INSERT INTO podcast (
|
||||
id,
|
||||
title,
|
||||
link,
|
||||
description,
|
||||
image_uri,
|
||||
source_id
|
||||
) SELECT id,
|
||||
title,
|
||||
link,
|
||||
description,
|
||||
image_uri,
|
||||
source_id
|
||||
FROM old_table;
|
||||
|
||||
Drop table old_table;
|
||||
@ -0,0 +1,66 @@
|
||||
ALTER TABLE episode RENAME TO old_table;
|
||||
|
||||
CREATE TABLE episode (
|
||||
title TEXT NOT NULL,
|
||||
uri TEXT,
|
||||
local_uri TEXT,
|
||||
description TEXT,
|
||||
epoch INTEGER NOT NULL DEFAULT 0,
|
||||
length INTEGER,
|
||||
duration INTEGER,
|
||||
guid TEXT,
|
||||
played INTEGER,
|
||||
podcast_id INTEGER NOT NULL,
|
||||
PRIMARY KEY (title, podcast_id)
|
||||
);
|
||||
|
||||
INSERT INTO episode (
|
||||
title,
|
||||
uri,
|
||||
local_uri,
|
||||
description,
|
||||
epoch,
|
||||
length,
|
||||
duration,
|
||||
guid,
|
||||
played,
|
||||
podcast_id
|
||||
) SELECT title,
|
||||
uri,
|
||||
local_uri,
|
||||
description,
|
||||
epoch, length,
|
||||
duration,
|
||||
guid,
|
||||
played,
|
||||
podcast_id
|
||||
FROM old_table;
|
||||
|
||||
Drop table old_table;
|
||||
|
||||
ALTER TABLE podcast RENAME TO old_table;
|
||||
CREATE TABLE `podcast` (
|
||||
`id` INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT UNIQUE,
|
||||
`title` TEXT NOT NULL,
|
||||
`link` TEXT NOT NULL,
|
||||
`description` TEXT NOT NULL,
|
||||
`image_uri` TEXT,
|
||||
`source_id` INTEGER NOT NULL UNIQUE
|
||||
);
|
||||
|
||||
INSERT INTO podcast (
|
||||
id,
|
||||
title,
|
||||
link,
|
||||
description,
|
||||
image_uri,
|
||||
source_id
|
||||
) SELECT id,
|
||||
title,
|
||||
link,
|
||||
description,
|
||||
image_uri,
|
||||
source_id
|
||||
FROM old_table;
|
||||
|
||||
Drop table old_table;
|
||||
@ -0,0 +1,40 @@
|
||||
ALTER TABLE episodes RENAME TO old_table;
|
||||
ALTER TABLE shows RENAME TO podcast;
|
||||
|
||||
CREATE TABLE episode (
|
||||
title TEXT NOT NULL,
|
||||
uri TEXT,
|
||||
local_uri TEXT,
|
||||
description TEXT,
|
||||
epoch INTEGER NOT NULL DEFAULT 0,
|
||||
length INTEGER,
|
||||
duration INTEGER,
|
||||
guid TEXT,
|
||||
played INTEGER,
|
||||
podcast_id INTEGER NOT NULL,
|
||||
PRIMARY KEY (title, podcast_id)
|
||||
);
|
||||
|
||||
INSERT INTO episode (
|
||||
title,
|
||||
uri,
|
||||
local_uri,
|
||||
description,
|
||||
epoch,
|
||||
length,
|
||||
duration,
|
||||
guid,
|
||||
played,
|
||||
podcast_id
|
||||
) SELECT title,
|
||||
uri,
|
||||
local_uri,
|
||||
description,
|
||||
epoch, length,
|
||||
duration,
|
||||
guid,
|
||||
played,
|
||||
show_id
|
||||
FROM old_table;
|
||||
|
||||
Drop table old_table;
|
||||
@ -0,0 +1,40 @@
|
||||
ALTER TABLE episode RENAME TO old_table;
|
||||
ALTER TABLE podcast RENAME TO shows;
|
||||
|
||||
CREATE TABLE episodes (
|
||||
title TEXT NOT NULL,
|
||||
uri TEXT,
|
||||
local_uri TEXT,
|
||||
description TEXT,
|
||||
epoch INTEGER NOT NULL DEFAULT 0,
|
||||
length INTEGER,
|
||||
duration INTEGER,
|
||||
guid TEXT,
|
||||
played INTEGER,
|
||||
show_id INTEGER NOT NULL,
|
||||
PRIMARY KEY (title, show_id)
|
||||
);
|
||||
|
||||
INSERT INTO episodes (
|
||||
title,
|
||||
uri,
|
||||
local_uri,
|
||||
description,
|
||||
epoch,
|
||||
length,
|
||||
duration,
|
||||
guid,
|
||||
played,
|
||||
show_id
|
||||
) SELECT title,
|
||||
uri,
|
||||
local_uri,
|
||||
description,
|
||||
epoch, length,
|
||||
duration,
|
||||
guid,
|
||||
played,
|
||||
podcast_id
|
||||
FROM old_table;
|
||||
|
||||
Drop table old_table;
|
||||
@ -1,4 +1,25 @@
|
||||
// database.rs
|
||||
//
|
||||
// Copyright 2017 Jordan Petridis <jpetridis@gnome.org>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
//
|
||||
// SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
//! Database Setup. This is only public to help with some unit tests.
|
||||
// Diesel embed_migrations! triggers the lint
|
||||
#![allow(unused_imports)]
|
||||
|
||||
use diesel::prelude::*;
|
||||
use diesel::r2d2;
|
||||
@ -7,34 +28,31 @@ use diesel::r2d2::ConnectionManager;
|
||||
use std::io;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use errors::DataError;
|
||||
use crate::errors::DataError;
|
||||
|
||||
#[cfg(not(test))]
|
||||
use xdg_dirs;
|
||||
use crate::xdg_dirs;
|
||||
|
||||
type Pool = r2d2::Pool<ConnectionManager<SqliteConnection>>;
|
||||
|
||||
embed_migrations!("migrations/");
|
||||
|
||||
lazy_static!{
|
||||
lazy_static! {
|
||||
static ref POOL: Pool = init_pool(DB_PATH.to_str().unwrap());
|
||||
}
|
||||
|
||||
#[cfg(not(test))]
|
||||
lazy_static! {
|
||||
static ref DB_PATH: PathBuf = xdg_dirs::HAMMOND_XDG.place_data_file("hammond.db").unwrap();
|
||||
static ref DB_PATH: PathBuf = xdg_dirs::PODCASTS_XDG
|
||||
.place_data_file("podcasts.db")
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
extern crate tempdir;
|
||||
|
||||
#[cfg(test)]
|
||||
lazy_static! {
|
||||
static ref TEMPDIR: tempdir::TempDir = {
|
||||
tempdir::TempDir::new("hammond_unit_test").unwrap()
|
||||
};
|
||||
|
||||
static ref DB_PATH: PathBuf = TEMPDIR.path().join("hammond.db");
|
||||
pub(crate) static ref TEMPDIR: tempdir::TempDir =
|
||||
{ tempdir::TempDir::new("podcasts_unit_test").unwrap() };
|
||||
static ref DB_PATH: PathBuf = TEMPDIR.path().join("podcasts.db");
|
||||
}
|
||||
|
||||
/// Get an r2d2 `SqliteConnection`.
|
||||
@ -65,12 +83,12 @@ fn run_migration_on(connection: &SqliteConnection) -> Result<(), DataError> {
|
||||
|
||||
/// Reset the database into a clean state.
|
||||
// Test share a Temp file db.
|
||||
#[allow(dead_code)]
|
||||
#[cfg(test)]
|
||||
pub fn truncate_db() -> Result<(), DataError> {
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
con.execute("DELETE FROM episode")?;
|
||||
con.execute("DELETE FROM podcast")?;
|
||||
con.execute("DELETE FROM episodes")?;
|
||||
con.execute("DELETE FROM shows")?;
|
||||
con.execute("DELETE FROM source")?;
|
||||
Ok(())
|
||||
}
|
||||
492
podcasts-data/src/dbqueries.rs
Normal file
492
podcasts-data/src/dbqueries.rs
Normal file
@ -0,0 +1,492 @@
|
||||
// dbqueries.rs
|
||||
//
|
||||
// Copyright 2017 Jordan Petridis <jpetridis@gnome.org>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
//
|
||||
// SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
//! Random CRUD helper functions.
|
||||
|
||||
use chrono::prelude::*;
|
||||
use diesel::prelude::*;
|
||||
|
||||
use diesel;
|
||||
use diesel::dsl::exists;
|
||||
use diesel::select;
|
||||
|
||||
use crate::database::connection;
|
||||
use crate::errors::DataError;
|
||||
use crate::models::*;
|
||||
|
||||
pub fn get_sources() -> Result<Vec<Source>, DataError> {
|
||||
use crate::schema::source::dsl::*;
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
source
|
||||
.order((http_etag.asc(), last_modified.asc()))
|
||||
.load::<Source>(&con)
|
||||
.map_err(From::from)
|
||||
}
|
||||
|
||||
pub fn get_podcasts() -> Result<Vec<Show>, DataError> {
|
||||
use crate::schema::shows::dsl::*;
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
shows
|
||||
.order(title.asc())
|
||||
.load::<Show>(&con)
|
||||
.map_err(From::from)
|
||||
}
|
||||
|
||||
pub fn get_podcasts_filter(filter_ids: &[i32]) -> Result<Vec<Show>, DataError> {
|
||||
use crate::schema::shows::dsl::*;
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
shows
|
||||
.order(title.asc())
|
||||
.filter(id.ne_all(filter_ids))
|
||||
.load::<Show>(&con)
|
||||
.map_err(From::from)
|
||||
}
|
||||
|
||||
pub fn get_episodes() -> Result<Vec<Episode>, DataError> {
|
||||
use crate::schema::episodes::dsl::*;
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
episodes
|
||||
.order(epoch.desc())
|
||||
.load::<Episode>(&con)
|
||||
.map_err(From::from)
|
||||
}
|
||||
|
||||
pub(crate) fn get_downloaded_episodes() -> Result<Vec<EpisodeCleanerModel>, DataError> {
|
||||
use crate::schema::episodes::dsl::*;
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
episodes
|
||||
.select((rowid, local_uri, played))
|
||||
.filter(local_uri.is_not_null())
|
||||
.load::<EpisodeCleanerModel>(&con)
|
||||
.map_err(From::from)
|
||||
}
|
||||
|
||||
// pub(crate) fn get_played_episodes() -> Result<Vec<Episode>, DataError> {
|
||||
// use schema::episodes::dsl::*;
|
||||
|
||||
// let db = connection();
|
||||
// let con = db.get()?;
|
||||
// episodes
|
||||
// .filter(played.is_not_null())
|
||||
// .load::<Episode>(&con)
|
||||
// .map_err(From::from)
|
||||
// }
|
||||
|
||||
pub(crate) fn get_played_cleaner_episodes() -> Result<Vec<EpisodeCleanerModel>, DataError> {
|
||||
use crate::schema::episodes::dsl::*;
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
episodes
|
||||
.select((rowid, local_uri, played))
|
||||
.filter(played.is_not_null())
|
||||
.load::<EpisodeCleanerModel>(&con)
|
||||
.map_err(From::from)
|
||||
}
|
||||
|
||||
pub fn get_episode_from_rowid(ep_id: i32) -> Result<Episode, DataError> {
|
||||
use crate::schema::episodes::dsl::*;
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
episodes
|
||||
.filter(rowid.eq(ep_id))
|
||||
.get_result::<Episode>(&con)
|
||||
.map_err(From::from)
|
||||
}
|
||||
|
||||
pub fn get_episode_widget_from_rowid(ep_id: i32) -> Result<EpisodeWidgetModel, DataError> {
|
||||
use crate::schema::episodes::dsl::*;
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
episodes
|
||||
.select((
|
||||
rowid, title, uri, local_uri, epoch, length, duration, played, show_id,
|
||||
))
|
||||
.filter(rowid.eq(ep_id))
|
||||
.get_result::<EpisodeWidgetModel>(&con)
|
||||
.map_err(From::from)
|
||||
}
|
||||
|
||||
pub fn get_episode_local_uri_from_id(ep_id: i32) -> Result<Option<String>, DataError> {
|
||||
use crate::schema::episodes::dsl::*;
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
episodes
|
||||
.filter(rowid.eq(ep_id))
|
||||
.select(local_uri)
|
||||
.get_result::<Option<String>>(&con)
|
||||
.map_err(From::from)
|
||||
}
|
||||
|
||||
pub fn get_episodes_widgets_filter_limit(
|
||||
filter_ids: &[i32],
|
||||
limit: u32,
|
||||
) -> Result<Vec<EpisodeWidgetModel>, DataError> {
|
||||
use crate::schema::episodes::dsl::*;
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
let columns = (
|
||||
rowid, title, uri, local_uri, epoch, length, duration, played, show_id,
|
||||
);
|
||||
|
||||
episodes
|
||||
.select(columns)
|
||||
.order(epoch.desc())
|
||||
.filter(show_id.ne_all(filter_ids))
|
||||
.limit(i64::from(limit))
|
||||
.load::<EpisodeWidgetModel>(&con)
|
||||
.map_err(From::from)
|
||||
}
|
||||
|
||||
pub fn get_podcast_from_id(pid: i32) -> Result<Show, DataError> {
|
||||
use crate::schema::shows::dsl::*;
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
shows
|
||||
.filter(id.eq(pid))
|
||||
.get_result::<Show>(&con)
|
||||
.map_err(From::from)
|
||||
}
|
||||
|
||||
pub fn get_podcast_cover_from_id(pid: i32) -> Result<ShowCoverModel, DataError> {
|
||||
use crate::schema::shows::dsl::*;
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
shows
|
||||
.select((id, title, image_uri))
|
||||
.filter(id.eq(pid))
|
||||
.get_result::<ShowCoverModel>(&con)
|
||||
.map_err(From::from)
|
||||
}
|
||||
|
||||
pub fn get_pd_episodes(parent: &Show) -> Result<Vec<Episode>, DataError> {
|
||||
use crate::schema::episodes::dsl::*;
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
Episode::belonging_to(parent)
|
||||
.order(epoch.desc())
|
||||
.load::<Episode>(&con)
|
||||
.map_err(From::from)
|
||||
}
|
||||
|
||||
pub fn get_pd_episodes_count(parent: &Show) -> Result<i64, DataError> {
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
Episode::belonging_to(parent)
|
||||
.count()
|
||||
.get_result(&con)
|
||||
.map_err(From::from)
|
||||
}
|
||||
|
||||
pub fn get_pd_episodeswidgets(parent: &Show) -> Result<Vec<EpisodeWidgetModel>, DataError> {
|
||||
use crate::schema::episodes::dsl::*;
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
let columns = (
|
||||
rowid, title, uri, local_uri, epoch, length, duration, played, show_id,
|
||||
);
|
||||
|
||||
episodes
|
||||
.select(columns)
|
||||
.filter(show_id.eq(parent.id()))
|
||||
.order(epoch.desc())
|
||||
.load::<EpisodeWidgetModel>(&con)
|
||||
.map_err(From::from)
|
||||
}
|
||||
|
||||
pub fn get_pd_unplayed_episodes(parent: &Show) -> Result<Vec<Episode>, DataError> {
|
||||
use crate::schema::episodes::dsl::*;
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
Episode::belonging_to(parent)
|
||||
.filter(played.is_null())
|
||||
.order(epoch.desc())
|
||||
.load::<Episode>(&con)
|
||||
.map_err(From::from)
|
||||
}
|
||||
|
||||
// pub(crate) fn get_pd_episodes_limit(parent: &Show, limit: u32) ->
|
||||
// Result<Vec<Episode>, DataError> { use schema::episodes::dsl::*;
|
||||
|
||||
// let db = connection();
|
||||
// let con = db.get()?;
|
||||
|
||||
// Episode::belonging_to(parent)
|
||||
// .order(epoch.desc())
|
||||
// .limit(i64::from(limit))
|
||||
// .load::<Episode>(&con)
|
||||
// .map_err(From::from)
|
||||
// }
|
||||
|
||||
pub fn get_source_from_uri(uri_: &str) -> Result<Source, DataError> {
|
||||
use crate::schema::source::dsl::*;
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
source
|
||||
.filter(uri.eq(uri_))
|
||||
.get_result::<Source>(&con)
|
||||
.map_err(From::from)
|
||||
}
|
||||
|
||||
pub fn get_source_from_id(id_: i32) -> Result<Source, DataError> {
|
||||
use crate::schema::source::dsl::*;
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
source
|
||||
.filter(id.eq(id_))
|
||||
.get_result::<Source>(&con)
|
||||
.map_err(From::from)
|
||||
}
|
||||
|
||||
pub fn get_podcast_from_source_id(sid: i32) -> Result<Show, DataError> {
|
||||
use crate::schema::shows::dsl::*;
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
shows
|
||||
.filter(source_id.eq(sid))
|
||||
.get_result::<Show>(&con)
|
||||
.map_err(From::from)
|
||||
}
|
||||
|
||||
pub fn get_episode_from_pk(title_: &str, pid: i32) -> Result<Episode, DataError> {
|
||||
use crate::schema::episodes::dsl::*;
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
episodes
|
||||
.filter(title.eq(title_))
|
||||
.filter(show_id.eq(pid))
|
||||
.get_result::<Episode>(&con)
|
||||
.map_err(From::from)
|
||||
}
|
||||
|
||||
pub(crate) fn get_episode_minimal_from_pk(
|
||||
title_: &str,
|
||||
pid: i32,
|
||||
) -> Result<EpisodeMinimal, DataError> {
|
||||
use crate::schema::episodes::dsl::*;
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
episodes
|
||||
.select((rowid, title, uri, epoch, length, duration, guid, show_id))
|
||||
.filter(title.eq(title_))
|
||||
.filter(show_id.eq(pid))
|
||||
.get_result::<EpisodeMinimal>(&con)
|
||||
.map_err(From::from)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub(crate) fn get_episode_cleaner_from_pk(
|
||||
title_: &str,
|
||||
pid: i32,
|
||||
) -> Result<EpisodeCleanerModel, DataError> {
|
||||
use crate::schema::episodes::dsl::*;
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
episodes
|
||||
.select((rowid, local_uri, played))
|
||||
.filter(title.eq(title_))
|
||||
.filter(show_id.eq(pid))
|
||||
.get_result::<EpisodeCleanerModel>(&con)
|
||||
.map_err(From::from)
|
||||
}
|
||||
|
||||
pub(crate) fn remove_feed(pd: &Show) -> Result<(), DataError> {
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
con.transaction(|| {
|
||||
delete_source(&con, pd.source_id())?;
|
||||
delete_podcast(&con, pd.id())?;
|
||||
delete_podcast_episodes(&con, pd.id())?;
|
||||
info!("Feed removed from the Database.");
|
||||
Ok(())
|
||||
})
|
||||
}
|
||||
|
||||
fn delete_source(con: &SqliteConnection, source_id: i32) -> QueryResult<usize> {
|
||||
use crate::schema::source::dsl::*;
|
||||
|
||||
diesel::delete(source.filter(id.eq(source_id))).execute(con)
|
||||
}
|
||||
|
||||
fn delete_podcast(con: &SqliteConnection, show_id: i32) -> QueryResult<usize> {
|
||||
use crate::schema::shows::dsl::*;
|
||||
|
||||
diesel::delete(shows.filter(id.eq(show_id))).execute(con)
|
||||
}
|
||||
|
||||
fn delete_podcast_episodes(con: &SqliteConnection, parent_id: i32) -> QueryResult<usize> {
|
||||
use crate::schema::episodes::dsl::*;
|
||||
|
||||
diesel::delete(episodes.filter(show_id.eq(parent_id))).execute(con)
|
||||
}
|
||||
|
||||
pub fn source_exists(url: &str) -> Result<bool, DataError> {
|
||||
use crate::schema::source::dsl::*;
|
||||
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
select(exists(source.filter(uri.eq(url))))
|
||||
.get_result(&con)
|
||||
.map_err(From::from)
|
||||
}
|
||||
|
||||
pub(crate) fn podcast_exists(source_id_: i32) -> Result<bool, DataError> {
|
||||
use crate::schema::shows::dsl::*;
|
||||
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
select(exists(shows.filter(source_id.eq(source_id_))))
|
||||
.get_result(&con)
|
||||
.map_err(From::from)
|
||||
}
|
||||
|
||||
#[cfg_attr(rustfmt, rustfmt_skip)]
|
||||
pub(crate) fn episode_exists(title_: &str, show_id_: i32) -> Result<bool, DataError> {
|
||||
use crate::schema::episodes::dsl::*;
|
||||
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
select(exists(episodes.filter(show_id.eq(show_id_)).filter(title.eq(title_))))
|
||||
.get_result(&con)
|
||||
.map_err(From::from)
|
||||
}
|
||||
|
||||
/// Check if the `episodes table contains any rows
|
||||
///
|
||||
/// Return true if `episodes` table is populated.
|
||||
pub fn is_episodes_populated(filter_show_ids: &[i32]) -> Result<bool, DataError> {
|
||||
use crate::schema::episodes::dsl::*;
|
||||
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
select(exists(episodes.filter(show_id.ne_all(filter_show_ids))))
|
||||
.get_result(&con)
|
||||
.map_err(From::from)
|
||||
}
|
||||
|
||||
/// Check if the `shows` table contains any rows
|
||||
///
|
||||
/// Return true if `shows` table is populated.
|
||||
pub fn is_podcasts_populated(filter_ids: &[i32]) -> Result<bool, DataError> {
|
||||
use crate::schema::shows::dsl::*;
|
||||
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
select(exists(shows.filter(id.ne_all(filter_ids))))
|
||||
.get_result(&con)
|
||||
.map_err(From::from)
|
||||
}
|
||||
|
||||
/// Check if the `source` table contains any rows
|
||||
///
|
||||
/// Return true if `source` table is populated.
|
||||
pub fn is_source_populated(filter_ids: &[i32]) -> Result<bool, DataError> {
|
||||
use crate::schema::source::dsl::*;
|
||||
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
select(exists(source.filter(id.ne_all(filter_ids))))
|
||||
.get_result(&con)
|
||||
.map_err(From::from)
|
||||
}
|
||||
|
||||
pub(crate) fn index_new_episodes(eps: &[NewEpisode]) -> Result<(), DataError> {
|
||||
use crate::schema::episodes::dsl::*;
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
diesel::insert_into(episodes)
|
||||
.values(eps)
|
||||
.execute(&*con)
|
||||
.map_err(From::from)
|
||||
.map(|_| ())
|
||||
}
|
||||
|
||||
pub fn update_none_to_played_now(parent: &Show) -> Result<usize, DataError> {
|
||||
use crate::schema::episodes::dsl::*;
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
let epoch_now = Utc::now().timestamp() as i32;
|
||||
con.transaction(|| {
|
||||
diesel::update(Episode::belonging_to(parent).filter(played.is_null()))
|
||||
.set(played.eq(Some(epoch_now)))
|
||||
.execute(&con)
|
||||
.map_err(From::from)
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::database::*;
|
||||
use crate::pipeline;
|
||||
use failure::Error;
|
||||
|
||||
#[test]
|
||||
fn test_update_none_to_played_now() -> Result<(), Error> {
|
||||
truncate_db()?;
|
||||
|
||||
let url = "https://web.archive.org/web/20180120083840if_/https://feeds.feedburner.\
|
||||
com/InterceptedWithJeremyScahill";
|
||||
let source = Source::from_url(url)?;
|
||||
let id = source.id();
|
||||
pipeline::run(vec![source])?;
|
||||
let pd = get_podcast_from_source_id(id)?;
|
||||
|
||||
let eps_num = get_pd_unplayed_episodes(&pd)?.len();
|
||||
assert_ne!(eps_num, 0);
|
||||
|
||||
update_none_to_played_now(&pd)?;
|
||||
let eps_num2 = get_pd_unplayed_episodes(&pd)?.len();
|
||||
assert_eq!(eps_num2, 0);
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
125
podcasts-data/src/errors.rs
Normal file
125
podcasts-data/src/errors.rs
Normal file
@ -0,0 +1,125 @@
|
||||
// errors.rs
|
||||
//
|
||||
// Copyright 2017 Jordan Petridis <jpetridis@gnome.org>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
//
|
||||
// SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
use diesel;
|
||||
use diesel::r2d2;
|
||||
use diesel_migrations::RunMigrationsError;
|
||||
use http;
|
||||
use hyper;
|
||||
use native_tls;
|
||||
use rss;
|
||||
use url;
|
||||
use xml;
|
||||
|
||||
use std::io;
|
||||
|
||||
use crate::models::Source;
|
||||
|
||||
#[fail(
|
||||
display = "Request to {} returned {}. Context: {}",
|
||||
url, status_code, context
|
||||
)]
|
||||
#[derive(Fail, Debug)]
|
||||
pub struct HttpStatusError {
|
||||
url: String,
|
||||
status_code: hyper::StatusCode,
|
||||
context: String,
|
||||
}
|
||||
|
||||
impl HttpStatusError {
|
||||
pub fn new(url: String, code: hyper::StatusCode, context: String) -> Self {
|
||||
HttpStatusError {
|
||||
url,
|
||||
status_code: code,
|
||||
context,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Fail, Debug)]
|
||||
pub enum DataError {
|
||||
#[fail(display = "SQL Query failed: {}", _0)]
|
||||
DieselResultError(#[cause] diesel::result::Error),
|
||||
#[fail(display = "Database Migration error: {}", _0)]
|
||||
DieselMigrationError(#[cause] RunMigrationsError),
|
||||
#[fail(display = "R2D2 error: {}", _0)]
|
||||
R2D2Error(#[cause] r2d2::Error),
|
||||
#[fail(display = "R2D2 Pool error: {}", _0)]
|
||||
R2D2PoolError(#[cause] r2d2::PoolError),
|
||||
#[fail(display = "Hyper Error: {}", _0)]
|
||||
HyperError(#[cause] hyper::Error),
|
||||
#[fail(display = "ToStr Error: {}", _0)]
|
||||
HttpToStr(#[cause] http::header::ToStrError),
|
||||
#[fail(display = "Failed to parse a url: {}", _0)]
|
||||
UrlError(#[cause] url::ParseError),
|
||||
#[fail(display = "TLS Error: {}", _0)]
|
||||
TLSError(#[cause] native_tls::Error),
|
||||
#[fail(display = "IO Error: {}", _0)]
|
||||
IOError(#[cause] io::Error),
|
||||
#[fail(display = "RSS Error: {}", _0)]
|
||||
RssError(#[cause] rss::Error),
|
||||
#[fail(display = "XML Reader Error: {}", _0)]
|
||||
XmlReaderError(#[cause] xml::reader::Error),
|
||||
#[fail(display = "Error: {}", _0)]
|
||||
Bail(String),
|
||||
#[fail(display = "{}", _0)]
|
||||
HttpStatusGeneral(HttpStatusError),
|
||||
#[fail(display = "Source redirects to a new url")]
|
||||
FeedRedirect(Source),
|
||||
#[fail(display = "Feed is up to date")]
|
||||
FeedNotModified(Source),
|
||||
#[fail(
|
||||
display = "Error occurred while Parsing an Episode. Reason: {}",
|
||||
reason
|
||||
)]
|
||||
ParseEpisodeError { reason: String, parent_id: i32 },
|
||||
#[fail(display = "Episode was not changed and thus skipped.")]
|
||||
EpisodeNotChanged,
|
||||
}
|
||||
|
||||
// Maps a type to a variant of the DataError enum
|
||||
macro_rules! easy_from_impl {
|
||||
($outer_type:ty, $from:ty => $to:expr) => (
|
||||
impl From<$from> for $outer_type {
|
||||
fn from(err: $from) -> Self {
|
||||
$to(err)
|
||||
}
|
||||
}
|
||||
);
|
||||
($outer_type:ty, $from:ty => $to:expr, $($f:ty => $t:expr),+) => (
|
||||
easy_from_impl!($outer_type, $from => $to);
|
||||
easy_from_impl!($outer_type, $($f => $t),+);
|
||||
);
|
||||
}
|
||||
|
||||
easy_from_impl!(
|
||||
DataError,
|
||||
RunMigrationsError => DataError::DieselMigrationError,
|
||||
diesel::result::Error => DataError::DieselResultError,
|
||||
r2d2::Error => DataError::R2D2Error,
|
||||
r2d2::PoolError => DataError::R2D2PoolError,
|
||||
hyper::Error => DataError::HyperError,
|
||||
http::header::ToStrError => DataError::HttpToStr,
|
||||
url::ParseError => DataError::UrlError,
|
||||
native_tls::Error => DataError::TLSError,
|
||||
io::Error => DataError::IOError,
|
||||
rss::Error => DataError::RssError,
|
||||
xml::reader::Error => DataError::XmlReaderError,
|
||||
String => DataError::Bail
|
||||
);
|
||||
240
podcasts-data/src/feed.rs
Normal file
240
podcasts-data/src/feed.rs
Normal file
@ -0,0 +1,240 @@
|
||||
// feed.rs
|
||||
//
|
||||
// Copyright 2017 Jordan Petridis <jpetridis@gnome.org>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
//
|
||||
// SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
|
||||
#![allow(clippy::unit_arg)]
|
||||
//! Index Feeds.
|
||||
|
||||
use futures::future::*;
|
||||
use futures::prelude::*;
|
||||
use futures::stream;
|
||||
use rss;
|
||||
|
||||
use crate::dbqueries;
|
||||
use crate::errors::DataError;
|
||||
use crate::models::{Index, IndexState, Update};
|
||||
use crate::models::{NewEpisode, NewEpisodeMinimal, NewShow, Show};
|
||||
|
||||
/// Wrapper struct that hold a `Source` id and the `rss::Channel`
|
||||
/// that corresponds to the `Source.uri` field.
|
||||
#[derive(Debug, Clone, Builder, PartialEq)]
|
||||
#[builder(derive(Debug))]
|
||||
#[builder(setter(into))]
|
||||
pub struct Feed {
|
||||
/// The `rss::Channel` parsed from the `Source` uri.
|
||||
channel: rss::Channel,
|
||||
/// The `Source` id where the xml `rss::Channel` came from.
|
||||
source_id: i32,
|
||||
}
|
||||
|
||||
impl Feed {
|
||||
/// Index the contents of the RSS `Feed` into the database.
|
||||
pub fn index(self) -> impl Future<Item = (), Error = DataError> + Send {
|
||||
ok(self.parse_podcast())
|
||||
.and_then(|pd| pd.to_podcast())
|
||||
.and_then(move |pd| self.index_channel_items(pd))
|
||||
}
|
||||
|
||||
fn parse_podcast(&self) -> NewShow {
|
||||
NewShow::new(&self.channel, self.source_id)
|
||||
}
|
||||
|
||||
fn index_channel_items(self, pd: Show) -> impl Future<Item = (), Error = DataError> + Send {
|
||||
let stream = stream::iter_ok::<_, DataError>(self.channel.into_items());
|
||||
|
||||
// Parse the episodes
|
||||
let episodes = stream.filter_map(move |item| {
|
||||
NewEpisodeMinimal::new(&item, pd.id())
|
||||
.and_then(move |ep| determine_ep_state(ep, &item))
|
||||
.map_err(|err| error!("Failed to parse an episode: {}", err))
|
||||
.ok()
|
||||
});
|
||||
|
||||
// Filter errors, Index updatable episodes, return insertables.
|
||||
filter_episodes(episodes)
|
||||
// Batch index insertable episodes.
|
||||
.and_then(|eps| ok(batch_insert_episodes(&eps)))
|
||||
}
|
||||
}
|
||||
|
||||
fn determine_ep_state(
|
||||
ep: NewEpisodeMinimal,
|
||||
item: &rss::Item,
|
||||
) -> Result<IndexState<NewEpisode>, DataError> {
|
||||
// Check if feed exists
|
||||
let exists = dbqueries::episode_exists(ep.title(), ep.show_id())?;
|
||||
|
||||
if !exists {
|
||||
Ok(IndexState::Index(ep.into_new_episode(item)))
|
||||
} else {
|
||||
let old = dbqueries::get_episode_minimal_from_pk(ep.title(), ep.show_id())?;
|
||||
let rowid = old.rowid();
|
||||
|
||||
if ep != old {
|
||||
Ok(IndexState::Update((ep.into_new_episode(item), rowid)))
|
||||
} else {
|
||||
Ok(IndexState::NotChanged)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn filter_episodes<'a, S>(
|
||||
stream: S,
|
||||
) -> impl Future<Item = Vec<NewEpisode>, Error = DataError> + Send + 'a
|
||||
where
|
||||
S: Stream<Item = IndexState<NewEpisode>, Error = DataError> + Send + 'a,
|
||||
{
|
||||
stream
|
||||
.filter_map(|state| match state {
|
||||
IndexState::NotChanged => None,
|
||||
// Update individual rows, and filter them
|
||||
IndexState::Update((ref ep, rowid)) => {
|
||||
ep.update(rowid)
|
||||
.map_err(|err| error!("{}", err))
|
||||
.map_err(|_| error!("Failed to index episode: {:?}.", ep.title()))
|
||||
.ok();
|
||||
|
||||
None
|
||||
}
|
||||
IndexState::Index(s) => Some(s),
|
||||
})
|
||||
// only Index is left, collect them for batch index
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn batch_insert_episodes(episodes: &[NewEpisode]) {
|
||||
if episodes.is_empty() {
|
||||
return;
|
||||
};
|
||||
|
||||
info!("Indexing {} episodes.", episodes.len());
|
||||
dbqueries::index_new_episodes(episodes)
|
||||
.map_err(|err| {
|
||||
error!("Failed batch indexng: {}", err);
|
||||
info!("Fallign back to individual indexing.");
|
||||
})
|
||||
.unwrap_or_else(|_| {
|
||||
episodes.iter().for_each(|ep| {
|
||||
ep.index()
|
||||
.map_err(|err| error!("Error: {}.", err))
|
||||
.map_err(|_| error!("Failed to index episode: {:?}.", ep.title()))
|
||||
.ok();
|
||||
});
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use failure::Error;
|
||||
use rss::Channel;
|
||||
use tokio::{self, prelude::*};
|
||||
|
||||
use crate::database::truncate_db;
|
||||
use crate::dbqueries;
|
||||
use crate::utils::get_feed;
|
||||
use crate::Source;
|
||||
|
||||
use std::fs;
|
||||
use std::io::BufReader;
|
||||
|
||||
use super::*;
|
||||
|
||||
// (path, url) tuples.
|
||||
const URLS: &[(&str, &str)] = {
|
||||
&[
|
||||
(
|
||||
"tests/feeds/2018-01-20-Intercepted.xml",
|
||||
"https://web.archive.org/web/20180120083840if_/https://feeds.feedburner.\
|
||||
com/InterceptedWithJeremyScahill",
|
||||
),
|
||||
(
|
||||
"tests/feeds/2018-01-20-LinuxUnplugged.xml",
|
||||
"https://web.archive.org/web/20180120110314if_/https://feeds.feedburner.\
|
||||
com/linuxunplugged",
|
||||
),
|
||||
(
|
||||
"tests/feeds/2018-01-20-TheTipOff.xml",
|
||||
"https://web.archive.org/web/20180120110727if_/https://rss.acast.com/thetipoff",
|
||||
),
|
||||
(
|
||||
"tests/feeds/2018-01-20-StealTheStars.xml",
|
||||
"https://web.archive.org/web/20180120104957if_/https://rss.art19.\
|
||||
com/steal-the-stars",
|
||||
),
|
||||
(
|
||||
"tests/feeds/2018-01-20-GreaterThanCode.xml",
|
||||
"https://web.archive.org/web/20180120104741if_/https://www.greaterthancode.\
|
||||
com/feed/podcast",
|
||||
),
|
||||
]
|
||||
};
|
||||
|
||||
#[test]
|
||||
fn test_complete_index() -> Result<(), Error> {
|
||||
truncate_db()?;
|
||||
|
||||
let feeds: Vec<_> = URLS
|
||||
.iter()
|
||||
.map(|&(path, url)| {
|
||||
// Create and insert a Source into db
|
||||
let s = Source::from_url(url).unwrap();
|
||||
get_feed(path, s.id())
|
||||
})
|
||||
.collect();
|
||||
|
||||
// Index the channels
|
||||
let stream_ = stream::iter_ok(feeds).for_each(|x| x.index());
|
||||
tokio::run(stream_.map_err(|_| ()));
|
||||
|
||||
// Assert the index rows equal the controlled results
|
||||
assert_eq!(dbqueries::get_sources()?.len(), 5);
|
||||
assert_eq!(dbqueries::get_podcasts()?.len(), 5);
|
||||
assert_eq!(dbqueries::get_episodes()?.len(), 354);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_feed_parse_podcast() -> Result<(), Error> {
|
||||
truncate_db()?;
|
||||
|
||||
let path = "tests/feeds/2018-01-20-Intercepted.xml";
|
||||
let feed = get_feed(path, 42);
|
||||
|
||||
let file = fs::File::open(path)?;
|
||||
let channel = Channel::read_from(BufReader::new(file))?;
|
||||
|
||||
let pd = NewShow::new(&channel, 42);
|
||||
assert_eq!(feed.parse_podcast(), pd);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_feed_index_channel_items() -> Result<(), Error> {
|
||||
truncate_db()?;
|
||||
|
||||
let path = "tests/feeds/2018-01-20-Intercepted.xml";
|
||||
let feed = get_feed(path, 42);
|
||||
let pd = feed.parse_podcast().to_podcast()?;
|
||||
|
||||
feed.index_channel_items(pd).wait()?;
|
||||
assert_eq!(dbqueries::get_podcasts()?.len(), 1);
|
||||
assert_eq!(dbqueries::get_episodes()?.len(), 43);
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
145
podcasts-data/src/lib.rs
Normal file
145
podcasts-data/src/lib.rs
Normal file
@ -0,0 +1,145 @@
|
||||
// lib.rs
|
||||
//
|
||||
// Copyright 2017 Jordan Petridis <jpetridis@gnome.org>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
//
|
||||
// SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
#![recursion_limit = "1024"]
|
||||
#![allow(unknown_lints)]
|
||||
#![cfg_attr(
|
||||
all(test, feature = "clippy"),
|
||||
allow(option_unwrap_used, result_unwrap_used)
|
||||
)]
|
||||
#![cfg_attr(
|
||||
feature = "clippy",
|
||||
warn(
|
||||
option_unwrap_used,
|
||||
result_unwrap_used,
|
||||
print_stdout,
|
||||
wrong_pub_self_convention,
|
||||
mut_mut,
|
||||
non_ascii_literal,
|
||||
similar_names,
|
||||
unicode_not_nfc,
|
||||
enum_glob_use,
|
||||
if_not_else,
|
||||
items_after_statements,
|
||||
used_underscore_binding
|
||||
)
|
||||
)]
|
||||
// Enable lint group collections
|
||||
#![warn(nonstandard_style, bad_style, unused)]
|
||||
#![warn(edition_2018, rust_2018_idioms)]
|
||||
// standalone lints
|
||||
#![warn(
|
||||
const_err,
|
||||
improper_ctypes,
|
||||
non_shorthand_field_patterns,
|
||||
no_mangle_generic_items,
|
||||
overflowing_literals,
|
||||
plugin_as_library,
|
||||
unconditional_recursion,
|
||||
unions_with_drop_fields,
|
||||
while_true,
|
||||
missing_debug_implementations,
|
||||
missing_docs,
|
||||
trivial_casts,
|
||||
trivial_numeric_casts,
|
||||
elided_lifetime_in_paths,
|
||||
missing_copy_implementations
|
||||
)]
|
||||
#![allow(proc_macro_derive_resolution_fallback)]
|
||||
|
||||
//! FIXME: Docs
|
||||
|
||||
#[cfg(test)]
|
||||
#[macro_use]
|
||||
extern crate pretty_assertions;
|
||||
|
||||
#[cfg(test)]
|
||||
#[macro_use]
|
||||
extern crate maplit;
|
||||
|
||||
#[macro_use]
|
||||
extern crate derive_builder;
|
||||
#[macro_use]
|
||||
extern crate diesel;
|
||||
#[macro_use]
|
||||
extern crate diesel_migrations;
|
||||
// #[macro_use]
|
||||
// extern crate failure;
|
||||
#[macro_use]
|
||||
extern crate failure_derive;
|
||||
#[macro_use]
|
||||
extern crate lazy_static;
|
||||
#[macro_use]
|
||||
extern crate log;
|
||||
|
||||
pub mod database;
|
||||
#[allow(missing_docs)]
|
||||
pub mod dbqueries;
|
||||
#[allow(missing_docs)]
|
||||
pub mod errors;
|
||||
mod feed;
|
||||
pub(crate) mod models;
|
||||
pub mod opml;
|
||||
mod parser;
|
||||
pub mod pipeline;
|
||||
mod schema;
|
||||
pub mod utils;
|
||||
|
||||
pub use crate::feed::{Feed, FeedBuilder};
|
||||
pub use crate::models::Save;
|
||||
pub use crate::models::{Episode, EpisodeWidgetModel, Show, ShowCoverModel, Source};
|
||||
|
||||
// Set the user agent, See #53 for more
|
||||
// Keep this in sync with Tor-browser releases
|
||||
/// The user-agent to be used for all the requests.
|
||||
/// It originates from the Tor-browser UA.
|
||||
pub const USER_AGENT: &str = "Mozilla/5.0 (Windows NT 6.1; rv:60.0) Gecko/20100101 Firefox/60.0";
|
||||
|
||||
/// [XDG Base Directory](https://specifications.freedesktop.org/basedir-spec/basedir-spec-latest.html) Paths.
|
||||
#[allow(missing_debug_implementations)]
|
||||
pub mod xdg_dirs {
|
||||
use std::path::PathBuf;
|
||||
use xdg;
|
||||
|
||||
lazy_static! {
|
||||
pub(crate) static ref PODCASTS_XDG: xdg::BaseDirectories = {
|
||||
xdg::BaseDirectories::with_prefix("gnome-podcasts").unwrap()
|
||||
};
|
||||
|
||||
/// XDG_DATA Directory `Pathbuf`.
|
||||
pub static ref PODCASTS_DATA: PathBuf = {
|
||||
PODCASTS_XDG.create_data_directory(PODCASTS_XDG.get_data_home()).unwrap()
|
||||
};
|
||||
|
||||
/// XDG_CONFIG Directory `Pathbuf`.
|
||||
pub static ref PODCASTS_CONFIG: PathBuf = {
|
||||
PODCASTS_XDG.create_config_directory(PODCASTS_XDG.get_config_home()).unwrap()
|
||||
};
|
||||
|
||||
/// XDG_CACHE Directory `Pathbuf`.
|
||||
pub static ref PODCASTS_CACHE: PathBuf = {
|
||||
PODCASTS_XDG.create_cache_directory(PODCASTS_XDG.get_cache_home()).unwrap()
|
||||
};
|
||||
|
||||
/// GNOME Podcasts Download Directory `PathBuf`.
|
||||
pub static ref DL_DIR: PathBuf = {
|
||||
PODCASTS_XDG.create_data_directory("Downloads").unwrap()
|
||||
};
|
||||
}
|
||||
}
|
||||
19
podcasts-data/src/meson.build
Normal file
19
podcasts-data/src/meson.build
Normal file
@ -0,0 +1,19 @@
|
||||
data_sources = files(
|
||||
'models/episode.rs',
|
||||
'models/mod.rs',
|
||||
'models/new_episode.rs',
|
||||
'models/new_show.rs',
|
||||
'models/new_source.rs',
|
||||
'models/show.rs',
|
||||
'models/source.rs',
|
||||
'database.rs',
|
||||
'dbqueries.rs',
|
||||
'errors.rs',
|
||||
'feed.rs',
|
||||
'lib.rs',
|
||||
'opml.rs',
|
||||
'parser.rs',
|
||||
'pipeline.rs',
|
||||
'schema.rs',
|
||||
'utils.rs',
|
||||
)
|
||||
@ -1,18 +1,37 @@
|
||||
// episode.rs
|
||||
//
|
||||
// Copyright 2017 Jordan Petridis <jpetridis@gnome.org>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
//
|
||||
// SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
use chrono::prelude::*;
|
||||
use diesel;
|
||||
use diesel::SaveChangesDsl;
|
||||
use diesel::prelude::*;
|
||||
use diesel::SaveChangesDsl;
|
||||
|
||||
use database::connection;
|
||||
use errors::DataError;
|
||||
use models::{Podcast, Save};
|
||||
use schema::episode;
|
||||
use crate::database::connection;
|
||||
use crate::errors::DataError;
|
||||
use crate::models::{Save, Show};
|
||||
use crate::schema::episodes;
|
||||
|
||||
#[derive(Queryable, Identifiable, AsChangeset, Associations, PartialEq)]
|
||||
#[table_name = "episode"]
|
||||
#[table_name = "episodes"]
|
||||
#[changeset_options(treat_none_as_null = "true")]
|
||||
#[primary_key(title, podcast_id)]
|
||||
#[belongs_to(Podcast, foreign_key = "podcast_id")]
|
||||
#[primary_key(title, show_id)]
|
||||
#[belongs_to(Show, foreign_key = "show_id")]
|
||||
#[derive(Debug, Clone)]
|
||||
/// Diesel Model of the episode table.
|
||||
pub struct Episode {
|
||||
@ -26,14 +45,15 @@ pub struct Episode {
|
||||
duration: Option<i32>,
|
||||
guid: Option<String>,
|
||||
played: Option<i32>,
|
||||
favorite: bool,
|
||||
archive: bool,
|
||||
podcast_id: i32,
|
||||
show_id: i32,
|
||||
}
|
||||
|
||||
impl Save<Episode, DataError> for Episode {
|
||||
/// Helper method to easily save/"sync" current state of self to the Database.
|
||||
fn save(&self) -> Result<Episode, DataError> {
|
||||
impl Save<Episode> for Episode {
|
||||
type Error = DataError;
|
||||
|
||||
/// Helper method to easily save/"sync" current state of self to the
|
||||
/// Database.
|
||||
fn save(&self) -> Result<Episode, Self::Error> {
|
||||
let db = connection();
|
||||
let tempdb = db.get()?;
|
||||
|
||||
@ -52,11 +72,6 @@ impl Episode {
|
||||
&self.title
|
||||
}
|
||||
|
||||
/// Set the `title`.
|
||||
pub fn set_title(&mut self, value: &str) {
|
||||
self.title = value.to_string();
|
||||
}
|
||||
|
||||
/// Get the value of the `uri`.
|
||||
///
|
||||
/// Represents the url(usually) that the media file will be located at.
|
||||
@ -64,11 +79,6 @@ impl Episode {
|
||||
self.uri.as_ref().map(|s| s.as_str())
|
||||
}
|
||||
|
||||
/// Set the `uri`.
|
||||
pub fn set_uri(&mut self, value: Option<&str>) {
|
||||
self.uri = value.map(|x| x.to_string());
|
||||
}
|
||||
|
||||
/// Get the value of the `local_uri`.
|
||||
///
|
||||
/// Represents the local uri,usually filesystem path,
|
||||
@ -77,31 +87,16 @@ impl Episode {
|
||||
self.local_uri.as_ref().map(|s| s.as_str())
|
||||
}
|
||||
|
||||
/// Set the `local_uri`.
|
||||
pub fn set_local_uri(&mut self, value: Option<&str>) {
|
||||
self.local_uri = value.map(|x| x.to_string());
|
||||
}
|
||||
|
||||
/// Get the `description`.
|
||||
pub fn description(&self) -> Option<&str> {
|
||||
self.description.as_ref().map(|s| s.as_str())
|
||||
}
|
||||
|
||||
/// Set the `description`.
|
||||
pub fn set_description(&mut self, value: Option<&str>) {
|
||||
self.description = value.map(|x| x.to_string());
|
||||
}
|
||||
|
||||
/// Get the Episode's `guid`.
|
||||
pub fn guid(&self) -> Option<&str> {
|
||||
self.guid.as_ref().map(|s| s.as_str())
|
||||
}
|
||||
|
||||
/// Set the `guid`.
|
||||
pub fn set_guid(&mut self, value: Option<&str>) {
|
||||
self.guid = value.map(|x| x.to_string());
|
||||
}
|
||||
|
||||
/// Get the `epoch` value.
|
||||
///
|
||||
/// Retrieved from the rss Item publish date.
|
||||
@ -110,11 +105,6 @@ impl Episode {
|
||||
self.epoch
|
||||
}
|
||||
|
||||
/// Set the `epoch`.
|
||||
pub fn set_epoch(&mut self, value: i32) {
|
||||
self.epoch = value;
|
||||
}
|
||||
|
||||
/// Get the `length`.
|
||||
///
|
||||
/// The number represents the size of the file in bytes.
|
||||
@ -122,11 +112,6 @@ impl Episode {
|
||||
self.length
|
||||
}
|
||||
|
||||
/// Set the `length`.
|
||||
pub fn set_length(&mut self, value: Option<i32>) {
|
||||
self.length = value;
|
||||
}
|
||||
|
||||
/// Get the `duration` value.
|
||||
///
|
||||
/// The number represents the duration of the item/episode in seconds.
|
||||
@ -134,11 +119,6 @@ impl Episode {
|
||||
self.duration
|
||||
}
|
||||
|
||||
/// Set the `duration`.
|
||||
pub fn set_duration(&mut self, value: Option<i32>) {
|
||||
self.duration = value;
|
||||
}
|
||||
|
||||
/// Epoch representation of the last time the episode was played.
|
||||
///
|
||||
/// None/Null for unplayed.
|
||||
@ -146,54 +126,19 @@ impl Episode {
|
||||
self.played
|
||||
}
|
||||
|
||||
/// Set the `played` value.
|
||||
pub fn set_played(&mut self, value: Option<i32>) {
|
||||
self.played = value;
|
||||
}
|
||||
|
||||
/// Represents the archiving policy for the episode.
|
||||
pub fn archive(&self) -> bool {
|
||||
self.archive
|
||||
}
|
||||
|
||||
/// Set the `archive` policy.
|
||||
///
|
||||
/// If true, the download cleanr will ignore the episode
|
||||
/// and the corresponding media value will never be automaticly deleted.
|
||||
pub fn set_archive(&mut self, b: bool) {
|
||||
self.archive = b
|
||||
}
|
||||
|
||||
/// Get the `favorite` status of the `Episode`.
|
||||
pub fn favorite(&self) -> bool {
|
||||
self.favorite
|
||||
}
|
||||
|
||||
/// Set `favorite` status.
|
||||
pub fn set_favorite(&mut self, b: bool) {
|
||||
self.favorite = b
|
||||
}
|
||||
|
||||
/// `Podcast` table foreign key.
|
||||
pub fn podcast_id(&self) -> i32 {
|
||||
self.podcast_id
|
||||
}
|
||||
|
||||
/// Sets the `played` value with the current `epoch` timestap and save it.
|
||||
pub fn set_played_now(&mut self) -> Result<(), DataError> {
|
||||
let epoch = Utc::now().timestamp() as i32;
|
||||
self.set_played(Some(epoch));
|
||||
self.save().map(|_| ())
|
||||
/// `Show` table foreign key.
|
||||
pub fn show_id(&self) -> i32 {
|
||||
self.show_id
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Queryable, AsChangeset, PartialEq)]
|
||||
#[table_name = "episode"]
|
||||
#[table_name = "episodes"]
|
||||
#[changeset_options(treat_none_as_null = "true")]
|
||||
#[primary_key(title, podcast_id)]
|
||||
#[primary_key(title, show_id)]
|
||||
#[derive(Debug, Clone)]
|
||||
/// Diesel Model to be used for constructing `EpisodeWidgets`.
|
||||
pub struct EpisodeWidgetQuery {
|
||||
pub struct EpisodeWidgetModel {
|
||||
rowid: i32,
|
||||
title: String,
|
||||
uri: Option<String>,
|
||||
@ -202,14 +147,12 @@ pub struct EpisodeWidgetQuery {
|
||||
length: Option<i32>,
|
||||
duration: Option<i32>,
|
||||
played: Option<i32>,
|
||||
// favorite: bool,
|
||||
// archive: bool,
|
||||
podcast_id: i32,
|
||||
show_id: i32,
|
||||
}
|
||||
|
||||
impl From<Episode> for EpisodeWidgetQuery {
|
||||
fn from(e: Episode) -> EpisodeWidgetQuery {
|
||||
EpisodeWidgetQuery {
|
||||
impl From<Episode> for EpisodeWidgetModel {
|
||||
fn from(e: Episode) -> EpisodeWidgetModel {
|
||||
EpisodeWidgetModel {
|
||||
rowid: e.rowid,
|
||||
title: e.title,
|
||||
uri: e.uri,
|
||||
@ -218,27 +161,30 @@ impl From<Episode> for EpisodeWidgetQuery {
|
||||
length: e.length,
|
||||
duration: e.duration,
|
||||
played: e.played,
|
||||
podcast_id: e.podcast_id,
|
||||
show_id: e.show_id,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Save<usize, DataError> for EpisodeWidgetQuery {
|
||||
/// Helper method to easily save/"sync" current state of self to the Database.
|
||||
fn save(&self) -> Result<usize, DataError> {
|
||||
use schema::episode::dsl::*;
|
||||
impl Save<usize> for EpisodeWidgetModel {
|
||||
type Error = DataError;
|
||||
|
||||
/// Helper method to easily save/"sync" current state of self to the
|
||||
/// Database.
|
||||
fn save(&self) -> Result<usize, Self::Error> {
|
||||
use crate::schema::episodes::dsl::*;
|
||||
|
||||
let db = connection();
|
||||
let tempdb = db.get()?;
|
||||
|
||||
diesel::update(episode.filter(rowid.eq(self.rowid)))
|
||||
diesel::update(episodes.filter(rowid.eq(self.rowid)))
|
||||
.set(self)
|
||||
.execute(&*tempdb)
|
||||
.map_err(From::from)
|
||||
}
|
||||
}
|
||||
|
||||
impl EpisodeWidgetQuery {
|
||||
impl EpisodeWidgetModel {
|
||||
/// Get the value of the sqlite's `ROW_ID`
|
||||
pub fn rowid(&self) -> i32 {
|
||||
self.rowid
|
||||
@ -296,11 +242,6 @@ impl EpisodeWidgetQuery {
|
||||
self.duration
|
||||
}
|
||||
|
||||
/// Set the `duration`.
|
||||
pub fn set_duration(&mut self, value: Option<i32>) {
|
||||
self.duration = value;
|
||||
}
|
||||
|
||||
/// Epoch representation of the last time the episode was played.
|
||||
///
|
||||
/// None/Null for unplayed.
|
||||
@ -309,36 +250,13 @@ impl EpisodeWidgetQuery {
|
||||
}
|
||||
|
||||
/// Set the `played` value.
|
||||
pub fn set_played(&mut self, value: Option<i32>) {
|
||||
fn set_played(&mut self, value: Option<i32>) {
|
||||
self.played = value;
|
||||
}
|
||||
|
||||
// /// Represents the archiving policy for the episode.
|
||||
// pub fn archive(&self) -> bool {
|
||||
// self.archive
|
||||
// }
|
||||
|
||||
// /// Set the `archive` policy.
|
||||
// ///
|
||||
// /// If true, the download cleanr will ignore the episode
|
||||
// /// and the corresponding media value will never be automaticly deleted.
|
||||
// pub fn set_archive(&mut self, b: bool) {
|
||||
// self.archive = b
|
||||
// }
|
||||
|
||||
// /// Get the `favorite` status of the `Episode`.
|
||||
// pub fn favorite(&self) -> bool {
|
||||
// self.favorite
|
||||
// }
|
||||
|
||||
// /// Set `favorite` status.
|
||||
// pub fn set_favorite(&mut self, b: bool) {
|
||||
// self.favorite = b
|
||||
// }
|
||||
|
||||
/// `Podcast` table foreign key.
|
||||
pub fn podcast_id(&self) -> i32 {
|
||||
self.podcast_id
|
||||
/// `Show` table foreign key.
|
||||
pub fn show_id(&self) -> i32 {
|
||||
self.show_id
|
||||
}
|
||||
|
||||
/// Sets the `played` value with the current `epoch` timestap and save it.
|
||||
@ -350,35 +268,38 @@ impl EpisodeWidgetQuery {
|
||||
}
|
||||
|
||||
#[derive(Queryable, AsChangeset, PartialEq)]
|
||||
#[table_name = "episode"]
|
||||
#[table_name = "episodes"]
|
||||
#[changeset_options(treat_none_as_null = "true")]
|
||||
#[primary_key(title, podcast_id)]
|
||||
#[primary_key(title, show_id)]
|
||||
#[derive(Debug, Clone)]
|
||||
/// Diesel Model to be used internal with the `utils::checkup` function.
|
||||
pub struct EpisodeCleanerQuery {
|
||||
pub struct EpisodeCleanerModel {
|
||||
rowid: i32,
|
||||
local_uri: Option<String>,
|
||||
played: Option<i32>,
|
||||
}
|
||||
|
||||
impl Save<usize, DataError> for EpisodeCleanerQuery {
|
||||
/// Helper method to easily save/"sync" current state of self to the Database.
|
||||
fn save(&self) -> Result<usize, DataError> {
|
||||
use schema::episode::dsl::*;
|
||||
impl Save<usize> for EpisodeCleanerModel {
|
||||
type Error = DataError;
|
||||
|
||||
/// Helper method to easily save/"sync" current state of self to the
|
||||
/// Database.
|
||||
fn save(&self) -> Result<usize, Self::Error> {
|
||||
use crate::schema::episodes::dsl::*;
|
||||
|
||||
let db = connection();
|
||||
let tempdb = db.get()?;
|
||||
|
||||
diesel::update(episode.filter(rowid.eq(self.rowid)))
|
||||
diesel::update(episodes.filter(rowid.eq(self.rowid)))
|
||||
.set(self)
|
||||
.execute(&*tempdb)
|
||||
.map_err(From::from)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Episode> for EpisodeCleanerQuery {
|
||||
fn from(e: Episode) -> EpisodeCleanerQuery {
|
||||
EpisodeCleanerQuery {
|
||||
impl From<Episode> for EpisodeCleanerModel {
|
||||
fn from(e: Episode) -> EpisodeCleanerModel {
|
||||
EpisodeCleanerModel {
|
||||
rowid: e.rowid(),
|
||||
local_uri: e.local_uri,
|
||||
played: e.played,
|
||||
@ -386,7 +307,7 @@ impl From<Episode> for EpisodeCleanerQuery {
|
||||
}
|
||||
}
|
||||
|
||||
impl EpisodeCleanerQuery {
|
||||
impl EpisodeCleanerModel {
|
||||
/// Get the value of the sqlite's `ROW_ID`
|
||||
pub fn rowid(&self) -> i32 {
|
||||
self.rowid
|
||||
@ -419,9 +340,9 @@ impl EpisodeCleanerQuery {
|
||||
}
|
||||
|
||||
#[derive(Queryable, AsChangeset, PartialEq)]
|
||||
#[table_name = "episode"]
|
||||
#[table_name = "episodes"]
|
||||
#[changeset_options(treat_none_as_null = "true")]
|
||||
#[primary_key(title, podcast_id)]
|
||||
#[primary_key(title, show_id)]
|
||||
#[derive(Debug, Clone)]
|
||||
/// Diesel Model to be used for FIXME.
|
||||
pub struct EpisodeMinimal {
|
||||
@ -429,9 +350,10 @@ pub struct EpisodeMinimal {
|
||||
title: String,
|
||||
uri: Option<String>,
|
||||
epoch: i32,
|
||||
length: Option<i32>,
|
||||
duration: Option<i32>,
|
||||
guid: Option<String>,
|
||||
podcast_id: i32,
|
||||
show_id: i32,
|
||||
}
|
||||
|
||||
impl From<Episode> for EpisodeMinimal {
|
||||
@ -440,10 +362,11 @@ impl From<Episode> for EpisodeMinimal {
|
||||
rowid: e.rowid,
|
||||
title: e.title,
|
||||
uri: e.uri,
|
||||
length: e.length,
|
||||
guid: e.guid,
|
||||
epoch: e.epoch,
|
||||
duration: e.duration,
|
||||
podcast_id: e.podcast_id,
|
||||
show_id: e.show_id,
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -479,6 +402,13 @@ impl EpisodeMinimal {
|
||||
self.epoch
|
||||
}
|
||||
|
||||
/// Get the `length`.
|
||||
///
|
||||
/// The number represents the size of the file in bytes.
|
||||
pub fn length(&self) -> Option<i32> {
|
||||
self.length
|
||||
}
|
||||
|
||||
/// Get the `duration` value.
|
||||
///
|
||||
/// The number represents the duration of the item/episode in seconds.
|
||||
@ -486,8 +416,8 @@ impl EpisodeMinimal {
|
||||
self.duration
|
||||
}
|
||||
|
||||
/// `Podcast` table foreign key.
|
||||
pub fn podcast_id(&self) -> i32 {
|
||||
self.podcast_id
|
||||
/// `Show` table foreign key.
|
||||
pub fn show_id(&self) -> i32 {
|
||||
self.show_id
|
||||
}
|
||||
}
|
||||
78
podcasts-data/src/models/mod.rs
Normal file
78
podcasts-data/src/models/mod.rs
Normal file
@ -0,0 +1,78 @@
|
||||
// mod.rs
|
||||
//
|
||||
// Copyright 2017 Jordan Petridis <jpetridis@gnome.org>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
//
|
||||
// SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
mod new_episode;
|
||||
mod new_show;
|
||||
mod new_source;
|
||||
|
||||
mod episode;
|
||||
mod show;
|
||||
mod source;
|
||||
|
||||
// use futures::prelude::*;
|
||||
// use futures::future::*;
|
||||
|
||||
pub(crate) use self::episode::EpisodeCleanerModel;
|
||||
pub(crate) use self::new_episode::{NewEpisode, NewEpisodeMinimal};
|
||||
pub(crate) use self::new_show::NewShow;
|
||||
pub(crate) use self::new_source::NewSource;
|
||||
|
||||
#[cfg(test)]
|
||||
pub(crate) use self::new_episode::NewEpisodeBuilder;
|
||||
#[cfg(test)]
|
||||
pub(crate) use self::new_show::NewShowBuilder;
|
||||
|
||||
pub use self::episode::{Episode, EpisodeMinimal, EpisodeWidgetModel};
|
||||
pub use self::show::{Show, ShowCoverModel};
|
||||
pub use self::source::Source;
|
||||
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub enum IndexState<T> {
|
||||
Index(T),
|
||||
Update((T, i32)),
|
||||
NotChanged,
|
||||
}
|
||||
|
||||
pub trait Insert<T> {
|
||||
type Error;
|
||||
|
||||
fn insert(&self) -> Result<T, Self::Error>;
|
||||
}
|
||||
|
||||
pub trait Update<T> {
|
||||
type Error;
|
||||
|
||||
fn update(&self, _: i32) -> Result<T, Self::Error>;
|
||||
}
|
||||
|
||||
// This might need to change in the future
|
||||
pub trait Index<T>: Insert<T> + Update<T> {
|
||||
type Error;
|
||||
|
||||
fn index(&self) -> Result<T, <Self as Index<T>>::Error>;
|
||||
}
|
||||
|
||||
/// FIXME: DOCS
|
||||
pub trait Save<T> {
|
||||
/// The Error type to be returned.
|
||||
type Error;
|
||||
/// Helper method to easily save/"sync" current state of a diesel model to
|
||||
/// the Database.
|
||||
fn save(&self) -> Result<T, Self::Error>;
|
||||
}
|
||||
@ -1,19 +1,38 @@
|
||||
// new_episode.rs
|
||||
//
|
||||
// Copyright 2017 Jordan Petridis <jpetridis@gnome.org>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
//
|
||||
// SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
use ammonia;
|
||||
use diesel;
|
||||
use diesel::prelude::*;
|
||||
use rfc822_sanitizer::parse_from_rfc2822_with_fallback as parse_rfc822;
|
||||
use rss;
|
||||
|
||||
use database::connection;
|
||||
use dbqueries;
|
||||
use errors::DataError;
|
||||
use models::{Episode, EpisodeMinimal, Index, Insert, Update};
|
||||
use parser;
|
||||
use schema::episode;
|
||||
use utils::{replace_extra_spaces, url_cleaner};
|
||||
use crate::database::connection;
|
||||
use crate::dbqueries;
|
||||
use crate::errors::DataError;
|
||||
use crate::models::{Episode, EpisodeMinimal, Index, Insert, Update};
|
||||
use crate::parser;
|
||||
use crate::schema::episodes;
|
||||
use crate::utils::url_cleaner;
|
||||
|
||||
#[derive(Insertable, AsChangeset)]
|
||||
#[table_name = "episode"]
|
||||
#[table_name = "episodes"]
|
||||
#[derive(Debug, Clone, Default, Builder, PartialEq)]
|
||||
#[builder(default)]
|
||||
#[builder(derive(Debug))]
|
||||
@ -26,7 +45,7 @@ pub(crate) struct NewEpisode {
|
||||
duration: Option<i32>,
|
||||
guid: Option<String>,
|
||||
epoch: i32,
|
||||
podcast_id: i32,
|
||||
show_id: i32,
|
||||
}
|
||||
|
||||
impl From<NewEpisodeMinimal> for NewEpisode {
|
||||
@ -36,21 +55,23 @@ impl From<NewEpisodeMinimal> for NewEpisode {
|
||||
.uri(e.uri)
|
||||
.duration(e.duration)
|
||||
.epoch(e.epoch)
|
||||
.podcast_id(e.podcast_id)
|
||||
.show_id(e.show_id)
|
||||
.guid(e.guid)
|
||||
.build()
|
||||
.unwrap()
|
||||
}
|
||||
}
|
||||
|
||||
impl Insert<(), DataError> for NewEpisode {
|
||||
impl Insert<()> for NewEpisode {
|
||||
type Error = DataError;
|
||||
|
||||
fn insert(&self) -> Result<(), DataError> {
|
||||
use schema::episode::dsl::*;
|
||||
use crate::schema::episodes::dsl::*;
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
info!("Inserting {:?}", self.title);
|
||||
diesel::insert_into(episode)
|
||||
diesel::insert_into(episodes)
|
||||
.values(self)
|
||||
.execute(&con)
|
||||
.map_err(From::from)
|
||||
@ -58,14 +79,16 @@ impl Insert<(), DataError> for NewEpisode {
|
||||
}
|
||||
}
|
||||
|
||||
impl Update<(), DataError> for NewEpisode {
|
||||
impl Update<()> for NewEpisode {
|
||||
type Error = DataError;
|
||||
|
||||
fn update(&self, episode_id: i32) -> Result<(), DataError> {
|
||||
use schema::episode::dsl::*;
|
||||
use crate::schema::episodes::dsl::*;
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
info!("Updating {:?}", self.title);
|
||||
diesel::update(episode.filter(rowid.eq(episode_id)))
|
||||
diesel::update(episodes.filter(rowid.eq(episode_id)))
|
||||
.set(self)
|
||||
.execute(&con)
|
||||
.map_err(From::from)
|
||||
@ -73,13 +96,16 @@ impl Update<(), DataError> for NewEpisode {
|
||||
}
|
||||
}
|
||||
|
||||
impl Index<(), DataError> for NewEpisode {
|
||||
// Does not update the episode description if it's the only thing that has changed.
|
||||
impl Index<()> for NewEpisode {
|
||||
type Error = DataError;
|
||||
|
||||
// Does not update the episode description if it's the only thing that has
|
||||
// changed.
|
||||
fn index(&self) -> Result<(), DataError> {
|
||||
let exists = dbqueries::episode_exists(self.title(), self.podcast_id())?;
|
||||
let exists = dbqueries::episode_exists(self.title(), self.show_id())?;
|
||||
|
||||
if exists {
|
||||
let other = dbqueries::get_episode_minimal_from_pk(self.title(), self.podcast_id())?;
|
||||
let other = dbqueries::get_episode_minimal_from_pk(self.title(), self.show_id())?;
|
||||
|
||||
if self != &other {
|
||||
self.update(other.rowid())
|
||||
@ -94,17 +120,23 @@ impl Index<(), DataError> for NewEpisode {
|
||||
|
||||
impl PartialEq<EpisodeMinimal> for NewEpisode {
|
||||
fn eq(&self, other: &EpisodeMinimal) -> bool {
|
||||
(self.title() == other.title()) && (self.uri() == other.uri())
|
||||
&& (self.duration() == other.duration()) && (self.epoch() == other.epoch())
|
||||
&& (self.guid() == other.guid()) && (self.podcast_id() == other.podcast_id())
|
||||
(self.title() == other.title())
|
||||
&& (self.uri() == other.uri())
|
||||
&& (self.duration() == other.duration())
|
||||
&& (self.epoch() == other.epoch())
|
||||
&& (self.guid() == other.guid())
|
||||
&& (self.show_id() == other.show_id())
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq<Episode> for NewEpisode {
|
||||
fn eq(&self, other: &Episode) -> bool {
|
||||
(self.title() == other.title()) && (self.uri() == other.uri())
|
||||
&& (self.duration() == other.duration()) && (self.epoch() == other.epoch())
|
||||
&& (self.guid() == other.guid()) && (self.podcast_id() == other.podcast_id())
|
||||
(self.title() == other.title())
|
||||
&& (self.uri() == other.uri())
|
||||
&& (self.duration() == other.duration())
|
||||
&& (self.epoch() == other.epoch())
|
||||
&& (self.guid() == other.guid())
|
||||
&& (self.show_id() == other.show_id())
|
||||
&& (self.description() == other.description())
|
||||
&& (self.length() == other.length())
|
||||
}
|
||||
@ -113,14 +145,14 @@ impl PartialEq<Episode> for NewEpisode {
|
||||
impl NewEpisode {
|
||||
/// Parses an `rss::Item` into a `NewEpisode` Struct.
|
||||
#[allow(dead_code)]
|
||||
pub(crate) fn new(item: &rss::Item, podcast_id: i32) -> Result<Self, DataError> {
|
||||
NewEpisodeMinimal::new(item, podcast_id).map(|ep| ep.into_new_episode(item))
|
||||
pub(crate) fn new(item: &rss::Item, show_id: i32) -> Result<Self, DataError> {
|
||||
NewEpisodeMinimal::new(item, show_id).map(|ep| ep.into_new_episode(item))
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub(crate) fn to_episode(&self) -> Result<Episode, DataError> {
|
||||
self.index()?;
|
||||
dbqueries::get_episode_from_pk(&self.title, self.podcast_id).map_err(From::from)
|
||||
dbqueries::get_episode_from_pk(&self.title, self.show_id).map_err(From::from)
|
||||
}
|
||||
}
|
||||
|
||||
@ -154,30 +186,34 @@ impl NewEpisode {
|
||||
self.length
|
||||
}
|
||||
|
||||
pub(crate) fn podcast_id(&self) -> i32 {
|
||||
self.podcast_id
|
||||
pub(crate) fn show_id(&self) -> i32 {
|
||||
self.show_id
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Insertable, AsChangeset)]
|
||||
#[table_name = "episode"]
|
||||
#[table_name = "episodes"]
|
||||
#[derive(Debug, Clone, Builder, PartialEq)]
|
||||
#[builder(derive(Debug))]
|
||||
#[builder(setter(into))]
|
||||
pub(crate) struct NewEpisodeMinimal {
|
||||
title: String,
|
||||
uri: Option<String>,
|
||||
length: Option<i32>,
|
||||
duration: Option<i32>,
|
||||
epoch: i32,
|
||||
guid: Option<String>,
|
||||
podcast_id: i32,
|
||||
show_id: i32,
|
||||
}
|
||||
|
||||
impl PartialEq<EpisodeMinimal> for NewEpisodeMinimal {
|
||||
fn eq(&self, other: &EpisodeMinimal) -> bool {
|
||||
(self.title() == other.title()) && (self.uri() == other.uri())
|
||||
&& (self.duration() == other.duration()) && (self.epoch() == other.epoch())
|
||||
&& (self.guid() == other.guid()) && (self.podcast_id() == other.podcast_id())
|
||||
(self.title() == other.title())
|
||||
&& (self.uri() == other.uri())
|
||||
&& (self.duration() == other.duration())
|
||||
&& (self.epoch() == other.epoch())
|
||||
&& (self.guid() == other.guid())
|
||||
&& (self.show_id() == other.show_id())
|
||||
}
|
||||
}
|
||||
|
||||
@ -185,7 +221,7 @@ impl NewEpisodeMinimal {
|
||||
pub(crate) fn new(item: &rss::Item, parent_id: i32) -> Result<Self, DataError> {
|
||||
if item.title().is_none() {
|
||||
let err = DataError::ParseEpisodeError {
|
||||
reason: format!("No title specified for this Episode."),
|
||||
reason: "No title specified for this Episode.".into(),
|
||||
parent_id,
|
||||
};
|
||||
|
||||
@ -195,23 +231,34 @@ impl NewEpisodeMinimal {
|
||||
let title = item.title().unwrap().trim().to_owned();
|
||||
let guid = item.guid().map(|s| s.value().trim().to_owned());
|
||||
|
||||
let uri = if let Some(url) = item.enclosure().map(|s| url_cleaner(s.url())) {
|
||||
Some(url)
|
||||
} else if item.link().is_some() {
|
||||
item.link().map(|s| url_cleaner(s))
|
||||
} else {
|
||||
// Get the mime type, the `http` url and the length from the enclosure
|
||||
// http://www.rssboard.org/rss-specification#ltenclosuregtSubelementOfLtitemgt
|
||||
let enc = item.enclosure();
|
||||
|
||||
// Get the url
|
||||
let uri = enc
|
||||
.map(|s| url_cleaner(s.url().trim()))
|
||||
// Fallback to Rss.Item.link if enclosure is None.
|
||||
.or_else(|| item.link().map(|s| url_cleaner(s.trim())));
|
||||
|
||||
// Get the size of the content, it should be in bytes
|
||||
let length = enc.and_then(|x| x.length().parse().ok());
|
||||
|
||||
// If url is still None return an Error as this behaviour is not
|
||||
// compliant with the RSS Spec.
|
||||
if uri.is_none() {
|
||||
let err = DataError::ParseEpisodeError {
|
||||
reason: format!("No url specified for the item."),
|
||||
reason: "No url specified for the item.".into(),
|
||||
parent_id,
|
||||
};
|
||||
|
||||
return Err(err);
|
||||
};
|
||||
|
||||
// Default to rfc2822 represantation of epoch 0.
|
||||
// Default to rfc2822 representation of epoch 0.
|
||||
let date = parse_rfc822(item.pub_date().unwrap_or("Thu, 1 Jan 1970 00:00:00 +0000"));
|
||||
// Should treat information from the rss feeds as invalid by default.
|
||||
// Case: Thu, 05 Aug 2016 06:00:00 -0400 <-- Actually that was friday.
|
||||
// Case: "Thu, 05 Aug 2016 06:00:00 -0400" <-- Actually that was friday.
|
||||
let epoch = date.map(|x| x.timestamp() as i32).unwrap_or(0);
|
||||
|
||||
let duration = parser::parse_itunes_duration(item.itunes_ext());
|
||||
@ -219,35 +266,35 @@ impl NewEpisodeMinimal {
|
||||
NewEpisodeMinimalBuilder::default()
|
||||
.title(title)
|
||||
.uri(uri)
|
||||
.length(length)
|
||||
.duration(duration)
|
||||
.epoch(epoch)
|
||||
.guid(guid)
|
||||
.podcast_id(parent_id)
|
||||
.show_id(parent_id)
|
||||
.build()
|
||||
.map_err(From::from)
|
||||
}
|
||||
|
||||
// TODO: TryInto is stabilizing in rustc v1.26!
|
||||
// ^ Jokes on you past self!
|
||||
pub(crate) fn into_new_episode(self, item: &rss::Item) -> NewEpisode {
|
||||
let length = || -> Option<i32> { item.enclosure().map(|x| x.length().parse().ok())? }();
|
||||
|
||||
// Prefer itunes summary over rss.description since many feeds put html into
|
||||
// rss.description.
|
||||
let summary = item.itunes_ext().map(|s| s.summary()).and_then(|s| s);
|
||||
let description = if summary.is_some() {
|
||||
summary.map(|s| replace_extra_spaces(&ammonia::clean(s)))
|
||||
} else {
|
||||
item.description()
|
||||
.map(|s| replace_extra_spaces(&ammonia::clean(s)))
|
||||
};
|
||||
let description = item.description().and_then(|s| {
|
||||
let sanitized_html = ammonia::Builder::new()
|
||||
// Remove `rel` attributes from `<a>` tags
|
||||
.link_rel(None)
|
||||
.clean(s.trim())
|
||||
.to_string();
|
||||
Some(sanitized_html)
|
||||
});
|
||||
|
||||
NewEpisodeBuilder::default()
|
||||
.title(self.title)
|
||||
.uri(self.uri)
|
||||
.duration(self.duration)
|
||||
.epoch(self.epoch)
|
||||
.podcast_id(self.podcast_id)
|
||||
.show_id(self.show_id)
|
||||
.guid(self.guid)
|
||||
.length(length)
|
||||
.length(self.length)
|
||||
.description(description)
|
||||
.build()
|
||||
.unwrap()
|
||||
@ -276,16 +323,18 @@ impl NewEpisodeMinimal {
|
||||
self.epoch
|
||||
}
|
||||
|
||||
pub(crate) fn podcast_id(&self) -> i32 {
|
||||
self.podcast_id
|
||||
pub(crate) fn show_id(&self) -> i32 {
|
||||
self.show_id
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use database::truncate_db;
|
||||
use dbqueries;
|
||||
use models::*;
|
||||
use models::new_episode::{NewEpisodeMinimal, NewEpisodeMinimalBuilder};
|
||||
use crate::database::truncate_db;
|
||||
use crate::dbqueries;
|
||||
use crate::models::new_episode::{NewEpisodeMinimal, NewEpisodeMinimalBuilder};
|
||||
use crate::models::*;
|
||||
use failure::Error;
|
||||
|
||||
use rss::Channel;
|
||||
|
||||
@ -293,7 +342,7 @@ mod tests {
|
||||
use std::io::BufReader;
|
||||
|
||||
// TODO: Add tests for other feeds too.
|
||||
// Especially if you find an *intresting* generated feed.
|
||||
// Especially if you find an *interesting* generated feed.
|
||||
|
||||
// Known prebuilt expected objects.
|
||||
lazy_static! {
|
||||
@ -305,12 +354,12 @@ mod tests {
|
||||
)))
|
||||
.guid(Some(String::from("7df4070a-9832-11e7-adac-cb37b05d5e24")))
|
||||
.epoch(1505296800)
|
||||
.length(Some(66738886))
|
||||
.duration(Some(4171))
|
||||
.podcast_id(42)
|
||||
.show_id(42)
|
||||
.build()
|
||||
.unwrap()
|
||||
};
|
||||
|
||||
static ref EXPECTED_MINIMAL_INTERCEPTED_2: NewEpisodeMinimal = {
|
||||
NewEpisodeMinimalBuilder::default()
|
||||
.title("Atlas Golfed — U.S.-Backed Think Tanks Target Latin America")
|
||||
@ -319,18 +368,18 @@ mod tests {
|
||||
)))
|
||||
.guid(Some(String::from("7c207a24-e33f-11e6-9438-eb45dcf36a1d")))
|
||||
.epoch(1502272800)
|
||||
.length(Some(67527575))
|
||||
.duration(Some(4415))
|
||||
.podcast_id(42)
|
||||
.show_id(42)
|
||||
.build()
|
||||
.unwrap()
|
||||
};
|
||||
|
||||
static ref EXPECTED_INTERCEPTED_1: NewEpisode = {
|
||||
let descr = "NSA whistleblower Edward Snowden discusses the massive Equifax data breach \
|
||||
and allegations of Russian interference in the US election. Commentator \
|
||||
Shaun King explains his call for a boycott of the NFL and talks about his \
|
||||
campaign to bring violent neo-Nazis to justice. Rapper Open Mike Eagle \
|
||||
performs.";
|
||||
let descr = "NSA whistleblower Edward Snowden discusses the massive Equifax data \
|
||||
breach and allegations of Russian interference in the US election. \
|
||||
Commentator Shaun King explains his call for a boycott of the NFL and \
|
||||
talks about his campaign to bring violent neo-Nazis to justice. Rapper \
|
||||
Open Mike Eagle performs.";
|
||||
|
||||
NewEpisodeBuilder::default()
|
||||
.title("The Super Bowl of Racism")
|
||||
@ -342,20 +391,19 @@ mod tests {
|
||||
.length(Some(66738886))
|
||||
.epoch(1505296800)
|
||||
.duration(Some(4171))
|
||||
.podcast_id(42)
|
||||
.show_id(42)
|
||||
.build()
|
||||
.unwrap()
|
||||
};
|
||||
|
||||
static ref EXPECTED_INTERCEPTED_2: NewEpisode = {
|
||||
let descr = "This week on Intercepted: Jeremy gives an update on the aftermath of \
|
||||
Blackwater’s 2007 massacre of Iraqi civilians. Intercept reporter Lee Fang \
|
||||
lays out how a network of libertarian think tanks called the Atlas Network \
|
||||
is insidiously shaping political infrastructure in Latin America. We speak \
|
||||
with attorney and former Hugo Chavez adviser Eva Golinger about the \
|
||||
Venezuela\'s political turmoil.And we hear Claudia Lizardo of the \
|
||||
Caracas-based band, La Pequeña Revancha, talk about her music and hopes for \
|
||||
Venezuela.";
|
||||
Blackwater’s 2007 massacre of Iraqi civilians. Intercept reporter Lee \
|
||||
Fang lays out how a network of libertarian think tanks called the Atlas \
|
||||
Network is insidiously shaping political infrastructure in Latin \
|
||||
America. We speak with attorney and former Hugo Chavez adviser Eva \
|
||||
Golinger about the Venezuela\'s political turmoil.And we hear Claudia \
|
||||
Lizardo of the Caracas-based band, La Pequeña Revancha, talk about her \
|
||||
music and hopes for Venezuela.";
|
||||
|
||||
NewEpisodeBuilder::default()
|
||||
.title("Atlas Golfed — U.S.-Backed Think Tanks Target Latin America")
|
||||
@ -367,11 +415,10 @@ mod tests {
|
||||
.length(Some(67527575))
|
||||
.epoch(1502272800)
|
||||
.duration(Some(4415))
|
||||
.podcast_id(42)
|
||||
.show_id(42)
|
||||
.build()
|
||||
.unwrap()
|
||||
};
|
||||
|
||||
static ref UPDATED_DURATION_INTERCEPTED_1: NewEpisode = {
|
||||
NewEpisodeBuilder::default()
|
||||
.title("The Super Bowl of Racism")
|
||||
@ -383,11 +430,10 @@ mod tests {
|
||||
.length(Some(66738886))
|
||||
.epoch(1505296800)
|
||||
.duration(Some(424242))
|
||||
.podcast_id(42)
|
||||
.show_id(42)
|
||||
.build()
|
||||
.unwrap()
|
||||
};
|
||||
|
||||
static ref EXPECTED_MINIMAL_LUP_1: NewEpisodeMinimal = {
|
||||
NewEpisodeMinimalBuilder::default()
|
||||
.title("Hacking Devices with Kali Linux | LUP 214")
|
||||
@ -395,13 +441,13 @@ mod tests {
|
||||
"http://www.podtrac.com/pts/redirect.mp3/traffic.libsyn.com/jnite/lup-0214.mp3",
|
||||
)))
|
||||
.guid(Some(String::from("78A682B4-73E8-47B8-88C0-1BE62DD4EF9D")))
|
||||
.length(Some(46479789))
|
||||
.epoch(1505280282)
|
||||
.duration(Some(5733))
|
||||
.podcast_id(42)
|
||||
.show_id(42)
|
||||
.build()
|
||||
.unwrap()
|
||||
};
|
||||
|
||||
static ref EXPECTED_MINIMAL_LUP_2: NewEpisodeMinimal = {
|
||||
NewEpisodeMinimalBuilder::default()
|
||||
.title("Gnome Does it Again | LUP 213")
|
||||
@ -410,17 +456,17 @@ mod tests {
|
||||
)))
|
||||
.guid(Some(String::from("1CE57548-B36C-4F14-832A-5D5E0A24E35B")))
|
||||
.epoch(1504670247)
|
||||
.length(Some(36544272))
|
||||
.duration(Some(4491))
|
||||
.podcast_id(42)
|
||||
.show_id(42)
|
||||
.build()
|
||||
.unwrap()
|
||||
};
|
||||
|
||||
static ref EXPECTED_LUP_1: NewEpisode = {
|
||||
let descr = "Audit your network with a couple of easy commands on Kali Linux. Chris \
|
||||
decides to blow off a little steam by attacking his IoT devices, Wes has the \
|
||||
scope on Equifax blaming open source & the Beard just saved the show. \
|
||||
It’s a really packed episode!";
|
||||
decides to blow off a little steam by attacking his IoT devices, Wes has \
|
||||
the scope on Equifax blaming open source & the Beard just saved the \
|
||||
show. It’s a really packed episode!";
|
||||
|
||||
NewEpisodeBuilder::default()
|
||||
.title("Hacking Devices with Kali Linux | LUP 214")
|
||||
@ -432,17 +478,17 @@ mod tests {
|
||||
.length(Some(46479789))
|
||||
.epoch(1505280282)
|
||||
.duration(Some(5733))
|
||||
.podcast_id(42)
|
||||
.show_id(42)
|
||||
.build()
|
||||
.unwrap()
|
||||
};
|
||||
|
||||
static ref EXPECTED_LUP_2: NewEpisode = {
|
||||
let descr = "The Gnome project is about to solve one of our audience's biggest Wayland’s \
|
||||
concerns. But as the project takes on a new level of relevance, decisions for the \
|
||||
next version of Gnome have us worried about the future.\nPlus we chat with Wimpy \
|
||||
about the Ubuntu Rally in NYC, Microsoft’s sneaky move to turn Windows 10 into the \
|
||||
“ULTIMATE LINUX RUNTIME”, community news & more!";
|
||||
let descr =
|
||||
"<p>The Gnome project is about to solve one of our audience's biggest Wayland’s \
|
||||
concerns. But as the project takes on a new level of relevance, decisions for \
|
||||
the next version of Gnome have us worried about the future.</p>\n\n<p>Plus we \
|
||||
chat with Wimpy about the Ubuntu Rally in NYC, Microsoft’s sneaky move to turn \
|
||||
Windows 10 into the “ULTIMATE LINUX RUNTIME”, community news & more!</p>";
|
||||
|
||||
NewEpisodeBuilder::default()
|
||||
.title("Gnome Does it Again | LUP 213")
|
||||
@ -454,80 +500,88 @@ mod tests {
|
||||
.length(Some(36544272))
|
||||
.epoch(1504670247)
|
||||
.duration(Some(4491))
|
||||
.podcast_id(42)
|
||||
.show_id(42)
|
||||
.build()
|
||||
.unwrap()
|
||||
};
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_new_episode_minimal_intercepted() {
|
||||
let file = File::open("tests/feeds/2018-01-20-Intercepted.xml").unwrap();
|
||||
let channel = Channel::read_from(BufReader::new(file)).unwrap();
|
||||
fn test_new_episode_minimal_intercepted() -> Result<(), Error> {
|
||||
let file = File::open("tests/feeds/2018-01-20-Intercepted.xml")?;
|
||||
let channel = Channel::read_from(BufReader::new(file))?;
|
||||
|
||||
let episode = channel.items().iter().nth(14).unwrap();
|
||||
let ep = NewEpisodeMinimal::new(&episode, 42).unwrap();
|
||||
let ep = NewEpisodeMinimal::new(&episode, 42)?;
|
||||
assert_eq!(ep, *EXPECTED_MINIMAL_INTERCEPTED_1);
|
||||
|
||||
let episode = channel.items().iter().nth(15).unwrap();
|
||||
let ep = NewEpisodeMinimal::new(&episode, 42).unwrap();
|
||||
let ep = NewEpisodeMinimal::new(&episode, 42)?;
|
||||
assert_eq!(ep, *EXPECTED_MINIMAL_INTERCEPTED_2);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_new_episode_intercepted() {
|
||||
let file = File::open("tests/feeds/2018-01-20-Intercepted.xml").unwrap();
|
||||
let channel = Channel::read_from(BufReader::new(file)).unwrap();
|
||||
fn test_new_episode_intercepted() -> Result<(), Error> {
|
||||
let file = File::open("tests/feeds/2018-01-20-Intercepted.xml")?;
|
||||
let channel = Channel::read_from(BufReader::new(file))?;
|
||||
|
||||
let episode = channel.items().iter().nth(14).unwrap();
|
||||
let ep = NewEpisode::new(&episode, 42).unwrap();
|
||||
let ep = NewEpisode::new(&episode, 42)?;
|
||||
assert_eq!(ep, *EXPECTED_INTERCEPTED_1);
|
||||
|
||||
let episode = channel.items().iter().nth(15).unwrap();
|
||||
let ep = NewEpisode::new(&episode, 42).unwrap();
|
||||
let ep = NewEpisode::new(&episode, 42)?;
|
||||
|
||||
assert_eq!(ep, *EXPECTED_INTERCEPTED_2);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_new_episode_minimal_lup() {
|
||||
let file = File::open("tests/feeds/2018-01-20-LinuxUnplugged.xml").unwrap();
|
||||
let channel = Channel::read_from(BufReader::new(file)).unwrap();
|
||||
fn test_new_episode_minimal_lup() -> Result<(), Error> {
|
||||
let file = File::open("tests/feeds/2018-01-20-LinuxUnplugged.xml")?;
|
||||
let channel = Channel::read_from(BufReader::new(file))?;
|
||||
|
||||
let episode = channel.items().iter().nth(18).unwrap();
|
||||
let ep = NewEpisodeMinimal::new(&episode, 42).unwrap();
|
||||
let ep = NewEpisodeMinimal::new(&episode, 42)?;
|
||||
assert_eq!(ep, *EXPECTED_MINIMAL_LUP_1);
|
||||
|
||||
let episode = channel.items().iter().nth(19).unwrap();
|
||||
let ep = NewEpisodeMinimal::new(&episode, 42).unwrap();
|
||||
let ep = NewEpisodeMinimal::new(&episode, 42)?;
|
||||
assert_eq!(ep, *EXPECTED_MINIMAL_LUP_2);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_new_episode_lup() {
|
||||
let file = File::open("tests/feeds/2018-01-20-LinuxUnplugged.xml").unwrap();
|
||||
let channel = Channel::read_from(BufReader::new(file)).unwrap();
|
||||
fn test_new_episode_lup() -> Result<(), Error> {
|
||||
let file = File::open("tests/feeds/2018-01-20-LinuxUnplugged.xml")?;
|
||||
let channel = Channel::read_from(BufReader::new(file))?;
|
||||
|
||||
let episode = channel.items().iter().nth(18).unwrap();
|
||||
let ep = NewEpisode::new(&episode, 42).unwrap();
|
||||
let ep = NewEpisode::new(&episode, 42)?;
|
||||
assert_eq!(ep, *EXPECTED_LUP_1);
|
||||
|
||||
let episode = channel.items().iter().nth(19).unwrap();
|
||||
let ep = NewEpisode::new(&episode, 42).unwrap();
|
||||
let ep = NewEpisode::new(&episode, 42)?;
|
||||
assert_eq!(ep, *EXPECTED_LUP_2);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_minimal_into_new_episode() {
|
||||
truncate_db().unwrap();
|
||||
fn test_minimal_into_new_episode() -> Result<(), Error> {
|
||||
truncate_db()?;
|
||||
|
||||
let file = File::open("tests/feeds/2018-01-20-Intercepted.xml").unwrap();
|
||||
let channel = Channel::read_from(BufReader::new(file)).unwrap();
|
||||
let file = File::open("tests/feeds/2018-01-20-Intercepted.xml")?;
|
||||
let channel = Channel::read_from(BufReader::new(file))?;
|
||||
|
||||
let item = channel.items().iter().nth(14).unwrap();
|
||||
let ep = EXPECTED_MINIMAL_INTERCEPTED_1
|
||||
.clone()
|
||||
.into_new_episode(&item);
|
||||
println!(
|
||||
"EPISODE: {:#?}\nEXPECTED: {:#?}",
|
||||
ep, *EXPECTED_INTERCEPTED_1
|
||||
);
|
||||
assert_eq!(ep, *EXPECTED_INTERCEPTED_1);
|
||||
|
||||
let item = channel.items().iter().nth(15).unwrap();
|
||||
@ -535,61 +589,58 @@ mod tests {
|
||||
.clone()
|
||||
.into_new_episode(&item);
|
||||
assert_eq!(ep, *EXPECTED_INTERCEPTED_2);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_new_episode_insert() {
|
||||
truncate_db().unwrap();
|
||||
fn test_new_episode_insert() -> Result<(), Error> {
|
||||
truncate_db()?;
|
||||
|
||||
let file = File::open("tests/feeds/2018-01-20-Intercepted.xml").unwrap();
|
||||
let channel = Channel::read_from(BufReader::new(file)).unwrap();
|
||||
let file = File::open("tests/feeds/2018-01-20-Intercepted.xml")?;
|
||||
let channel = Channel::read_from(BufReader::new(file))?;
|
||||
|
||||
let episode = channel.items().iter().nth(14).unwrap();
|
||||
let new_ep = NewEpisode::new(&episode, 42).unwrap();
|
||||
new_ep.insert().unwrap();
|
||||
let ep = dbqueries::get_episode_from_pk(new_ep.title(), new_ep.podcast_id()).unwrap();
|
||||
let new_ep = NewEpisode::new(&episode, 42)?;
|
||||
new_ep.insert()?;
|
||||
let ep = dbqueries::get_episode_from_pk(new_ep.title(), new_ep.show_id())?;
|
||||
|
||||
assert_eq!(new_ep, ep);
|
||||
assert_eq!(&new_ep, &*EXPECTED_INTERCEPTED_1);
|
||||
assert_eq!(&*EXPECTED_INTERCEPTED_1, &ep);
|
||||
|
||||
let episode = channel.items().iter().nth(15).unwrap();
|
||||
let new_ep = NewEpisode::new(&episode, 42).unwrap();
|
||||
new_ep.insert().unwrap();
|
||||
let ep = dbqueries::get_episode_from_pk(new_ep.title(), new_ep.podcast_id()).unwrap();
|
||||
let new_ep = NewEpisode::new(&episode, 42)?;
|
||||
new_ep.insert()?;
|
||||
let ep = dbqueries::get_episode_from_pk(new_ep.title(), new_ep.show_id())?;
|
||||
|
||||
assert_eq!(new_ep, ep);
|
||||
assert_eq!(&new_ep, &*EXPECTED_INTERCEPTED_2);
|
||||
assert_eq!(&*EXPECTED_INTERCEPTED_2, &ep);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_new_episode_update() {
|
||||
truncate_db().unwrap();
|
||||
let old = EXPECTED_INTERCEPTED_1.clone().to_episode().unwrap();
|
||||
fn test_new_episode_update() -> Result<(), Error> {
|
||||
truncate_db()?;
|
||||
let old = EXPECTED_INTERCEPTED_1.clone().to_episode()?;
|
||||
|
||||
let updated = &*UPDATED_DURATION_INTERCEPTED_1;
|
||||
updated.update(old.rowid()).unwrap();
|
||||
let mut new = dbqueries::get_episode_from_pk(old.title(), old.podcast_id()).unwrap();
|
||||
updated.update(old.rowid())?;
|
||||
let new = dbqueries::get_episode_from_pk(old.title(), old.show_id())?;
|
||||
|
||||
// Assert that updating does not change the rowid and podcast_id
|
||||
// Assert that updating does not change the rowid and show_id
|
||||
assert_ne!(old, new);
|
||||
assert_eq!(old.rowid(), new.rowid());
|
||||
assert_eq!(old.podcast_id(), new.podcast_id());
|
||||
assert_eq!(old.show_id(), new.show_id());
|
||||
|
||||
assert_eq!(updated, &new);
|
||||
assert_ne!(updated, &old);
|
||||
|
||||
new.set_archive(true);
|
||||
new.save().unwrap();
|
||||
|
||||
let new2 = dbqueries::get_episode_from_pk(old.title(), old.podcast_id()).unwrap();
|
||||
assert_eq!(true, new2.archive());
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_new_episode_index() {
|
||||
truncate_db().unwrap();
|
||||
fn test_new_episode_index() -> Result<(), Error> {
|
||||
truncate_db()?;
|
||||
let expected = &*EXPECTED_INTERCEPTED_1;
|
||||
|
||||
// First insert
|
||||
@ -597,7 +648,7 @@ mod tests {
|
||||
// Second identical, This should take the early return path
|
||||
assert!(expected.index().is_ok());
|
||||
// Get the episode
|
||||
let old = dbqueries::get_episode_from_pk(expected.title(), expected.podcast_id()).unwrap();
|
||||
let old = dbqueries::get_episode_from_pk(expected.title(), expected.show_id())?;
|
||||
// Assert that NewPodcast is equal to the Indexed one
|
||||
assert_eq!(*expected, old);
|
||||
|
||||
@ -606,42 +657,33 @@ mod tests {
|
||||
// Update the podcast
|
||||
assert!(updated.index().is_ok());
|
||||
// Get the new Podcast
|
||||
let new = dbqueries::get_episode_from_pk(expected.title(), expected.podcast_id()).unwrap();
|
||||
let new = dbqueries::get_episode_from_pk(expected.title(), expected.show_id())?;
|
||||
// Assert it's diff from the old one.
|
||||
assert_ne!(new, old);
|
||||
assert_eq!(*updated, new);
|
||||
assert_eq!(new.rowid(), old.rowid());
|
||||
assert_eq!(new.podcast_id(), old.podcast_id());
|
||||
assert_eq!(new.show_id(), old.show_id());
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_new_episode_to_episode() {
|
||||
fn test_new_episode_to_episode() -> Result<(), Error> {
|
||||
let expected = &*EXPECTED_INTERCEPTED_1;
|
||||
let updated = &*UPDATED_DURATION_INTERCEPTED_1;
|
||||
|
||||
// Assert insert() produces the same result that you would get with to_podcast()
|
||||
truncate_db().unwrap();
|
||||
expected.insert().unwrap();
|
||||
let old = dbqueries::get_episode_from_pk(expected.title(), expected.podcast_id()).unwrap();
|
||||
let ep = expected.to_episode().unwrap();
|
||||
truncate_db()?;
|
||||
expected.insert()?;
|
||||
let old = dbqueries::get_episode_from_pk(expected.title(), expected.show_id())?;
|
||||
let ep = expected.to_episode()?;
|
||||
assert_eq!(old, ep);
|
||||
|
||||
// Same as above, diff order
|
||||
truncate_db().unwrap();
|
||||
let ep = expected.to_episode().unwrap();
|
||||
truncate_db()?;
|
||||
let ep = expected.to_episode()?;
|
||||
// This should error as a unique constrain violation
|
||||
assert!(expected.insert().is_err());
|
||||
let mut old =
|
||||
dbqueries::get_episode_from_pk(expected.title(), expected.podcast_id()).unwrap();
|
||||
let old = dbqueries::get_episode_from_pk(expected.title(), expected.show_id())?;
|
||||
assert_eq!(old, ep);
|
||||
|
||||
old.set_archive(true);
|
||||
old.save().unwrap();
|
||||
|
||||
// Assert that it does not mess with user preferences
|
||||
let ep = updated.to_episode().unwrap();
|
||||
let old = dbqueries::get_episode_from_pk(expected.title(), expected.podcast_id()).unwrap();
|
||||
assert_eq!(old, ep);
|
||||
assert_eq!(old.archive(), true);
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
453
podcasts-data/src/models/new_show.rs
Normal file
453
podcasts-data/src/models/new_show.rs
Normal file
@ -0,0 +1,453 @@
|
||||
// new_show.rs
|
||||
//
|
||||
// Copyright 2017 Jordan Petridis <jpetridis@gnome.org>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
//
|
||||
// SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
use ammonia;
|
||||
use diesel;
|
||||
use diesel::prelude::*;
|
||||
use rss;
|
||||
|
||||
use crate::errors::DataError;
|
||||
use crate::models::Show;
|
||||
use crate::models::{Index, Insert, Update};
|
||||
use crate::schema::shows;
|
||||
|
||||
use crate::database::connection;
|
||||
use crate::dbqueries;
|
||||
use crate::utils::url_cleaner;
|
||||
|
||||
#[derive(Insertable, AsChangeset)]
|
||||
#[table_name = "shows"]
|
||||
#[derive(Debug, Clone, Default, Builder, PartialEq)]
|
||||
#[builder(default)]
|
||||
#[builder(derive(Debug))]
|
||||
#[builder(setter(into))]
|
||||
pub(crate) struct NewShow {
|
||||
title: String,
|
||||
link: String,
|
||||
description: String,
|
||||
image_uri: Option<String>,
|
||||
source_id: i32,
|
||||
}
|
||||
|
||||
impl Insert<()> for NewShow {
|
||||
type Error = DataError;
|
||||
|
||||
fn insert(&self) -> Result<(), Self::Error> {
|
||||
use crate::schema::shows::dsl::*;
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
diesel::insert_into(shows)
|
||||
.values(self)
|
||||
.execute(&con)
|
||||
.map(|_| ())
|
||||
.map_err(From::from)
|
||||
}
|
||||
}
|
||||
|
||||
impl Update<()> for NewShow {
|
||||
type Error = DataError;
|
||||
|
||||
fn update(&self, show_id: i32) -> Result<(), Self::Error> {
|
||||
use crate::schema::shows::dsl::*;
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
info!("Updating {}", self.title);
|
||||
diesel::update(shows.filter(id.eq(show_id)))
|
||||
.set(self)
|
||||
.execute(&con)
|
||||
.map(|_| ())
|
||||
.map_err(From::from)
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: Maybe return an Enum<Action(Resut)> Instead.
|
||||
// It would make unti testing better too.
|
||||
impl Index<()> for NewShow {
|
||||
type Error = DataError;
|
||||
|
||||
fn index(&self) -> Result<(), DataError> {
|
||||
let exists = dbqueries::podcast_exists(self.source_id)?;
|
||||
|
||||
if exists {
|
||||
let other = dbqueries::get_podcast_from_source_id(self.source_id)?;
|
||||
|
||||
if self != &other {
|
||||
self.update(other.id())
|
||||
} else {
|
||||
Ok(())
|
||||
}
|
||||
} else {
|
||||
self.insert()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq<Show> for NewShow {
|
||||
fn eq(&self, other: &Show) -> bool {
|
||||
(self.link() == other.link())
|
||||
&& (self.title() == other.title())
|
||||
&& (self.image_uri() == other.image_uri())
|
||||
&& (self.description() == other.description())
|
||||
&& (self.source_id() == other.source_id())
|
||||
}
|
||||
}
|
||||
|
||||
impl NewShow {
|
||||
/// Parses a `rss::Channel` into a `NewShow` Struct.
|
||||
pub(crate) fn new(chan: &rss::Channel, source_id: i32) -> NewShow {
|
||||
let title = chan.title().trim();
|
||||
let link = url_cleaner(chan.link().trim());
|
||||
|
||||
let description = ammonia::Builder::new()
|
||||
// Remove `rel` attributes from `<a>` tags
|
||||
.link_rel(None)
|
||||
.clean(chan.description().trim())
|
||||
.to_string();
|
||||
|
||||
// Try to get the itunes img first
|
||||
let itunes_img = chan
|
||||
.itunes_ext()
|
||||
.and_then(|s| s.image().map(|url| url.trim()))
|
||||
.map(|s| s.to_owned());
|
||||
// If itunes is None, try to get the channel.image from the rss spec
|
||||
let image_uri = itunes_img.or_else(|| chan.image().map(|s| s.url().trim().to_owned()));
|
||||
|
||||
NewShowBuilder::default()
|
||||
.title(title)
|
||||
.description(description)
|
||||
.link(link)
|
||||
.image_uri(image_uri)
|
||||
.source_id(source_id)
|
||||
.build()
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
// Look out for when tryinto lands into stable.
|
||||
pub(crate) fn to_podcast(&self) -> Result<Show, DataError> {
|
||||
self.index()?;
|
||||
dbqueries::get_podcast_from_source_id(self.source_id).map_err(From::from)
|
||||
}
|
||||
}
|
||||
|
||||
// Ignore the following geters. They are used in unit tests mainly.
|
||||
impl NewShow {
|
||||
#[allow(dead_code)]
|
||||
pub(crate) fn source_id(&self) -> i32 {
|
||||
self.source_id
|
||||
}
|
||||
|
||||
pub(crate) fn title(&self) -> &str {
|
||||
&self.title
|
||||
}
|
||||
|
||||
pub(crate) fn link(&self) -> &str {
|
||||
&self.link
|
||||
}
|
||||
|
||||
pub(crate) fn description(&self) -> &str {
|
||||
&self.description
|
||||
}
|
||||
|
||||
pub(crate) fn image_uri(&self) -> Option<&str> {
|
||||
self.image_uri.as_ref().map(|s| s.as_str())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
// use tokio_core::reactor::Core;
|
||||
|
||||
use failure::Error;
|
||||
use rss::Channel;
|
||||
|
||||
use crate::database::truncate_db;
|
||||
use crate::models::NewShowBuilder;
|
||||
|
||||
use std::fs::File;
|
||||
use std::io::BufReader;
|
||||
|
||||
// Pre-built expected NewShow structs.
|
||||
lazy_static! {
|
||||
static ref EXPECTED_INTERCEPTED: NewShow = {
|
||||
let descr = "The people behind The Intercept’s fearless reporting and incisive \
|
||||
commentary—Jeremy Scahill, Glenn Greenwald, Betsy Reed and \
|
||||
others—discuss the crucial issues of our time: national security, civil \
|
||||
liberties, foreign policy, and criminal justice. Plus interviews with \
|
||||
artists, thinkers, and newsmakers who challenge our preconceptions about \
|
||||
the world we live in.";
|
||||
|
||||
NewShowBuilder::default()
|
||||
.title("Intercepted with Jeremy Scahill")
|
||||
.link("https://theintercept.com/podcasts")
|
||||
.description(descr)
|
||||
.image_uri(Some(String::from(
|
||||
"http://static.megaphone.fm/podcasts/d5735a50-d904-11e6-8532-73c7de466ea6/image/\
|
||||
uploads_2F1484252190700-qhn5krasklbce3dh-a797539282700ea0298a3a26f7e49b0b_\
|
||||
2FIntercepted_COVER%2B_281_29.png")
|
||||
))
|
||||
.source_id(42)
|
||||
.build()
|
||||
.unwrap()
|
||||
};
|
||||
static ref EXPECTED_LUP: NewShow = {
|
||||
let descr = "An open show powered by community LINUX Unplugged takes the best \
|
||||
attributes of open collaboration and focuses them into a weekly \
|
||||
lifestyle show about Linux.";
|
||||
|
||||
NewShowBuilder::default()
|
||||
.title("LINUX Unplugged Podcast")
|
||||
.link("http://www.jupiterbroadcasting.com/")
|
||||
.description(descr)
|
||||
.image_uri(Some(String::from(
|
||||
"http://www.jupiterbroadcasting.com/images/LASUN-Badge1400.jpg",
|
||||
)))
|
||||
.source_id(42)
|
||||
.build()
|
||||
.unwrap()
|
||||
};
|
||||
static ref EXPECTED_TIPOFF: NewShow = {
|
||||
let desc = "<p>Welcome to The Tip Off- the podcast where we take you behind the \
|
||||
scenes of some of the best investigative journalism from recent years. \
|
||||
Each episode we’ll be digging into an investigative scoop- hearing from \
|
||||
the journalists behind the work as they tell us about the leads, the \
|
||||
dead-ends and of course, the tip offs. There’ll be car chases, slammed \
|
||||
doors, terrorist cells, meetings in dimly lit bars and cafes, wrangling \
|
||||
with despotic regimes and much more. So if you’re curious about the fun, \
|
||||
complicated detective work that goes into doing great investigative \
|
||||
journalism- then this is the podcast for you.</p>";
|
||||
|
||||
NewShowBuilder::default()
|
||||
.title("The Tip Off")
|
||||
.link("http://www.acast.com/thetipoff")
|
||||
.description(desc)
|
||||
.image_uri(Some(String::from(
|
||||
"https://imagecdn.acast.com/image?h=1500&w=1500&source=http%3A%2F%2Fi1.sndcdn.\
|
||||
com%2Favatars-000317856075-a2coqz-original.jpg",
|
||||
)))
|
||||
.source_id(42)
|
||||
.build()
|
||||
.unwrap()
|
||||
};
|
||||
static ref EXPECTED_STARS: NewShow = {
|
||||
let descr = "<p>The first audio drama from Tor Labs and Gideon Media, Steal the Stars \
|
||||
is a gripping noir science fiction thriller in 14 episodes: Forbidden \
|
||||
love, a crashed UFO, an alien body, and an impossible heist unlike any \
|
||||
ever attempted - scripted by Mac Rogers, the award-winning playwright \
|
||||
and writer of the multi-million download The Message and LifeAfter.</p>";
|
||||
let img = "https://dfkfj8j276wwv.cloudfront.net/images/2c/5f/a0/1a/2c5fa01a-ae78-4a8c-\
|
||||
b183-7311d2e436c3/b3a4aa57a576bb662191f2a6bc2a436c8c4ae256ecffaff5c4c54fd42e\
|
||||
923914941c264d01efb1833234b52c9530e67d28a8cebbe3d11a4bc0fbbdf13ecdf1c3.jpeg";
|
||||
|
||||
NewShowBuilder::default()
|
||||
.title("Steal the Stars")
|
||||
.link("http://tor-labs.com/")
|
||||
.description(descr)
|
||||
.image_uri(Some(String::from(img)))
|
||||
.source_id(42)
|
||||
.build()
|
||||
.unwrap()
|
||||
};
|
||||
static ref EXPECTED_CODE: NewShow = {
|
||||
let descr = "A podcast about humans and technology. Panelists: Coraline Ada Ehmke, \
|
||||
David Brady, Jessica Kerr, Jay Bobo, Astrid Countee and Sam \
|
||||
Livingston-Gray. Brought to you by @therubyrep.";
|
||||
|
||||
NewShowBuilder::default()
|
||||
.title("Greater Than Code")
|
||||
.link("https://www.greaterthancode.com/")
|
||||
.description(descr)
|
||||
.image_uri(Some(String::from(
|
||||
"http://www.greaterthancode.com/wp-content/uploads/2016/10/code1400-4.jpg",
|
||||
)))
|
||||
.source_id(42)
|
||||
.build()
|
||||
.unwrap()
|
||||
};
|
||||
static ref EXPECTED_ELLINOFRENEIA: NewShow = {
|
||||
NewShowBuilder::default()
|
||||
.title("Ελληνοφρένεια")
|
||||
.link("https://ellinofreneia.sealabs.net/feed.rss")
|
||||
.description("Ανεπίσημο feed της Ελληνοφρένειας")
|
||||
.image_uri(Some("https://ellinofreneia.sealabs.net/logo.png".into()))
|
||||
.source_id(42)
|
||||
.build()
|
||||
.unwrap()
|
||||
};
|
||||
static ref UPDATED_DESC_INTERCEPTED: NewShow = {
|
||||
NewShowBuilder::default()
|
||||
.title("Intercepted with Jeremy Scahill")
|
||||
.link("https://theintercept.com/podcasts")
|
||||
.description("New Description")
|
||||
.image_uri(Some(String::from(
|
||||
"http://static.megaphone.fm/podcasts/d5735a50-d904-11e6-8532-73c7de466ea6/image/\
|
||||
uploads_2F1484252190700-qhn5krasklbce3dh-a797539282700ea0298a3a26f7e49b0b_\
|
||||
2FIntercepted_COVER%2B_281_29.png")
|
||||
))
|
||||
.source_id(42)
|
||||
.build()
|
||||
.unwrap()
|
||||
};
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_new_podcast_intercepted() -> Result<(), Error> {
|
||||
let file = File::open("tests/feeds/2018-01-20-Intercepted.xml")?;
|
||||
let channel = Channel::read_from(BufReader::new(file))?;
|
||||
|
||||
let pd = NewShow::new(&channel, 42);
|
||||
assert_eq!(*EXPECTED_INTERCEPTED, pd);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_new_podcast_lup() -> Result<(), Error> {
|
||||
let file = File::open("tests/feeds/2018-01-20-LinuxUnplugged.xml")?;
|
||||
let channel = Channel::read_from(BufReader::new(file))?;
|
||||
|
||||
let pd = NewShow::new(&channel, 42);
|
||||
assert_eq!(*EXPECTED_LUP, pd);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_new_podcast_thetipoff() -> Result<(), Error> {
|
||||
let file = File::open("tests/feeds/2018-01-20-TheTipOff.xml")?;
|
||||
let channel = Channel::read_from(BufReader::new(file))?;
|
||||
|
||||
let pd = NewShow::new(&channel, 42);
|
||||
assert_eq!(*EXPECTED_TIPOFF, pd);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_new_podcast_steal_the_stars() -> Result<(), Error> {
|
||||
let file = File::open("tests/feeds/2018-01-20-StealTheStars.xml")?;
|
||||
let channel = Channel::read_from(BufReader::new(file))?;
|
||||
|
||||
let pd = NewShow::new(&channel, 42);
|
||||
assert_eq!(*EXPECTED_STARS, pd);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_new_podcast_greater_than_code() -> Result<(), Error> {
|
||||
let file = File::open("tests/feeds/2018-01-20-GreaterThanCode.xml")?;
|
||||
let channel = Channel::read_from(BufReader::new(file))?;
|
||||
|
||||
let pd = NewShow::new(&channel, 42);
|
||||
assert_eq!(*EXPECTED_CODE, pd);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_new_podcast_ellinofreneia() -> Result<(), Error> {
|
||||
let file = File::open("tests/feeds/2018-03-28-Ellinofreneia.xml")?;
|
||||
let channel = Channel::read_from(BufReader::new(file))?;
|
||||
|
||||
let pd = NewShow::new(&channel, 42);
|
||||
assert_eq!(*EXPECTED_ELLINOFRENEIA, pd);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
// This maybe could be a doc test on insert.
|
||||
fn test_new_podcast_insert() -> Result<(), Error> {
|
||||
truncate_db()?;
|
||||
let file = File::open("tests/feeds/2018-01-20-Intercepted.xml")?;
|
||||
let channel = Channel::read_from(BufReader::new(file))?;
|
||||
|
||||
let npd = NewShow::new(&channel, 42);
|
||||
npd.insert()?;
|
||||
let pd = dbqueries::get_podcast_from_source_id(42)?;
|
||||
|
||||
assert_eq!(npd, pd);
|
||||
assert_eq!(*EXPECTED_INTERCEPTED, npd);
|
||||
assert_eq!(&*EXPECTED_INTERCEPTED, &pd);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
// TODO: Add more test/checks
|
||||
// Currently there's a test that only checks new description or title.
|
||||
// If you have time and want to help, implement the test for the other fields
|
||||
// too.
|
||||
fn test_new_podcast_update() -> Result<(), Error> {
|
||||
truncate_db()?;
|
||||
let old = EXPECTED_INTERCEPTED.to_podcast()?;
|
||||
|
||||
let updated = &*UPDATED_DESC_INTERCEPTED;
|
||||
updated.update(old.id())?;
|
||||
let new = dbqueries::get_podcast_from_source_id(42)?;
|
||||
|
||||
assert_ne!(old, new);
|
||||
assert_eq!(old.id(), new.id());
|
||||
assert_eq!(old.source_id(), new.source_id());
|
||||
assert_eq!(updated, &new);
|
||||
assert_ne!(updated, &old);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_new_podcast_index() -> Result<(), Error> {
|
||||
truncate_db()?;
|
||||
|
||||
// First insert
|
||||
assert!(EXPECTED_INTERCEPTED.index().is_ok());
|
||||
// Second identical, This should take the early return path
|
||||
assert!(EXPECTED_INTERCEPTED.index().is_ok());
|
||||
// Get the podcast
|
||||
let old = dbqueries::get_podcast_from_source_id(42)?;
|
||||
// Assert that NewShow is equal to the Indexed one
|
||||
assert_eq!(&*EXPECTED_INTERCEPTED, &old);
|
||||
|
||||
let updated = &*UPDATED_DESC_INTERCEPTED;
|
||||
|
||||
// Update the podcast
|
||||
assert!(updated.index().is_ok());
|
||||
// Get the new Show
|
||||
let new = dbqueries::get_podcast_from_source_id(42)?;
|
||||
// Assert it's diff from the old one.
|
||||
assert_ne!(new, old);
|
||||
assert_eq!(new.id(), old.id());
|
||||
assert_eq!(new.source_id(), old.source_id());
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_to_podcast() -> Result<(), Error> {
|
||||
// Assert insert() produces the same result that you would get with to_podcast()
|
||||
truncate_db()?;
|
||||
EXPECTED_INTERCEPTED.insert()?;
|
||||
let old = dbqueries::get_podcast_from_source_id(42)?;
|
||||
let pd = EXPECTED_INTERCEPTED.to_podcast()?;
|
||||
assert_eq!(old, pd);
|
||||
|
||||
// Same as above, diff order
|
||||
truncate_db()?;
|
||||
let pd = EXPECTED_INTERCEPTED.to_podcast()?;
|
||||
// This should error as a unique constrain violation
|
||||
assert!(EXPECTED_INTERCEPTED.insert().is_err());
|
||||
let old = dbqueries::get_podcast_from_source_id(42)?;
|
||||
assert_eq!(old, pd);
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
@ -1,15 +1,32 @@
|
||||
#![allow(unused_mut)]
|
||||
// new_source.rs
|
||||
//
|
||||
// Copyright 2017 Jordan Petridis <jpetridis@gnome.org>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
//
|
||||
// SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
use diesel;
|
||||
use diesel::prelude::*;
|
||||
use url::Url;
|
||||
|
||||
use database::connection;
|
||||
use dbqueries;
|
||||
use crate::database::connection;
|
||||
use crate::dbqueries;
|
||||
// use models::{Insert, Update};
|
||||
use errors::DataError;
|
||||
use models::Source;
|
||||
use schema::source;
|
||||
use crate::errors::DataError;
|
||||
use crate::models::Source;
|
||||
use crate::schema::source;
|
||||
|
||||
#[derive(Insertable)]
|
||||
#[table_name = "source"]
|
||||
@ -33,7 +50,7 @@ impl NewSource {
|
||||
}
|
||||
|
||||
pub(crate) fn insert_or_ignore(&self) -> Result<(), DataError> {
|
||||
use schema::source::dsl::*;
|
||||
use crate::schema::source::dsl::*;
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
110
podcasts-data/src/models/show.rs
Normal file
110
podcasts-data/src/models/show.rs
Normal file
@ -0,0 +1,110 @@
|
||||
// show.rs
|
||||
//
|
||||
// Copyright 2017 Jordan Petridis <jpetridis@gnome.org>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
//
|
||||
// SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
use crate::models::Source;
|
||||
use crate::schema::shows;
|
||||
|
||||
#[derive(Queryable, Identifiable, AsChangeset, Associations, PartialEq)]
|
||||
#[belongs_to(Source, foreign_key = "source_id")]
|
||||
#[changeset_options(treat_none_as_null = "true")]
|
||||
#[table_name = "shows"]
|
||||
#[derive(Debug, Clone)]
|
||||
/// Diesel Model of the shows table.
|
||||
pub struct Show {
|
||||
id: i32,
|
||||
title: String,
|
||||
link: String,
|
||||
description: String,
|
||||
image_uri: Option<String>,
|
||||
source_id: i32,
|
||||
}
|
||||
|
||||
impl Show {
|
||||
/// Get the Feed `id`.
|
||||
pub fn id(&self) -> i32 {
|
||||
self.id
|
||||
}
|
||||
|
||||
/// Get the Feed `title`.
|
||||
pub fn title(&self) -> &str {
|
||||
&self.title
|
||||
}
|
||||
|
||||
/// Get the Feed `link`.
|
||||
///
|
||||
/// Usually the website/homepage of the content creator.
|
||||
pub fn link(&self) -> &str {
|
||||
&self.link
|
||||
}
|
||||
|
||||
/// Get the `description`.
|
||||
pub fn description(&self) -> &str {
|
||||
&self.description
|
||||
}
|
||||
|
||||
/// Get the `image_uri`.
|
||||
///
|
||||
/// Represents the uri(url usually) that the Feed cover image is located at.
|
||||
pub fn image_uri(&self) -> Option<&str> {
|
||||
self.image_uri.as_ref().map(|s| s.as_str())
|
||||
}
|
||||
|
||||
/// `Source` table foreign key.
|
||||
pub fn source_id(&self) -> i32 {
|
||||
self.source_id
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Queryable, Debug, Clone)]
|
||||
/// Diesel Model of the Show cover query.
|
||||
/// Used for fetching information about a Show's cover.
|
||||
pub struct ShowCoverModel {
|
||||
id: i32,
|
||||
title: String,
|
||||
image_uri: Option<String>,
|
||||
}
|
||||
|
||||
impl From<Show> for ShowCoverModel {
|
||||
fn from(p: Show) -> ShowCoverModel {
|
||||
ShowCoverModel {
|
||||
id: p.id(),
|
||||
title: p.title,
|
||||
image_uri: p.image_uri,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ShowCoverModel {
|
||||
/// Get the Feed `id`.
|
||||
pub fn id(&self) -> i32 {
|
||||
self.id
|
||||
}
|
||||
|
||||
/// Get the Feed `title`.
|
||||
pub fn title(&self) -> &str {
|
||||
&self.title
|
||||
}
|
||||
|
||||
/// Get the `image_uri`.
|
||||
///
|
||||
/// Represents the uri(url usually) that the Feed cover image is located at.
|
||||
pub fn image_uri(&self) -> Option<&str> {
|
||||
self.image_uri.as_ref().map(|s| s.as_str())
|
||||
}
|
||||
}
|
||||
358
podcasts-data/src/models/source.rs
Normal file
358
podcasts-data/src/models/source.rs
Normal file
@ -0,0 +1,358 @@
|
||||
// source.rs
|
||||
//
|
||||
// Copyright 2017 Jordan Petridis <jpetridis@gnome.org>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
//
|
||||
// SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
use diesel::SaveChangesDsl;
|
||||
// use failure::ResultExt;
|
||||
use rss::Channel;
|
||||
use url::Url;
|
||||
|
||||
use hyper::client::HttpConnector;
|
||||
use hyper::{Body, Client};
|
||||
use hyper_tls::HttpsConnector;
|
||||
|
||||
use http::header::{
|
||||
HeaderValue, AUTHORIZATION, ETAG, IF_MODIFIED_SINCE, IF_NONE_MATCH, LAST_MODIFIED, LOCATION,
|
||||
USER_AGENT as USER_AGENT_HEADER,
|
||||
};
|
||||
use http::{Request, Response, StatusCode, Uri};
|
||||
// use futures::future::ok;
|
||||
use futures::future::{loop_fn, Future, Loop};
|
||||
use futures::prelude::*;
|
||||
|
||||
use base64::{encode_config, URL_SAFE};
|
||||
|
||||
use crate::database::connection;
|
||||
use crate::errors::*;
|
||||
use crate::feed::{Feed, FeedBuilder};
|
||||
use crate::models::{NewSource, Save};
|
||||
use crate::schema::source;
|
||||
use crate::USER_AGENT;
|
||||
|
||||
use std::str::FromStr;
|
||||
|
||||
#[derive(Queryable, Identifiable, AsChangeset, PartialEq)]
|
||||
#[table_name = "source"]
|
||||
#[changeset_options(treat_none_as_null = "true")]
|
||||
#[derive(Debug, Clone)]
|
||||
/// Diesel Model of the source table.
|
||||
pub struct Source {
|
||||
id: i32,
|
||||
uri: String,
|
||||
last_modified: Option<String>,
|
||||
http_etag: Option<String>,
|
||||
}
|
||||
|
||||
impl Save<Source> for Source {
|
||||
type Error = DataError;
|
||||
|
||||
/// Helper method to easily save/"sync" current state of self to the
|
||||
/// Database.
|
||||
fn save(&self) -> Result<Source, Self::Error> {
|
||||
let db = connection();
|
||||
let con = db.get()?;
|
||||
|
||||
self.save_changes::<Source>(&*con).map_err(From::from)
|
||||
}
|
||||
}
|
||||
|
||||
impl Source {
|
||||
/// Get the source `id` column.
|
||||
pub fn id(&self) -> i32 {
|
||||
self.id
|
||||
}
|
||||
|
||||
/// Represents the location(usually url) of the Feed xml file.
|
||||
pub fn uri(&self) -> &str {
|
||||
&self.uri
|
||||
}
|
||||
|
||||
/// Set the `uri` field value.
|
||||
pub fn set_uri(&mut self, uri: String) {
|
||||
self.uri = uri;
|
||||
}
|
||||
|
||||
/// Represents the Http Last-Modified Header field.
|
||||
///
|
||||
/// See [RFC 7231](https://tools.ietf.org/html/rfc7231#section-7.2) for more.
|
||||
pub fn last_modified(&self) -> Option<&str> {
|
||||
self.last_modified.as_ref().map(|s| s.as_str())
|
||||
}
|
||||
|
||||
/// Set `last_modified` value.
|
||||
pub fn set_last_modified(&mut self, value: Option<String>) {
|
||||
// self.last_modified = value.map(|x| x.to_string());
|
||||
self.last_modified = value;
|
||||
}
|
||||
|
||||
/// Represents the Http Etag Header field.
|
||||
///
|
||||
/// See [RFC 7231](https://tools.ietf.org/html/rfc7231#section-7.2) for more.
|
||||
pub fn http_etag(&self) -> Option<&str> {
|
||||
self.http_etag.as_ref().map(|s| s.as_str())
|
||||
}
|
||||
|
||||
/// Set `http_etag` value.
|
||||
pub fn set_http_etag(&mut self, value: Option<&str>) {
|
||||
self.http_etag = value.map(|x| x.to_string());
|
||||
}
|
||||
|
||||
/// Extract Etag and LastModifier from res, and update self and the
|
||||
/// corresponding db row.
|
||||
fn update_etag(mut self, res: &Response<Body>) -> Result<Self, DataError> {
|
||||
let headers = res.headers();
|
||||
|
||||
let etag = headers
|
||||
.get(ETAG)
|
||||
.and_then(|h| h.to_str().ok())
|
||||
.map(From::from);
|
||||
let lmod = headers
|
||||
.get(LAST_MODIFIED)
|
||||
.and_then(|h| h.to_str().ok())
|
||||
.map(From::from);
|
||||
|
||||
if (self.http_etag() != etag) || (self.last_modified != lmod) {
|
||||
self.set_http_etag(etag);
|
||||
self.set_last_modified(lmod);
|
||||
self = self.save()?;
|
||||
}
|
||||
|
||||
Ok(self)
|
||||
}
|
||||
|
||||
/// Clear the `HTTP` `Etag` and `Last-modified` headers.
|
||||
/// This method does not sync the state of self in the database, call
|
||||
/// .save() method explicitly
|
||||
fn clear_etags(&mut self) {
|
||||
debug!("Source etags before clear: {:#?}", &self);
|
||||
self.http_etag = None;
|
||||
self.last_modified = None;
|
||||
}
|
||||
|
||||
fn make_err(self, context: &str, code: StatusCode) -> DataError {
|
||||
DataError::HttpStatusGeneral(HttpStatusError::new(self.uri, code, context.into()))
|
||||
}
|
||||
|
||||
// TODO match on more stuff
|
||||
// 301: Moved Permanently
|
||||
// 304: Up to date Feed, checked with the Etag
|
||||
// 307: Temporary redirect of the url
|
||||
// 308: Permanent redirect of the url
|
||||
// 401: Unathorized
|
||||
// 403: Forbidden
|
||||
// 408: Timeout
|
||||
// 410: Feed deleted
|
||||
// TODO: Rething this api,
|
||||
fn match_status(mut self, res: Response<Body>) -> Result<Response<Body>, DataError> {
|
||||
let code = res.status();
|
||||
|
||||
if code.is_success() {
|
||||
// If request is succesful save the etag
|
||||
self = self.update_etag(&res)?
|
||||
} else {
|
||||
match code.as_u16() {
|
||||
// Save etags if it returns NotModified
|
||||
304 => self = self.update_etag(&res)?,
|
||||
// Clear the Etag/lmod else
|
||||
_ => {
|
||||
self.clear_etags();
|
||||
self = self.save()?;
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
match code.as_u16() {
|
||||
304 => {
|
||||
info!("304: Source, (id: {}), is up to date", self.id());
|
||||
return Err(DataError::FeedNotModified(self));
|
||||
}
|
||||
301 | 302 | 308 => {
|
||||
warn!("Feed was moved permanently.");
|
||||
self = self.update_url(&res)?;
|
||||
return Err(DataError::FeedRedirect(self));
|
||||
}
|
||||
307 => {
|
||||
warn!("307: Temporary Redirect.");
|
||||
// FIXME: How is it actually handling the redirect?
|
||||
return Err(DataError::FeedRedirect(self));
|
||||
}
|
||||
401 => return Err(self.make_err("401: Unauthorized.", code)),
|
||||
403 => return Err(self.make_err("403: Forbidden.", code)),
|
||||
404 => return Err(self.make_err("404: Not found.", code)),
|
||||
408 => return Err(self.make_err("408: Request Timeout.", code)),
|
||||
410 => return Err(self.make_err("410: Feed was deleted..", code)),
|
||||
_ => info!("HTTP StatusCode: {}", code),
|
||||
};
|
||||
|
||||
Ok(res)
|
||||
}
|
||||
|
||||
fn update_url(mut self, res: &Response<Body>) -> Result<Self, DataError> {
|
||||
let code = res.status();
|
||||
let headers = res.headers();
|
||||
info!("HTTP StatusCode: {}", code);
|
||||
debug!("Headers {:#?}", headers);
|
||||
|
||||
if let Some(url) = headers.get(LOCATION) {
|
||||
debug!("Previous Source: {:#?}", &self);
|
||||
|
||||
self.set_uri(url.to_str()?.into());
|
||||
self.clear_etags();
|
||||
self = self.save()?;
|
||||
|
||||
debug!("Updated Source: {:#?}", &self);
|
||||
info!(
|
||||
"Feed url of Source {}, was updated successfully.",
|
||||
self.id()
|
||||
);
|
||||
}
|
||||
|
||||
Ok(self)
|
||||
}
|
||||
|
||||
/// Construct a new `Source` with the given `uri` and index it.
|
||||
///
|
||||
/// This only indexes the `Source` struct, not the Podcast Feed.
|
||||
pub fn from_url(uri: &str) -> Result<Source, DataError> {
|
||||
let url = Url::parse(uri)?;
|
||||
|
||||
NewSource::new(&url).to_source()
|
||||
}
|
||||
|
||||
/// `Feed` constructor.
|
||||
///
|
||||
/// Fetches the latest xml Feed.
|
||||
///
|
||||
/// Updates the validator Http Headers.
|
||||
///
|
||||
/// Consumes `self` and Returns the corresponding `Feed` Object.
|
||||
// Refactor into TryInto once it lands on stable.
|
||||
pub fn into_feed(
|
||||
self,
|
||||
client: Client<HttpsConnector<HttpConnector>>,
|
||||
) -> impl Future<Item = Feed, Error = DataError> {
|
||||
let id = self.id();
|
||||
let response = loop_fn(self, move |source| {
|
||||
source
|
||||
.request_constructor(&client.clone())
|
||||
.then(|res| match res {
|
||||
Ok(response) => Ok(Loop::Break(response)),
|
||||
Err(err) => match err {
|
||||
DataError::FeedRedirect(s) => {
|
||||
info!("Following redirect...");
|
||||
Ok(Loop::Continue(s))
|
||||
}
|
||||
e => Err(e),
|
||||
},
|
||||
})
|
||||
});
|
||||
|
||||
response
|
||||
.and_then(response_to_channel)
|
||||
.and_then(move |chan| {
|
||||
FeedBuilder::default()
|
||||
.channel(chan)
|
||||
.source_id(id)
|
||||
.build()
|
||||
.map_err(From::from)
|
||||
})
|
||||
}
|
||||
|
||||
fn request_constructor(
|
||||
self,
|
||||
client: &Client<HttpsConnector<HttpConnector>>,
|
||||
) -> impl Future<Item = Response<Body>, Error = DataError> {
|
||||
// FIXME: remove unwrap somehow
|
||||
let uri = Uri::from_str(self.uri()).unwrap();
|
||||
let mut req = Request::get(uri).body(Body::empty()).unwrap();
|
||||
|
||||
if let Ok(url) = Url::parse(self.uri()) {
|
||||
if let Some(password) = url.password() {
|
||||
let mut auth = "Basic ".to_owned();
|
||||
auth.push_str(&encode_config(
|
||||
&format!("{}:{}", url.username(), password),
|
||||
URL_SAFE,
|
||||
));
|
||||
req.headers_mut()
|
||||
.insert(AUTHORIZATION, HeaderValue::from_str(&auth).unwrap());
|
||||
}
|
||||
}
|
||||
|
||||
// Set the UserAgent cause ppl still seem to check it for some reason...
|
||||
req.headers_mut()
|
||||
.insert(USER_AGENT_HEADER, HeaderValue::from_static(USER_AGENT));
|
||||
|
||||
if let Some(etag) = self.http_etag() {
|
||||
req.headers_mut()
|
||||
.insert(IF_NONE_MATCH, HeaderValue::from_str(etag).unwrap());
|
||||
}
|
||||
|
||||
if let Some(lmod) = self.last_modified() {
|
||||
req.headers_mut()
|
||||
.insert(IF_MODIFIED_SINCE, HeaderValue::from_str(lmod).unwrap());
|
||||
}
|
||||
|
||||
client
|
||||
.request(req)
|
||||
.map_err(From::from)
|
||||
.and_then(move |res| self.match_status(res))
|
||||
}
|
||||
}
|
||||
|
||||
fn response_to_channel(
|
||||
res: Response<Body>,
|
||||
) -> impl Future<Item = Channel, Error = DataError> + Send {
|
||||
res.into_body()
|
||||
.concat2()
|
||||
.map(|x| x.into_iter())
|
||||
.map_err(From::from)
|
||||
.map(|iter| iter.collect::<Vec<u8>>())
|
||||
.map(|utf_8_bytes| String::from_utf8_lossy(&utf_8_bytes).into_owned())
|
||||
.and_then(|buf| Channel::from_str(&buf).map_err(From::from))
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use failure::Error;
|
||||
use num_cpus;
|
||||
use tokio;
|
||||
|
||||
use crate::database::truncate_db;
|
||||
use crate::utils::get_feed;
|
||||
|
||||
#[test]
|
||||
fn test_into_feed() -> Result<(), Error> {
|
||||
truncate_db()?;
|
||||
|
||||
let mut rt = tokio::runtime::Runtime::new()?;
|
||||
let https = HttpsConnector::new(num_cpus::get())?;
|
||||
let client = Client::builder().build::<_, Body>(https);
|
||||
|
||||
let url = "https://web.archive.org/web/20180120083840if_/https://feeds.feedburner.\
|
||||
com/InterceptedWithJeremyScahill";
|
||||
let source = Source::from_url(url)?;
|
||||
let id = source.id();
|
||||
let feed = source.into_feed(client);
|
||||
let feed = rt.block_on(feed)?;
|
||||
|
||||
let expected = get_feed("tests/feeds/2018-01-20-Intercepted.xml", id);
|
||||
assert_eq!(expected, feed);
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
355
podcasts-data/src/opml.rs
Normal file
355
podcasts-data/src/opml.rs
Normal file
@ -0,0 +1,355 @@
|
||||
// opml.rs
|
||||
//
|
||||
// Copyright 2017 Jordan Petridis <jpetridis@gnome.org>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
//
|
||||
// SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
//! FIXME: Docs
|
||||
|
||||
// #![allow(unused)]
|
||||
|
||||
use crate::dbqueries;
|
||||
use crate::errors::DataError;
|
||||
use crate::models::Source;
|
||||
use xml::{
|
||||
common::XmlVersion,
|
||||
reader,
|
||||
writer::{events::XmlEvent, EmitterConfig},
|
||||
};
|
||||
|
||||
use std::collections::HashSet;
|
||||
use std::fs;
|
||||
use std::io::{Read, Write};
|
||||
use std::path::Path;
|
||||
|
||||
use std::fs::File;
|
||||
// use std::io::BufReader;
|
||||
|
||||
use failure::Error;
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
// FIXME: Make it a Diesel model
|
||||
/// Represents an `outline` xml element as per the `OPML` [specification][spec]
|
||||
/// not `RSS` related sub-elements are omitted.
|
||||
///
|
||||
/// [spec]: http://dev.opml.org/spec2.html
|
||||
pub struct Opml {
|
||||
title: String,
|
||||
description: String,
|
||||
url: String,
|
||||
}
|
||||
|
||||
/// Import feed url's from a `R` into the `Source` table.
|
||||
// TODO: Write test
|
||||
pub fn import_to_db<R: Read>(reader: R) -> Result<Vec<Source>, reader::Error> {
|
||||
let feeds = extract_sources(reader)?
|
||||
.iter()
|
||||
.map(|opml| Source::from_url(&opml.url))
|
||||
.filter_map(|s| {
|
||||
if let Err(ref err) = s {
|
||||
let txt = "If you think this might be a bug please consider filling a report over \
|
||||
at https://gitlab.gnome.org/World/podcasts/issues/new";
|
||||
|
||||
error!("Failed to import a Show: {}", err);
|
||||
error!("{}", txt);
|
||||
}
|
||||
|
||||
s.ok()
|
||||
})
|
||||
.collect();
|
||||
|
||||
Ok(feeds)
|
||||
}
|
||||
|
||||
/// Open a File from `P`, try to parse the OPML then insert the Feeds in the database and
|
||||
/// return the new `Source`s
|
||||
// TODO: Write test
|
||||
pub fn import_from_file<P: AsRef<Path>>(path: P) -> Result<Vec<Source>, DataError> {
|
||||
let content = fs::read(path)?;
|
||||
import_to_db(content.as_slice()).map_err(From::from)
|
||||
}
|
||||
|
||||
/// Export a file to `P`, taking the feeds from the database and outputting
|
||||
/// them in opml format.
|
||||
pub fn export_from_db<P: AsRef<Path>>(path: P, export_title: &str) -> Result<(), Error> {
|
||||
let file = File::create(path)?;
|
||||
export_to_file(&file, export_title)
|
||||
}
|
||||
|
||||
/// Export from `Source`s and `Show`s into `F` in OPML format
|
||||
pub fn export_to_file<F: Write>(file: F, export_title: &str) -> Result<(), Error> {
|
||||
let config = EmitterConfig::new().perform_indent(true);
|
||||
|
||||
let mut writer = config.create_writer(file);
|
||||
|
||||
let mut events: Vec<XmlEvent<'_>> = Vec::new();
|
||||
|
||||
// Set up headers
|
||||
let doc = XmlEvent::StartDocument {
|
||||
version: XmlVersion::Version10,
|
||||
encoding: Some("UTF-8"),
|
||||
standalone: Some(false),
|
||||
};
|
||||
events.push(doc);
|
||||
|
||||
let opml: XmlEvent<'_> = XmlEvent::start_element("opml")
|
||||
.attr("version", "2.0")
|
||||
.into();
|
||||
events.push(opml);
|
||||
|
||||
let head: XmlEvent<'_> = XmlEvent::start_element("head").into();
|
||||
events.push(head);
|
||||
|
||||
let title_ev: XmlEvent<'_> = XmlEvent::start_element("title").into();
|
||||
events.push(title_ev);
|
||||
|
||||
let title_chars: XmlEvent<'_> = XmlEvent::characters(export_title).into();
|
||||
events.push(title_chars);
|
||||
|
||||
// Close <title> & <head>
|
||||
events.push(XmlEvent::end_element().into());
|
||||
events.push(XmlEvent::end_element().into());
|
||||
|
||||
let body: XmlEvent<'_> = XmlEvent::start_element("body").into();
|
||||
events.push(body);
|
||||
|
||||
for event in events {
|
||||
writer.write(event)?;
|
||||
}
|
||||
|
||||
// FIXME: Make this a model of a joined query (http://docs.diesel.rs/diesel/macro.joinable.html)
|
||||
let shows = dbqueries::get_podcasts()?.into_iter().map(|show| {
|
||||
let source = dbqueries::get_source_from_id(show.source_id()).unwrap();
|
||||
(source, show)
|
||||
});
|
||||
|
||||
for (ref source, ref show) in shows {
|
||||
let title = show.title();
|
||||
let link = show.link();
|
||||
let xml_url = source.uri();
|
||||
|
||||
let s_ev: XmlEvent<'_> = XmlEvent::start_element("outline")
|
||||
.attr("text", title)
|
||||
.attr("title", title)
|
||||
.attr("type", "rss")
|
||||
.attr("xmlUrl", xml_url)
|
||||
.attr("htmlUrl", link)
|
||||
.into();
|
||||
|
||||
let end_ev: XmlEvent<'_> = XmlEvent::end_element().into();
|
||||
writer.write(s_ev)?;
|
||||
writer.write(end_ev)?;
|
||||
}
|
||||
|
||||
// Close <body> and <opml>
|
||||
let end_bod: XmlEvent<'_> = XmlEvent::end_element().into();
|
||||
writer.write(end_bod)?;
|
||||
let end_opml: XmlEvent<'_> = XmlEvent::end_element().into();
|
||||
writer.write(end_opml)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Extracts the `outline` elements from a reader `R` and returns a `HashSet` of `Opml` structs.
|
||||
pub fn extract_sources<R: Read>(reader: R) -> Result<HashSet<Opml>, reader::Error> {
|
||||
let mut list = HashSet::new();
|
||||
let parser = reader::EventReader::new(reader);
|
||||
|
||||
parser
|
||||
.into_iter()
|
||||
.map(|e| match e {
|
||||
Ok(reader::XmlEvent::StartElement {
|
||||
name, attributes, ..
|
||||
}) => {
|
||||
if name.local_name == "outline" {
|
||||
let mut title = String::new();
|
||||
let mut url = String::new();
|
||||
let mut description = String::new();
|
||||
|
||||
attributes.into_iter().for_each(|attribute| {
|
||||
match attribute.name.local_name.as_str() {
|
||||
"title" => title = attribute.value,
|
||||
"xmlUrl" => url = attribute.value,
|
||||
"description" => description = attribute.value,
|
||||
_ => {}
|
||||
}
|
||||
});
|
||||
|
||||
let feed = Opml {
|
||||
title,
|
||||
description,
|
||||
url,
|
||||
};
|
||||
list.insert(feed);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
Err(err) => Err(err),
|
||||
_ => Ok(()),
|
||||
})
|
||||
.collect::<Result<Vec<_>, reader::Error>>()?;
|
||||
|
||||
Ok(list)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use chrono::Local;
|
||||
use failure::Error;
|
||||
use futures::Future;
|
||||
|
||||
use crate::database::{truncate_db, TEMPDIR};
|
||||
use crate::utils::get_feed;
|
||||
|
||||
const URLS: &[(&str, &str)] = {
|
||||
&[
|
||||
(
|
||||
"tests/feeds/2018-01-20-Intercepted.xml",
|
||||
"https://web.archive.org/web/20180120083840if_/https://feeds.feedburner.\
|
||||
com/InterceptedWithJeremyScahill",
|
||||
),
|
||||
(
|
||||
"tests/feeds/2018-01-20-LinuxUnplugged.xml",
|
||||
"https://web.archive.org/web/20180120110314if_/https://feeds.feedburner.\
|
||||
com/linuxunplugged",
|
||||
),
|
||||
(
|
||||
"tests/feeds/2018-01-20-TheTipOff.xml",
|
||||
"https://web.archive.org/web/20180120110727if_/https://rss.acast.com/thetipoff",
|
||||
),
|
||||
(
|
||||
"tests/feeds/2018-01-20-StealTheStars.xml",
|
||||
"https://web.archive.org/web/20180120104957if_/https://rss.art19.\
|
||||
com/steal-the-stars",
|
||||
),
|
||||
(
|
||||
"tests/feeds/2018-01-20-GreaterThanCode.xml",
|
||||
"https://web.archive.org/web/20180120104741if_/https://www.greaterthancode.\
|
||||
com/feed/podcast",
|
||||
),
|
||||
(
|
||||
"tests/feeds/2019-01-27-ACC.xml",
|
||||
"https://web.archive.org/web/20190127005213if_/https://anticapitalistchronicles.libsyn.com/rss"
|
||||
),
|
||||
]
|
||||
};
|
||||
|
||||
#[test]
|
||||
fn test_extract() -> Result<(), Error> {
|
||||
let int_title = String::from("Intercepted with Jeremy Scahill");
|
||||
let int_url = String::from("https://feeds.feedburner.com/InterceptedWithJeremyScahill");
|
||||
let int_desc = String::from(
|
||||
"The people behind The Intercept’s fearless reporting and incisive \
|
||||
commentary—Jeremy Scahill, Glenn Greenwald, Betsy Reed and others—discuss the \
|
||||
crucial issues of our time: national security, civil liberties, foreign policy, \
|
||||
and criminal justice. Plus interviews with artists, thinkers, and newsmakers \
|
||||
who challenge our preconceptions about the world we live in.",
|
||||
);
|
||||
|
||||
let dec_title = String::from("Deconstructed with Mehdi Hasan");
|
||||
let dec_url = String::from("https://rss.prod.firstlook.media/deconstructed/podcast.rss");
|
||||
let dec_desc = String::from(
|
||||
"Journalist Mehdi Hasan is known around the world for his televised takedowns of \
|
||||
presidents and prime ministers. In this new podcast from The Intercept, Mehdi \
|
||||
unpacks a game-changing news event of the week while challenging the conventional \
|
||||
wisdom. As a Brit, a Muslim and an immigrant based in Donald Trump's Washington \
|
||||
D.C., Mehdi gives a refreshingly provocative perspective on the ups and downs of \
|
||||
American—and global—politics.",
|
||||
);
|
||||
|
||||
#[cfg_attr(rustfmt, rustfmt_skip)]
|
||||
let sample1 = format!(
|
||||
"<?xml version=\"1.0\" encoding=\"UTF-8\"?> \
|
||||
<opml version=\"2.0\"> \
|
||||
<head> \
|
||||
<title>Test OPML File</title> \
|
||||
<dateCreated>{}</dateCreated> \
|
||||
<docs>http://www.opml.org/spec2</docs> \
|
||||
</head> \
|
||||
<body> \
|
||||
<outline type=\"rss\" title=\"{}\" description=\"{}\" xmlUrl=\"{}\"/> \
|
||||
<outline type=\"rss\" title=\"{}\" description=\"{}\" xmlUrl=\"{}\"/> \
|
||||
</body> \
|
||||
</opml>",
|
||||
Local::now().format("%a, %d %b %Y %T %Z"),
|
||||
int_title,
|
||||
int_desc,
|
||||
int_url,
|
||||
dec_title,
|
||||
dec_desc,
|
||||
dec_url,
|
||||
);
|
||||
|
||||
let map = hashset![
|
||||
Opml {
|
||||
title: int_title,
|
||||
description: int_desc,
|
||||
url: int_url
|
||||
},
|
||||
Opml {
|
||||
title: dec_title,
|
||||
description: dec_desc,
|
||||
url: dec_url
|
||||
},
|
||||
];
|
||||
assert_eq!(extract_sources(sample1.as_bytes())?, map);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn text_export() -> Result<(), Error> {
|
||||
truncate_db()?;
|
||||
|
||||
URLS.iter().for_each(|&(path, url)| {
|
||||
// Create and insert a Source into db
|
||||
let s = Source::from_url(url).unwrap();
|
||||
let feed = get_feed(path, s.id());
|
||||
feed.index().wait().unwrap();
|
||||
});
|
||||
|
||||
let mut map: HashSet<Opml> = HashSet::new();
|
||||
let shows = dbqueries::get_podcasts()?.into_iter().map(|show| {
|
||||
let source = dbqueries::get_source_from_id(show.source_id()).unwrap();
|
||||
(source, show)
|
||||
});
|
||||
|
||||
for (ref source, ref show) in shows {
|
||||
let title = show.title().to_string();
|
||||
// description is an optional field that we don't export
|
||||
let description = String::new();
|
||||
let url = source.uri().to_string();
|
||||
|
||||
map.insert(Opml {
|
||||
title,
|
||||
description,
|
||||
url,
|
||||
});
|
||||
}
|
||||
|
||||
let opml_path = TEMPDIR.path().join("podcasts.opml");
|
||||
export_from_db(opml_path.as_path(), "GNOME Podcasts Subscriptions")?;
|
||||
let opml_file = File::open(opml_path.as_path())?;
|
||||
assert_eq!(extract_sources(&opml_file)?, map);
|
||||
|
||||
// extract_sources drains the reader its passed
|
||||
let mut opml_file = File::open(opml_path.as_path())?;
|
||||
let mut opml_str = String::new();
|
||||
opml_file.read_to_string(&mut opml_str)?;
|
||||
assert_eq!(opml_str, include_str!("../tests/export_test.opml"));
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
@ -1,3 +1,22 @@
|
||||
// parser.rs
|
||||
//
|
||||
// Copyright 2017 Jordan Petridis <jpetridis@gnome.org>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
//
|
||||
// SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
use rss::extension::itunes::ITunesItemExtension;
|
||||
|
||||
/// Parses an Item Itunes extension and returns it's duration value in seconds.
|
||||
@ -77,5 +96,4 @@ mod tests {
|
||||
let item = Some(&extension);
|
||||
assert_eq!(parse_itunes_duration(item), Some(6970));
|
||||
}
|
||||
|
||||
}
|
||||
133
podcasts-data/src/pipeline.rs
Normal file
133
podcasts-data/src/pipeline.rs
Normal file
@ -0,0 +1,133 @@
|
||||
// pipeline.rs
|
||||
//
|
||||
// Copyright 2017 Jordan Petridis <jpetridis@gnome.org>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
//
|
||||
// SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
// FIXME:
|
||||
//! Docs.
|
||||
|
||||
use futures::{future::ok, lazy, prelude::*, stream::FuturesUnordered};
|
||||
use tokio;
|
||||
|
||||
use hyper::client::HttpConnector;
|
||||
use hyper::{Body, Client};
|
||||
use hyper_tls::HttpsConnector;
|
||||
|
||||
use num_cpus;
|
||||
|
||||
use crate::errors::DataError;
|
||||
use crate::Source;
|
||||
|
||||
use std::iter::FromIterator;
|
||||
|
||||
type HttpsClient = Client<HttpsConnector<HttpConnector>>;
|
||||
|
||||
/// The pipline to be run for indexing and updating a Podcast feed that originates from
|
||||
/// `Source.uri`.
|
||||
///
|
||||
/// Messy temp diagram:
|
||||
/// Source -> GET Request -> Update Etags -> Check Status -> Parse `xml/Rss` ->
|
||||
/// Convert `rss::Channel` into `Feed` -> Index Podcast -> Index Episodes.
|
||||
pub fn pipeline<'a, S>(sources: S, client: HttpsClient) -> impl Future<Item = (), Error = ()> + 'a
|
||||
where
|
||||
S: Stream<Item = Source, Error = DataError> + Send + 'a,
|
||||
{
|
||||
sources
|
||||
.and_then(move |s| s.into_feed(client.clone()))
|
||||
.map_err(|err| {
|
||||
match err {
|
||||
// Avoid spamming the stderr when its not an eactual error
|
||||
DataError::FeedNotModified(_) => (),
|
||||
_ => error!("Error: {}", err),
|
||||
}
|
||||
})
|
||||
.and_then(move |feed| {
|
||||
let fut = lazy(|| feed.index().map_err(|err| error!("Error: {}", err)));
|
||||
tokio::spawn(fut);
|
||||
Ok(())
|
||||
})
|
||||
// For each terminates the stream at the first error so we make sure
|
||||
// we pass good values regardless
|
||||
.then(move |_| ok(()))
|
||||
// Convert the stream into a Future to later execute as a tokio task
|
||||
.for_each(move |_| ok(()))
|
||||
}
|
||||
|
||||
/// Creates a tokio `reactor::Core`, and a `hyper::Client` and
|
||||
/// runs the pipeline to completion. The `reactor::Core` is dropped afterwards.
|
||||
pub fn run<S>(sources: S) -> Result<(), DataError>
|
||||
where
|
||||
S: IntoIterator<Item = Source>,
|
||||
{
|
||||
let https = HttpsConnector::new(num_cpus::get())?;
|
||||
let client = Client::builder().build::<_, Body>(https);
|
||||
|
||||
let foo = sources.into_iter().map(ok::<_, _>);
|
||||
let stream = FuturesUnordered::from_iter(foo);
|
||||
let p = pipeline(stream, client);
|
||||
tokio::run(p);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::database::truncate_db;
|
||||
use crate::dbqueries;
|
||||
use crate::Source;
|
||||
use failure::Error;
|
||||
|
||||
// (path, url) tuples.
|
||||
const URLS: &[&str] = &[
|
||||
"https://web.archive.org/web/20180120083840if_/https://feeds.feedburner.\
|
||||
com/InterceptedWithJeremyScahill",
|
||||
"https://web.archive.org/web/20180120110314if_/https://feeds.feedburner.com/linuxunplugged",
|
||||
"https://web.archive.org/web/20180120110727if_/https://rss.acast.com/thetipoff",
|
||||
"https://web.archive.org/web/20180120104957if_/https://rss.art19.com/steal-the-stars",
|
||||
"https://web.archive.org/web/20180120104741if_/https://www.greaterthancode.\
|
||||
com/feed/podcast",
|
||||
];
|
||||
|
||||
#[test]
|
||||
/// Insert feeds and update/index them.
|
||||
fn test_pipeline() -> Result<(), Error> {
|
||||
truncate_db()?;
|
||||
let bad_url = "https://gitlab.gnome.org/World/podcasts.atom";
|
||||
// if a stream returns error/None it stops
|
||||
// bad we want to parse all feeds regardless if one fails
|
||||
Source::from_url(bad_url)?;
|
||||
|
||||
URLS.iter().for_each(|url| {
|
||||
// Index the urls into the source table.
|
||||
Source::from_url(url).unwrap();
|
||||
});
|
||||
|
||||
let sources = dbqueries::get_sources()?;
|
||||
run(sources)?;
|
||||
|
||||
let sources = dbqueries::get_sources()?;
|
||||
// Run again to cover Unique constrains erros.
|
||||
run(sources)?;
|
||||
|
||||
// Assert the index rows equal the controlled results
|
||||
assert_eq!(dbqueries::get_sources()?.len(), 6);
|
||||
assert_eq!(dbqueries::get_podcasts()?.len(), 5);
|
||||
assert_eq!(dbqueries::get_episodes()?.len(), 354);
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
29
podcasts-data/src/schema.patch
Normal file
29
podcasts-data/src/schema.patch
Normal file
@ -0,0 +1,29 @@
|
||||
diff --git a/podcasts-data/src/schema.rs b/podcasts-data/src/schema.rs
|
||||
index 03cbed0..88f1622 100644
|
||||
--- a/podcasts-data/src/schema.rs
|
||||
+++ b/podcasts-data/src/schema.rs
|
||||
@@ -1,8 +1,11 @@
|
||||
+#![allow(warnings)]
|
||||
+
|
||||
table! {
|
||||
episodes (title, show_id) {
|
||||
+ rowid -> Integer,
|
||||
title -> Text,
|
||||
uri -> Nullable<Text>,
|
||||
local_uri -> Nullable<Text>,
|
||||
description -> Nullable<Text>,
|
||||
epoch -> Integer,
|
||||
length -> Nullable<Integer>,
|
||||
@@ -30,11 +33,7 @@ table! {
|
||||
uri -> Text,
|
||||
last_modified -> Nullable<Text>,
|
||||
http_etag -> Nullable<Text>,
|
||||
}
|
||||
}
|
||||
|
||||
-allow_tables_to_appear_in_same_query!(
|
||||
- episodes,
|
||||
- shows,
|
||||
- source,
|
||||
-);
|
||||
+allow_tables_to_appear_in_same_query!(episodes, shows, source);
|
||||
@ -1,5 +1,7 @@
|
||||
#![allow(warnings)]
|
||||
|
||||
table! {
|
||||
episode (title, podcast_id) {
|
||||
episodes (title, show_id) {
|
||||
rowid -> Integer,
|
||||
title -> Text,
|
||||
uri -> Nullable<Text>,
|
||||
@ -10,22 +12,17 @@ table! {
|
||||
duration -> Nullable<Integer>,
|
||||
guid -> Nullable<Text>,
|
||||
played -> Nullable<Integer>,
|
||||
favorite -> Bool,
|
||||
archive -> Bool,
|
||||
podcast_id -> Integer,
|
||||
show_id -> Integer,
|
||||
}
|
||||
}
|
||||
|
||||
table! {
|
||||
podcast (id) {
|
||||
shows (id) {
|
||||
id -> Integer,
|
||||
title -> Text,
|
||||
link -> Text,
|
||||
description -> Text,
|
||||
image_uri -> Nullable<Text>,
|
||||
favorite -> Bool,
|
||||
archive -> Bool,
|
||||
always_dl -> Bool,
|
||||
source_id -> Integer,
|
||||
}
|
||||
}
|
||||
@ -39,4 +36,4 @@ table! {
|
||||
}
|
||||
}
|
||||
|
||||
allow_tables_to_appear_in_same_query!(episode, podcast, source,);
|
||||
allow_tables_to_appear_in_same_query!(episodes, shows, source);
|
||||
@ -1,20 +1,39 @@
|
||||
// utils.rs
|
||||
//
|
||||
// Copyright 2017 Jordan Petridis <jpetridis@gnome.org>
|
||||
//
|
||||
// This program is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
//
|
||||
// SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
//! Helper utilities for accomplishing various tasks.
|
||||
|
||||
use chrono::prelude::*;
|
||||
use rayon::prelude::*;
|
||||
|
||||
use itertools::Itertools;
|
||||
use url::{Position, Url};
|
||||
|
||||
use dbqueries;
|
||||
use errors::DataError;
|
||||
use models::{EpisodeCleanerQuery, Podcast, Save};
|
||||
use xdg_dirs::DL_DIR;
|
||||
use crate::dbqueries;
|
||||
use crate::errors::DataError;
|
||||
use crate::models::{EpisodeCleanerModel, Save, Show};
|
||||
use crate::xdg_dirs::DL_DIR;
|
||||
|
||||
use std::fs;
|
||||
use std::path::Path;
|
||||
|
||||
/// Scan downloaded `episode` entries that might have broken `local_uri`s and set them to `None`.
|
||||
/// Scan downloaded `episode` entries that might have broken `local_uri`s and
|
||||
/// set them to `None`.
|
||||
fn download_checker() -> Result<(), DataError> {
|
||||
let mut episodes = dbqueries::get_downloaded_episodes()?;
|
||||
|
||||
@ -28,41 +47,38 @@ fn download_checker() -> Result<(), DataError> {
|
||||
})
|
||||
.for_each(|ep| {
|
||||
ep.set_local_uri(None);
|
||||
if let Err(err) = ep.save() {
|
||||
error!("Error while trying to update episode: {:#?}", ep);
|
||||
error!("{}", err);
|
||||
};
|
||||
ep.save()
|
||||
.map_err(|err| error!("{}", err))
|
||||
.map_err(|_| error!("Error while trying to update episode: {:#?}", ep))
|
||||
.ok();
|
||||
});
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Delete watched `episodes` that have exceded their liftime after played.
|
||||
fn played_cleaner() -> Result<(), DataError> {
|
||||
/// Delete watched `episodes` that have exceeded their lifetime after played.
|
||||
fn played_cleaner(cleanup_date: DateTime<Utc>) -> Result<(), DataError> {
|
||||
let mut episodes = dbqueries::get_played_cleaner_episodes()?;
|
||||
let now_utc = cleanup_date.timestamp() as i32;
|
||||
|
||||
let now_utc = Utc::now().timestamp() as i32;
|
||||
episodes
|
||||
.par_iter_mut()
|
||||
.filter(|ep| ep.local_uri().is_some() && ep.played().is_some())
|
||||
.for_each(|ep| {
|
||||
// TODO: expose a config and a user set option.
|
||||
// Chnage the test too when exposed
|
||||
let limit = ep.played().unwrap() + 172_800; // add 2days in seconds
|
||||
let limit = ep.played().unwrap();
|
||||
if now_utc > limit {
|
||||
if let Err(err) = delete_local_content(ep) {
|
||||
error!("Error while trying to delete file: {:?}", ep.local_uri());
|
||||
error!("{}", err);
|
||||
} else {
|
||||
info!("Episode {:?} was deleted succesfully.", ep.local_uri());
|
||||
};
|
||||
delete_local_content(ep)
|
||||
.map(|_| info!("Episode {:?} was deleted successfully.", ep.local_uri()))
|
||||
.map_err(|err| error!("Error: {}", err))
|
||||
.map_err(|_| error!("Failed to delete file: {:?}", ep.local_uri()))
|
||||
.ok();
|
||||
}
|
||||
});
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Check `ep.local_uri` field and delete the file it points to.
|
||||
fn delete_local_content(ep: &mut EpisodeCleanerQuery) -> Result<(), DataError> {
|
||||
fn delete_local_content(ep: &mut EpisodeCleanerModel) -> Result<(), DataError> {
|
||||
if ep.local_uri().is_some() {
|
||||
let uri = ep.local_uri().unwrap().to_owned();
|
||||
if Path::new(&uri).exists() {
|
||||
@ -91,10 +107,10 @@ fn delete_local_content(ep: &mut EpisodeCleanerQuery) -> Result<(), DataError> {
|
||||
///
|
||||
/// Runs a cleaner for played Episode's that are pass the lifetime limit and
|
||||
/// scheduled for removal.
|
||||
pub fn checkup() -> Result<(), DataError> {
|
||||
pub fn checkup(cleanup_date: DateTime<Utc>) -> Result<(), DataError> {
|
||||
info!("Running database checks.");
|
||||
download_checker()?;
|
||||
played_cleaner()?;
|
||||
played_cleaner(cleanup_date)?;
|
||||
info!("Checks completed.");
|
||||
Ok(())
|
||||
}
|
||||
@ -106,30 +122,14 @@ pub fn url_cleaner(s: &str) -> String {
|
||||
// https://rust-lang-nursery.github.io/rust-cookbook/net.html
|
||||
// #remove-fragment-identifiers-and-query-pairs-from-a-url
|
||||
match Url::parse(s) {
|
||||
Ok(parsed) => parsed[..Position::AfterPath].to_owned(),
|
||||
Ok(parsed) => parsed[..Position::AfterQuery].to_owned(),
|
||||
_ => s.trim().to_owned(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Helper functions that strips extra spaces and newlines and ignores the tabs.
|
||||
#[allow(match_same_arms)]
|
||||
pub fn replace_extra_spaces(s: &str) -> String {
|
||||
s.trim()
|
||||
.chars()
|
||||
.filter(|ch| *ch != '\t')
|
||||
.coalesce(|current, next| match (current, next) {
|
||||
('\n', '\n') => Ok('\n'),
|
||||
('\n', ' ') => Ok('\n'),
|
||||
(' ', '\n') => Ok('\n'),
|
||||
(' ', ' ') => Ok(' '),
|
||||
(_, _) => Err((current, next)),
|
||||
})
|
||||
.collect::<String>()
|
||||
}
|
||||
|
||||
/// Returns the URI of a Podcast Downloads given it's title.
|
||||
/// Returns the URI of a Show Downloads given it's title.
|
||||
pub fn get_download_folder(pd_title: &str) -> Result<String, DataError> {
|
||||
// It might be better to make it a hash of the title or the podcast rowid
|
||||
// It might be better to make it a hash of the title or the Show rowid
|
||||
let download_fold = format!("{}/{}", DL_DIR.to_str().unwrap(), pd_title);
|
||||
|
||||
// Create the folder
|
||||
@ -142,26 +142,25 @@ pub fn get_download_folder(pd_title: &str) -> Result<String, DataError> {
|
||||
/// Removes all the entries associated with the given show from the database,
|
||||
/// and deletes all of the downloaded content.
|
||||
// TODO: Write Tests
|
||||
pub fn delete_show(pd: &Podcast) -> Result<(), DataError> {
|
||||
pub fn delete_show(pd: &Show) -> Result<(), DataError> {
|
||||
dbqueries::remove_feed(pd)?;
|
||||
info!("{} was removed succesfully.", pd.title());
|
||||
info!("{} was removed successfully.", pd.title());
|
||||
|
||||
let fold = get_download_folder(pd.title())?;
|
||||
fs::remove_dir_all(&fold)?;
|
||||
info!("All the content at, {} was removed succesfully", &fold);
|
||||
info!("All the content at, {} was removed successfully", &fold);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
use Feed;
|
||||
use crate::Feed;
|
||||
|
||||
#[cfg(test)]
|
||||
/// Helper function that open a local file, parse the rss::Channel and gives back a Feed object.
|
||||
/// Alternative Feed constructor to be used for tests.
|
||||
pub fn get_feed(file_path: &str, id: i32) -> Feed {
|
||||
use feed::FeedBuilder;
|
||||
use crate::feed::FeedBuilder;
|
||||
use rss::Channel;
|
||||
use std::fs;
|
||||
use std::io::BufReader;
|
||||
|
||||
// open the xml file
|
||||
@ -177,60 +176,58 @@ pub fn get_feed(file_path: &str, id: i32) -> Feed {
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
extern crate tempdir;
|
||||
|
||||
use self::tempdir::TempDir;
|
||||
use super::*;
|
||||
use chrono::Duration;
|
||||
use failure::Error;
|
||||
use tempdir::TempDir;
|
||||
|
||||
use database::truncate_db;
|
||||
use models::NewEpisodeBuilder;
|
||||
use crate::database::truncate_db;
|
||||
use crate::models::NewEpisodeBuilder;
|
||||
|
||||
use std::fs::File;
|
||||
use std::io::Write;
|
||||
|
||||
fn helper_db() -> TempDir {
|
||||
fn helper_db() -> Result<TempDir, Error> {
|
||||
// Clean the db
|
||||
truncate_db().unwrap();
|
||||
truncate_db()?;
|
||||
// Setup tmp file stuff
|
||||
let tmp_dir = TempDir::new("hammond_test").unwrap();
|
||||
let tmp_dir = TempDir::new("podcasts_test")?;
|
||||
let valid_path = tmp_dir.path().join("virtual_dl.mp3");
|
||||
let bad_path = tmp_dir.path().join("invalid_thing.mp3");
|
||||
let mut tmp_file = File::create(&valid_path).unwrap();
|
||||
writeln!(tmp_file, "Foooo").unwrap();
|
||||
let mut tmp_file = File::create(&valid_path)?;
|
||||
writeln!(tmp_file, "Foooo")?;
|
||||
|
||||
// Setup episodes
|
||||
let n1 = NewEpisodeBuilder::default()
|
||||
.title("foo_bar".to_string())
|
||||
.podcast_id(0)
|
||||
.show_id(0)
|
||||
.build()
|
||||
.unwrap()
|
||||
.to_episode()
|
||||
.unwrap();
|
||||
.to_episode()?;
|
||||
|
||||
let n2 = NewEpisodeBuilder::default()
|
||||
.title("bar_baz".to_string())
|
||||
.podcast_id(1)
|
||||
.show_id(1)
|
||||
.build()
|
||||
.unwrap()
|
||||
.to_episode()
|
||||
.unwrap();
|
||||
.to_episode()?;
|
||||
|
||||
let mut ep1 = dbqueries::get_episode_from_pk(n1.title(), n1.podcast_id()).unwrap();
|
||||
let mut ep2 = dbqueries::get_episode_from_pk(n2.title(), n2.podcast_id()).unwrap();
|
||||
let mut ep1 = dbqueries::get_episode_cleaner_from_pk(n1.title(), n1.show_id())?;
|
||||
let mut ep2 = dbqueries::get_episode_cleaner_from_pk(n2.title(), n2.show_id())?;
|
||||
ep1.set_local_uri(Some(valid_path.to_str().unwrap()));
|
||||
ep2.set_local_uri(Some(bad_path.to_str().unwrap()));
|
||||
|
||||
ep1.save().unwrap();
|
||||
ep2.save().unwrap();
|
||||
ep1.save()?;
|
||||
ep2.save()?;
|
||||
|
||||
tmp_dir
|
||||
Ok(tmp_dir)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_download_checker() {
|
||||
let tmp_dir = helper_db();
|
||||
download_checker().unwrap();
|
||||
let episodes = dbqueries::get_downloaded_episodes().unwrap();
|
||||
fn test_download_checker() -> Result<(), Error> {
|
||||
let tmp_dir = helper_db()?;
|
||||
download_checker()?;
|
||||
let episodes = dbqueries::get_downloaded_episodes()?;
|
||||
let valid_path = tmp_dir.path().join("virtual_dl.mp3");
|
||||
|
||||
assert_eq!(episodes.len(), 1);
|
||||
@ -239,87 +236,75 @@ mod tests {
|
||||
episodes.first().unwrap().local_uri()
|
||||
);
|
||||
|
||||
let _tmp_dir = helper_db();
|
||||
download_checker().unwrap();
|
||||
let episode = dbqueries::get_episode_from_pk("bar_baz", 1).unwrap();
|
||||
let _tmp_dir = helper_db()?;
|
||||
download_checker()?;
|
||||
let episode = dbqueries::get_episode_cleaner_from_pk("bar_baz", 1)?;
|
||||
assert!(episode.local_uri().is_none());
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_download_cleaner() {
|
||||
let _tmp_dir = helper_db();
|
||||
let mut episode: EpisodeCleanerQuery =
|
||||
dbqueries::get_episode_from_pk("foo_bar", 0).unwrap().into();
|
||||
fn test_download_cleaner() -> Result<(), Error> {
|
||||
let _tmp_dir = helper_db()?;
|
||||
let mut episode: EpisodeCleanerModel =
|
||||
dbqueries::get_episode_cleaner_from_pk("foo_bar", 0)?.into();
|
||||
|
||||
let valid_path = episode.local_uri().unwrap().to_owned();
|
||||
delete_local_content(&mut episode).unwrap();
|
||||
delete_local_content(&mut episode)?;
|
||||
assert_eq!(Path::new(&valid_path).exists(), false);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_played_cleaner_expired() {
|
||||
let _tmp_dir = helper_db();
|
||||
let mut episode = dbqueries::get_episode_from_pk("foo_bar", 0).unwrap();
|
||||
let now_utc = Utc::now().timestamp() as i32;
|
||||
// let limit = now_utc - 172_800;
|
||||
let epoch = now_utc - 200_000;
|
||||
fn test_played_cleaner_expired() -> Result<(), Error> {
|
||||
let _tmp_dir = helper_db()?;
|
||||
let mut episode = dbqueries::get_episode_cleaner_from_pk("foo_bar", 0)?;
|
||||
let cleanup_date = Utc::now() - Duration::seconds(1000);
|
||||
let epoch = cleanup_date.timestamp() as i32 - 1;
|
||||
episode.set_played(Some(epoch));
|
||||
episode.save().unwrap();
|
||||
episode.save()?;
|
||||
let valid_path = episode.local_uri().unwrap().to_owned();
|
||||
|
||||
// This should delete the file
|
||||
played_cleaner().unwrap();
|
||||
played_cleaner(cleanup_date)?;
|
||||
assert_eq!(Path::new(&valid_path).exists(), false);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_played_cleaner_none() {
|
||||
let _tmp_dir = helper_db();
|
||||
let mut episode = dbqueries::get_episode_from_pk("foo_bar", 0).unwrap();
|
||||
let now_utc = Utc::now().timestamp() as i32;
|
||||
// limit = 172_800;
|
||||
let epoch = now_utc - 20_000;
|
||||
fn test_played_cleaner_none() -> Result<(), Error> {
|
||||
let _tmp_dir = helper_db()?;
|
||||
let mut episode = dbqueries::get_episode_cleaner_from_pk("foo_bar", 0)?;
|
||||
let cleanup_date = Utc::now() - Duration::seconds(1000);
|
||||
let epoch = cleanup_date.timestamp() as i32 + 1;
|
||||
episode.set_played(Some(epoch));
|
||||
episode.save().unwrap();
|
||||
episode.save()?;
|
||||
let valid_path = episode.local_uri().unwrap().to_owned();
|
||||
|
||||
// This should not delete the file
|
||||
played_cleaner().unwrap();
|
||||
played_cleaner(cleanup_date)?;
|
||||
assert_eq!(Path::new(&valid_path).exists(), true);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_url_cleaner() {
|
||||
let good_url = "http://traffic.megaphone.fm/FL8608731318.mp3";
|
||||
let bad_url = "http://traffic.megaphone.fm/FL8608731318.mp3?updated=1484685184";
|
||||
fn test_url_cleaner() -> Result<(), Error> {
|
||||
let good_url = "http://traffic.megaphone.fm/FL8608731318.mp3?updated=1484685184";
|
||||
let bad_url = "http://traffic.megaphone.fm/FL8608731318.mp3?updated=1484685184#foobar";
|
||||
|
||||
assert_eq!(url_cleaner(bad_url), good_url);
|
||||
assert_eq!(url_cleaner(good_url), good_url);
|
||||
assert_eq!(url_cleaner(&format!(" {}\t\n", bad_url)), good_url);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_whitespace() {
|
||||
let bad_txt = "1 2 3 4 5";
|
||||
let valid_txt = "1 2 3 4 5";
|
||||
|
||||
assert_eq!(replace_extra_spaces(&bad_txt), valid_txt);
|
||||
|
||||
let bad_txt = "1 2 3 \n 4 5\n";
|
||||
let valid_txt = "1 2 3\n4 5";
|
||||
|
||||
assert_eq!(replace_extra_spaces(&bad_txt), valid_txt);
|
||||
|
||||
let bad_txt = "1 2 3 \n\n\n \n 4 5\n";
|
||||
let valid_txt = "1 2 3\n4 5";
|
||||
|
||||
assert_eq!(replace_extra_spaces(&bad_txt), valid_txt);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_get_dl_folder() {
|
||||
// This test needs access to local system so we ignore it by default.
|
||||
#[ignore]
|
||||
fn test_get_dl_folder() -> Result<(), Error> {
|
||||
let foo_ = format!("{}/{}", DL_DIR.to_str().unwrap(), "foo");
|
||||
assert_eq!(get_download_folder("foo").unwrap(), foo_);
|
||||
assert_eq!(get_download_folder("foo")?, foo_);
|
||||
let _ = fs::remove_dir_all(foo_);
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
14
podcasts-data/tests/export_test.opml
Normal file
14
podcasts-data/tests/export_test.opml
Normal file
@ -0,0 +1,14 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||
<opml version="2.0">
|
||||
<head>
|
||||
<title>GNOME Podcasts Subscriptions</title>
|
||||
</head>
|
||||
<body>
|
||||
<outline text="David Harvey's Anti-Capitalist Chronicles" title="David Harvey's Anti-Capitalist Chronicles" type="rss" xmlUrl="https://web.archive.org/web/20190127005213if_/https://anticapitalistchronicles.libsyn.com/rss" htmlUrl="https://www.democracyatwork.info/acc" />
|
||||
<outline text="Greater Than Code" title="Greater Than Code" type="rss" xmlUrl="https://web.archive.org/web/20180120104741if_/https://www.greaterthancode.com/feed/podcast" htmlUrl="https://www.greaterthancode.com/" />
|
||||
<outline text="Intercepted with Jeremy Scahill" title="Intercepted with Jeremy Scahill" type="rss" xmlUrl="https://web.archive.org/web/20180120083840if_/https://feeds.feedburner.com/InterceptedWithJeremyScahill" htmlUrl="https://theintercept.com/podcasts" />
|
||||
<outline text="LINUX Unplugged Podcast" title="LINUX Unplugged Podcast" type="rss" xmlUrl="https://web.archive.org/web/20180120110314if_/https://feeds.feedburner.com/linuxunplugged" htmlUrl="http://www.jupiterbroadcasting.com/" />
|
||||
<outline text="Steal the Stars" title="Steal the Stars" type="rss" xmlUrl="https://web.archive.org/web/20180120104957if_/https://rss.art19.com/steal-the-stars" htmlUrl="http://tor-labs.com/" />
|
||||
<outline text="The Tip Off" title="The Tip Off" type="rss" xmlUrl="https://web.archive.org/web/20180120110727if_/https://rss.acast.com/thetipoff" htmlUrl="http://www.acast.com/thetipoff" />
|
||||
</body>
|
||||
</opml>
|
||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user