Compare commits

...

423 Commits

Author SHA1 Message Date
8191bf6066 Release 0.4.3 2019-06-29 10:44:15 +01:00
29b6bfd463 support different update types 2019-06-29 10:31:27 +01:00
2f3d23bc34 fixes 2019-06-29 10:12:50 +01:00
98dd80c4b8 fix 2019-06-29 10:03:58 +01:00
d9edb2e128 ability to download updates automatically 2019-06-29 09:23:27 +01:00
de04b40b86 Release 0.4.2 2019-06-29 07:17:45 +01:00
7206a3d926 more i2p metrics 2019-06-29 07:07:48 +01:00
98b98d8938 I2P status panel 2019-06-29 06:33:53 +01:00
294b8fcc2f MW status window 2019-06-29 05:58:46 +01:00
32f601a1b1 add ability to change i2p port 2019-06-28 23:53:22 +01:00
8e3a398080 Release 0.4.1 2019-06-28 16:42:37 +01:00
720b9688b4 Add unsharing of directories 2019-06-28 16:08:04 +01:00
e3066161c5 do not perform filesystem operations in the UI thread 2019-06-27 23:29:48 +01:00
a9aa3a524f disable i2cp interface on embedded router 2019-06-27 09:56:18 +01:00
92848e818a on empty properties source from java props 2019-06-27 03:47:56 +01:00
a7aa3008c0 bandwidth settings 2019-06-27 00:42:27 +01:00
485325e824 embedded router except for logs 2019-06-26 23:25:22 +01:00
0df2a0e039 start work on embedded router 2019-06-26 22:39:25 +01:00
fb7b4466c2 update readme 2019-06-26 22:05:04 +01:00
53105245f4 Release 0.4.0 2019-06-26 21:59:28 +01:00
b68eab91e0 Release 0.3.10 2019-06-25 22:39:43 +01:00
f72cf91462 wait for files to be loaded before sharing watched directories 2019-06-25 22:24:32 +01:00
a655c4ef50 add toString 2019-06-25 22:24:15 +01:00
5d46e9b796 switch 4_ to INFO 2019-06-25 21:50:15 +01:00
642e6e67b3 wait for all files loaded before watching dirs 2019-06-25 21:43:07 +01:00
2b6b86f903 show how many pieces the remote side already has 2019-06-25 17:44:05 +01:00
f2706a4426 clarify upload column 2019-06-25 17:24:42 +01:00
1af75413aa update for brackets 2019-06-25 16:27:02 +01:00
adc4077b1a filter asterix 2019-06-25 15:54:30 +01:00
01f4e2453b limit search length to 128 characters 2019-06-25 15:53:53 +01:00
61267374dd move button around 2019-06-25 08:10:20 +01:00
970f814685 make mesh expiration configurable 2019-06-25 08:04:57 +01:00
4fd9fc1991 add option to change download location 2019-06-25 07:59:30 +01:00
26207ffd1b add constructor 2019-06-25 07:53:24 +01:00
2614cfbe5f make host clear interval configurable 2019-06-25 07:41:20 +01:00
f11d461ec0 make download sequential ratio a property 2019-06-25 07:34:26 +01:00
b2eb2d2755 show hidden files in file choosers 2019-06-24 23:09:20 +01:00
ea46a54f19 enable AA by default 2019-06-24 22:55:26 +01:00
627add45ad remove griffon icons 2019-06-24 22:51:43 +01:00
d364855459 logo 2019-06-24 22:13:03 +01:00
14ee35e77a Release 0.3.9 2019-06-24 18:39:59 +01:00
8773eb4ee0 fix piece size calculation 2019-06-24 18:29:00 +01:00
51425bbfd9 Release 0.3.8 2019-06-24 07:38:39 +01:00
6a4879bc0b always save pieces 2019-06-24 07:29:49 +01:00
e7fe56439b persist X-Have, fix flickering bug 2019-06-24 07:20:53 +01:00
2886feab4a do not modify the set of available pieces 2019-06-23 17:08:07 +01:00
fb91194026 even noisier log 2019-06-23 16:39:38 +01:00
4527478b0d even noisier 4_ 2019-06-23 12:42:44 +01:00
b0062f146e log roots of download exceptions 2019-06-23 12:10:19 +01:00
bf16561170 Release 0.3.7 2019-06-23 11:25:19 +01:00
3b23dc29c4 if all sources are expired forget mesh 2019-06-23 11:21:39 +01:00
c0645b670e no split on list 2019-06-23 10:50:19 +01:00
30613fe530 update todo 2019-06-23 09:56:51 +01:00
e7822f6edc expire sources, fix compilation 2019-06-23 09:43:56 +01:00
7e5c9ba115 actually save 2019-06-23 09:41:20 +01:00
647fa3a481 persist download mesh 2019-06-23 09:38:42 +01:00
538eca9297 Release 0.3.6 2019-06-23 08:54:28 +01:00
e73a23d4a4 fix space not showing 2019-06-23 08:44:51 +01:00
76e41a0383 fix restoring paused downloads 2019-06-23 08:42:45 +01:00
7045927666 hide monitor options from gui 2019-06-23 08:02:28 +01:00
5fb3086b42 update faq 2019-06-23 07:52:01 +01:00
2de18227c1 persist pause state 2019-06-23 07:48:49 +01:00
bd12a1de3d pause/resume downloads 2019-06-23 06:59:52 +01:00
a3a91050c8 update todo 2019-06-23 01:50:30 +01:00
6c1cc28e49 shutdown if connection to I2P router is lost 2019-06-22 17:32:12 +01:00
b6e5b54f05 do not show monitor by default 2019-06-22 14:51:26 +01:00
a6e559ec67 change some defaults 2019-06-22 06:54:49 +01:00
f11badb824 update todo 2019-06-21 22:43:46 +01:00
44da44ff6f Release 0.3.5 2019-06-21 22:35:54 +01:00
aae3fc29ca add logging.properties with various degree of noisiness 2019-06-21 22:28:57 +01:00
c30aa19d8b Merge branch 'download-mesh' 2019-06-21 22:26:17 +01:00
c79e8712d0 correctly determine if uploader has requested piece 2019-06-21 20:36:33 +01:00
ed12d78a48 clear pieces on cancel 2019-06-21 17:22:55 +01:00
d27872cc8b investigate StringIndexOutOfBounds 2019-06-21 16:29:52 +01:00
f794c39760 personas not destinations 2019-06-21 16:15:35 +01:00
2be9c425f7 compute which pieces are requested 2019-06-21 16:09:57 +01:00
ab5fea9216 416 if piece not downloaded 2019-06-21 16:03:20 +01:00
d1c8328080 do not send alts if there aren't any 2019-06-21 15:39:00 +01:00
89e761f53b write personas on the wire part1 2019-06-21 15:26:18 +01:00
40410eba63 fix constructor 2019-06-21 14:57:53 +01:00
85466a8e80 fix npe 2019-06-21 14:45:14 +01:00
c210af7870 source partial uploads from incompletes file 2019-06-21 14:39:20 +01:00
38ff49d28f downloaders get pieces from mesh manager 2019-06-21 14:17:10 +01:00
710f9f52a8 send X-Have and X-Alts from uploader 2019-06-21 13:58:21 +01:00
1b6eda5a40 skeleton of mesh manager 2019-06-21 13:34:00 +01:00
1ee9ccf098 parse X-Have on uploader side 2019-06-21 12:55:25 +01:00
0f07562de3 pass new sources to active downloaders 2019-06-21 12:39:16 +01:00
6eb1aa07f5 key downloaders by infohash 2019-06-21 12:29:32 +01:00
05b02834af parse X-Alt 2019-06-21 12:25:04 +01:00
56125f6df8 refactor X-Have decoding logic 2019-06-21 09:32:10 +01:00
8f9996848b send X-Have from downloader too 2019-06-21 09:25:28 +01:00
dd655ed60f test for re-requesting available pieces 2019-06-21 09:12:42 +01:00
8923c6ff7d exclude local results by default 2019-06-21 08:15:20 +01:00
807ab22f8e test parsing of X-Have 2019-06-21 06:43:48 +01:00
a26ad229ee more tests 2019-06-21 05:56:42 +01:00
5504dd2251 tighten conditions 2019-06-21 05:45:11 +01:00
f9777d29f4 get existing tests to pass 2019-06-21 05:41:49 +01:00
b23226e8c6 wip on parsing X-Have from uploader 2019-06-21 05:30:56 +01:00
1249ad29e0 claim pieces from list of available pieces 2019-06-21 04:42:02 +01:00
7bb5e5b632 Release 0.3.4 2019-06-20 21:07:50 +01:00
b2e43f9765 update split pattern and add unit test 2019-06-20 21:06:39 +01:00
2aa73c203a Release 0.3.3 2019-06-20 18:08:02 +01:00
18d2b56563 fix indexing 2019-06-20 17:57:36 +01:00
a455b4ad6e redirect exceptions in result sender to log 2019-06-20 17:22:59 +01:00
761b683a81 Release 0.3.2 2019-06-20 16:04:46 +01:00
1d41bcd825 prevent empty tokens in search index 2019-06-20 16:02:48 +01:00
f1ac038b55 update split pattern 2019-06-20 15:47:00 +01:00
396c636e42 prevent empty search terms 2019-06-20 15:29:27 +01:00
e32c858e90 update README with quick FAQ 2019-06-20 14:18:37 +01:00
821555f3f1 Release 0.3.1 2019-06-20 14:02:22 +01:00
089ab4f0d9 do not retry downloads if core is shut(ting) down 2019-06-20 13:40:04 +01:00
948b6292fe add shutdown hook to shutdown core on SIGTERM 2019-06-20 13:29:15 +01:00
4e2a530a13 Release 0.3.0 2019-06-20 07:04:45 +01:00
03646e2b90 Document download mesh 2019-06-20 01:19:15 +01:00
3dce228bbb always clean 2019-06-19 22:42:05 +01:00
15a49ad550 show git revision in title 2019-06-19 22:36:22 +01:00
3d91c0f4c7 increase default tunnel count 2019-06-19 22:24:04 +01:00
2825a8d9a4 Release 0.2.10 2019-06-19 17:18:30 +01:00
8dcce9bda6 Merge branch 'connection-logic' 2019-06-19 17:16:13 +01:00
d8d3e2cd58 update tests 2019-06-19 15:54:35 +01:00
51d5dbe47e Prevent rare exception on changing trust when result tabs are open 2019-06-19 12:23:18 +01:00
84cee0aa43 retry failed hosts after one hour 2019-06-19 08:35:31 +01:00
162844787f explicitly set java versions 2019-06-19 02:11:00 +01:00
d8a2b59055 tool to print out contents of files.json 2019-06-18 22:08:33 +01:00
67a0939de4 Release 0.2.9 2019-06-18 20:15:53 +01:00
37ca922a2c reduce default retry interval 2019-06-18 20:07:20 +01:00
1d6781819b ignore CWSE if shutting down 2019-06-18 19:44:22 +01:00
64d45da94a show version on title 2019-06-18 18:57:44 +01:00
59c84d8a5e Release 0.2.8 2019-06-18 17:48:07 +01:00
8b55021a4b fix 2019-06-18 17:23:18 +01:00
8bd3ebfaf5 timestamp entries 2019-06-18 17:17:03 +01:00
526ec45da3 Release 0.2.7 2019-06-18 15:53:54 +01:00
deb7c0b4b0 exclude files present locally from search results 2019-06-18 15:45:27 +01:00
e85a0c7b2c Merge branch 'source-tracking' 2019-06-18 12:22:46 +01:00
7b021a47eb fix detection of moving files into a watched dir on Linux 2019-06-18 12:20:10 +01:00
0c21d4d6c1 implement source tracking 2019-06-18 11:34:19 +01:00
8e9f79d404 update TODO 2019-06-18 09:43:22 +01:00
bf33a6ff61 Release 0.2.6 2019-06-18 09:07:27 +01:00
19c8d84afd Merge branch 'file-monitor' 2019-06-18 09:01:09 +01:00
6a40787863 fine log 2019-06-18 05:46:16 +01:00
c698cbd737 register created directories recursively 2019-06-18 05:43:41 +01:00
9c049b9301 special case mac 2019-06-18 05:26:41 +01:00
84a9bb9482 watch deleting of files 2019-06-18 04:15:44 +01:00
0c1008d6b3 update readme 2019-06-18 04:01:04 +01:00
c46f1b1ccd delay processing of files until after 1 second after the last MODIFY event 2019-06-17 23:08:16 +01:00
7e2c4d48c6 wait for UI to load before loading files 2019-06-17 22:34:19 +01:00
71a919e62b shut down watcher before connection manager 2019-06-17 22:15:50 +01:00
d5eb65bdc2 do not print stacktrace on clean shutdown 2019-06-17 21:58:44 +01:00
aef7533bd5 make watcher thread daemon 2019-06-17 19:58:57 +01:00
e78016ead4 ui panel for managing watched directories 2019-06-17 19:23:04 +01:00
52ced669dd basic watching of directories 2019-06-17 16:36:12 +01:00
b52fb38ede fix disabling of buttons on search tab close 2019-06-17 13:43:11 +01:00
5dcef3ca05 Release 0.2.5 2019-06-17 12:53:58 +01:00
eaa0e46ce5 Merge branch 'separate-incomplete-files' 2019-06-17 12:45:51 +01:00
c4f48c02b6 delete incomplete file on cancel 2019-06-17 12:33:44 +01:00
5c16335969 if no row is selected do not enable buttons 2019-06-17 12:26:28 +01:00
546eb4e9d3 only allow one download per infohash from gui 2019-06-17 11:25:21 +01:00
c3d9e852ba separate incomplete files 2019-06-17 07:49:06 +01:00
0db7077a45 Release 0.2.4 2019-06-17 03:22:52 +01:00
614ecc85fe new piece selection logic to avoid high cpu bug 2019-06-17 03:21:37 +01:00
af66a79376 fix sorting by progress 2019-06-17 00:56:16 +01:00
465171c81d prevent multiple identical shared files 2019-06-17 00:38:05 +01:00
b507361c58 close the file before marking pieces complete 2019-06-16 23:45:23 +01:00
4d001ae74b thread-safe access to the pieces file 2019-06-16 22:56:09 +01:00
36a6e2769f Release 0.2.3 2019-06-16 19:05:12 +01:00
69eeb7d77a fix 2019-06-16 18:58:52 +01:00
551982b72a batch results sent to the GUI to prevent freeze 2019-06-16 18:51:07 +01:00
8d808f0b8f Release 0.2.2 2019-06-16 13:30:11 +01:00
7833a83c87 mark hash queries for V2 results 2019-06-16 13:17:32 +01:00
3160c1a8f3 fix for silent uploader exceptions 2019-06-16 13:01:14 +01:00
e295aa67d5 proper log statement 2019-06-16 10:59:11 +01:00
a9f5625dc3 fix popup menu on failed downloads 2019-06-16 10:50:21 +01:00
cc0af5b9ed add context menu to downloads table 2019-06-16 10:29:28 +01:00
041fc3bef3 Release 0.2.1 2019-06-16 09:37:53 +01:00
03c3b1ebf1 fix copying of hash if search results are sorted 2019-06-16 09:30:52 +01:00
aece390daa right-click menu on the search results tab 2019-06-16 09:17:17 +01:00
cf63be68e8 copy search to clipboard 2019-06-16 08:38:47 +01:00
88ece4dc23 add option to show search hashes in monitor 2019-06-16 08:29:03 +01:00
13767d58f2 detect if a query is hash, get rid of radio buttons 2019-06-16 08:09:51 +01:00
05a1ccd3d8 update todo 2019-06-16 07:31:01 +01:00
6807c14a5f add copy hash to clipboard 2019-06-16 07:23:22 +01:00
684be0c50e start of work on directory watcher 2019-06-16 07:03:16 +01:00
6655c262c6 more todo items 2019-06-16 07:01:50 +01:00
b1ccd55030 more todo items 2019-06-16 06:26:03 +01:00
a3becd0f7e update TODO 2019-06-16 06:19:28 +01:00
af2f3e0ebf in/out direction done 2019-06-16 05:56:56 +01:00
e2b7ffa1db direction in monitor tab 2019-06-16 05:52:23 +01:00
0e0176acfc add web UI to TODO list 2019-06-16 05:35:05 +01:00
7f09bb079c Beginnings of a TODO list 2019-06-16 05:28:42 +01:00
77e48b01bb Release 0.2.0 2019-06-15 21:10:11 +01:00
12db6857c1 disable unshare files popup until implemented 2019-06-15 12:12:08 +01:00
acd67733a5 sort the downloads table on updates 2019-06-15 12:08:29 +01:00
8d3ce7aa8e use the same sorted row selection logic in downloads table 2019-06-15 09:57:12 +01:00
0eb5870e9b Release 0.1.13 2019-06-15 09:19:19 +01:00
051efbfaba prevent empty searches 2019-06-15 09:11:42 +01:00
6b38d7bffb fix sorting bug try 2 2019-06-15 08:58:51 +01:00
5778d537ce Release 0.1.12 2019-06-15 08:39:19 +01:00
93664a7985 update readme 2019-06-15 08:37:29 +01:00
edd58e0c90 allow cancelling of downloads while hashlist is being fetched 2019-06-15 08:35:23 +01:00
9ac52b61dc sort results table on update 2019-06-15 08:33:22 +01:00
0a4b9c7029 shut down connection manager last 2019-06-15 08:20:10 +01:00
87b366a205 add ability to cancel failed downloads 2019-06-14 22:49:56 +01:00
040248560a Release 0.1.11 2019-06-14 22:26:28 +01:00
77caaf83de reset instead of close 2019-06-14 22:08:25 +01:00
cc5ece5103 do not throw exception on shutdown 2019-06-14 21:36:50 +01:00
db7e21e343 close connections in parallel, more shutdown fixes 2019-06-14 21:25:22 +01:00
a388eaec1d shutdown all connections on shutdown 2019-06-14 20:53:54 +01:00
8ff39072c7 download file on double-clicking a result 2019-06-14 20:42:26 +01:00
55d2ac9b24 delete partial files and pieces file on cancel 2019-06-14 20:27:14 +01:00
6ebe492fd8 if nothing is enabled cancel and retry buttons are disabled 2019-06-14 18:37:18 +01:00
165cd542ec work around not having a selected row while cancelling a download 2019-06-14 18:28:00 +01:00
5ca0c8b00d wip on unshare selected files popup menu 2019-06-14 18:08:56 +01:00
b6a38e3f23 revert to default lnf if the desired one fails 2019-06-14 18:01:14 +01:00
34d9165bd5 Release 0.1.10 2019-06-14 16:43:28 +01:00
2e52dd5c49 fix overwriting of custom nickname 2019-06-14 16:20:21 +01:00
2a315dd734 add option to exclude local results from searches 2019-06-14 14:48:01 +01:00
6b661b99c5 fix sorting by size in shared files table 2019-06-14 13:47:35 +01:00
5dacd60bbb hook up cleaning up of cancelled/finished downloads 2019-06-14 13:11:20 +01:00
f8f7cfe836 UI options panel 2019-06-14 12:51:27 +01:00
0b4f261bc1 ability to not show monitor panel 2019-06-14 12:21:14 +01:00
042d67d784 fix selection of size column 2019-06-14 11:46:31 +01:00
800df88f14 proper sorting by size 2019-06-14 11:10:19 +01:00
4d1eac50a0 update readme for sorting bug 2019-06-14 10:39:58 +01:00
c48df7f14b Release 0.1.9 2019-06-13 22:57:08 +01:00
9d04148001 remember loaded downloads from previous sessions 2019-06-13 22:53:23 +01:00
bb4d522572 Release 0.1.8 2019-06-13 15:27:06 +01:00
8052501e52 increase persistence interval to 15 seconds 2019-06-13 15:25:30 +01:00
66cc6d8ab7 reduce piece size by factor of 8 2019-06-13 15:24:26 +01:00
a45e57f5ec Release 0.1.7 2019-06-13 10:28:44 +01:00
7d8ca55d87 fix emiting of download finished event 2019-06-13 10:27:18 +01:00
de22f3c6b9 use metal lnf on java 9 or newer 2019-06-13 05:02:11 +01:00
3b0eb5678d update wire protocol 2019-06-12 23:46:48 +01:00
5a1f32e40b Release 0.1.6 2019-06-12 22:42:34 +01:00
ca3f2513e1 sync persisting of hashlist or hashroot for active downloads 2019-06-12 22:39:00 +01:00
658d9cf5a8 serialize downloads that do not have a hashlist 2019-06-12 22:22:20 +01:00
e389090b7e download side of oob hashlist 2019-06-12 22:13:16 +01:00
04ceaba514 do not persist downloaders until they have a hashlist 2019-06-12 21:02:01 +01:00
6a01d97a8d enable oob infohash in queries; send V2 search results 2019-06-12 20:55:13 +01:00
747663e1dc fix pieece size of shared downloaded files 2019-06-12 18:22:53 +01:00
e426b3ccbd refactoring to enable hashlist uploads 2019-06-12 17:33:43 +01:00
5172e19627 font-ize more elements 2019-06-12 16:34:24 +01:00
e826cfd8d5 start work on ability to configure font 2019-06-12 16:26:40 +01:00
51004f6fe9 wip on adding UI options 2019-06-11 08:04:26 +01:00
08bb2b614d load some gui props from a separate config file 2019-06-11 02:17:58 +01:00
d0e5d0ce8a set default i2cp options if none present 2019-06-10 08:55:44 +01:00
9e05802d1b Merge pull request #4 from mikalv/master
Fixes i2cp bug while connecting to remote router
2019-06-10 08:48:27 +01:00
fb4f56eec9 Remove debug message 2019-06-10 09:40:32 +02:00
be2083d430 Fixes i2cp bug while connecting to remote router 2019-06-10 09:39:46 +02:00
af6275d0a3 prevent Cli from hanging if there are no shared files 2019-06-10 07:04:01 +01:00
5269815329 update readme 2019-06-10 04:49:09 +01:00
bd21cf65ea Release 0.1.5 2019-06-09 20:37:39 +01:00
dea592eb27 do not resume cancelled downloads on restart 2019-06-09 20:36:14 +01:00
c81f963e0a Release 0.1.4 2019-06-09 17:37:10 +01:00
dc6b1199f3 implement resume across restart 2019-06-09 17:35:32 +01:00
42621a2dfb wip on persisting downloads between restarts 2019-06-09 16:26:00 +01:00
a7125963a7 DownloadManager listens to events, not FileManager 2019-06-09 16:19:35 +01:00
f39d7f4fa8 emit an event when the UI loads 2019-06-09 15:44:06 +01:00
b88334f19a Release 0.1.3 for sorting fixes 2019-06-08 17:57:36 +01:00
81e186ad1f fix sorting by download status and trust, fix events on downloads table 2019-06-08 17:55:39 +01:00
33a45c3835 fix buttons when tables are sorted 2019-06-08 17:09:44 +01:00
32b7867e44 Release 0.1.2 for search index test 2019-06-08 13:09:28 +01:00
5b313276f4 fix tests broken by piece size change 2019-06-08 13:08:20 +01:00
abba4cc6fa fix a bug where multi-term search modifies the index 2019-06-08 12:55:47 +01:00
15b4804968 update wire protocol with originator and oobHashlist fields 2019-06-08 12:40:38 +01:00
942a01a501 forgot to commit 2019-06-08 09:33:16 +01:00
502a8d91da print only the root 2019-06-08 09:30:01 +01:00
5414e8679b update readme 2019-06-08 09:07:13 +01:00
14e42dd7c2 correct element 2019-06-08 08:46:28 +01:00
1299fb2512 Release 0.1.1 for fixes and reduced piece size 2019-06-08 08:04:35 +01:00
9bafdfe0b1 reduce piece size 2019-06-08 07:57:36 +01:00
36eb632756 do not set the flag until it is implemented 2019-06-08 07:53:33 +01:00
83ee620402 sort by columns 2019-06-08 07:45:07 +01:00
3fe40d317d update readme for custom host:port 2019-06-08 07:28:23 +01:00
e9703a2652 support for custom i2cp host:port 2019-06-08 07:23:14 +01:00
a3fe89851f OS-specific home dir 2019-06-08 07:10:24 +01:00
b9ea0128cd add oobInfohash flag, filter results by that flag 2019-06-08 02:44:49 +01:00
53c6db4ec8 de-hardcode piece sizes in results 2019-06-08 01:48:07 +01:00
60776829b9 fix disabling sharing of downloaded files 2019-06-08 01:35:03 +01:00
b5cb31c23d proposed infohash upgrade document 2019-06-08 01:04:56 +01:00
5052c0c993 note about downloads in progress 2019-06-07 21:52:38 +01:00
06de007866 update readme 2019-06-07 21:22:49 +01:00
7c8a0c9ad9 update readme for 0.1.0 2019-06-07 19:24:13 +01:00
cda81a89a2 Release 0.1.0 2019-06-07 18:39:39 +01:00
483773422c fix remaining tests 2019-06-07 18:23:16 +01:00
1e1e6d0bb0 fix test 2019-06-07 18:17:16 +01:00
668d6e087d fix test 2019-06-07 18:15:03 +01:00
49af412b96 status update and auto-retry 2019-06-07 16:13:35 +01:00
d5513021ed Release 0.0.14 for split search 2019-06-07 15:00:16 +01:00
c3154cf717 stray println 2019-06-07 14:58:03 +01:00
114940c4c1 fix searches with spaces 2019-06-07 14:51:09 +01:00
d4336e9b5d outbound nickname 2019-06-07 14:24:45 +01:00
2c1d5508ed outbound nickname 2019-06-07 14:21:03 +01:00
1cebf6c7bd cli downloader 2019-06-07 14:02:10 +01:00
e12924a207 shadow jar for cli 2019-06-07 14:01:28 +01:00
f3b11895e4 utility for hashing files 2019-06-07 12:10:18 +01:00
1e084820fb log tweak 2019-06-07 11:55:17 +01:00
2198b4846d change wording 2019-06-07 11:43:02 +01:00
a5d442d320 Release 0.0.13 for keyword search fix 2019-06-07 06:37:23 +01:00
3f9ee887d6 prevent NPE in toString 2019-06-07 06:31:29 +01:00
4a9e6d3b6b prevent npe in keyword searches 2019-06-07 06:14:40 +01:00
80f2cc5f99 logging and toString() 2019-06-07 06:07:02 +01:00
12283dba9d Release 0.0.12 for search by hash 2019-06-06 22:22:43 +01:00
5c959bc8b7 name update search tab 2019-06-06 22:07:20 +01:00
f3712fe7af delay initial update check a minute 2019-06-06 21:52:35 +01:00
3e49b0ec66 infohash may be null 2019-06-06 21:40:44 +01:00
f90beb8e3d encode infohash 2019-06-06 21:31:00 +01:00
fbad7b6c7e searchHash 2019-06-06 21:27:07 +01:00
ec2d89c18c serialize infohash 2019-06-06 21:21:40 +01:00
c27fc0a515 update from infohash 2019-06-06 21:08:58 +01:00
14681c2060 search by hash ui 2019-06-06 20:30:15 +01:00
1aeb230ea8 catch exceptions in event dispatch thread 2019-06-06 19:31:10 +01:00
d1dfc73f5a decode infohash 2019-06-06 19:28:29 +01:00
0cebe4119c update list of limitations 2019-06-06 14:19:43 +01:00
9f21120ec8 print periodic stats 2019-06-06 13:59:05 +01:00
7eea8be67d Release 0.0.11 for file loading bug 2019-06-06 09:22:16 +01:00
f114302bdb hopefully fix the shared file loss 2019-06-06 09:19:00 +01:00
05b9b37488 emit an event when all files are loaded 2019-06-06 09:10:09 +01:00
52f317a5b7 prevent division by zero 2019-06-06 07:09:54 +01:00
fb8227a1f3 prevent division by zero 2019-06-06 07:09:05 +01:00
5677d9f46a release 0.0.10 2019-06-06 00:23:59 +01:00
c5192e3845 update readme for fix 2019-06-06 00:21:41 +01:00
43c2a55cb8 0 not null 2019-06-06 00:03:22 +01:00
94f6de6bea do not create new objects because that clears the successes 2019-06-05 21:07:23 +01:00
6782849a12 retry hosts received from hostcache even if marked as failed 2019-06-05 20:58:28 +01:00
c07d351c5d switch to jul, reduce aging interval 2019-06-05 20:14:38 +01:00
dc2f675dd3 delete pieces file when download finishes 2019-06-05 19:52:50 +01:00
a8e795ec51 do not accept connections if already try to connect to them 2019-06-05 19:07:36 +01:00
33c5b3b18e option to disable sharing of downloaded files 2019-06-05 17:46:55 +01:00
581fce4643 share downloaded files 2019-06-05 17:33:34 +01:00
7fe78a0719 more clear name 2019-06-05 16:47:10 +01:00
cdb6e22522 ui option for allowing untrusted connections 2019-06-05 15:47:44 +01:00
2edeb046be drop neutral queries if configured 2019-06-05 15:38:39 +01:00
4021f3c244 fix jullog 2019-06-05 13:04:46 +01:00
9008fac24d shutdown cleanly on exit 2019-06-05 12:38:56 +01:00
e2f92c5c5e print reported version 2019-06-05 10:07:04 +01:00
7b33a16fd8 update list of known issues 2019-06-05 09:22:56 +01:00
9a2531b264 release 0.0.9 2019-06-05 09:04:52 +01:00
9a8dadff57 center the sources column 2019-06-05 08:43:58 +01:00
4a274010f9 fix close tab button not appearing on duplicate searches 2019-06-05 08:34:09 +01:00
1eb930435b fix hashing errors in large files 2019-06-05 00:34:38 +01:00
9df28552ad try to load persisted files before hashing new ones 2019-06-05 00:22:36 +01:00
ac0204dffc hopefully more accurate bandwidth gauge 2019-06-04 23:50:36 +01:00
e5c402a400 retry download workers on resume 2019-06-04 23:36:57 +01:00
7704c73b68 pass logging.properties to cli 2019-06-04 22:19:19 +01:00
a9aa8dd840 do not count finished downloaders towards bandwidth 2019-06-04 21:55:59 +01:00
de682a802a options panel for i2p tunnel options 2019-06-04 21:14:23 +01:00
5435518212 core-side i2cp options 2019-06-04 20:20:25 +01:00
bd01f983c9 break html in search results 2019-06-04 19:27:22 +01:00
8b63864b90 utility to share files in headless mode 2019-06-04 18:58:02 +01:00
ed3943c1af 0.0.8 for UI tweaks and sanitization 2019-06-04 18:01:08 +01:00
e195141a27 simpler sanitization 2019-06-04 17:58:19 +01:00
bb02fdbee9 do not use regex in sanitization 2019-06-04 17:46:41 +01:00
6e3a2c0d08 update split pattern 2019-06-04 17:30:55 +01:00
bd5fecc19d fix 2019-06-04 17:04:24 +01:00
d5db49fa79 initialize core 2019-06-04 16:56:58 +01:00
f2ea8619bb CLI project 2019-06-04 16:46:32 +01:00
b129e79196 do not count finished workers in total count 2019-06-04 16:22:48 +01:00
404d5b60bc format length in shared file stable an resize columns 2019-06-04 14:05:33 +01:00
de2753ac50 preferred sizes for download table columns 2019-06-04 13:35:18 +01:00
2d53999c8e only show download speed if downloading 2019-06-04 13:23:48 +01:00
5aecf72d6f format download speed 2019-06-04 13:19:14 +01:00
a574a67ec6 format file size 2019-06-04 13:15:24 +01:00
6b5ad969b7 pass logging properties 2019-06-04 13:00:10 +01:00
617209c4e4 column widths tweaks 2019-06-04 12:46:48 +01:00
16b475bd9a 0.0.7 for multi-source downloads 2019-06-04 04:17:29 +01:00
3cea1870cd multisource downloads, untested 2019-06-04 03:30:55 +01:00
e7240dcb6f keep track of claimed pieces in preparation for multi-source downloads 2019-06-04 02:18:30 +01:00
c91440cbfc config option for update check interval 2019-06-03 23:30:39 +01:00
294605f5c7 basic update notification 2019-06-03 23:23:07 +01:00
986caf3a75 backend for checking updates 2019-06-03 23:11:03 +01:00
8524d5309f typo 2019-06-03 21:53:51 +01:00
48b3ac2b4a wip on update server 2019-06-03 21:50:46 +01:00
18f21dc247 update server 2019-06-03 21:47:31 +01:00
e69a5eac18 0.0.6 2019-06-03 18:30:27 +01:00
6e0f1778b7 rudimentary speed gauge 2019-06-03 18:02:10 +01:00
abbb741d73 show the number of sources for a result, counted by infohash 2019-06-03 17:21:08 +01:00
07dfc0a1d1 destroy mvc group on options window close 2019-06-03 15:33:16 +01:00
00c12cfd49 hook up download retry logic 2019-06-03 15:02:04 +01:00
1ee389ff91 options dialog 2019-06-03 14:40:32 +01:00
3642736cfe options dialog, wip 2019-06-03 11:32:34 +01:00
b6f7f51476 verify X-Persona header if present 2019-06-03 08:12:33 +01:00
4c21f2d5ae show full persona in searches 2019-06-03 08:06:51 +01:00
9e0d52d548 show source in incoming searches 2019-06-03 07:43:28 +01:00
fad01603de fix replyTo field 2019-06-03 07:35:09 +01:00
da007795fb learn about new hosts from incoming connections too 2019-06-03 07:27:12 +01:00
881d755dd3 update test work with personas 2019-06-02 22:47:43 +01:00
bc3b6f500f 0.0.5 for trust panel 2019-06-02 12:18:44 +01:00
8f8710801c update any result tabs on trust events 2019-06-02 12:16:28 +01:00
43f3cf9b7a small ui tweak 2019-06-02 12:00:14 +01:00
6fe4155678 delete accidental commit 2019-06-02 11:57:15 +01:00
32f944a089 trust panel ui 2019-06-02 11:56:19 +01:00
b19b5ef315 Fix for java 9+ #1 2019-06-02 10:04:27 +01:00
5138935c20 add options for portable installation, issue #2 2019-06-02 09:33:28 +01:00
ba596af778 Trust panel, wip 2019-06-02 05:40:44 +01:00
0f4533c867 persist personas in trust files instead of destinations 2019-06-02 05:12:14 +01:00
727834390c slightly better looking message 2019-06-02 04:18:15 +01:00
c51e3874da show a message instead of search bar while disconnected 2019-06-02 04:12:11 +01:00
d18a618575 focus on the tab of the new search 2019-06-02 03:54:34 +01:00
15508f417d hack to add some horizontal space 2019-06-02 01:33:53 +01:00
44dad55178 update test 2019-06-02 01:28:00 +01:00
5c17e77190 change groovy version to match griffon 2019-06-02 01:20:55 +01:00
de856cd085 canonize search terms 2019-06-02 00:42:18 +01:00
d2533cc4d6 retry failed downloads, every 15 minutes by default 2019-06-02 00:22:33 +01:00
f41cc39659 show who is downloading 2019-06-01 21:53:14 +01:00
656b62fc2e 0.0.4 with download retry 2019-06-01 18:31:36 +01:00
13b3f0f63b retry implemented 2019-06-01 18:30:30 +01:00
98ea8154a5 store done pieces on disk to enable resume 2019-06-01 18:09:14 +01:00
82377aa9df hook up cancel button 2019-06-01 17:44:52 +01:00
bd2368e23a cancelled downloader state 2019-06-01 17:31:18 +01:00
70078c309b add cancel and retry buttons, not hooked up yet 2019-06-01 17:30:29 +01:00
15a0eda713 preserve selection in downloads table 2019-06-01 17:09:23 +01:00
9645716e18 prevent rare stacktraces on shutdown 2019-06-01 16:55:37 +01:00
03d6af39ed icon for closing tabs 2019-06-01 16:43:05 +01:00
9435cb003b Show warning if cannot find I2P router 2019-06-01 16:36:23 +01:00
63399803d5 ui tweaks 2019-06-01 15:59:55 +01:00
4d6541030f disable system l&f on osx 2019-06-01 14:55:17 +01:00
16c51e7cd6 add a failed download state 2019-06-01 14:14:20 +01:00
9d75550b6f do not show local searches in monitor 2019-06-01 13:48:12 +01:00
1996681677 incoming searches monitor 2019-06-01 13:44:46 +01:00
9dac1891b2 connection monitor 2019-06-01 13:32:40 +01:00
1255ac936b close connections on shutdown 2019-06-01 13:04:22 +01:00
2db3276b07 fix rare NPE on shutdown 2019-06-01 13:03:42 +01:00
7e3b0795af disable buttons if no row is selected 2019-06-01 12:23:20 +01:00
147 changed files with 5833 additions and 569 deletions

View File

@ -1,39 +1,56 @@
# MuWire - Easy Anonymous File-Sharing
MuWire is an easy to use file-sharing program which offers anonymity using [I2P technology](http://geti2p.net).
MuWire is an easy to use file-sharing program which offers anonymity using [I2P technology](http://geti2p.net). It works on any platform Java works on, including Windows,MacOS,Linux.
It is inspired by the LimeWire Gnutella client and developped by a former LimeWire developer.
The project is in development. You can find technical documentation in the "doc" folder.
The current stable release - 0.4.0 is avaiable for download at https://muwire.com. You can find technical documentation in the "doc" folder.
### Building
You need JDK 8 or newer. After installing that and setting up the appropriate paths, just type
You need JRE 8 or newer. After installing that and setting up the appropriate paths, just type
```
./gradlew assemble
./gradlew clean assemble
```
If you want to run the unit tests, type
```
./gradlew build
./gradlew clean build
```
Some of the UI tests will fail because they haven't been written yet :-/
### Running
You need to have an I2P router up and running on the same machine. After you build the application, look inside "gui/build/distributions". Untar/unzip one of the "shadow" files and then run the jar contained inside.
You need to have an I2P router up and running on the same machine. After you build the application, look inside "gui/build/distributions". Untar/unzip one of the "shadow" files and then run the jar contained inside by typing "java -jar MuWire-x.y.z.jar" in a terminal or command prompt. If you use a custom I2CP host and port, create a file $HOME/.MuWire/i2p.properties and put "i2cp.tcp.host=<host>" and "i2cp.tcp.port=<port>" in there.
The first time you run MuWire it will ask you to select a nickname. This nickname will be displayed with search results, so that others can verify the file was shared by you.
At the moment there are very few nodes on the network, so you will see very few connections and search results. It is best to leave MuWire running all the time, just like I2P.
The first time you run MuWire it will ask you to select a nickname. This nickname will be displayed with search results, so that others can verify the file was shared by you. It is best to leave MuWire running all the time, just like I2P.
### Known bugs and limitations
* Any shared files get re-hashed on startup
* Sometimes the list of shared files gets lost
* Many UI features you would expect are not there yet
### Quick FAQ
* why is MuWire slow ?
- too few sources you're downloading from
- you can increase the number of tunnels by using more tunnels via Options->I2P Inbound/Outbound Quantity
the default is 4 and you could raise up to as high as 16 ( Caution !!!!)
* my search is not returning (enough) results !
- search is keyword or hash based
- keywords and hash(es) are NOT regexed or wildcarded so they have to be complete
so searching for 'musi' will not return results with 'music' - you have to search for 'music'
- ALL keywords have to match
- only use space for keyword separation
- if you already have the file in question it is not displayed ( can be changed via Options )
* what's this right click -> 'Copy hash to clipboard' for ?
- if you have a specific file you wish to share or download you can use the hash as a unique identifier
to make sure you have exactly the right file.
- you can share this hash with others to ensure they are getting the right file

40
TODO.md Normal file
View File

@ -0,0 +1,40 @@
# TODO List
Not in any particular order yet
### Big Items
##### Bloom Filters
This reduces query traffic by not sending last hop queries to peers that definitely do not have the file
##### Two-tier Topology
This helps with scalability
##### Trust List Sharing
For helping users make better decisions whom to trust
##### Content Control Panel
To allow every user to not route queries for content they do not like. This is mostly GUI work, the backend part is simple
##### Packaging With JRE, Embedded Router
For ease of deployment for new users, and so that users do not need to run a separate I2P router
##### Web UI, REST Interface, etc.
Basically any non-gui non-cli user interface
##### Metadata editing and search
To enable parsing of metadata from known file types and the user editing it or adding manual metadata
### Small Items
* Wrapper of some kind for in-place upgrades
* Download file sequentially
* Unsharing of files
* Multiple-selection download, Ctrl-A

View File

@ -3,7 +3,7 @@ subprojects {
dependencies {
compile 'net.i2p:i2p:0.9.40'
compile 'org.codehaus.groovy:groovy-all:2.5.7'
compile 'org.codehaus.groovy:groovy-all:2.4.15'
}
compileGroovy {

22
cli/build.gradle Normal file
View File

@ -0,0 +1,22 @@
buildscript {
repositories {
jcenter()
mavenLocal()
}
dependencies {
classpath 'com.github.jengelman.gradle.plugins:shadow:2.0.4'
}
}
apply plugin : 'application'
mainClassName = 'com.muwire.cli.Cli'
apply plugin : 'com.github.johnrengelman.shadow'
applicationDefaultJvmArgs = ['-Djava.util.logging.config.file=logging.properties']
dependencies {
compile project(":core")
}

View File

@ -0,0 +1,144 @@
package com.muwire.cli
import java.util.concurrent.CountDownLatch
import com.muwire.core.Core
import com.muwire.core.MuWireSettings
import com.muwire.core.UILoadedEvent
import com.muwire.core.connection.ConnectionAttemptStatus
import com.muwire.core.connection.ConnectionEvent
import com.muwire.core.connection.DisconnectionEvent
import com.muwire.core.files.AllFilesLoadedEvent
import com.muwire.core.files.FileHashedEvent
import com.muwire.core.files.FileLoadedEvent
import com.muwire.core.files.FileSharedEvent
import com.muwire.core.upload.UploadEvent
import com.muwire.core.upload.UploadFinishedEvent
class Cli {
public static void main(String[] args) {
def home = System.getProperty("user.home") + File.separator + ".MuWire"
home = new File(home)
if (!home.exists())
home.mkdirs()
def propsFile = new File(home,"MuWire.properties")
if (!propsFile.exists()) {
println "create props file ${propsFile.getAbsoluteFile()} before launching MuWire"
System.exit(1)
}
def props = new Properties()
propsFile.withInputStream { props.load(it) }
props = new MuWireSettings(props)
Core core
try {
core = new Core(props, home, "0.4.3")
} catch (Exception bad) {
bad.printStackTrace(System.out)
println "Failed to initialize core, exiting"
System.exit(1)
}
def filesList
if (args.length == 0) {
println "Enter a file containing list of files to share"
def reader = new BufferedReader(new InputStreamReader(System.in))
filesList = reader.readLine()
} else
filesList = args[0]
Thread.sleep(1000)
println "loading shared files from $filesList"
// listener for shared files
def sharedListener = new SharedListener()
core.eventBus.register(FileHashedEvent.class, sharedListener)
core.eventBus.register(FileLoadedEvent.class, sharedListener)
// for connections
def connectionsListener = new ConnectionListener()
core.eventBus.register(ConnectionEvent.class, connectionsListener)
core.eventBus.register(DisconnectionEvent.class, connectionsListener)
// for uploads
def uploadsListener = new UploadsListener()
core.eventBus.register(UploadEvent.class, uploadsListener)
core.eventBus.register(UploadFinishedEvent.class, uploadsListener)
Timer timer = new Timer("status-printer", true)
timer.schedule({
println String.valueOf(new Date()) + " Connections $connectionsListener.connections Uploads $uploadsListener.uploads Shared $sharedListener.shared"
} as TimerTask, 60000, 60000)
def latch = new CountDownLatch(1)
def fileLoader = new Object() {
public void onAllFilesLoadedEvent(AllFilesLoadedEvent e) {
latch.countDown()
}
}
core.eventBus.register(AllFilesLoadedEvent.class, fileLoader)
core.startServices()
core.eventBus.publish(new UILoadedEvent())
println "waiting for files to load"
latch.await()
// now we begin
println "MuWire is ready"
filesList = new File(filesList)
filesList.withReader {
def toShare = it.readLine()
core.eventBus.publish(new FileSharedEvent(file : new File(toShare)))
}
Runtime.getRuntime().addShutdownHook({
println "shutting down.."
core.shutdown()
println "shutdown."
})
Thread.sleep(Integer.MAX_VALUE)
}
static class ConnectionListener {
volatile int connections
public void onConnectionEvent(ConnectionEvent e) {
if (e.status == ConnectionAttemptStatus.SUCCESSFUL)
connections++
}
public void onDisconnectionEvent(DisconnectionEvent e) {
connections--
}
}
static class UploadsListener {
volatile int uploads
public void onUploadEvent(UploadEvent e) {
uploads++
println String.valueOf(new Date()) + " Starting upload of ${e.uploader.file.getName()} to ${e.uploader.request.downloader.getHumanReadableName()}"
}
public void onUploadFinishedEvent(UploadFinishedEvent e) {
uploads--
println String.valueOf(new Date()) + " Finished upload of ${e.uploader.file.getName()} to ${e.uploader.request.downloader.getHumanReadableName()}"
}
}
static class SharedListener {
volatile int shared
void onFileHashedEvent(FileHashedEvent e) {
if (e.error != null)
println "ERROR $e.error"
else {
println "Shared file : $e.sharedFile.file"
shared++
}
}
void onFileLoadedEvent(FileLoadedEvent e) {
shared++
}
}
}

View File

@ -0,0 +1,166 @@
package com.muwire.cli
import java.util.concurrent.ConcurrentHashMap
import java.util.concurrent.CountDownLatch
import com.muwire.core.Core
import com.muwire.core.MuWireSettings
import com.muwire.core.connection.ConnectionAttemptStatus
import com.muwire.core.connection.ConnectionEvent
import com.muwire.core.download.DownloadStartedEvent
import com.muwire.core.download.Downloader
import com.muwire.core.download.UIDownloadEvent
import com.muwire.core.search.QueryEvent
import com.muwire.core.search.SearchEvent
import com.muwire.core.search.UIResultEvent
import net.i2p.data.Base64
class CliDownloader {
private static final List<Downloader> downloaders = Collections.synchronizedList(new ArrayList<>())
private static final Map<UUID,ResultsHolder> resultsListeners = new ConcurrentHashMap<>()
public static void main(String []args) {
def home = System.getProperty("user.home") + File.separator + ".MuWire"
home = new File(home)
if (!home.exists())
home.mkdirs()
def propsFile = new File(home,"MuWire.properties")
if (!propsFile.exists()) {
println "create props file ${propsFile.getAbsoluteFile()} before launching MuWire"
System.exit(1)
}
def props = new Properties()
propsFile.withInputStream { props.load(it) }
props = new MuWireSettings(props)
def filesList
int connections
int resultWait
if (args.length != 3) {
println "Enter a file containing list of hashes of files to download, " +
"how many connections you want before searching" +
"and how long to wait for results to arrive"
System.exit(1)
} else {
filesList = args[0]
connections = Integer.parseInt(args[1])
resultWait = Integer.parseInt(args[2])
}
Core core
try {
core = new Core(props, home, "0.4.3")
} catch (Exception bad) {
bad.printStackTrace(System.out)
println "Failed to initialize core, exiting"
System.exit(1)
}
def latch = new CountDownLatch(connections)
def connectionListener = new ConnectionWaiter(latch : latch)
core.eventBus.register(ConnectionEvent.class, connectionListener)
core.startServices()
println "starting to wait until there are $connections connections"
latch.await()
println "connected, searching for files"
def file = new File(filesList)
file.eachLine {
String[] split = it.split(",")
UUID uuid = UUID.randomUUID()
core.eventBus.register(UIResultEvent.class, new ResultsListener(fileName : split[1]))
def hash = Base64.decode(split[0])
def searchEvent = new SearchEvent(searchHash : hash, uuid : uuid)
core.eventBus.publish(new QueryEvent(searchEvent : searchEvent, firstHop:true,
replyTo: core.me.destination, receivedOn : core.me.destination, originator: core.me))
}
println "waiting for results to arrive"
Thread.sleep(resultWait * 1000)
core.eventBus.register(DownloadStartedEvent.class, new DownloadListener())
resultsListeners.each { uuid, resultsListener ->
println "starting download of $resultsListener.fileName from ${resultsListener.getResults().size()} hosts"
File target = new File(resultsListener.fileName)
core.eventBus.publish(new UIDownloadEvent(target : target, result : resultsListener.getResults()))
}
Thread.sleep(1000)
Timer timer = new Timer("stats-printer")
timer.schedule({
println "==== STATUS UPDATE ==="
downloaders.each {
int donePieces = it.donePieces()
int totalPieces = it.nPieces
int sources = it.activeWorkers.size()
def root = Base64.encode(it.infoHash.getRoot())
def state = it.getCurrentState()
println "file $it.file hash: $root progress: $donePieces/$totalPieces sources: $sources status: $state}"
it.resume()
}
println "==== END ==="
} as TimerTask, 60000, 60000)
println "waiting for downloads to finish"
while(true) {
boolean allFinished = true
for (Downloader d : downloaders) {
allFinished &= d.getCurrentState() == Downloader.DownloadState.FINISHED
}
if (allFinished)
break
Thread.sleep(1000)
}
println "all downloads finished"
}
static class ResultsHolder {
final List<UIResultEvent> results = Collections.synchronizedList(new ArrayList<>())
String fileName
void add(UIResultEvent e) {
results.add(e)
}
List getResults() {
results
}
}
static class ResultsListener {
UUID uuid
String fileName
public onUIResultEvent(UIResultEvent e) {
println "got a result for $fileName from ${e.sender.getHumanReadableName()}"
ResultsHolder listener = resultsListeners.get(e.uuid)
if (listener == null) {
listener = new ResultsHolder(fileName : fileName)
resultsListeners.put(e.uuid, listener)
}
listener.add(e)
}
}
static class ConnectionWaiter {
CountDownLatch latch
public void onConnectionEvent(ConnectionEvent e) {
if (e.status == ConnectionAttemptStatus.SUCCESSFUL)
latch.countDown()
}
}
static class DownloadListener {
public void onDownloadStartedEvent(DownloadStartedEvent e) {
downloaders.add(e.downloader)
}
}
}

View File

@ -0,0 +1,23 @@
package com.muwire.cli
import com.muwire.core.util.DataUtil
import groovy.json.JsonSlurper
import net.i2p.data.Base64
class FileList {
public static void main(String [] args) {
if (args.length < 1) {
println "pass files.json as argument"
System.exit(1)
}
def slurper = new JsonSlurper()
File filesJson = new File(args[0])
filesJson.eachLine {
def json = slurper.parseText(it)
String name = DataUtil.readi18nString(Base64.decode(json.file))
println "$name,$json.length,$json.pieceSize,$json.infoHash"
}
}
}

View File

@ -2,6 +2,7 @@ apply plugin : 'application'
mainClassName = 'com.muwire.core.Core'
applicationDefaultJvmArgs = ['-Djava.util.logging.config.file=logging.properties']
dependencies {
compile 'net.i2p:router:0.9.40'
compile 'net.i2p.client:mstreaming:0.9.40'
compile 'net.i2p.client:streaming:0.9.40'

View File

@ -9,5 +9,5 @@ class Constants {
public static final int MAX_HEADER_SIZE = 0x1 << 14
public static final int MAX_HEADERS = 16
public static final float DOWNLOAD_SEQUENTIAL_RATIO = 0.8f
public static final String SPLIT_PATTERN = "[\\*\\+\\-,\\.:;\\(\\)=_/\\\\\\!\\\"\\\'\\\$%\\|\\[\\]\\{\\}]"
}

View File

@ -1,6 +1,7 @@
package com.muwire.core
import java.nio.charset.StandardCharsets
import java.util.concurrent.atomic.AtomicBoolean
import com.muwire.core.connection.ConnectionAcceptor
import com.muwire.core.connection.ConnectionEstablisher
@ -12,7 +13,11 @@ import com.muwire.core.connection.I2PConnector
import com.muwire.core.connection.LeafConnectionManager
import com.muwire.core.connection.UltrapeerConnectionManager
import com.muwire.core.download.DownloadManager
import com.muwire.core.download.SourceDiscoveredEvent
import com.muwire.core.download.UIDownloadCancelledEvent
import com.muwire.core.download.UIDownloadEvent
import com.muwire.core.download.UIDownloadPausedEvent
import com.muwire.core.download.UIDownloadResumedEvent
import com.muwire.core.files.FileDownloadedEvent
import com.muwire.core.files.FileHashedEvent
import com.muwire.core.files.FileHasher
@ -22,16 +27,22 @@ import com.muwire.core.files.FileSharedEvent
import com.muwire.core.files.FileUnsharedEvent
import com.muwire.core.files.HasherService
import com.muwire.core.files.PersisterService
import com.muwire.core.files.AllFilesLoadedEvent
import com.muwire.core.files.DirectoryUnsharedEvent
import com.muwire.core.files.DirectoryWatcher
import com.muwire.core.hostcache.CacheClient
import com.muwire.core.hostcache.HostCache
import com.muwire.core.hostcache.HostDiscoveredEvent
import com.muwire.core.mesh.MeshManager
import com.muwire.core.search.QueryEvent
import com.muwire.core.search.ResultsEvent
import com.muwire.core.search.ResultsSender
import com.muwire.core.search.SearchEvent
import com.muwire.core.search.SearchManager
import com.muwire.core.search.UIResultBatchEvent
import com.muwire.core.trust.TrustEvent
import com.muwire.core.trust.TrustService
import com.muwire.core.update.UpdateClient
import com.muwire.core.upload.UploadManager
import com.muwire.core.util.MuWireLogManager
@ -42,6 +53,7 @@ import net.i2p.client.I2PSession
import net.i2p.client.streaming.I2PSocketManager
import net.i2p.client.streaming.I2PSocketManagerFactory
import net.i2p.client.streaming.I2PSocketOptions
import net.i2p.client.streaming.I2PSocketManager.DisconnectListener
import net.i2p.crypto.DSAEngine
import net.i2p.crypto.SigType
import net.i2p.data.Destination
@ -49,25 +61,86 @@ import net.i2p.data.PrivateKey
import net.i2p.data.Signature
import net.i2p.data.SigningPrivateKey
import net.i2p.router.Router
import net.i2p.router.RouterContext
@Log
public class Core {
final EventBus eventBus
final Persona me
final File home
final Properties i2pOptions
final MuWireSettings muOptions
private final TrustService trustService
private final PersisterService persisterService
private final HostCache hostCache
private final ConnectionManager connectionManager
private final CacheClient cacheClient
private final UpdateClient updateClient
private final ConnectionAcceptor connectionAcceptor
private final ConnectionEstablisher connectionEstablisher
private final HasherService hasherService
private final DownloadManager downloadManager
private final DirectoryWatcher directoryWatcher
final FileManager fileManager
final UploadManager uploadManager
private final Router router
final AtomicBoolean shutdown = new AtomicBoolean()
public Core(MuWireSettings props, File home) {
log.info "Initializing I2P context"
I2PAppContext.getGlobalContext().logManager()
I2PAppContext.getGlobalContext()._logManager = new MuWireLogManager()
public Core(MuWireSettings props, File home, String myVersion) {
this.home = home
this.muOptions = props
i2pOptions = new Properties()
def i2pOptionsFile = new File(home,"i2p.properties")
if (i2pOptionsFile.exists()) {
i2pOptionsFile.withInputStream { i2pOptions.load(it) }
if (!i2pOptions.containsKey("inbound.nickname"))
i2pOptions["inbound.nickname"] = "MuWire"
if (!i2pOptions.containsKey("outbound.nickname"))
i2pOptions["outbound.nickname"] = "MuWire"
} else {
i2pOptions["inbound.nickname"] = "MuWire"
i2pOptions["outbound.nickname"] = "MuWire"
i2pOptions["inbound.length"] = "3"
i2pOptions["inbound.quantity"] = "4"
i2pOptions["outbound.length"] = "3"
i2pOptions["outbound.quantity"] = "4"
i2pOptions["i2cp.tcp.host"] = "127.0.0.1"
i2pOptions["i2cp.tcp.port"] = "7654"
Random r = new Random()
int port = r.nextInt(60000) + 4000
i2pOptions["i2np.ntcp.port"] = String.valueOf(port)
i2pOptions["i2np.udp.port"] = String.valueOf(port)
i2pOptionsFile.withOutputStream { i2pOptions.store(it, "") }
}
if (!props.embeddedRouter) {
log.info "Initializing I2P context"
I2PAppContext.getGlobalContext().logManager()
I2PAppContext.getGlobalContext()._logManager = new MuWireLogManager()
router = null
} else {
log.info("launching embedded router")
Properties routerProps = new Properties()
routerProps.setProperty("i2p.dir.config", home.getAbsolutePath())
routerProps.setProperty("i2np.inboundKBytesPerSecond", String.valueOf(props.inBw))
routerProps.setProperty("i2np.outboundKBytesPerSecond", String.valueOf(props.outBw))
routerProps.setProperty("i2cp.disableInterface", "true")
routerProps.setProperty("i2np.ntcp.port", i2pOptions["i2np.ntcp.port"])
routerProps.setProperty("i2np.udp.port", i2pOptions["i2np.udp.port"])
routerProps.setProperty("i2np.udp.internalPort", i2pOptions["i2np.udp.port"])
router = new Router(routerProps)
I2PAppContext.getGlobalContext().metaClass = new RouterContextMetaClass()
router.runRouter()
while(!router.isRunning())
Thread.sleep(100)
}
log.info("initializing I2P socket manager")
def i2pClient = new I2PClientFactory().createClient()
@ -79,15 +152,16 @@ public class Core {
}
}
def sysProps = System.getProperties().clone()
sysProps["inbound.nickname"] = "MuWire"
// options like tunnel length and quantity
I2PSession i2pSession
I2PSocketManager socketManager
keyDat.withInputStream {
socketManager = new I2PSocketManagerFactory().createManager(it, sysProps)
socketManager = new I2PSocketManagerFactory().createManager(it, i2pOptions["i2cp.tcp.host"], i2pOptions["i2cp.tcp.port"].toInteger(), i2pOptions)
}
socketManager.getDefaultOptions().setReadTimeout(60000)
socketManager.getDefaultOptions().setConnectTimeout(30000)
socketManager.addDisconnectListener({eventBus.publish(new RouterDisconnectedEvent())} as DisconnectListener)
i2pSession = socketManager.getSession()
def destination = new Destination()
@ -118,22 +192,28 @@ public class Core {
eventBus = new EventBus()
log.info("initializing trust service")
File goodTrust = new File(home, "trust.good")
File badTrust = new File(home, "trust.bad")
File goodTrust = new File(home, "trusted")
File badTrust = new File(home, "distrusted")
trustService = new TrustService(goodTrust, badTrust, 5000)
eventBus.register(TrustEvent.class, trustService)
log.info "initializing file manager"
FileManager fileManager = new FileManager(eventBus)
fileManager = new FileManager(eventBus, props)
eventBus.register(FileHashedEvent.class, fileManager)
eventBus.register(FileLoadedEvent.class, fileManager)
eventBus.register(FileDownloadedEvent.class, fileManager)
eventBus.register(FileUnsharedEvent.class, fileManager)
eventBus.register(SearchEvent.class, fileManager)
eventBus.register(DirectoryUnsharedEvent.class, fileManager)
log.info("initializing mesh manager")
MeshManager meshManager = new MeshManager(fileManager, home, props)
eventBus.register(SourceDiscoveredEvent.class, meshManager)
log.info "initializing persistence service"
persisterService = new PersisterService(new File(home, "files.json"), eventBus, 5000, fileManager)
persisterService = new PersisterService(new File(home, "files.json"), eventBus, 15000, fileManager)
eventBus.register(UILoadedEvent.class, persisterService)
log.info("initializing host cache")
File hostStorage = new File(home, "hosts.json")
@ -143,7 +223,8 @@ public class Core {
log.info("initializing connection manager")
connectionManager = props.isLeaf() ?
new LeafConnectionManager(eventBus, me, 3, hostCache) : new UltrapeerConnectionManager(eventBus, me, 512, 512, hostCache, trustService)
new LeafConnectionManager(eventBus, me, 3, hostCache, props) :
new UltrapeerConnectionManager(eventBus, me, 512, 512, hostCache, trustService, props)
eventBus.register(TrustEvent.class, connectionManager)
eventBus.register(ConnectionEvent.class, connectionManager)
eventBus.register(DisconnectionEvent.class, connectionManager)
@ -152,6 +233,11 @@ public class Core {
log.info("initializing cache client")
cacheClient = new CacheClient(eventBus,hostCache, connectionManager, i2pSession, props, 10000)
log.info("initializing update client")
updateClient = new UpdateClient(eventBus, i2pSession, myVersion, props, fileManager, me)
eventBus.register(FileDownloadedEvent.class, updateClient)
eventBus.register(UIResultBatchEvent.class, updateClient)
log.info("initializing connector")
I2PConnector i2pConnector = new I2PConnector(socketManager)
@ -164,23 +250,34 @@ public class Core {
eventBus.register(ResultsEvent.class, searchManager)
log.info("initializing download manager")
DownloadManager downloadManager = new DownloadManager(eventBus, i2pConnector)
downloadManager = new DownloadManager(eventBus, trustService, meshManager, props, i2pConnector, home, me)
eventBus.register(UIDownloadEvent.class, downloadManager)
eventBus.register(UILoadedEvent.class, downloadManager)
eventBus.register(FileDownloadedEvent.class, downloadManager)
eventBus.register(UIDownloadCancelledEvent.class, downloadManager)
eventBus.register(SourceDiscoveredEvent.class, downloadManager)
eventBus.register(UIDownloadPausedEvent.class, downloadManager)
eventBus.register(UIDownloadResumedEvent.class, downloadManager)
log.info("initializing upload manager")
UploadManager uploadManager = new UploadManager(eventBus, fileManager)
uploadManager = new UploadManager(eventBus, fileManager, meshManager, downloadManager)
log.info("initializing connection establisher")
connectionEstablisher = new ConnectionEstablisher(eventBus, i2pConnector, props, connectionManager, hostCache)
log.info("initializing acceptor")
I2PAcceptor i2pAcceptor = new I2PAcceptor(socketManager)
connectionAcceptor = new ConnectionAcceptor(eventBus, connectionManager, props,
i2pAcceptor, hostCache, trustService, searchManager, uploadManager)
i2pAcceptor, hostCache, trustService, searchManager, uploadManager, connectionEstablisher)
connectionEstablisher = new ConnectionEstablisher(eventBus, i2pConnector, props, connectionManager, hostCache)
log.info("initializing directory watcher")
directoryWatcher = new DirectoryWatcher(eventBus, fileManager)
eventBus.register(FileSharedEvent.class, directoryWatcher)
eventBus.register(AllFilesLoadedEvent.class, directoryWatcher)
eventBus.register(DirectoryUnsharedEvent.class, directoryWatcher)
log.info("initializing hasher service")
hasherService = new HasherService(new FileHasher(), eventBus)
hasherService = new HasherService(new FileHasher(), eventBus, fileManager)
eventBus.register(FileSharedEvent.class, hasherService)
}
@ -188,15 +285,49 @@ public class Core {
hasherService.start()
trustService.start()
trustService.waitForLoad()
persisterService.start()
hostCache.start()
connectionManager.start()
cacheClient.start()
connectionAcceptor.start()
connectionEstablisher.start()
hostCache.waitForLoad()
updateClient.start()
}
public void shutdown() {
if (!shutdown.compareAndSet(false, true)) {
log.info("already shutting down")
return
}
log.info("shutting down download manageer")
downloadManager.shutdown()
log.info("shutting down connection acceeptor")
connectionAcceptor.stop()
log.info("shutting down connection establisher")
connectionEstablisher.stop()
log.info("shutting down directory watcher")
directoryWatcher.stop()
log.info("shutting down connection manager")
connectionManager.shutdown()
if (router != null) {
log.info("shutting down embedded router")
router.shutdown(0)
}
}
static class RouterContextMetaClass extends DelegatingMetaClass {
private final Object logManager = new MuWireLogManager()
RouterContextMetaClass() {
super(RouterContext.class)
}
Object invokeMethod(Object object, String name, Object[] args) {
if (name == "logManager")
return logManager
super.invokeMethod(object, name, args)
}
}
static main(args) {
def home = System.getProperty("user.home") + File.separator + ".MuWire"
home = new File(home)
@ -221,7 +352,7 @@ public class Core {
}
}
Core core = new Core(props, home)
Core core = new Core(props, home, "0.4.3")
core.startServices()
// ... at the end, sleep or execute script

View File

@ -3,6 +3,7 @@ package com.muwire.core
import java.util.concurrent.CopyOnWriteArrayList
import java.util.concurrent.Executor
import java.util.concurrent.Executors
import java.util.logging.Level
import com.muwire.core.files.FileSharedEvent
@ -23,14 +24,18 @@ class EventBus {
}
private void publishInternal(Event e) {
log.fine "publishing event $e of type ${e.getClass().getSimpleName()}"
log.fine "publishing event $e of type ${e.getClass().getSimpleName()} event $e"
def currentHandlers
final def clazz = e.getClass()
synchronized(this) {
currentHandlers = handlers.getOrDefault(clazz, [])
}
currentHandlers.each {
it."on${clazz.getSimpleName()}"(e)
try {
it."on${clazz.getSimpleName()}"(e)
} catch (Exception bad) {
log.log(Level.SEVERE, "exception dispatching event",bad)
}
}
}

View File

@ -1,15 +1,30 @@
package com.muwire.core
import java.util.stream.Collectors
import com.muwire.core.hostcache.CrawlerResponse
import com.muwire.core.util.DataUtil
import net.i2p.data.Base64
class MuWireSettings {
final boolean isLeaf
boolean allowUntrusted
int downloadRetryInterval
int updateCheckInterval
boolean autoDownloadUpdate
String updateType
String nickname
File downloadLocation
String sharedFiles
CrawlerResponse crawlerResponse
boolean shareDownloadedFiles
Set<String> watchedDirectories
float downloadSequentialRatio
int hostClearInterval
int meshExpiration
boolean embeddedRouter
int inBw, outBw
MuWireSettings() {
this(new Properties())
@ -22,7 +37,24 @@ class MuWireSettings {
nickname = props.getProperty("nickname","MuWireUser")
downloadLocation = new File((String)props.getProperty("downloadLocation",
System.getProperty("user.home")))
sharedFiles = props.getProperty("sharedFiles")
downloadRetryInterval = Integer.parseInt(props.getProperty("downloadRetryInterval","1"))
updateCheckInterval = Integer.parseInt(props.getProperty("updateCheckInterval","24"))
autoDownloadUpdate = Boolean.parseBoolean(props.getProperty("autoDownloadUpdate","true"))
updateType = props.getProperty("updateType","jar")
shareDownloadedFiles = Boolean.parseBoolean(props.getProperty("shareDownloadedFiles","true"))
downloadSequentialRatio = Float.valueOf(props.getProperty("downloadSequentialRatio","0.8"))
hostClearInterval = Integer.valueOf(props.getProperty("hostClearInterval","60"))
meshExpiration = Integer.valueOf(props.getProperty("meshExpiration","60"))
embeddedRouter = Boolean.valueOf(props.getProperty("embeddedRouter","false"))
inBw = Integer.valueOf(props.getProperty("inBw","256"))
outBw = Integer.valueOf(props.getProperty("outBw","128"))
watchedDirectories = new HashSet<>()
if (props.containsKey("watchedDirectories")) {
String[] encoded = props.getProperty("watchedDirectories").split(",")
encoded.each { watchedDirectories << DataUtil.readi18nString(Base64.decode(it)) }
}
}
void write(OutputStream out) throws IOException {
@ -32,8 +64,25 @@ class MuWireSettings {
props.setProperty("crawlerResponse", crawlerResponse.toString())
props.setProperty("nickname", nickname)
props.setProperty("downloadLocation", downloadLocation.getAbsolutePath())
if (sharedFiles != null)
props.setProperty("sharedFiles", sharedFiles)
props.setProperty("downloadRetryInterval", String.valueOf(downloadRetryInterval))
props.setProperty("updateCheckInterval", String.valueOf(updateCheckInterval))
props.setProperty("autoDownloadUpdate", String.valueOf(autoDownloadUpdate))
props.setProperty("updateType",updateType)
props.setProperty("shareDownloadedFiles", String.valueOf(shareDownloadedFiles))
props.setProperty("downloadSequentialRatio", String.valueOf(downloadSequentialRatio))
props.setProperty("hostClearInterval", String.valueOf(hostClearInterval))
props.setProperty("meshExpiration", String.valueOf(meshExpiration))
props.setProperty("embeddedRouter", String.valueOf(embeddedRouter))
props.setProperty("inBw", String.valueOf(inBw))
props.setProperty("outBw", String.valueOf(outBw))
if (!watchedDirectories.isEmpty()) {
String encoded = watchedDirectories.stream().
map({Base64.encode(DataUtil.encodei18nString(it))}).
collect(Collectors.joining(","))
props.setProperty("watchedDirectories", encoded)
}
props.store(out, "")
}

View File

@ -2,6 +2,7 @@ package com.muwire.core
import net.i2p.crypto.DSAEngine
import net.i2p.crypto.SigType
import net.i2p.data.Base64
import net.i2p.data.Destination
import net.i2p.data.Signature
import net.i2p.data.SigningPublicKey
@ -14,6 +15,7 @@ public class Persona {
private final Destination destination
private final byte[] sig
private volatile String humanReadableName
private volatile String base64
private volatile byte[] payload
public Persona(InputStream personaStream) throws IOException, InvalidSignatureException {
@ -59,6 +61,15 @@ public class Persona {
humanReadableName
}
public String toBase64() {
if (base64 == null) {
def baos = new ByteArrayOutputStream()
write(baos)
base64 = Base64.encode(baos.toByteArray())
}
base64
}
@Override
public int hashCode() {
name.hashCode() ^ destination.hashCode()

View File

@ -0,0 +1,4 @@
package com.muwire.core
class RouterDisconnectedEvent extends Event {
}

View File

@ -0,0 +1,4 @@
package com.muwire.core
class UILoadedEvent extends Event {
}

View File

@ -6,6 +6,8 @@ import java.util.concurrent.atomic.AtomicBoolean
import java.util.logging.Level
import com.muwire.core.EventBus
import com.muwire.core.MuWireSettings
import com.muwire.core.Persona
import com.muwire.core.hostcache.HostCache
import com.muwire.core.hostcache.HostDiscoveredEvent
import com.muwire.core.search.QueryEvent
@ -14,6 +16,7 @@ import com.muwire.core.trust.TrustLevel
import com.muwire.core.trust.TrustService
import groovy.util.logging.Log
import net.i2p.data.Base64
import net.i2p.data.Destination
@Log
@ -24,7 +27,8 @@ abstract class Connection implements Closeable {
final boolean incoming
final HostCache hostCache
final TrustService trustService
final MuWireSettings settings
private final AtomicBoolean running = new AtomicBoolean()
private final BlockingQueue messages = new LinkedBlockingQueue()
private final Thread reader, writer
@ -33,12 +37,14 @@ abstract class Connection implements Closeable {
long lastPingSentTime, lastPongReceivedTime
Connection(EventBus eventBus, Endpoint endpoint, boolean incoming, HostCache hostCache, TrustService trustService) {
Connection(EventBus eventBus, Endpoint endpoint, boolean incoming,
HostCache hostCache, TrustService trustService, MuWireSettings settings) {
this.eventBus = eventBus
this.incoming = incoming
this.endpoint = endpoint
this.hostCache = hostCache
this.trustService = trustService
this.settings = settings
this.name = endpoint.destination.toBase32().substring(0,8)
@ -70,9 +76,9 @@ abstract class Connection implements Closeable {
return
}
log.info("closing $name")
endpoint.close()
reader.interrupt()
writer.interrupt()
endpoint.close()
eventBus.publish(new DisconnectionEvent(destination: endpoint.destination))
}
@ -82,7 +88,6 @@ abstract class Connection implements Closeable {
read()
}
} catch (SocketTimeoutException e) {
close()
} catch (Exception e) {
log.log(Level.WARNING,"unhandled exception in reader",e)
} finally {
@ -121,9 +126,13 @@ abstract class Connection implements Closeable {
query.version = 1
query.uuid = e.searchEvent.getUuid()
query.firstHop = e.firstHop
// TODO: first hop figure out
query.keywords = e.searchEvent.getSearchTerms()
query.replyTo = e.getReceivedOn().toBase64()
query.oobInfohash = e.searchEvent.oobInfohash
if (e.searchEvent.searchHash != null)
query.infohash = Base64.encode(e.searchEvent.searchHash)
query.replyTo = e.replyTo.toBase64()
if (e.originator != null)
query.originator = e.originator.toBase64()
messages.put(query)
}
@ -149,21 +158,43 @@ abstract class Connection implements Closeable {
protected void handleSearch(def search) {
UUID uuid = UUID.fromString(search.uuid)
if (search.infohash != null)
byte [] infohash = null
if (search.infohash != null) {
search.keywords = null
infohash = Base64.decode(search.infohash)
}
Destination replyTo = new Destination(search.replyTo)
if (trustService.getLevel(replyTo) == TrustLevel.DISTRUSTED) {
TrustLevel trustLevel = trustService.getLevel(replyTo)
if (trustLevel == TrustLevel.DISTRUSTED) {
log.info "dropping search from distrusted peer"
return
}
// TODO: add option to respond only to trusted peers
if (trustLevel == TrustLevel.NEUTRAL && !settings.allowUntrusted()) {
log.info("dropping search from neutral peer")
return
}
Persona originator = null
if (search.originator != null) {
originator = new Persona(new ByteArrayInputStream(Base64.decode(search.originator)))
if (originator.destination != replyTo) {
log.info("originator doesn't match destination")
return
}
}
boolean oob = false
if (search.oobInfohash != null)
oob = search.oobInfohash
SearchEvent searchEvent = new SearchEvent(searchTerms : search.keywords,
searchHash : search.infohash,
uuid : uuid)
searchHash : infohash,
uuid : uuid,
oobInfohash : oob)
QueryEvent event = new QueryEvent ( searchEvent : searchEvent,
replyTo : replyTo,
originator : originator,
receivedOn : endpoint.destination,
firstHop : search.firstHop )
eventBus.publish(event)

View File

@ -17,6 +17,8 @@ import com.muwire.core.upload.UploadManager
import com.muwire.core.search.InvalidSearchResultException
import com.muwire.core.search.ResultsParser
import com.muwire.core.search.SearchManager
import com.muwire.core.search.UIResultBatchEvent
import com.muwire.core.search.UIResultEvent
import com.muwire.core.search.UnexpectedResultsException
import groovy.json.JsonOutput
@ -34,13 +36,17 @@ class ConnectionAcceptor {
final TrustService trustService
final SearchManager searchManager
final UploadManager uploadManager
final ConnectionEstablisher establisher
final ExecutorService acceptorThread
final ExecutorService handshakerThreads
private volatile shutdown
ConnectionAcceptor(EventBus eventBus, UltrapeerConnectionManager manager,
MuWireSettings settings, I2PAcceptor acceptor, HostCache hostCache,
TrustService trustService, SearchManager searchManager, UploadManager uploadManager) {
TrustService trustService, SearchManager searchManager, UploadManager uploadManager,
ConnectionEstablisher establisher) {
this.eventBus = eventBus
this.manager = manager
this.settings = settings
@ -49,7 +55,8 @@ class ConnectionAcceptor {
this.trustService = trustService
this.searchManager = searchManager
this.uploadManager = uploadManager
this.establisher = establisher
acceptorThread = Executors.newSingleThreadExecutor { r ->
def rv = new Thread(r)
rv.setDaemon(true)
@ -70,11 +77,13 @@ class ConnectionAcceptor {
}
void stop() {
shutdown = true
acceptorThread.shutdownNow()
handshakerThreads.shutdownNow()
}
private void acceptLoop() {
try {
while(true) {
def incoming = acceptor.accept()
log.info("accepted connection from ${incoming.destination.toBase32()}")
@ -90,6 +99,11 @@ class ConnectionAcceptor {
}
handshakerThreads.execute({processIncoming(incoming)} as Runnable)
}
} catch (Exception e) {
log.log(Level.WARNING, "exception in accept loop",e)
if (!shutdown)
throw e
}
}
private void processIncoming(Endpoint e) {
@ -105,6 +119,9 @@ class ConnectionAcceptor {
case (byte)'G':
processGET(e)
break
case (byte)'H':
processHashList(e)
break
case (byte)'P':
processPOST(e)
break
@ -140,7 +157,9 @@ class ConnectionAcceptor {
}
private void handleIncoming(Endpoint e, boolean leaf) {
boolean accept = !manager.isConnected(e.destination) && (leaf ? manager.hasLeafSlots() : manager.hasPeerSlots())
boolean accept = !manager.isConnected(e.destination) &&
!establisher.isInProgress(e.destination) &&
(leaf ? manager.hasLeafSlots() : manager.hasPeerSlots())
if (accept) {
log.info("accepting connection, leaf:$leaf")
e.outputStream.write("OK".bytes)
@ -173,9 +192,18 @@ class ConnectionAcceptor {
dis.readFully(et)
if (et != "ET ".getBytes(StandardCharsets.US_ASCII))
throw new IOException("Invalid GET connection")
uploadManager.processEndpoint(e)
uploadManager.processGET(e)
}
private void processHashList(Endpoint e) {
byte[] ashList = new byte[8]
final DataInputStream dis = new DataInputStream(e.getInputStream())
dis.readFully(ashList)
if (ashList != "ASHLIST ".getBytes(StandardCharsets.US_ASCII))
throw new IOException("Invalid HASHLIST connection")
uploadManager.processHashList(e)
}
private void processPOST(final Endpoint e) throws IOException {
byte [] ost = new byte[4]
final DataInputStream dis = new DataInputStream(e.getInputStream())
@ -199,13 +227,15 @@ class ConnectionAcceptor {
if (sender.destination != e.getDestination())
throw new IOException("Sender destination mismatch expected $e.getDestination(), got $sender.destination")
int nResults = dis.readUnsignedShort()
UIResultEvent[] results = new UIResultEvent[nResults]
for (int i = 0; i < nResults; i++) {
int jsonSize = dis.readUnsignedShort()
byte [] payload = new byte[jsonSize]
dis.readFully(payload)
def json = slurper.parse(payload)
eventBus.publish(ResultsParser.parse(sender, resultsUUID, json))
results[i] = ResultsParser.parse(sender, resultsUUID, json)
}
eventBus.publish(new UIResultBatchEvent(uuid: resultsUUID, results: results))
} catch (IOException | UnexpectedResultsException | InvalidSearchResultException bad) {
log.log(Level.WARNING, "failed to process POST", bad)
} finally {

View File

@ -35,6 +35,8 @@ class ConnectionEstablisher {
final Set inProgress = new ConcurrentHashSet()
ConnectionEstablisher(){}
ConnectionEstablisher(EventBus eventBus, I2PConnector i2pConnector, MuWireSettings settings,
ConnectionManager connectionManager, HostCache hostCache) {
this.eventBus = eventBus
@ -176,4 +178,8 @@ class ConnectionEstablisher {
e.close()
}
}
public boolean isInProgress(Destination d) {
inProgress.contains(d)
}
}

View File

@ -1,6 +1,7 @@
package com.muwire.core.connection
import com.muwire.core.EventBus
import com.muwire.core.MuWireSettings
import com.muwire.core.Persona
import com.muwire.core.hostcache.HostCache
import com.muwire.core.search.QueryEvent
@ -19,13 +20,15 @@ abstract class ConnectionManager {
protected final HostCache hostCache
protected final Persona me
protected final MuWireSettings settings
ConnectionManager() {}
ConnectionManager(EventBus eventBus, Persona me, HostCache hostCache) {
ConnectionManager(EventBus eventBus, Persona me, HostCache hostCache, MuWireSettings settings) {
this.eventBus = eventBus
this.me = me
this.hostCache = hostCache
this.settings = settings
this.timer = new Timer("connections-pinger",true)
}
@ -40,7 +43,7 @@ abstract class ConnectionManager {
void onTrustEvent(TrustEvent e) {
if (e.level == TrustLevel.DISTRUSTED)
drop(e.destination)
drop(e.persona.destination)
}
abstract void drop(Destination d)
@ -58,6 +61,8 @@ abstract class ConnectionManager {
abstract void onConnectionEvent(ConnectionEvent e)
abstract void onDisconnectionEvent(DisconnectionEvent e)
abstract void shutdown()
protected void sendPings() {
final long now = System.currentTimeMillis()

View File

@ -1,6 +1,7 @@
package com.muwire.core.connection
import java.util.concurrent.atomic.AtomicBoolean
import java.util.logging.Level
import groovy.util.logging.Log
import net.i2p.data.Destination
@ -24,7 +25,7 @@ class Endpoint implements Closeable {
@Override
public void close() {
if (!closed.compareAndSet(false, true)) {
log.warning("Close loop detected for ${destination.toBase32()}", new Exception())
log.log(Level.WARNING,"Close loop detected for ${destination.toBase32()}", new Exception())
return
}
if (inputStream != null) {
@ -34,7 +35,7 @@ class Endpoint implements Closeable {
try {outputStream.close()} catch (Exception ignore) {}
}
if (toClose != null) {
try {toClose.close()} catch (Exception ignore) {}
try {toClose.reset()} catch (Exception ignore) {}
}
}

View File

@ -4,6 +4,7 @@ import java.io.InputStream
import java.io.OutputStream
import com.muwire.core.EventBus
import com.muwire.core.MuWireSettings
import com.muwire.core.hostcache.HostCache
import com.muwire.core.trust.TrustService
@ -16,8 +17,9 @@ import net.i2p.data.Destination
*/
class LeafConnection extends Connection {
public LeafConnection(EventBus eventBus, Endpoint endpoint, HostCache hostCache, TrustService trustService) {
super(eventBus, endpoint, true, hostCache, trustService);
public LeafConnection(EventBus eventBus, Endpoint endpoint, HostCache hostCache,
TrustService trustService, MuWireSettings settings) {
super(eventBus, endpoint, true, hostCache, trustService, settings);
}
@Override

View File

@ -3,6 +3,7 @@ package com.muwire.core.connection
import java.util.concurrent.ConcurrentHashMap
import com.muwire.core.EventBus
import com.muwire.core.MuWireSettings
import com.muwire.core.Persona
import com.muwire.core.hostcache.HostCache
import com.muwire.core.search.QueryEvent
@ -17,8 +18,9 @@ class LeafConnectionManager extends ConnectionManager {
final Map<Destination, UltrapeerConnection> connections = new ConcurrentHashMap()
public LeafConnectionManager(EventBus eventBus, Persona me, int maxConnections, HostCache hostCache) {
super(eventBus, me, hostCache)
public LeafConnectionManager(EventBus eventBus, Persona me, int maxConnections,
HostCache hostCache, MuWireSettings settings) {
super(eventBus, me, hostCache, settings)
this.maxConnections = maxConnections
}
@ -71,4 +73,8 @@ class LeafConnectionManager extends ConnectionManager {
log.severe("removed destination not present in connection manager ${e.destination.toBase32()}")
}
@Override
void shutdown() {
}
}

View File

@ -4,6 +4,7 @@ import java.io.InputStream
import java.io.OutputStream
import com.muwire.core.EventBus
import com.muwire.core.MuWireSettings
import com.muwire.core.hostcache.HostCache
import com.muwire.core.trust.TrustService
import com.muwire.core.util.DataUtil
@ -29,8 +30,9 @@ class PeerConnection extends Connection {
private final JsonSlurper slurper = new JsonSlurper()
public PeerConnection(EventBus eventBus, Endpoint endpoint,
boolean incoming, HostCache hostCache, TrustService trustService) {
super(eventBus, endpoint, incoming, hostCache, trustService)
boolean incoming, HostCache hostCache, TrustService trustService,
MuWireSettings settings) {
super(eventBus, endpoint, incoming, hostCache, trustService, settings)
this.dis = new DataInputStream(endpoint.inputStream)
this.dos = new DataOutputStream(endpoint.outputStream)
}

View File

@ -4,6 +4,7 @@ import java.util.Collection
import java.util.concurrent.ConcurrentHashMap
import com.muwire.core.EventBus
import com.muwire.core.MuWireSettings
import com.muwire.core.Persona
import com.muwire.core.hostcache.HostCache
import com.muwire.core.search.QueryEvent
@ -17,15 +18,15 @@ class UltrapeerConnectionManager extends ConnectionManager {
final int maxPeers, maxLeafs
final TrustService trustService
final Map<Destination, PeerConnection> peerConnections = new ConcurrentHashMap()
final Map<Destination, LeafConnection> leafConnections = new ConcurrentHashMap()
UltrapeerConnectionManager() {}
public UltrapeerConnectionManager(EventBus eventBus, Persona me, int maxPeers, int maxLeafs,
HostCache hostCache, TrustService trustService) {
super(eventBus, me, hostCache)
HostCache hostCache, TrustService trustService, MuWireSettings settings) {
super(eventBus, me, hostCache, settings)
this.maxPeers = maxPeers
this.maxLeafs = maxLeafs
this.trustService = trustService
@ -85,8 +86,8 @@ class UltrapeerConnectionManager extends ConnectionManager {
return
Connection c = e.leaf ?
new LeafConnection(eventBus, e.endpoint, hostCache, trustService) :
new PeerConnection(eventBus, e.endpoint, e.incoming, hostCache, trustService)
new LeafConnection(eventBus, e.endpoint, hostCache, trustService, settings) :
new PeerConnection(eventBus, e.endpoint, e.incoming, hostCache, trustService, settings)
def map = e.leaf ? leafConnections : peerConnections
map.put(e.endpoint.destination, c)
c.start()
@ -100,6 +101,14 @@ class UltrapeerConnectionManager extends ConnectionManager {
if (removed == null)
log.severe("Removed connection not present in either leaf or peer map ${e.destination.toBase32()}")
}
@Override
void shutdown() {
peerConnections.values().stream().parallel().forEach({v -> v.close()})
leafConnections.values().stream().parallel().forEach({v -> v.close()})
peerConnections.clear()
leafConnections.clear()
}
void forwardQueryToLeafs(QueryEvent e) {

View File

@ -0,0 +1,25 @@
package com.muwire.core.download
class BadHashException extends Exception {
public BadHashException() {
super();
}
public BadHashException(String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace) {
super(message, cause, enableSuppression, writableStackTrace);
}
public BadHashException(String message, Throwable cause) {
super(message, cause);
}
public BadHashException(String message) {
super(message);
}
public BadHashException(Throwable cause) {
super(cause);
}
}

View File

@ -1,20 +1,57 @@
package com.muwire.core.download
import com.muwire.core.connection.I2PConnector
import com.muwire.core.EventBus
import com.muwire.core.files.FileDownloadedEvent
import com.muwire.core.files.FileHasher
import com.muwire.core.mesh.Mesh
import com.muwire.core.mesh.MeshManager
import com.muwire.core.trust.TrustLevel
import com.muwire.core.trust.TrustService
import com.muwire.core.util.DataUtil
import groovy.json.JsonBuilder
import groovy.json.JsonOutput
import groovy.json.JsonSlurper
import net.i2p.data.Base64
import net.i2p.data.Destination
import net.i2p.util.ConcurrentHashSet
import com.muwire.core.EventBus
import com.muwire.core.InfoHash
import com.muwire.core.MuWireSettings
import com.muwire.core.Persona
import com.muwire.core.UILoadedEvent
import java.util.concurrent.ConcurrentHashMap
import java.util.concurrent.Executor
import java.util.concurrent.Executors
public class DownloadManager {
private final EventBus eventBus
private final TrustService trustService
private final MeshManager meshManager
private final MuWireSettings muSettings
private final I2PConnector connector
private final Executor executor
private final File incompletes, home
private final Persona me
public DownloadManager(EventBus eventBus, I2PConnector connector) {
private final Map<InfoHash, Downloader> downloaders = new ConcurrentHashMap<>()
public DownloadManager(EventBus eventBus, TrustService trustService, MeshManager meshManager, MuWireSettings muSettings,
I2PConnector connector, File home, Persona me) {
this.eventBus = eventBus
this.trustService = trustService
this.meshManager = meshManager
this.muSettings = muSettings
this.connector = connector
this.incompletes = new File(home,"incompletes")
this.home = home
this.me = me
incompletes.mkdir()
this.executor = Executors.newCachedThreadPool({ r ->
Thread rv = new Thread(r)
rv.setName("download-worker")
@ -25,9 +62,140 @@ public class DownloadManager {
public void onUIDownloadEvent(UIDownloadEvent e) {
def downloader = new Downloader(e.target, e.result.size,
e.result.infohash, e.result.pieceSize, connector, e.result.sender.destination)
def size = e.result[0].size
def infohash = e.result[0].infohash
def pieceSize = e.result[0].pieceSize
Set<Destination> destinations = new HashSet<>()
e.result.each {
destinations.add(it.sender.destination)
}
destinations.addAll(e.sources)
destinations.remove(me.destination)
Pieces pieces = getPieces(infohash, size, pieceSize)
def downloader = new Downloader(eventBus, this, me, e.target, size,
infohash, pieceSize, connector, destinations,
incompletes, pieces)
downloaders.put(infohash, downloader)
persistDownloaders()
executor.execute({downloader.download()} as Runnable)
eventBus.publish(new DownloadStartedEvent(downloader : downloader))
}
public void onUIDownloadCancelledEvent(UIDownloadCancelledEvent e) {
downloaders.remove(e.downloader.infoHash)
persistDownloaders()
}
public void onUIDownloadPausedEvent(UIDownloadPausedEvent e) {
persistDownloaders()
}
public void onUIDownloadResumedEvent(UIDownloadResumedEvent e) {
persistDownloaders()
}
void resume(Downloader downloader) {
executor.execute({downloader.download() as Runnable})
}
void onUILoadedEvent(UILoadedEvent e) {
File downloadsFile = new File(home, "downloads.json")
if (!downloadsFile.exists())
return
def slurper = new JsonSlurper()
downloadsFile.eachLine {
def json = slurper.parseText(it)
File file = new File(DataUtil.readi18nString(Base64.decode(json.file)))
def destinations = new HashSet<>()
json.destinations.each { destination ->
destinations.add new Destination(destination)
}
InfoHash infoHash
if (json.hashList != null) {
byte[] hashList = Base64.decode(json.hashList)
infoHash = InfoHash.fromHashList(hashList)
} else {
byte [] root = Base64.decode(json.hashRoot)
infoHash = new InfoHash(root)
}
Pieces pieces = getPieces(infoHash, (long)json.length, json.pieceSizePow2)
def downloader = new Downloader(eventBus, this, me, file, (long)json.length,
infoHash, json.pieceSizePow2, connector, destinations, incompletes, pieces)
if (json.paused != null)
downloader.paused = json.paused
downloaders.put(infoHash, downloader)
downloader.readPieces()
if (!downloader.paused)
downloader.download()
eventBus.publish(new DownloadStartedEvent(downloader : downloader))
}
}
private Pieces getPieces(InfoHash infoHash, long length, int pieceSizePow2) {
int pieceSize = 0x1 << pieceSizePow2
int nPieces = (int)(length / pieceSize)
if (length % pieceSize != 0)
nPieces++
Mesh mesh = meshManager.getOrCreate(infoHash, nPieces)
mesh.pieces
}
void onSourceDiscoveredEvent(SourceDiscoveredEvent e) {
Downloader downloader = downloaders.get(e.infoHash)
if (downloader == null)
return
boolean ok = false
switch(trustService.getLevel(e.source.destination)) {
case TrustLevel.TRUSTED: ok = true; break
case TrustLevel.NEUTRAL: ok = muSettings.allowUntrusted; break
case TrustLevel.DISTRUSTED: ok = false; break
}
if (ok)
downloader.addSource(e.source.destination)
}
void onFileDownloadedEvent(FileDownloadedEvent e) {
downloaders.remove(e.downloader.infoHash)
persistDownloaders()
}
private void persistDownloaders() {
File downloadsFile = new File(home,"downloads.json")
downloadsFile.withPrintWriter { writer ->
downloaders.values().each { downloader ->
if (!downloader.cancelled) {
def json = [:]
json.file = Base64.encode(DataUtil.encodei18nString(downloader.file.getAbsolutePath()))
json.length = downloader.length
json.pieceSizePow2 = downloader.pieceSizePow2
def destinations = []
downloader.destinations.each {
destinations << it.toBase64()
}
json.destinations = destinations
InfoHash infoHash = downloader.getInfoHash()
if (infoHash.hashList != null)
json.hashList = Base64.encode(infoHash.hashList)
else
json.hashRoot = Base64.encode(infoHash.getRoot())
json.paused = downloader.paused
writer.println(JsonOutput.toJson(json))
}
}
}
}
public void shutdown() {
downloaders.values().each { it.stop() }
Downloader.executorService.shutdownNow()
}
}

View File

@ -3,8 +3,12 @@ package com.muwire.core.download;
import net.i2p.data.Base64
import com.muwire.core.Constants
import com.muwire.core.EventBus
import com.muwire.core.InfoHash
import com.muwire.core.Persona
import com.muwire.core.connection.Endpoint
import com.muwire.core.util.DataUtil
import static com.muwire.core.util.DataUtil.readTillRN
import groovy.util.logging.Log
@ -16,28 +20,40 @@ import java.nio.file.Files
import java.nio.file.StandardOpenOption
import java.security.MessageDigest
import java.security.NoSuchAlgorithmException
import java.util.logging.Level
@Log
class DownloadSession {
private static int SAMPLES = 10
private final EventBus eventBus
private final String meB64
private final Pieces pieces
private final InfoHash infoHash
private final Endpoint endpoint
private final File file
private final int pieceSize
private final long fileLength
private final Set<Integer> available
private final MessageDigest digest
private final LinkedList<Long> timestamps = new LinkedList<>()
private final LinkedList<Integer> reads = new LinkedList<>()
private ByteBuffer mapped
DownloadSession(Pieces pieces, InfoHash infoHash, Endpoint endpoint, File file,
int pieceSize, long fileLength) {
DownloadSession(EventBus eventBus, String meB64, Pieces pieces, InfoHash infoHash, Endpoint endpoint, File file,
int pieceSize, long fileLength, Set<Integer> available) {
this.eventBus = eventBus
this.meB64 = meB64
this.pieces = pieces
this.endpoint = endpoint
this.infoHash = infoHash
this.file = file
this.pieceSize = pieceSize
this.fileLength = fileLength
this.available = available
try {
digest = MessageDigest.getInstance("SHA-256")
} catch (NoSuchAlgorithmException impossible) {
@ -46,97 +62,157 @@ class DownloadSession {
}
}
public void request() throws IOException {
/**
* @return if the request will proceed. The only time it may not
* is if all the pieces have been claimed by other sessions.
* @throws IOException
*/
public boolean request() throws IOException {
OutputStream os = endpoint.getOutputStream()
InputStream is = endpoint.getInputStream()
int piece = pieces.getRandomPiece()
int piece
if (available.isEmpty())
piece = pieces.claim()
else
piece = pieces.claim(new HashSet<>(available))
if (piece == -1)
return false
boolean unclaim = true
log.info("will download piece $piece")
long start = piece * pieceSize
long end = Math.min(fileLength, start + pieceSize) - 1
long length = end - start + 1
String root = Base64.encode(infoHash.getRoot())
FileChannel channel
try {
os.write("GET $root\r\n".getBytes(StandardCharsets.US_ASCII))
os.write("Range: $start-$end\r\n\r\n".getBytes(StandardCharsets.US_ASCII))
os.write("Range: $start-$end\r\n".getBytes(StandardCharsets.US_ASCII))
os.write("X-Persona: $meB64\r\n".getBytes(StandardCharsets.US_ASCII))
String xHave = DataUtil.encodeXHave(pieces.getDownloaded(), pieces.nPieces)
os.write("X-Have: $xHave\r\n\r\n".getBytes(StandardCharsets.US_ASCII))
os.flush()
String code = readTillRN(is)
if (code.startsWith("404 ")) {
String codeString = readTillRN(is)
int space = codeString.indexOf(' ')
if (space > 0)
codeString = codeString.substring(0, space)
int code = Integer.parseInt(codeString.trim())
if (code == 404) {
log.warning("file not found")
endpoint.close()
return
return false
}
if (code.startsWith("416 ")) {
log.warning("range $start-$end cannot be satisfied")
return // leave endpoint open
}
if (!code.startsWith("200 ")) {
if (!(code == 200 || code == 416)) {
log.warning("unknown code $code")
endpoint.close()
return
return false
}
// parse all headers
Set<String> headers = new HashSet<>()
Map<String,String> headers = new HashMap<>()
String header
while((header = readTillRN(is)) != "" && headers.size() < Constants.MAX_HEADERS)
headers.add(header)
long receivedStart = -1
long receivedEnd = -1
for (String receivedHeader : headers) {
def group = (receivedHeader =~ /^Content-Range: (\d+)-(\d+)$/)
if (group.size() != 1) {
log.info("ignoring header $receivedHeader")
continue
}
receivedStart = Long.parseLong(group[0][1])
receivedEnd = Long.parseLong(group[0][2])
while((header = readTillRN(is)) != "" && headers.size() < Constants.MAX_HEADERS) {
int colon = header.indexOf(':')
if (colon == -1 || colon == header.length() - 1)
throw new IOException("invalid header $header")
String key = header.substring(0, colon)
String value = header.substring(colon + 1)
headers[key] = value.trim()
}
// prase X-Alt if present
if (headers.containsKey("X-Alt")) {
headers["X-Alt"].split(",").each {
if (it.length() > 0) {
byte [] raw = Base64.decode(it)
Persona source = new Persona(new ByteArrayInputStream(raw))
eventBus.publish(new SourceDiscoveredEvent(infoHash : infoHash, source : source))
}
}
}
// parse X-Have if present
if (headers.containsKey("X-Have")) {
DataUtil.decodeXHave(headers["X-Have"]).each {
available.add(it)
}
if (!available.contains(piece))
return true // try again next time
} else {
if (code != 200)
throw new IOException("Code $code but no X-Have")
available.clear()
}
if (code != 200)
return true
String range = headers["Content-Range"]
if (range == null)
throw new IOException("Code 200 but no Content-Range")
def group = (range =~ /^(\d+)-(\d+)$/)
if (group.size() != 1)
throw new IOException("invalid Content-Range header $range")
long receivedStart = Long.parseLong(group[0][1])
long receivedEnd = Long.parseLong(group[0][2])
if (receivedStart != start || receivedEnd != end) {
log.warning("We don't support mismatching ranges yet")
endpoint.close()
return
return false
}
// start the download
channel = Files.newByteChannel(file.toPath(), EnumSet.of(StandardOpenOption.READ, StandardOpenOption.WRITE,
StandardOpenOption.SPARSE, StandardOpenOption.CREATE)) // TODO: double-check, maybe CREATE_NEW
mapped = channel.map(FileChannel.MapMode.READ_WRITE, start, end - start + 1)
byte[] tmp = new byte[0x1 << 13]
while(mapped.hasRemaining()) {
if (mapped.remaining() < tmp.length)
tmp = new byte[mapped.remaining()]
int read = is.read(tmp)
if (read == -1)
throw new IOException()
synchronized(this) {
mapped.put(tmp, 0, read)
FileChannel channel
try {
channel = Files.newByteChannel(file.toPath(), EnumSet.of(StandardOpenOption.READ, StandardOpenOption.WRITE,
StandardOpenOption.SPARSE, StandardOpenOption.CREATE)) // TODO: double-check, maybe CREATE_NEW
mapped = channel.map(FileChannel.MapMode.READ_WRITE, start, end - start + 1)
byte[] tmp = new byte[0x1 << 13]
while(mapped.hasRemaining()) {
if (mapped.remaining() < tmp.length)
tmp = new byte[mapped.remaining()]
int read = is.read(tmp)
if (read == -1)
throw new IOException()
synchronized(this) {
mapped.put(tmp, 0, read)
if (timestamps.size() == SAMPLES) {
timestamps.removeFirst()
reads.removeFirst()
}
timestamps.addLast(System.currentTimeMillis())
reads.addLast(read)
}
}
mapped.clear()
digest.update(mapped)
byte [] hash = digest.digest()
byte [] expected = new byte[32]
System.arraycopy(infoHash.getHashList(), piece * 32, expected, 0, 32)
if (hash != expected)
throw new BadHashException()
} finally {
try { channel?.close() } catch (IOException ignore) {}
}
mapped.clear()
digest.update(mapped)
byte [] hash = digest.digest()
byte [] expected = new byte[32]
System.arraycopy(infoHash.getHashList(), piece * 32, expected, 0, 32)
if (hash != expected) {
log.warning("hash mismatch")
endpoint.close()
return
}
pieces.markDownloaded(piece)
pieces.markDownloaded(piece)
unclaim = false
} finally {
try { channel?.close() } catch (IOException ignore) {}
if (unclaim)
pieces.unclaim(piece)
}
return true
}
synchronized int positionInPiece() {
@ -144,4 +220,26 @@ class DownloadSession {
return 0
mapped.position()
}
synchronized int speed() {
if (timestamps.size() < SAMPLES)
return 0
int totalRead = 0
int idx = 0
final long now = System.currentTimeMillis()
while(idx < SAMPLES && timestamps.get(idx) < now - 1000)
idx++
if (idx == SAMPLES)
return 0
if (idx == SAMPLES - 1)
return reads[idx]
long interval = timestamps.last - timestamps[idx]
if (interval == 0)
interval = 1
for (int i = idx; i < SAMPLES; i++)
totalRead += reads[idx]
(int)(totalRead * 1000.0 / interval)
}
}

View File

@ -1,69 +1,307 @@
package com.muwire.core.download
import com.muwire.core.InfoHash
import com.muwire.core.Persona
import com.muwire.core.connection.Endpoint
import java.nio.file.AtomicMoveNotSupportedException
import java.nio.file.Files
import java.nio.file.StandardCopyOption
import java.util.concurrent.ConcurrentHashMap
import java.util.concurrent.ExecutorService
import java.util.concurrent.Executors
import java.util.concurrent.atomic.AtomicBoolean
import java.util.logging.Level
import com.muwire.core.Constants
import com.muwire.core.DownloadedFile
import com.muwire.core.EventBus
import com.muwire.core.connection.I2PConnector
import com.muwire.core.files.FileDownloadedEvent
import com.muwire.core.util.DataUtil
import groovy.util.logging.Log
import net.i2p.data.Destination
import net.i2p.util.ConcurrentHashSet
@Log
public class Downloader {
public enum DownloadState { CONNECTING, DOWNLOADING, FINISHED }
public enum DownloadState { CONNECTING, HASHLIST, DOWNLOADING, FAILED, CANCELLED, PAUSED, FINISHED }
private enum WorkerState { CONNECTING, HASHLIST, DOWNLOADING, FINISHED}
private static final ExecutorService executorService = Executors.newCachedThreadPool({r ->
Thread rv = new Thread(r)
rv.setName("download worker")
rv.setDaemon(true)
rv
})
private final EventBus eventBus
private final DownloadManager downloadManager
private final Persona me
private final File file
private final Pieces pieces
private final long length
private final InfoHash infoHash
private InfoHash infoHash
private final int pieceSize
private final I2PConnector connector
private final Destination destination
private final Set<Destination> destinations
private final int nPieces
private final File piecesFile
private final File incompleteFile
final int pieceSizePow2
private final Map<Destination, DownloadWorker> activeWorkers = new ConcurrentHashMap<>()
private final Set<Destination> successfulDestinations = new ConcurrentHashSet<>()
private Endpoint endpoint
private volatile DownloadSession currentSession
private volatile DownloadState currentState
public Downloader(File file, long length, InfoHash infoHash, int pieceSizePow2, I2PConnector connector, Destination destination) {
private volatile boolean cancelled, paused
private final AtomicBoolean eventFired = new AtomicBoolean()
private boolean piecesFileClosed
public Downloader(EventBus eventBus, DownloadManager downloadManager,
Persona me, File file, long length, InfoHash infoHash,
int pieceSizePow2, I2PConnector connector, Set<Destination> destinations,
File incompletes, Pieces pieces) {
this.eventBus = eventBus
this.me = me
this.downloadManager = downloadManager
this.file = file
this.infoHash = infoHash
this.length = length
this.connector = connector
this.destination = destination
this.destinations = destinations
this.piecesFile = new File(incompletes, file.getName()+".pieces")
this.incompleteFile = new File(incompletes, file.getName()+".part")
this.pieceSizePow2 = pieceSizePow2
this.pieceSize = 1 << pieceSizePow2
int nPieces
if (length % pieceSize == 0)
nPieces = length / pieceSize
else
nPieces = length / pieceSize + 1
this.nPieces = nPieces
pieces = new Pieces(nPieces, Constants.DOWNLOAD_SEQUENTIAL_RATIO)
currentState = DownloadState.CONNECTING
this.pieces = pieces
this.nPieces = pieces.nPieces
}
public synchronized InfoHash getInfoHash() {
infoHash
}
private synchronized void setInfoHash(InfoHash infoHash) {
this.infoHash = infoHash
}
void download() {
Endpoint endpoint = connector.connect(destination)
currentState = DownloadState.DOWNLOADING
while(!pieces.isComplete()) {
currentSession = new DownloadSession(pieces, infoHash, endpoint, file, pieceSize, length)
currentSession.request()
readPieces()
destinations.each {
if (it != me.destination) {
def worker = new DownloadWorker(it)
activeWorkers.put(it, worker)
executorService.submit(worker)
}
}
}
void readPieces() {
if (!piecesFile.exists())
return
piecesFile.eachLine {
int piece = Integer.parseInt(it)
pieces.markDownloaded(piece)
}
}
void writePieces() {
synchronized(piecesFile) {
if (piecesFileClosed)
return
piecesFile.withPrintWriter { writer ->
pieces.getDownloaded().each { piece ->
writer.println(piece)
}
}
}
currentState = DownloadState.FINISHED
endpoint.close()
}
public long donePieces() {
pieces.donePieces()
}
public int positionInPiece() {
if (currentSession == null)
return 0
currentSession.positionInPiece()
public int speed() {
int total = 0
if (getCurrentState() == DownloadState.DOWNLOADING) {
activeWorkers.values().each {
if (it.currentState == WorkerState.DOWNLOADING)
total += it.speed()
}
}
total
}
public DownloadState getCurrentState() {
currentState
if (cancelled)
return DownloadState.CANCELLED
if (paused)
return DownloadState.PAUSED
boolean allFinished = true
activeWorkers.values().each {
allFinished &= it.currentState == WorkerState.FINISHED
}
if (allFinished) {
if (pieces.isComplete())
return DownloadState.FINISHED
return DownloadState.FAILED
}
// if at least one is downloading...
boolean oneDownloading = false
activeWorkers.values().each {
if (it.currentState == WorkerState.DOWNLOADING) {
oneDownloading = true
return
}
}
if (oneDownloading)
return DownloadState.DOWNLOADING
// at least one is requesting hashlist
boolean oneHashlist = false
activeWorkers.values().each {
if (it.currentState == WorkerState.HASHLIST) {
oneHashlist = true
return
}
}
if (oneHashlist)
return DownloadState.HASHLIST
return DownloadState.CONNECTING
}
public void cancel() {
cancelled = true
stop()
synchronized(piecesFile) {
piecesFileClosed = true
piecesFile.delete()
}
incompleteFile.delete()
pieces.clearAll()
}
public void pause() {
paused = true
stop()
}
void stop() {
activeWorkers.values().each {
it.cancel()
}
}
public int activeWorkers() {
int active = 0
activeWorkers.values().each {
if (it.currentState != WorkerState.FINISHED)
active++
}
active
}
public void resume() {
paused = false
readPieces()
destinations.each { destination ->
def worker = activeWorkers.get(destination)
if (worker != null) {
if (worker.currentState == WorkerState.FINISHED) {
def newWorker = new DownloadWorker(destination)
activeWorkers.put(destination, newWorker)
executorService.submit(newWorker)
}
} else {
worker = new DownloadWorker(destination)
activeWorkers.put(destination, worker)
executorService.submit(worker)
}
}
}
void addSource(Destination d) {
if (activeWorkers.containsKey(d))
return
DownloadWorker newWorker = new DownloadWorker(d)
activeWorkers.put(d, newWorker)
executorService.submit(newWorker)
}
class DownloadWorker implements Runnable {
private final Destination destination
private volatile WorkerState currentState
private volatile Thread downloadThread
private Endpoint endpoint
private volatile DownloadSession currentSession
private final Set<Integer> available = new HashSet<>()
DownloadWorker(Destination destination) {
this.destination = destination
}
public void run() {
downloadThread = Thread.currentThread()
currentState = WorkerState.CONNECTING
Endpoint endpoint = null
try {
endpoint = connector.connect(destination)
while(getInfoHash().hashList == null) {
currentState = WorkerState.HASHLIST
HashListSession session = new HashListSession(me.toBase64(), infoHash, endpoint)
InfoHash received = session.request()
setInfoHash(received)
}
currentState = WorkerState.DOWNLOADING
boolean requestPerformed
while(!pieces.isComplete()) {
currentSession = new DownloadSession(eventBus, me.toBase64(), pieces, getInfoHash(),
endpoint, incompleteFile, pieceSize, length, available)
requestPerformed = currentSession.request()
if (!requestPerformed)
break
successfulDestinations.add(endpoint.destination)
writePieces()
}
} catch (Exception bad) {
log.log(Level.WARNING,"Exception while downloading",DataUtil.findRoot(bad))
} finally {
currentState = WorkerState.FINISHED
if (pieces.isComplete() && eventFired.compareAndSet(false, true)) {
synchronized(piecesFile) {
piecesFileClosed = true
piecesFile.delete()
}
try {
Files.move(incompleteFile.toPath(), file.toPath(), StandardCopyOption.ATOMIC_MOVE)
} catch (AtomicMoveNotSupportedException e) {
Files.copy(incompleteFile.toPath(), file.toPath(), StandardCopyOption.REPLACE_EXISTING)
incompleteFile.delete()
}
eventBus.publish(
new FileDownloadedEvent(
downloadedFile : new DownloadedFile(file, getInfoHash(), pieceSizePow2, successfulDestinations),
downloader : Downloader.this))
}
endpoint?.close()
}
}
int speed() {
if (currentSession == null)
return 0
currentSession.speed()
}
void cancel() {
downloadThread?.interrupt()
}
}
}

View File

@ -0,0 +1,82 @@
package com.muwire.core.download
import java.nio.ByteBuffer
import java.nio.charset.StandardCharsets
import java.security.MessageDigest
import java.security.NoSuchAlgorithmException
import com.muwire.core.Constants
import com.muwire.core.InfoHash
import com.muwire.core.connection.Endpoint
import groovy.util.logging.Log
import static com.muwire.core.util.DataUtil.readTillRN
import net.i2p.data.Base64
@Log
class HashListSession {
private final String meB64
private final InfoHash infoHash
private final Endpoint endpoint
HashListSession(String meB64, InfoHash infoHash, Endpoint endpoint) {
this.meB64 = meB64
this.infoHash = infoHash
this.endpoint = endpoint
}
InfoHash request() throws IOException {
InputStream is = endpoint.getInputStream()
OutputStream os = endpoint.getOutputStream()
String root = Base64.encode(infoHash.getRoot())
os.write("HASHLIST $root\r\n".getBytes(StandardCharsets.US_ASCII))
os.write("X-Persona: $meB64\r\n\r\n".getBytes(StandardCharsets.US_ASCII))
os.flush()
String code = readTillRN(is)
if (!code.startsWith("200"))
throw new IOException("unknown code $code")
// parse all headers
Set<String> headers = new HashSet<>()
String header
while((header = readTillRN(is)) != "" && headers.size() < Constants.MAX_HEADERS)
headers.add(header)
long receivedStart = -1
long receivedEnd = -1
for (String receivedHeader : headers) {
def group = (receivedHeader =~ /^Content-Range: (\d+)-(\d+)$/)
if (group.size() != 1) {
log.info("ignoring header $receivedHeader")
continue
}
receivedStart = Long.parseLong(group[0][1])
receivedEnd = Long.parseLong(group[0][2])
}
if (receivedStart != 0)
throw new IOException("hashlist started at $receivedStart")
byte[] hashList = new byte[receivedEnd]
ByteBuffer hashListBuf = ByteBuffer.wrap(hashList)
byte[] tmp = new byte[0x1 << 13]
while(hashListBuf.hasRemaining()) {
if (hashListBuf.remaining() > tmp.length)
tmp = new byte[hashListBuf.remaining()]
int read = is.read(tmp)
if (read == -1)
throw new IOException()
hashListBuf.put(tmp, 0, read)
}
InfoHash received = InfoHash.fromHashList(hashList)
if (received.getRoot() != infoHash.getRoot())
throw new IOException("fetched list doesn't match root")
received
}
}

View File

@ -1,7 +1,7 @@
package com.muwire.core.download
class Pieces {
private final BitSet bitSet
private final BitSet done, claimed
private final int nPieces
private final float ratio
private final Random random = new Random()
@ -13,35 +13,74 @@ class Pieces {
Pieces(int nPieces, float ratio) {
this.nPieces = nPieces
this.ratio = ratio
bitSet = new BitSet(nPieces)
done = new BitSet(nPieces)
claimed = new BitSet(nPieces)
}
synchronized int getRandomPiece() {
int cardinality = bitSet.cardinality()
if (cardinality == nPieces)
synchronized int claim() {
int claimedCardinality = claimed.cardinality()
if (claimedCardinality == nPieces)
return -1
// if fuller than ratio just do sequential
if ( (1.0f * cardinality) / nPieces > ratio) {
return bitSet.nextClearBit(0)
if ( (1.0f * claimedCardinality) / nPieces > ratio) {
int rv = claimed.nextClearBit(0)
claimed.set(rv)
return rv
}
while(true) {
int start = random.nextInt(nPieces)
while(bitSet.get(start) && ++start < nPieces);
if (claimed.get(start))
continue
claimed.set(start)
return start
}
}
synchronized int claim(Set<Integer> available) {
for (int i = claimed.nextSetBit(0); i >= 0; i = claimed.nextSetBit(i+1))
available.remove(i)
if (available.isEmpty())
return -1
List<Integer> toList = available.toList()
Collections.shuffle(toList)
int rv = toList[0]
claimed.set(rv)
rv
}
synchronized def getDownloaded() {
def rv = []
for (int i = done.nextSetBit(0); i >= 0; i = done.nextSetBit(i+1)) {
rv << i
}
rv
}
synchronized void markDownloaded(int piece) {
bitSet.set(piece)
done.set(piece)
claimed.set(piece)
}
synchronized void unclaim(int piece) {
claimed.clear(piece)
}
synchronized boolean isComplete() {
bitSet.cardinality() == nPieces
done.cardinality() == nPieces
}
synchronized int donePieces() {
bitSet.cardinality()
done.cardinality()
}
synchronized boolean isDownloaded(int piece) {
done.get(piece)
}
synchronized void clearAll() {
done.clear()
claimed.clear()
}
}

View File

@ -0,0 +1,10 @@
package com.muwire.core.download
import com.muwire.core.Event
import com.muwire.core.InfoHash
import com.muwire.core.Persona
class SourceDiscoveredEvent extends Event {
InfoHash infoHash
Persona source
}

View File

@ -0,0 +1,7 @@
package com.muwire.core.download
import com.muwire.core.Event
class UIDownloadCancelledEvent extends Event {
Downloader downloader
}

View File

@ -3,8 +3,11 @@ package com.muwire.core.download
import com.muwire.core.Event
import com.muwire.core.search.UIResultEvent
import net.i2p.data.Destination
class UIDownloadEvent extends Event {
UIResultEvent result
UIResultEvent[] result
Set<Destination> sources
File target
}

View File

@ -0,0 +1,6 @@
package com.muwire.core.download
import com.muwire.core.Event
class UIDownloadPausedEvent extends Event {
}

View File

@ -0,0 +1,6 @@
package com.muwire.core.download
import com.muwire.core.Event
class UIDownloadResumedEvent extends Event {
}

View File

@ -0,0 +1,6 @@
package com.muwire.core.files
import com.muwire.core.Event
class AllFilesLoadedEvent extends Event {
}

View File

@ -0,0 +1,7 @@
package com.muwire.core.files
import com.muwire.core.Event
class DirectoryUnsharedEvent extends Event {
File directory
}

View File

@ -0,0 +1,148 @@
package com.muwire.core.files
import java.nio.file.FileSystem
import java.nio.file.FileSystems
import java.nio.file.Path
import java.nio.file.Paths
import static java.nio.file.StandardWatchEventKinds.*
import java.nio.file.ClosedWatchServiceException
import java.nio.file.WatchEvent
import java.nio.file.WatchKey
import java.nio.file.WatchService
import java.util.concurrent.ConcurrentHashMap
import com.muwire.core.EventBus
import com.muwire.core.SharedFile
import groovy.util.logging.Log
import net.i2p.util.SystemVersion
@Log
class DirectoryWatcher {
private static final long WAIT_TIME = 1000
private static final WatchEvent.Kind[] kinds
static {
if (SystemVersion.isMac())
kinds = [ENTRY_MODIFY, ENTRY_DELETE]
else
kinds = [ENTRY_CREATE, ENTRY_MODIFY, ENTRY_DELETE]
}
private final EventBus eventBus
private final FileManager fileManager
private final Thread watcherThread, publisherThread
private final Map<File, Long> waitingFiles = new ConcurrentHashMap<>()
private final Map<File, WatchKey> watchedDirectories = new ConcurrentHashMap<>()
private WatchService watchService
private volatile boolean shutdown
DirectoryWatcher(EventBus eventBus, FileManager fileManager) {
this.eventBus = eventBus
this.fileManager = fileManager
this.watcherThread = new Thread({watch() } as Runnable, "directory-watcher")
watcherThread.setDaemon(true)
this.publisherThread = new Thread({publish()} as Runnable, "watched-files-publisher")
publisherThread.setDaemon(true)
}
void onAllFilesLoadedEvent(AllFilesLoadedEvent e) {
watchService = FileSystems.getDefault().newWatchService()
watcherThread.start()
publisherThread.start()
}
void stop() {
shutdown = true
watcherThread?.interrupt()
publisherThread?.interrupt()
watchService?.close()
}
void onFileSharedEvent(FileSharedEvent e) {
if (!e.file.isDirectory())
return
Path path = e.file.getCanonicalFile().toPath()
WatchKey wk = path.register(watchService, kinds)
watchedDirectories.put(e.file, wk)
}
void onDirectoryUnsharedEvent(DirectoryUnsharedEvent e) {
WatchKey wk = watchedDirectories.remove(e.directory)
wk?.cancel()
}
private void watch() {
try {
while(!shutdown) {
WatchKey key = watchService.take()
key.pollEvents().each {
switch(it.kind()) {
case ENTRY_CREATE: processCreated(key.watchable(), it.context()); break
case ENTRY_MODIFY: processModified(key.watchable(), it.context()); break
case ENTRY_DELETE: processDeleted(key.watchable(), it.context()); break
}
}
key.reset()
}
} catch (InterruptedException|ClosedWatchServiceException e) {
if (!shutdown)
throw e
}
}
private void processCreated(Path parent, Path path) {
File f= join(parent, path)
log.fine("created entry $f")
if (f.isDirectory())
f.toPath().register(watchService, kinds)
else
waitingFiles.put(f, System.currentTimeMillis())
}
private void processModified(Path parent, Path path) {
File f = join(parent, path)
log.fine("modified entry $f")
waitingFiles.put(f, System.currentTimeMillis())
}
private void processDeleted(Path parent, Path path) {
File f = join(parent, path)
log.fine("deleted entry $f")
SharedFile sf = fileManager.fileToSharedFile.get(f)
if (sf != null)
eventBus.publish(new FileUnsharedEvent(unsharedFile : sf))
}
private static File join(Path parent, Path path) {
File parentFile = parent.toFile().getCanonicalFile()
new File(parentFile, path.toFile().getName())
}
private void publish() {
try {
while(!shutdown) {
Thread.sleep(WAIT_TIME)
long now = System.currentTimeMillis()
def published = []
waitingFiles.each { file, timestamp ->
if (now - timestamp > WAIT_TIME) {
log.fine("publishing file $file")
eventBus.publish new FileSharedEvent(file : file)
published << file
}
}
published.each {
waitingFiles.remove(it)
}
}
} catch (InterruptedException e) {
if (!shutdown)
throw e
}
}
}

View File

@ -2,10 +2,11 @@ package com.muwire.core.files
import com.muwire.core.DownloadedFile
import com.muwire.core.Event
import com.muwire.core.download.Downloader
import net.i2p.data.Destination
class FileDownloadedEvent extends Event {
Downloader downloader
DownloadedFile downloadedFile
}

View File

@ -7,4 +7,10 @@ class FileHashedEvent extends Event {
SharedFile sharedFile
String error
@Override
public String toString() {
super.toString() + " sharedFile " + sharedFile?.file.getAbsolutePath() + " error: $error"
}
}

View File

@ -1,6 +1,9 @@
package com.muwire.core.files
import com.muwire.core.InfoHash
import net.i2p.data.Base64
import java.nio.MappedByteBuffer
import java.nio.channels.FileChannel
import java.nio.channels.FileChannel.MapMode
@ -17,12 +20,12 @@ class FileHasher {
* @return the size of each piece in power of 2
*/
static int getPieceSize(long size) {
if (size <= 0x1 << 25)
return 18
if (size <= 0x1 << 30)
return 17
for (int i = 26; i <= 37; i++) {
for (int i = 31; i <= 37; i++) {
if (size <= 0x1L << i) {
return i-7
return i-13
}
}
@ -52,11 +55,11 @@ class FileHasher {
try {
MappedByteBuffer buf
for (int i = 0; i < numPieces - 1; i++) {
buf = raf.getChannel().map(MapMode.READ_ONLY, size * i, size)
buf = raf.getChannel().map(MapMode.READ_ONLY, ((long)size) * i, size)
digest.update buf
output.write(digest.digest(), 0, 32)
}
def lastPieceLength = length - (numPieces - 1) * size
def lastPieceLength = length - (numPieces - 1) * ((long)size)
buf = raf.getChannel().map(MapMode.READ_ONLY, length - lastPieceLength, lastPieceLength)
digest.update buf
output.write(digest.digest(), 0, 32)
@ -67,4 +70,18 @@ class FileHasher {
byte [] hashList = output.toByteArray()
InfoHash.fromHashList(hashList)
}
public static void main(String[] args) {
if (args.length != 1) {
println "This utility computes an infohash of a file"
println "Pass absolute path to a file as an argument"
System.exit(1)
}
def file = new File(args[0])
file = file.getAbsoluteFile()
def hasher = new FileHasher()
def infohash = hasher.hashFile(file)
println Base64.encode(infohash.getRoot())
}
}

View File

@ -2,7 +2,9 @@ package com.muwire.core.files
import com.muwire.core.EventBus
import com.muwire.core.InfoHash
import com.muwire.core.MuWireSettings
import com.muwire.core.SharedFile
import com.muwire.core.UILoadedEvent
import com.muwire.core.search.ResultsEvent
import com.muwire.core.search.SearchEvent
import com.muwire.core.search.SearchIndex
@ -14,27 +16,31 @@ class FileManager {
final EventBus eventBus
final MuWireSettings settings
final Map<InfoHash, Set<SharedFile>> rootToFiles = Collections.synchronizedMap(new HashMap<>())
final Map<File, SharedFile> fileToSharedFile = Collections.synchronizedMap(new HashMap<>())
final Map<String, Set<File>> nameToFiles = new HashMap<>()
final SearchIndex index = new SearchIndex()
FileManager(EventBus eventBus) {
FileManager(EventBus eventBus, MuWireSettings settings) {
this.settings = settings
this.eventBus = eventBus
}
void onFileHashedEvent(FileHashedEvent e) {
if (e.sharedFile != null)
addToIndex(e.sharedFile)
}
void onFileHashedEvent(FileHashedEvent e) {
if (e.sharedFile != null)
addToIndex(e.sharedFile)
}
void onFileLoadedEvent(FileLoadedEvent e) {
addToIndex(e.loadedFile)
}
void onFileDownloadedEvent(FileDownloadedEvent e) {
addToIndex(e.downloadedFile)
}
void onFileDownloadedEvent(FileDownloadedEvent e) {
if (settings.shareDownloadedFiles) {
addToIndex(e.downloadedFile)
}
}
private void addToIndex(SharedFile sf) {
log.info("Adding shared file " + sf.getFile())
@ -100,20 +106,45 @@ class FileManager {
if (e.searchHash != null) {
Set<SharedFile> found
found = rootToFiles.get new InfoHash(e.searchHash)
found = filter(found, e.oobInfohash)
if (found != null && !found.isEmpty())
re = new ResultsEvent(results: found.asList(), uuid: e.uuid)
re = new ResultsEvent(results: found.asList(), uuid: e.uuid, searchEvent: e)
} else {
def names = index.search e.searchTerms
Set<File> files = new HashSet<>()
names.each { files.addAll nameToFiles.getOrDefault(it, []) }
Set<SharedFile> sharedFiles = new HashSet<>()
files.each { sharedFiles.add fileToSharedFile[it] }
files = filter(sharedFiles, e.oobInfohash)
if (!sharedFiles.isEmpty())
re = new ResultsEvent(results: sharedFiles.asList(), uuid: e.uuid)
re = new ResultsEvent(results: sharedFiles.asList(), uuid: e.uuid, searchEvent: e)
}
if (re != null)
eventBus.publish(re)
}
private static Set<SharedFile> filter(Set<SharedFile> files, boolean oob) {
if (!oob)
return files
Set<SharedFile> rv = new HashSet<>()
files.each {
if (it.getPieceSize() != 0)
rv.add(it)
}
rv
}
void onDirectoryUnsharedEvent(DirectoryUnsharedEvent e) {
e.directory.listFiles().each {
if (it.isDirectory())
eventBus.publish(new DirectoryUnsharedEvent(directory : it))
else {
SharedFile sf = fileToSharedFile.get(it)
if (sf != null)
eventBus.publish(new FileUnsharedEvent(unsharedFile : sf))
}
}
}
}

View File

@ -5,4 +5,9 @@ import com.muwire.core.Event
class FileSharedEvent extends Event {
File file
@Override
public String toString() {
return super.toString() + " file: "+file.getAbsolutePath()
}
}

View File

@ -10,11 +10,13 @@ class HasherService {
final FileHasher hasher
final EventBus eventBus
final FileManager fileManager
Executor executor
HasherService(FileHasher hasher, EventBus eventBus) {
HasherService(FileHasher hasher, EventBus eventBus, FileManager fileManager) {
this.hasher = hasher
this.eventBus = eventBus
this.fileManager = fileManager
}
void start() {
@ -22,13 +24,15 @@ class HasherService {
}
void onFileSharedEvent(FileSharedEvent evt) {
if (fileManager.fileToSharedFile.containsKey(evt.file))
return
executor.execute( { -> process(evt.file) } as Runnable)
}
private void process(File f) {
f = f.getCanonicalFile()
if (f.isDirectory()) {
f.listFiles().each {onFileSharedEvent new FileSharedEvent(file: it) }
f.listFiles().each {eventBus.publish new FileSharedEvent(file: it) }
} else {
if (f.length() == 0) {
eventBus.publish new FileHashedEvent(error: "Not sharing empty file $f")
@ -36,7 +40,7 @@ class HasherService {
eventBus.publish new FileHashedEvent(error: "$f is too large to be shared ${f.length()}")
} else {
def hash = hasher.hashFile f
eventBus.publish new FileHashedEvent(sharedFile: new SharedFile(f, hash))
eventBus.publish new FileHashedEvent(sharedFile: new SharedFile(f, hash, FileHasher.getPieceSize(f.length())))
}
}
}

View File

@ -1,5 +1,8 @@
package com.muwire.core.files
import java.nio.file.CopyOption
import java.nio.file.Files
import java.nio.file.StandardCopyOption
import java.util.logging.Level
import java.util.stream.Collectors
@ -8,6 +11,7 @@ import com.muwire.core.EventBus
import com.muwire.core.InfoHash
import com.muwire.core.Service
import com.muwire.core.SharedFile
import com.muwire.core.UILoadedEvent
import com.muwire.core.util.DataUtil
import groovy.json.JsonOutput
@ -33,14 +37,14 @@ class PersisterService extends Service {
timer = new Timer("file persister", true)
}
void start() {
timer.schedule({load()} as TimerTask, 1000)
}
void stop() {
timer.cancel()
}
void onUILoadedEvent(UILoadedEvent e) {
timer.schedule({load()} as TimerTask, 1)
}
void load() {
if (location.exists() && location.isFile()) {
def slurper = new JsonSlurper()
@ -55,10 +59,13 @@ class PersisterService extends Service {
}
}
}
listener.publish(new AllFilesLoadedEvent())
} catch (IllegalArgumentException|NumberFormatException e) {
log.log(Level.WARNING, "couldn't load files",e)
}
}
} else {
listener.publish(new AllFilesLoadedEvent())
}
timer.schedule({persistFiles()} as TimerTask, 0, interval)
loaded = true
}
@ -94,28 +101,37 @@ class PersisterService extends Service {
if (!Arrays.equals(root, ih.getRoot()))
return null
if (json.sources != null) {
int pieceSize = 0
if (json.pieceSize != null)
pieceSize = json.pieceSize
if (json.sources != null) {
List sources = (List)json.sources
Set<Destination> sourceSet = sources.stream().map({d -> new Destination(d.toString())}).collect Collectors.toSet()
DownloadedFile df = new DownloadedFile(file, ih, sourceSet)
DownloadedFile df = new DownloadedFile(file, ih, pieceSize, sourceSet)
return new FileLoadedEvent(loadedFile : df)
}
SharedFile sf = new SharedFile(file, ih)
SharedFile sf = new SharedFile(file, ih, pieceSize)
return new FileLoadedEvent(loadedFile: sf)
}
private void persistFiles() {
location.delete()
def sharedFiles = fileManager.getSharedFiles()
location.withPrintWriter { writer ->
File tmp = File.createTempFile("muwire-files", "tmp")
tmp.deleteOnExit()
tmp.withPrintWriter { writer ->
sharedFiles.each { k, v ->
def json = toJson(k,v)
json = JsonOutput.toJson(json)
writer.println json
}
}
Files.copy(tmp.toPath(), location.toPath(), StandardCopyOption.REPLACE_EXISTING)
tmp.delete()
}
private def toJson(File f, SharedFile sf) {
@ -124,6 +140,7 @@ class PersisterService extends Service {
json.length = f.length()
InfoHash ih = sf.getInfoHash()
json.infoHash = Base64.encode ih.getRoot()
json.pieceSize = sf.getPieceSize()
byte [] tmp = new byte [32]
json.hashList = []
for (int i = 0;i < ih.getHashList().length / 32; i++) {

View File

@ -65,7 +65,7 @@ class CacheClient {
options.setSendLeaseSet(true)
CacheServers.getCacheServers().each {
log.info "Querying hostcache ${it.toBase32()}"
session.sendMessage(it, ping, 0, ping.length, I2PSession.PROTO_DATAGRAM, 0, 0, options)
session.sendMessage(it, ping, 0, ping.length, I2PSession.PROTO_DATAGRAM, 1, 0, options)
}
}
@ -140,7 +140,7 @@ class CacheClient {
pong.pongs.asList().each {
Destination dest = new Destination(it)
if (!session.getMyDestination().equals(dest))
eventBus.publish(new HostDiscoveredEvent(destination: dest))
eventBus.publish(new HostDiscoveredEvent(destination: dest, fromHostcache : true))
}
}

View File

@ -7,20 +7,25 @@ class Host {
private static final int MAX_FAILURES = 3
final Destination destination
private final int clearInterval
int failures,successes
long lastAttempt
public Host(Destination destination) {
public Host(Destination destination, int clearInterval) {
this.destination = destination
this.clearInterval = clearInterval
}
synchronized void onConnect() {
failures = 0
successes++
lastAttempt = System.currentTimeMillis()
}
synchronized void onFailure() {
failures++
successes = 0
lastAttempt = System.currentTimeMillis()
}
synchronized boolean isFailed() {
@ -30,4 +35,12 @@ class Host {
synchronized boolean hasSucceeded() {
successes > 0
}
synchronized void clearFailures() {
failures = 0
}
synchronized void canTryAgain() {
System.currentTimeMillis() - lastAttempt > (clearInterval * 60 * 1000)
}
}

View File

@ -46,21 +46,25 @@ class HostCache extends Service {
void onHostDiscoveredEvent(HostDiscoveredEvent e) {
if (myself == e.destination)
return
if (hosts.containsKey(e.destination))
return
Host host = new Host(e.destination)
if (hosts.containsKey(e.destination)) {
if (!e.fromHostcache)
return
hosts.get(e.destination).clearFailures()
return
}
Host host = new Host(e.destination, settings.hostClearInterval)
if (allowHost(host)) {
hosts.put(e.destination, host)
}
}
void onConnectionEvent(ConnectionEvent e) {
if (e.incoming || e.leaf)
if (e.leaf)
return
Destination dest = e.endpoint.destination
Host host = hosts.get(dest)
if (host == null) {
host = new Host(dest)
host = new Host(dest, settings.hostClearInterval)
hosts.put(dest, host)
}
@ -102,9 +106,11 @@ class HostCache extends Service {
storage.eachLine {
def entry = slurper.parseText(it)
Destination dest = new Destination(entry.destination)
Host host = new Host(dest)
Host host = new Host(dest, settings.hostClearInterval)
host.failures = Integer.valueOf(String.valueOf(entry.failures))
host.successes = Integer.valueOf(String.valueOf(entry.successes))
if (entry.lastAttempt != null)
host.lastAttempt = entry.lastAttempt
if (allowHost(host))
hosts.put(dest, host)
}
@ -114,7 +120,7 @@ class HostCache extends Service {
}
private boolean allowHost(Host host) {
if (host.isFailed())
if (host.isFailed() && !host.canTryAgain())
return false
if (host.destination == myself)
return false
@ -139,6 +145,7 @@ class HostCache extends Service {
map.destination = dest.toBase64()
map.failures = host.failures
map.successes = host.successes
map.lastAttempt = host.lastAttempt
def json = JsonOutput.toJson(map)
writer.println json
}

View File

@ -7,9 +7,10 @@ import net.i2p.data.Destination
class HostDiscoveredEvent extends Event {
Destination destination
boolean fromHostcache
@Override
public String toString() {
"HostDiscoveredEvent ${super.toString()} destination:${destination.toBase32()}"
"HostDiscoveredEvent ${super.toString()} destination:${destination.toBase32()} from hostcache $fromHostcache"
}
}

View File

@ -0,0 +1,28 @@
package com.muwire.core.mesh
import com.muwire.core.InfoHash
import com.muwire.core.Persona
import com.muwire.core.download.Pieces
import net.i2p.data.Destination
import net.i2p.util.ConcurrentHashSet
class Mesh {
private final InfoHash infoHash
private final Set<Persona> sources = new ConcurrentHashSet<>()
private final Pieces pieces
Mesh(InfoHash infoHash, Pieces pieces) {
this.infoHash = infoHash
this.pieces = pieces
}
Set<Persona> getRandom(int n, Persona exclude) {
List<Persona> tmp = new ArrayList<>(sources)
tmp.remove(exclude)
Collections.shuffle(tmp)
if (tmp.size() < n)
return tmp
tmp[0..n-1]
}
}

View File

@ -0,0 +1,102 @@
package com.muwire.core.mesh
import java.util.stream.Collectors
import com.muwire.core.Constants
import com.muwire.core.InfoHash
import com.muwire.core.MuWireSettings
import com.muwire.core.Persona
import com.muwire.core.download.Pieces
import com.muwire.core.download.SourceDiscoveredEvent
import com.muwire.core.files.FileManager
import com.muwire.core.util.DataUtil
import groovy.json.JsonOutput
import groovy.json.JsonSlurper
import net.i2p.data.Base64
class MeshManager {
private final Map<InfoHash, Mesh> meshes = Collections.synchronizedMap(new HashMap<>())
private final FileManager fileManager
private final File home
private final MuWireSettings settings
MeshManager(FileManager fileManager, File home, MuWireSettings settings) {
this.fileManager = fileManager
this.home = home
this.settings = settings
load()
}
Mesh get(InfoHash infoHash) {
meshes.get(infoHash)
}
Mesh getOrCreate(InfoHash infoHash, int nPieces) {
synchronized(meshes) {
if (meshes.containsKey(infoHash))
return meshes.get(infoHash)
Pieces pieces = new Pieces(nPieces, settings.downloadSequentialRatio)
if (fileManager.rootToFiles.containsKey(infoHash)) {
for (int i = 0; i < nPieces; i++)
pieces.markDownloaded(i)
}
Mesh rv = new Mesh(infoHash, pieces)
meshes.put(infoHash, rv)
return rv
}
}
void onSourceDiscoveredEvent(SourceDiscoveredEvent e) {
Mesh mesh = meshes.get(e.infoHash)
if (mesh == null)
return
mesh.sources.add(e.source)
save()
}
private void save() {
File meshFile = new File(home, "mesh.json")
synchronized(meshes) {
meshFile.withPrintWriter { writer ->
meshes.values().each { mesh ->
def json = [:]
json.timestamp = System.currentTimeMillis()
json.infoHash = Base64.encode(mesh.infoHash.getRoot())
json.sources = mesh.sources.stream().map({it.toBase64()}).collect(Collectors.toList())
json.nPieces = mesh.pieces.nPieces
json.xHave = DataUtil.encodeXHave(mesh.pieces.downloaded, mesh.pieces.nPieces)
writer.println(JsonOutput.toJson(json))
}
}
}
}
private void load() {
File meshFile = new File(home, "mesh.json")
if (!meshFile.exists())
return
long now = System.currentTimeMillis()
JsonSlurper slurper = new JsonSlurper()
meshFile.eachLine {
def json = slurper.parseText(it)
if (now - json.timestamp > settings.meshExpiration * 60 * 1000)
return
InfoHash infoHash = new InfoHash(Base64.decode(json.infoHash))
Pieces pieces = new Pieces(json.nPieces, settings.downloadSequentialRatio)
Mesh mesh = new Mesh(infoHash, pieces)
json.sources.each { source ->
Persona persona = new Persona(new ByteArrayInputStream(Base64.decode(source)))
mesh.sources.add(persona)
}
if (json.xHave != null)
DataUtil.decodeXHave(json.xHave).each { pieces.markDownloaded(it) }
if (!mesh.sources.isEmpty())
meshes.put(infoHash, mesh)
}
}
}

View File

@ -1,6 +1,7 @@
package com.muwire.core.search
import com.muwire.core.Event
import com.muwire.core.Persona
import net.i2p.data.Destination
@ -9,6 +10,11 @@ class QueryEvent extends Event {
SearchEvent searchEvent
boolean firstHop
Destination replyTo
Persona originator
Destination receivedOn
String toString() {
"searchEvent: $searchEvent firstHop:$firstHop, replyTo:${replyTo.toBase32()}" +
"originator: ${originator.getHumanReadableName()} receivedOn: ${receivedOn.toBase32()}"
}
}

View File

@ -5,6 +5,7 @@ import com.muwire.core.SharedFile
class ResultsEvent extends Event {
SearchEvent searchEvent
SharedFile[] results
UUID uuid
}

View File

@ -1,5 +1,7 @@
package com.muwire.core.search
import java.util.stream.Collectors
import javax.naming.directory.InvalidSearchControlsException
import com.muwire.core.InfoHash
@ -7,13 +9,25 @@ import com.muwire.core.Persona
import com.muwire.core.util.DataUtil
import net.i2p.data.Base64
import net.i2p.data.Destination
class ResultsParser {
public static UIResultEvent parse(Persona p, UUID uuid, def json) throws InvalidSearchResultException {
if (json.type != "Result")
throw new InvalidSearchResultException("not a result json")
if (json.version != 1)
throw new InvalidSearchResultException("unknown version $json.version")
switch(json.version) {
case 1:
return parseV1(p, uuid, json)
case 2:
return parseV2(p, uuid, json)
default:
throw new InvalidSearchResultException("unknown version $json.version")
}
}
private static parseV1(Persona p, UUID uuid, def json) {
if (json.name == null)
throw new InvalidSearchResultException("name missing")
if (json.size == null)
@ -47,9 +61,45 @@ class ResultsParser {
size : size,
infohash : parsedIH,
pieceSize : pieceSize,
sources : Collections.emptySet(),
uuid : uuid)
} catch (Exception e) {
throw new InvalidSearchResultException("parsing search result failed",e)
}
}
private static UIResultEvent parseV2(Persona p, UUID uuid, def json) {
if (json.name == null)
throw new InvalidSearchResultException("name missing")
if (json.size == null)
throw new InvalidSearchResultException("length missing")
if (json.infohash == null)
throw new InvalidSearchResultException("infohash missing")
if (json.pieceSize == null)
throw new InvalidSearchResultException("pieceSize missing")
if (json.hashList != null)
throw new InvalidSearchResultException("V2 result with hashlist")
try {
String name = DataUtil.readi18nString(Base64.decode(json.name))
long size = json.size
byte [] infoHash = Base64.decode(json.infohash)
if (infoHash.length != InfoHash.SIZE)
throw new InvalidSearchResultException("invalid infohash size $infoHash.length")
int pieceSize = json.pieceSize
Set<Destination> sources = Collections.emptySet()
if (json.sources != null)
sources = json.sources.stream().map({new Destination(it)}).collect(Collectors.toSet())
return new UIResultEvent( sender : p,
name : name,
size : size,
infohash : new InfoHash(infoHash),
pieceSize : pieceSize,
sources : sources,
uuid: uuid)
} catch (Exception e) {
throw new InvalidSearchResultException("parsing search result failed",e)
}
}
}

View File

@ -11,7 +11,10 @@ import java.util.concurrent.Executor
import java.util.concurrent.Executors
import java.util.concurrent.ThreadFactory
import java.util.concurrent.atomic.AtomicInteger
import java.util.logging.Level
import java.util.stream.Collectors
import com.muwire.core.DownloadedFile
import com.muwire.core.EventBus
import com.muwire.core.InfoHash
@ -46,22 +49,30 @@ class ResultsSender {
this.me = me
}
void sendResults(UUID uuid, SharedFile[] results, Destination target) {
log.info("Sending $results.length results for uuid $uuid to ${target.toBase32()}")
void sendResults(UUID uuid, SharedFile[] results, Destination target, boolean oobInfohash) {
log.info("Sending $results.length results for uuid $uuid to ${target.toBase32()} oobInfohash : $oobInfohash")
if (target.equals(me.destination)) {
results.each {
long length = it.getFile().length()
int pieceSize = it.getPieceSize()
if (pieceSize == 0)
pieceSize = FileHasher.getPieceSize(length)
Set<Destination> suggested = Collections.emptySet()
if (it instanceof DownloadedFile)
suggested = it.sources
def uiResultEvent = new UIResultEvent( sender : me,
name : it.getFile().getName(),
size : length,
infohash : it.getInfoHash(),
pieceSize : FileHasher.getPieceSize(length),
uuid : uuid
pieceSize : pieceSize,
uuid : uuid,
sources : suggested
)
eventBus.publish(uiResultEvent)
}
} else {
executor.execute(new ResultSendJob(uuid : uuid, results : results, target: target))
executor.execute(new ResultSendJob(uuid : uuid, results : results,
target: target, oobInfohash : oobInfohash))
}
}
@ -69,48 +80,58 @@ class ResultsSender {
UUID uuid
SharedFile [] results
Destination target
boolean oobInfohash
@Override
public void run() {
byte [] tmp = new byte[InfoHash.SIZE]
JsonOutput jsonOutput = new JsonOutput()
Endpoint endpoint = null;
try {
endpoint = connector.connect(target)
DataOutputStream os = new DataOutputStream(endpoint.getOutputStream())
os.write("POST $uuid\r\n\r\n".getBytes(StandardCharsets.US_ASCII))
me.write(os)
os.writeShort((short)results.length)
results.each {
byte [] name = it.getFile().getName().getBytes(StandardCharsets.UTF_8)
def baos = new ByteArrayOutputStream()
def daos = new DataOutputStream(baos)
daos.writeShort((short) name.length)
daos.write(name)
daos.flush()
String encodedName = Base64.encode(baos.toByteArray())
def obj = [:]
obj.type = "Result"
obj.version = 1
obj.name = encodedName
obj.infohash = Base64.encode(it.getInfoHash().getRoot())
obj.size = it.getFile().length()
obj.pieceSize = FileHasher.getPieceSize(it.getFile().length())
byte [] hashList = it.getInfoHash().getHashList()
def hashListB64 = []
for (int i = 0; i < hashList.length / InfoHash.SIZE; i++) {
System.arraycopy(hashList, InfoHash.SIZE * i, tmp, 0, InfoHash.SIZE)
hashListB64 << Base64.encode(tmp)
}
obj.hashList = hashListB64
byte [] tmp = new byte[InfoHash.SIZE]
JsonOutput jsonOutput = new JsonOutput()
Endpoint endpoint = null;
try {
endpoint = connector.connect(target)
DataOutputStream os = new DataOutputStream(endpoint.getOutputStream())
os.write("POST $uuid\r\n\r\n".getBytes(StandardCharsets.US_ASCII))
me.write(os)
os.writeShort((short)results.length)
results.each {
byte [] name = it.getFile().getName().getBytes(StandardCharsets.UTF_8)
def baos = new ByteArrayOutputStream()
def daos = new DataOutputStream(baos)
daos.writeShort((short) name.length)
daos.write(name)
daos.flush()
String encodedName = Base64.encode(baos.toByteArray())
def obj = [:]
obj.type = "Result"
obj.version = oobInfohash ? 2 : 1
obj.name = encodedName
obj.infohash = Base64.encode(it.getInfoHash().getRoot())
obj.size = it.getFile().length()
obj.pieceSize = it.getPieceSize()
if (!oobInfohash) {
byte [] hashList = it.getInfoHash().getHashList()
def hashListB64 = []
for (int i = 0; i < hashList.length / InfoHash.SIZE; i++) {
System.arraycopy(hashList, InfoHash.SIZE * i, tmp, 0, InfoHash.SIZE)
hashListB64 << Base64.encode(tmp)
}
obj.hashList = hashListB64
}
def json = jsonOutput.toJson(obj)
os.writeShort((short)json.length())
os.write(json.getBytes(StandardCharsets.US_ASCII))
if (it instanceof DownloadedFile)
obj.sources = it.sources.stream().map({dest -> dest.toBase64()}).collect(Collectors.toSet())
def json = jsonOutput.toJson(obj)
os.writeShort((short)json.length())
os.write(json.getBytes(StandardCharsets.US_ASCII))
}
os.flush()
} finally {
endpoint?.close()
}
os.flush()
} finally {
endpoint?.close()
} catch (Exception e) {
log.log(Level.WARNING, "problem sending results",e)
}
}
}

View File

@ -1,10 +1,19 @@
package com.muwire.core.search
import com.muwire.core.Event
import com.muwire.core.InfoHash
class SearchEvent extends Event {
List<String> searchTerms
byte [] searchHash
UUID uuid
boolean oobInfohash
String toString() {
def infoHash = null
if (searchHash != null)
infoHash = new InfoHash(searchHash)
"searchTerms: $searchTerms searchHash:$infoHash, uuid:$uuid oobInfohash:$oobInfohash"
}
}

View File

@ -1,5 +1,6 @@
package com.muwire.core.search
import com.muwire.core.Constants
class SearchIndex {
@ -31,8 +32,11 @@ class SearchIndex {
}
private static String[] split(String source) {
source = source.replaceAll("[\\.,_-]", " ")
source.split(" ")
source = source.replaceAll(Constants.SPLIT_PATTERN, " ").toLowerCase()
String [] split = source.split(" ")
def rv = []
split.each { if (it.length() > 0) rv << it }
rv.toArray(new String[0])
}
String[] search(List<String> terms) {
@ -41,7 +45,7 @@ class SearchIndex {
terms.each {
Set<String> forWord = keywords.getOrDefault(it,[])
if (rv == null) {
rv = forWord
rv = new HashSet<>(forWord)
} else {
rv.retainAll(forWord)
}

View File

@ -44,7 +44,7 @@ public class SearchManager {
log.info("No results for search uuid $event.uuid")
return
}
resultsSender.sendResults(event.uuid, event.results, target)
resultsSender.sendResults(event.uuid, event.results, target, event.searchEvent.oobInfohash)
}
boolean hasLocalSearch(UUID uuid) {

View File

@ -0,0 +1,8 @@
package com.muwire.core.search
import com.muwire.core.Event
class UIResultBatchEvent extends Event {
UUID uuid
UIResultEvent[] results
}

View File

@ -4,11 +4,19 @@ import com.muwire.core.Event
import com.muwire.core.InfoHash
import com.muwire.core.Persona
import net.i2p.data.Destination
class UIResultEvent extends Event {
Persona sender
Set<Destination> sources
UUID uuid
String name
long size
InfoHash infohash
int pieceSize
@Override
public String toString() {
super.toString() + "name:$name size:$size sender:${sender.getHumanReadableName()} pieceSize $pieceSize"
}
}

View File

@ -1,11 +1,10 @@
package com.muwire.core.trust
import com.muwire.core.Event
import net.i2p.data.Destination
import com.muwire.core.Persona
class TrustEvent extends Event {
Destination destination
Persona persona
TrustLevel level
}

View File

@ -1,7 +1,11 @@
package com.muwire.core.trust
import java.util.concurrent.ConcurrentHashMap
import com.muwire.core.Persona
import com.muwire.core.Service
import net.i2p.data.Base64
import net.i2p.data.Destination
import net.i2p.util.ConcurrentHashSet
@ -10,8 +14,8 @@ class TrustService extends Service {
final File persistGood, persistBad
final long persistInterval
final Set<Destination> good = new ConcurrentHashSet<>()
final Set<Destination> bad = new ConcurrentHashSet<>()
final Map<Destination, Persona> good = new ConcurrentHashMap<>()
final Map<Destination, Persona> bad = new ConcurrentHashMap<>()
final Timer timer
@ -35,12 +39,16 @@ class TrustService extends Service {
void load() {
if (persistGood.exists()) {
persistGood.eachLine {
good.add(new Destination(it))
byte [] decoded = Base64.decode(it)
Persona persona = new Persona(new ByteArrayInputStream(decoded))
good.put(persona.destination, persona)
}
}
if (persistBad.exists()) {
persistBad.eachLine {
bad.add(new Destination(it))
byte [] decoded = Base64.decode(it)
Persona persona = new Persona(new ByteArrayInputStream(decoded))
bad.put(persona.destination, persona)
}
}
timer.schedule({persist()} as TimerTask, persistInterval, persistInterval)
@ -50,22 +58,22 @@ class TrustService extends Service {
private void persist() {
persistGood.delete()
persistGood.withPrintWriter { writer ->
good.each {
writer.println it.toBase64()
good.each {k,v ->
writer.println v.toBase64()
}
}
persistBad.delete()
persistBad.withPrintWriter { writer ->
bad.each {
writer.println it.toBase64()
bad.each { k,v ->
writer.println v.toBase64()
}
}
}
TrustLevel getLevel(Destination dest) {
if (good.contains(dest))
if (good.containsKey(dest))
return TrustLevel.TRUSTED
else if (bad.contains(dest))
else if (bad.containsKey(dest))
return TrustLevel.DISTRUSTED
TrustLevel.NEUTRAL
}
@ -73,16 +81,16 @@ class TrustService extends Service {
void onTrustEvent(TrustEvent e) {
switch(e.level) {
case TrustLevel.TRUSTED:
bad.remove(e.destination)
good.add(e.destination)
bad.remove(e.persona.destination)
good.put(e.persona.destination, e.persona)
break
case TrustLevel.DISTRUSTED:
good.remove(e.destination)
bad.add(e.destination)
good.remove(e.persona.destination)
bad.put(e.persona.destination, e.persona)
break
case TrustLevel.NEUTRAL:
good.remove(e.destination)
bad.remove(e.destination)
good.remove(e.persona.destination)
bad.remove(e.persona.destination)
break
}
}

View File

@ -0,0 +1,10 @@
package com.muwire.core.update
import com.muwire.core.Event
import com.muwire.core.InfoHash
class UpdateAvailableEvent extends Event {
String version
String signer
String infoHash
}

View File

@ -0,0 +1,191 @@
package com.muwire.core.update
import java.util.logging.Level
import com.muwire.core.EventBus
import com.muwire.core.InfoHash
import com.muwire.core.MuWireSettings
import com.muwire.core.Persona
import com.muwire.core.download.UIDownloadEvent
import com.muwire.core.files.FileDownloadedEvent
import com.muwire.core.files.FileManager
import com.muwire.core.search.QueryEvent
import com.muwire.core.search.SearchEvent
import com.muwire.core.search.UIResultBatchEvent
import groovy.json.JsonOutput
import groovy.json.JsonSlurper
import groovy.util.logging.Log
import net.i2p.client.I2PSession
import net.i2p.client.I2PSessionMuxedListener
import net.i2p.client.SendMessageOptions
import net.i2p.client.datagram.I2PDatagramDissector
import net.i2p.client.datagram.I2PDatagramMaker
import net.i2p.data.Base64
import net.i2p.util.VersionComparator
@Log
class UpdateClient {
final EventBus eventBus
final I2PSession session
final String myVersion
final MuWireSettings settings
final FileManager fileManager
final Persona me
private final Timer timer
private long lastUpdateCheckTime
private volatile InfoHash updateInfoHash
private volatile String version, signer
private volatile boolean updateDownloading
UpdateClient(EventBus eventBus, I2PSession session, String myVersion, MuWireSettings settings, FileManager fileManager, Persona me) {
this.eventBus = eventBus
this.session = session
this.myVersion = myVersion
this.settings = settings
this.fileManager = fileManager
this.me = me
timer = new Timer("update-client",true)
}
void start() {
session.addMuxedSessionListener(new Listener(), I2PSession.PROTO_DATAGRAM, 2)
timer.schedule({checkUpdate()} as TimerTask, 60000, 60 * 60 * 1000)
}
void stop() {
timer.cancel()
}
void onUIResultBatchEvent(UIResultBatchEvent results) {
if (results.results[0].infohash != updateInfoHash)
return
if (updateDownloading)
return
updateDownloading = true
def file = new File(settings.downloadLocation, results.results[0].name)
def downloadEvent = new UIDownloadEvent(result: results.results[0], sources : results.results[0].sources, target : file)
eventBus.publish(downloadEvent)
}
void onFileDownloadedEvent(FileDownloadedEvent e) {
if (e.downloadedFile.infoHash != updateInfoHash)
return
updateDownloading = false
eventBus.publish(new UpdateDownloadedEvent(version : version, signer : signer))
}
private void checkUpdate() {
final long now = System.currentTimeMillis()
if (lastUpdateCheckTime > 0) {
if (now - lastUpdateCheckTime < settings.updateCheckInterval * 60 * 60 * 1000)
return
}
lastUpdateCheckTime = now
log.info("checking for update")
def ping = [version : 1, myVersion : myVersion]
ping = JsonOutput.toJson(ping)
def maker = new I2PDatagramMaker(session)
ping = maker.makeI2PDatagram(ping.bytes)
def options = new SendMessageOptions()
options.setSendLeaseSet(true)
session.sendMessage(UpdateServers.UPDATE_SERVER, ping, 0, ping.length, I2PSession.PROTO_DATAGRAM, 2, 0, options)
}
class Listener implements I2PSessionMuxedListener {
final JsonSlurper slurper = new JsonSlurper()
@Override
public void messageAvailable(I2PSession session, int msgId, long size) {
}
@Override
public void messageAvailable(I2PSession session, int msgId, long size, int proto, int fromport, int toport) {
if (proto != I2PSession.PROTO_DATAGRAM) {
log.warning "Received unexpected protocol $proto"
return
}
def payload = session.receiveMessage(msgId)
def dissector = new I2PDatagramDissector()
try {
dissector.loadI2PDatagram(payload)
def sender = dissector.getSender()
if (sender != UpdateServers.UPDATE_SERVER) {
log.warning("received something not from update server " + sender.toBase32())
return
}
log.info("Received something from update server")
payload = dissector.getPayload()
payload = slurper.parse(payload)
if (payload.version == null) {
log.warning("version missing")
return
}
if (payload.signer == null) {
log.warning("signer missing")
}
if (VersionComparator.comp(myVersion, payload.version) >= 0) {
log.info("no new version available")
return
}
String infoHash
if (settings.updateType == "jar") {
infoHash = payload.infoHash
} else
infoHash = payload[settings.updateType]
if (!settings.autoDownloadUpdate) {
log.info("new version $payload.version available, publishing event")
eventBus.publish(new UpdateAvailableEvent(version : payload.version, signer : payload.signer, infoHash : infoHash))
} else {
log.info("new version $payload.version available")
updateInfoHash = new InfoHash(Base64.decode(infoHash))
if (fileManager.rootToFiles.containsKey(updateInfoHash))
eventBus.publish(new UpdateDownloadedEvent(version : payload.version, signer : payload.signer))
else {
updateDownloading = false
version = payload.version
signer = payload.signer
log.info("starting search for new version hash $payload.infoHash")
def searchEvent = new SearchEvent(searchHash : updateInfoHash.getRoot(), uuid : UUID.randomUUID(), oobInfohash : true)
def queryEvent = new QueryEvent(searchEvent : searchEvent, firstHop : true, replyTo : me.destination,
receivedOn : me.destination, originator : me)
eventBus.publish(queryEvent)
}
}
} catch (Exception e) {
log.log(Level.WARNING,"Invalid datagram",e)
}
}
@Override
public void reportAbuse(I2PSession session, int severity) {
}
@Override
public void disconnected(I2PSession session) {
log.severe("I2P session disconnected")
}
@Override
public void errorOccurred(I2PSession session, String message, Throwable error) {
log.log(Level.SEVERE, message, error)
}
}
}

View File

@ -0,0 +1,8 @@
package com.muwire.core.update
import com.muwire.core.Event
class UpdateDownloadedEvent extends Event {
String version
String signer
}

View File

@ -0,0 +1,7 @@
package com.muwire.core.update
import net.i2p.data.Destination
class UpdateServers {
static final Destination UPDATE_SERVER = new Destination("pSWieSRB3czCl3Zz4WpKp4Z8tjv-05zbogRDS7SEnKcSdWOupVwjzQ92GsgQh1VqgoSRk1F8dpZOnHxxz5HFy9D7ri0uFdkMyXdSKoB7IgkkvCfTAyEmeaPwSYnurF3Zk7u286E7YG2rZkQZgJ77tow7ZS0mxFB7Z0Ti-VkZ9~GeGePW~howwNm4iSQACZA0DyTpI8iv5j4I0itPCQRgaGziob~Vfvjk49nd8N4jtaDGo9cEcafikVzQ2OgBgYWL6LRbrrItwuGqsDvITUHWaElUYIDhRQYUq8gYiUA6rwAJputfhFU0J7lIxFR9vVY7YzRvcFckfr0DNI4VQVVlPnRPkUxQa--BlldMaCIppWugjgKLwqiSiHywKpSMlBWgY2z1ry4ueEBo1WEP-mEf88wRk4cFQBCKtctCQnIG2GsnATqTl-VGUAsuzeNWZiFSwXiTy~gQ094yWx-K06fFZUDt4CMiLZVhGlixiInD~34FCRC9LVMtFcqiFB2M-Ql2AAAA")
}

View File

@ -0,0 +1,6 @@
package com.muwire.core.upload
class ContentRequest extends Request {
Range range
int have
}

View File

@ -0,0 +1,118 @@
package com.muwire.core.upload
import java.nio.ByteBuffer
import java.nio.channels.FileChannel
import java.nio.charset.StandardCharsets
import java.nio.file.Files
import java.nio.file.StandardOpenOption
import java.util.stream.Collectors
import com.muwire.core.Persona
import com.muwire.core.connection.Endpoint
import com.muwire.core.mesh.Mesh
import com.muwire.core.util.DataUtil
import net.i2p.data.Destination
class ContentUploader extends Uploader {
private final File file
private final ContentRequest request
private final Mesh mesh
private final int pieceSize
ContentUploader(File file, ContentRequest request, Endpoint endpoint, Mesh mesh, int pieceSize) {
super(endpoint)
this.file = file
this.request = request
this.mesh = mesh
this.pieceSize = pieceSize
}
@Override
void respond() {
OutputStream os = endpoint.getOutputStream()
Range range = request.getRange()
boolean satisfiable = true
final long length = file.length()
if (range.start >= length || range.end >= length)
satisfiable = false
if (satisfiable) {
int startPiece = range.start / (0x1 << pieceSize)
int endPiece = range.end / (0x1 << pieceSize)
for (int i = startPiece; i <= endPiece; i++)
satisfiable &= mesh.pieces.isDownloaded(i)
}
if (!satisfiable) {
os.write("416 Range Not Satisfiable\r\n".getBytes(StandardCharsets.US_ASCII))
writeMesh(request.downloader)
os.write("\r\n".getBytes(StandardCharsets.US_ASCII))
os.flush()
return
}
os.write("200 OK\r\n".getBytes(StandardCharsets.US_ASCII))
os.write("Content-Range: $range.start-$range.end\r\n".getBytes(StandardCharsets.US_ASCII))
writeMesh(request.downloader)
os.write("\r\n".getBytes(StandardCharsets.US_ASCII))
FileChannel channel
try {
channel = Files.newByteChannel(file.toPath(), EnumSet.of(StandardOpenOption.READ))
mapped = channel.map(FileChannel.MapMode.READ_ONLY, range.start, range.end - range.start + 1)
byte [] tmp = new byte[0x1 << 13]
while(mapped.hasRemaining()) {
int start = mapped.position()
synchronized(this) {
mapped.get(tmp, 0, Math.min(tmp.length, mapped.remaining()))
}
int read = mapped.position() - start
endpoint.getOutputStream().write(tmp, 0, read)
}
} finally {
try {channel?.close() } catch (IOException ignored) {}
endpoint.getOutputStream().flush()
}
}
private void writeMesh(Persona toExclude) {
String xHave = DataUtil.encodeXHave(mesh.pieces.getDownloaded(), mesh.pieces.nPieces)
endpoint.getOutputStream().write("X-Have: $xHave\r\n".getBytes(StandardCharsets.US_ASCII))
Set<Persona> sources = mesh.getRandom(3, toExclude)
if (!sources.isEmpty()) {
String xAlts = sources.stream().map({ it.toBase64() }).collect(Collectors.joining(","))
endpoint.getOutputStream().write("X-Alt: $xAlts\r\n".getBytes(StandardCharsets.US_ASCII))
}
}
@Override
public String getName() {
return file.getName();
}
@Override
public synchronized int getProgress() {
if (mapped == null)
return 0
int position = mapped.position()
int total = request.getRange().end - request.getRange().start
(int)(position * 100.0 / total)
}
@Override
public String getDownloader() {
request.downloader.getHumanReadableName()
}
@Override
public int getDonePieces() {
return request.have;
}
@Override
public int getTotalPieces() {
return mesh.pieces.nPieces;
}
}

View File

@ -0,0 +1,4 @@
package com.muwire.core.upload
class HashListRequest extends Request {
}

View File

@ -0,0 +1,65 @@
package com.muwire.core.upload
import java.nio.ByteBuffer
import java.nio.charset.StandardCharsets
import com.muwire.core.InfoHash
import com.muwire.core.connection.Endpoint
import net.i2p.data.Base64
class HashListUploader extends Uploader {
private final InfoHash infoHash
private final HashListRequest request
HashListUploader(Endpoint endpoint, InfoHash infoHash, HashListRequest request) {
super(endpoint)
this.infoHash = infoHash
mapped = ByteBuffer.wrap(infoHash.getHashList())
this.request = request
}
void respond() {
OutputStream os = endpoint.getOutputStream()
os.write("200 OK\r\n".getBytes(StandardCharsets.US_ASCII))
os.write("Content-Range: 0-${mapped.remaining()}\r\n\r\n".getBytes(StandardCharsets.US_ASCII))
byte[]tmp = new byte[0x1 << 13]
while(mapped.hasRemaining()) {
int start = mapped.position()
synchronized(this) {
mapped.get(tmp, 0, Math.min(tmp.length, mapped.remaining()))
}
int read = mapped.position() - start
endpoint.getOutputStream().write(tmp, 0, read)
}
endpoint.getOutputStream().flush()
}
@Override
public String getName() {
return "Hash list for " + Base64.encode(infoHash.getRoot());
}
@Override
public synchronized int getProgress() {
(int)(mapped.position() * 100.0 / mapped.capacity())
}
@Override
public String getDownloader() {
request.downloader.getHumanReadableName()
}
@Override
public int getDonePieces() {
return 0;
}
@Override
public int getTotalPieces() {
return 1;
}
}

View File

@ -4,6 +4,8 @@ import java.nio.charset.StandardCharsets
import com.muwire.core.Constants
import com.muwire.core.InfoHash
import com.muwire.core.Persona
import com.muwire.core.util.DataUtil
import groovy.util.logging.Log
import net.i2p.data.Base64
@ -15,56 +17,12 @@ class Request {
private static final byte N = "\n".getBytes(StandardCharsets.US_ASCII)[0]
InfoHash infoHash
Range range
Persona downloader
Map<String, String> headers
static Request parse(InfoHash infoHash, InputStream is) throws IOException {
Map<String,String> headers = new HashMap<>()
byte [] tmp = new byte[Constants.MAX_HEADER_SIZE]
while(headers.size() < Constants.MAX_HEADERS) {
boolean r = false
boolean n = false
int idx = 0
while (true) {
byte read = is.read()
if (read == -1)
throw new IOException("Stream closed")
if (!r && read == N)
throw new IOException("Received N before R")
if (read == R) {
if (r)
throw new IOException("double R")
r = true
continue
}
if (r && !n) {
if (read != N)
throw new IOException("R not followed by N")
n = true
break
}
if (idx == 0x1 << 14)
throw new IOException("Header too long")
tmp[idx++] = read
}
if (idx == 0)
break
String header = new String(tmp, 0, idx, StandardCharsets.US_ASCII)
log.fine("Read header $header")
int keyIdx = header.indexOf(":")
if (keyIdx < 1)
throw new IOException("Header key not found")
if (keyIdx == header.length())
throw new IOException("Header value not found")
String key = header.substring(0, keyIdx)
String value = header.substring(keyIdx + 1)
headers.put(key, value)
}
static Request parseContentRequest(InfoHash infoHash, InputStream is) throws IOException {
Map<String, String> headers = parseHeaders(is)
if (!headers.containsKey("Range"))
throw new IOException("Range header not found")
@ -85,7 +43,81 @@ class Request {
if (start < 0 || end < start)
throw new IOException("Invalid range $start - $end")
new Request( infoHash : infoHash, range : new Range(start, end), headers : headers)
Persona downloader = null
if (headers.containsKey("X-Persona")) {
def encoded = headers["X-Persona"].trim()
def decoded = Base64.decode(encoded)
downloader = new Persona(new ByteArrayInputStream(decoded))
}
int have = 0
if (headers.containsKey("X-Have")) {
def encoded = headers["X-Have"].trim()
have = DataUtil.decodeXHave(encoded).size()
}
new ContentRequest( infoHash : infoHash, range : new Range(start, end),
headers : headers, downloader : downloader, have : have)
}
static Request parseHashListRequest(InfoHash infoHash, InputStream is) throws IOException {
Map<String,String> headers = parseHeaders(is)
Persona downloader = null
if (headers.containsKey("X-Persona")) {
def encoded = headers["X-Persona"].trim()
def decoded = Base64.decode(encoded)
downloader = new Persona(new ByteArrayInputStream(decoded))
}
new HashListRequest(infoHash : infoHash, headers : headers, downloader : downloader)
}
private static Map<String, String> parseHeaders(InputStream is) {
Map<String,String> headers = new HashMap<>()
byte [] tmp = new byte[Constants.MAX_HEADER_SIZE]
while(headers.size() < Constants.MAX_HEADERS) {
boolean r = false
boolean n = false
int idx = 0
while (true) {
byte read = is.read()
if (read == -1)
throw new IOException("Stream closed")
if (!r && read == N)
throw new IOException("Received N before R")
if (read == R) {
if (r)
throw new IOException("double R")
r = true
continue
}
if (r && !n) {
if (read != N)
throw new IOException("R not followed by N")
n = true
break
}
if (idx == 0x1 << 14)
throw new IOException("Header too long")
tmp[idx++] = read
}
if (idx == 0)
break
String header = new String(tmp, 0, idx, StandardCharsets.US_ASCII)
log.fine("Read header $header")
int keyIdx = header.indexOf(":")
if (keyIdx < 1)
throw new IOException("Header key not found")
if (keyIdx == header.length())
throw new IOException("Header value not found")
String key = header.substring(0, keyIdx)
String value = header.substring(keyIdx + 1)
headers.put(key, value)
}
headers
}
}

View File

@ -6,7 +6,12 @@ import com.muwire.core.EventBus
import com.muwire.core.InfoHash
import com.muwire.core.SharedFile
import com.muwire.core.connection.Endpoint
import com.muwire.core.download.DownloadManager
import com.muwire.core.download.Downloader
import com.muwire.core.download.SourceDiscoveredEvent
import com.muwire.core.files.FileManager
import com.muwire.core.mesh.Mesh
import com.muwire.core.mesh.MeshManager
import groovy.util.logging.Log
import net.i2p.data.Base64
@ -15,17 +20,22 @@ import net.i2p.data.Base64
public class UploadManager {
private final EventBus eventBus
private final FileManager fileManager
private final MeshManager meshManager
private final DownloadManager downloadManager
public UploadManager() {}
public UploadManager(EventBus eventBus, FileManager fileManager) {
public UploadManager(EventBus eventBus, FileManager fileManager,
MeshManager meshManager, DownloadManager downloadManager) {
this.eventBus = eventBus
this.fileManager = fileManager
this.meshManager = meshManager
this.downloadManager = downloadManager
}
public void processEndpoint(Endpoint e) throws IOException {
public void processGET(Endpoint e) throws IOException {
byte [] infoHashStringBytes = new byte[44]
DataInputStream dis = new DataInputStream(e.getInputStream())
DataInputStream dis = new DataInputStream(e.getInputStream())
boolean first = true
while(true) {
if (first)
@ -44,8 +54,10 @@ public class UploadManager {
log.info("Responding to upload request for root $infoHashString")
byte [] infoHashRoot = Base64.decode(infoHashString)
InfoHash infoHash = new InfoHash(infoHashRoot)
Set<SharedFile> sharedFiles = fileManager.getSharedFiles(infoHashRoot)
if (sharedFiles == null || sharedFiles.isEmpty()) {
Downloader downloader = downloadManager.downloaders.get(infoHash)
if (downloader == null && (sharedFiles == null || sharedFiles.isEmpty())) {
log.info "file not found"
e.getOutputStream().write("404 File Not Found\r\n\r\n".getBytes(StandardCharsets.US_ASCII))
e.getOutputStream().flush()
@ -61,8 +73,31 @@ public class UploadManager {
return
}
Request request = Request.parse(new InfoHash(infoHashRoot), e.getInputStream())
Uploader uploader = new Uploader(sharedFiles.iterator().next().file, request, e)
ContentRequest request = Request.parseContentRequest(infoHash, e.getInputStream())
if (request.downloader != null && request.downloader.destination != e.destination) {
log.info("Downloader persona doesn't match their destination")
e.close()
return
}
if (request.have > 0)
eventBus.publish(new SourceDiscoveredEvent(infoHash : request.infoHash, source : request.downloader))
Mesh mesh
File file
int pieceSize
if (downloader != null) {
mesh = meshManager.get(infoHash)
file = downloader.incompleteFile
pieceSize = downloader.pieceSizePow2
} else {
SharedFile sharedFile = sharedFiles.iterator().next();
mesh = meshManager.getOrCreate(request.infoHash, sharedFile.NPieces)
file = sharedFile.file
pieceSize = sharedFile.pieceSize
}
Uploader uploader = new ContentUploader(file, request, e, mesh, pieceSize)
eventBus.publish(new UploadEvent(uploader : uploader))
try {
uploader.respond()
@ -70,7 +105,131 @@ public class UploadManager {
eventBus.publish(new UploadFinishedEvent(uploader : uploader))
}
}
}
public void processHashList(Endpoint e) {
byte [] infoHashStringBytes = new byte[44]
DataInputStream dis = new DataInputStream(e.getInputStream())
dis.readFully(infoHashStringBytes)
String infoHashString = new String(infoHashStringBytes, StandardCharsets.US_ASCII)
log.info("Responding to hashlist request for root $infoHashString")
byte [] infoHashRoot = Base64.decode(infoHashString)
InfoHash infoHash = new InfoHash(infoHashRoot)
Downloader downloader = downloadManager.downloaders.get(infoHash)
Set<SharedFile> sharedFiles = fileManager.getSharedFiles(infoHashRoot)
if (downloader == null && (sharedFiles == null || sharedFiles.isEmpty())) {
log.info "file not found"
e.getOutputStream().write("404 File Not Found\r\n\r\n".getBytes(StandardCharsets.US_ASCII))
e.getOutputStream().flush()
e.close()
return
}
byte [] rn = new byte[2]
dis.readFully(rn)
if (rn != "\r\n".getBytes(StandardCharsets.US_ASCII)) {
log.warning("Malformed HASHLIST header")
e.close()
return
}
Request request = Request.parseHashListRequest(infoHash, e.getInputStream())
if (request.downloader != null && request.downloader.destination != e.destination) {
log.info("Downloader persona doesn't match their destination")
e.close()
return
}
InfoHash fullInfoHash
if (downloader == null) {
fullInfoHash = sharedFiles.iterator().next().infoHash
} else {
byte [] hashList = downloader.getInfoHash().getHashList()
if (hashList != null && hashList.length > 0)
fullInfoHash = downloader.getInfoHash()
else {
log.info("infohash not found in downloader")
e.getOutputStream().write("404 File Not Found\r\n\r\n".getBytes(StandardCharsets.US_ASCII))
e.getOutputStream().flush()
e.close()
return
}
}
Uploader uploader = new HashListUploader(e, fullInfoHash, request)
eventBus.publish(new UploadEvent(uploader : uploader))
try {
uploader.respond()
} finally {
eventBus.publish(new UploadFinishedEvent(uploader : uploader))
}
// proceed with content
while(true) {
byte[] get = new byte[4]
dis.readFully(get)
if (get != "GET ".getBytes(StandardCharsets.US_ASCII)) {
log.warning("received a method other than GET on subsequent call")
e.close()
return
}
dis.readFully(infoHashStringBytes)
infoHashString = new String(infoHashStringBytes, StandardCharsets.US_ASCII)
log.info("Responding to upload request for root $infoHashString")
infoHashRoot = Base64.decode(infoHashString)
infoHash = new InfoHash(infoHashRoot)
sharedFiles = fileManager.getSharedFiles(infoHashRoot)
downloader = downloadManager.downloaders.get(infoHash)
if (downloader == null && (sharedFiles == null || sharedFiles.isEmpty())) {
log.info "file not found"
e.getOutputStream().write("404 File Not Found\r\n\r\n".getBytes(StandardCharsets.US_ASCII))
e.getOutputStream().flush()
e.close()
return
}
rn = new byte[2]
dis.readFully(rn)
if (rn != "\r\n".getBytes(StandardCharsets.US_ASCII)) {
log.warning("Malformed GET header")
e.close()
return
}
request = Request.parseContentRequest(new InfoHash(infoHashRoot), e.getInputStream())
if (request.downloader != null && request.downloader.destination != e.destination) {
log.info("Downloader persona doesn't match their destination")
e.close()
return
}
if (request.have > 0)
eventBus.publish(new SourceDiscoveredEvent(infoHash : request.infoHash, source : request.downloader))
Mesh mesh
File file
int pieceSize
if (downloader != null) {
mesh = meshManager.get(infoHash)
file = downloader.incompleteFile
pieceSize = downloader.pieceSizePow2
} else {
SharedFile sharedFile = sharedFiles.iterator().next();
mesh = meshManager.getOrCreate(request.infoHash, sharedFile.NPieces)
file = sharedFile.file
pieceSize = sharedFile.pieceSize
}
uploader = new ContentUploader(file, request, e, mesh, pieceSize)
eventBus.publish(new UploadEvent(uploader : uploader))
try {
uploader.respond()
} finally {
eventBus.publish(new UploadFinishedEvent(uploader : uploader))
}
}
}
}

View File

@ -8,52 +8,32 @@ import java.nio.file.StandardOpenOption
import com.muwire.core.connection.Endpoint
class Uploader {
private final File file
private final Request request
private final Endpoint endpoint
private ByteBuffer mapped
abstract class Uploader {
protected final Endpoint endpoint
protected ByteBuffer mapped
Uploader(File file, Request request, Endpoint endpoint) {
this.file = file
this.request = request
Uploader(Endpoint endpoint) {
this.endpoint = endpoint
}
void respond() {
OutputStream os = endpoint.getOutputStream()
Range range = request.getRange()
if (range.start >= file.length() || range.end >= file.length()) {
os.write("416 Range Not Satisfiable\r\n\r\n".getBytes(StandardCharsets.US_ASCII))
os.flush()
return
}
os.write("200 OK\r\n".getBytes(StandardCharsets.US_ASCII))
os.write("Content-Range: $range.start-$range.end\r\n\r\n".getBytes(StandardCharsets.US_ASCII))
FileChannel channel
try {
channel = Files.newByteChannel(file.toPath(), EnumSet.of(StandardOpenOption.READ))
mapped = channel.map(FileChannel.MapMode.READ_ONLY, range.start, range.end - range.start + 1)
byte [] tmp = new byte[0x1 << 13]
while(mapped.hasRemaining()) {
int start = mapped.position()
synchronized(this) {
mapped.get(tmp, 0, Math.min(tmp.length, mapped.remaining()))
}
int read = mapped.position() - start
endpoint.getOutputStream().write(tmp, 0, read)
}
} finally {
try {channel?.close() } catch (IOException ignored) {}
endpoint.getOutputStream().flush()
}
}
abstract void respond()
public synchronized int getPosition() {
if (mapped == null)
return -1
mapped.position()
}
abstract String getName();
/**
* @return an integer between 0 and 100
*/
abstract int getProgress();
abstract String getDownloader();
abstract int getDonePieces();
abstract int getTotalPieces()
}

View File

@ -4,6 +4,8 @@ import java.nio.charset.StandardCharsets
import com.muwire.core.Constants
import net.i2p.data.Base64
class DataUtil {
private final static int MAX_SHORT = (0x1 << 16) - 1
@ -79,4 +81,38 @@ class DataUtil {
}
new String(baos.toByteArray(), StandardCharsets.US_ASCII)
}
public static String encodeXHave(List<Integer> pieces, int totalPieces) {
int bytes = totalPieces / 8
if (totalPieces % 8 != 0)
bytes++
byte[] raw = new byte[bytes]
pieces.each {
int byteIdx = it / 8
int offset = it % 8
int mask = 0x80 >>> offset
raw[byteIdx] |= mask
}
Base64.encode(raw)
}
public static List<Integer> decodeXHave(String xHave) {
byte [] availablePieces = Base64.decode(xHave)
List<Integer> available = new ArrayList<>()
availablePieces.eachWithIndex {b, i ->
for (int j = 0; j < 8 ; j++) {
byte mask = 0x80 >>> j
if ((b & mask) == mask) {
available.add(i * 8 + j)
}
}
}
available
}
public static Exception findRoot(Exception e) {
while(e.getCause() != null)
e = e.getCause()
e
}
}

View File

@ -55,21 +55,21 @@ class JULLog extends Log {
@Override
public boolean shouldDebug() {
level.intValue().intValue() >= Level.FINE.intValue()
level.intValue().intValue() <= Level.FINE.intValue()
}
@Override
public boolean shouldInfo() {
level.intValue().intValue() >= Level.INFO.intValue()
level.intValue().intValue() <= Level.INFO.intValue()
}
@Override
public boolean shouldWarn() {
level.intValue().intValue() >= Level.WARNING.intValue()
level.intValue().intValue() <= Level.WARNING.intValue()
}
@Override
public boolean shouldError() {
level.intValue().intValue() >= Level.SEVERE.intValue()
level.intValue().intValue() <= Level.SEVERE.intValue()
}
}

View File

@ -1,6 +1,7 @@
package com.muwire.core;
import java.io.File;
import java.io.IOException;
import java.util.Set;
import net.i2p.data.Destination;
@ -9,8 +10,9 @@ public class DownloadedFile extends SharedFile {
private final Set<Destination> sources;
public DownloadedFile(File file, InfoHash infoHash, Set<Destination> sources) {
super(file, infoHash);
public DownloadedFile(File file, InfoHash infoHash, int pieceSize, Set<Destination> sources)
throws IOException {
super(file, infoHash, pieceSize);
this.sources = sources;
}

View File

@ -7,6 +7,7 @@ import java.util.Arrays;
import java.util.List;
import net.i2p.data.Base32;
import net.i2p.data.Base64;
public class InfoHash {
@ -76,14 +77,16 @@ public class InfoHash {
}
public String toString() {
String rv = "InfoHash[root:"+Base32.encode(root) + " hashList:";
List<String> b32HashList = new ArrayList<>(hashList.length / SIZE);
byte [] tmp = new byte[SIZE];
for (int i = 0; i < hashList.length / SIZE; i++) {
System.arraycopy(hashList, SIZE * i, tmp, 0, SIZE);
b32HashList.add(Base32.encode(tmp));
String rv = "InfoHash[root:"+Base64.encode(root) + " hashList:";
List<String> b64HashList = new ArrayList<>();
if (hashList != null) {
byte [] tmp = new byte[SIZE];
for (int i = 0; i < hashList.length / SIZE; i++) {
System.arraycopy(hashList, SIZE * i, tmp, 0, SIZE);
b64HashList.add(Base64.encode(tmp));
}
}
rv += b32HashList.toString();
rv += b64HashList.toString();
rv += "]";
return rv;
}

View File

@ -1,15 +1,23 @@
package com.muwire.core;
import java.io.File;
import java.io.IOException;
public class SharedFile {
private final File file;
private final InfoHash infoHash;
private final int pieceSize;
public SharedFile(File file, InfoHash infoHash) {
private final String cachedPath;
private final long cachedLength;
public SharedFile(File file, InfoHash infoHash, int pieceSize) throws IOException {
this.file = file;
this.infoHash = infoHash;
this.pieceSize = pieceSize;
this.cachedPath = file.getAbsolutePath();
this.cachedLength = file.length();
}
public File getFile() {
@ -20,4 +28,37 @@ public class SharedFile {
return infoHash;
}
public int getPieceSize() {
return pieceSize;
}
public int getNPieces() {
long length = file.length();
int rawPieceSize = 0x1 << pieceSize;
int rv = (int) (length / rawPieceSize);
if (length % rawPieceSize != 0)
rv++;
return rv;
}
public String getCachedPath() {
return cachedPath;
}
public long getCachedLength() {
return cachedLength;
}
@Override
public int hashCode() {
return file.hashCode() ^ infoHash.hashCode();
}
@Override
public boolean equals(Object o) {
if (!(o instanceof SharedFile))
return false;
SharedFile other = (SharedFile)o;
return file.equals(other.file) && infoHash.equals(other.infoHash);
}
}

View File

@ -0,0 +1,11 @@
package com.muwire.core
import net.i2p.data.Base64
class Personas {
private final String encoded1 = "AQADemFiO~pgSoEo8wQfwncYMvBQWkvPY9I7DYUllHp289UE~zBaLdbl~wbliktAUsW-S70f3UeYgHq34~c7zVuUQjgHZ506iG9hX8B9S3a9gQ3CSG0GuDpeNyiXmZkpHp5m8vT9PZ1zMWzxvzZY~fP9yKFKgO4yrso5I9~DGOPeyJZJ4BFsTJDERv41aZqjFLYUBDmeHGgg9RjYy~93h-nQMVYj9JSO3AgowW-ix49rtiKYIXHMa2PxWHUXkUHWJZtIZntNIDEFeMnPdzLxjAl8so2G6pDcTMZPLLwyb73Ee5ZVfxUynPqyp~fIGVP8Rl4rlaGFli2~ATGBz3XY54aObC~0p7us2JnWaTC~oQT5DVDM7gaOO885o-m8BB8b0duzMBelbdnMZFQJ5jIHVKxkC6Niw4fxTOoXTyOqQmVhtK-9xcwxMuN5DF9IewkR5bhpq5rgnfBP5zvyBaAHMq-d3TCOjTsZ-d3liB98xX5p8G5zmS7gfKArQtM5~CcK~AlX-lGLBQAEAAcAAN5MW1Tq983szfZgY1l8tQFqy8I9tdMf7vc1Ktj~TCIvXYw6AYMbMGy3S67FSPLZVmfHEMQKj2KLAdaRKQkHPAY"
private final String encoded2 = "AQAHemxhdGluYiN~3G-hPoBfJ04mhcC52lC6TYSwWxH-WNWno9Y35JS-WrXlnPsodZtwy96ttEaiKTg-hkRqMsaYKpWar1FwayR6qlo0pZCo5pQOLfR7GIM3~wde0JIBEp8BUpgzF1-QXLhuRG1t7tBbenW2tSgp5jQH61RI-c9flyUlOvf6nrhQMZ3aoviZ4aZW23Fx-ajYQBDk7PIxuyn8qYNwWy3kWOhGan05c54NnumS3XCzQWFDDPlADmco1WROeY9qrwwtmLM8lzDCEtJQXJlk~K5yLbyB63hmAeTK7J4iS6f9nnWv7TbB5r-Z3kC6D9TLYrQbu3h4AAxrqso45P8yHQtKUA4QJicS-6NJoBOnlCCU887wx2k9YSxxwNydlIxb1mZsX65Ke4uY0HDFokZHTzUcxvfLB6G~5JkSPDCyZz~2fREgW2-VXu7gokEdEugkuZRrsiQzyfAOOkv53ti5MzTbMOXinBskSb1vZyN2-XcZNaDJvEqUNj~qpfhe-ov2F7FuwQUABAAHAAAfqq-MneIqWBQY92-sy9Z0s~iQsq6lUFa~sYMdY-5o-94fF8a140dm-emF3rO8vuidUIPNaS-37Rl05mAKUCcB"
Persona persona1 = new Persona(new ByteArrayInputStream(Base64.decode(encoded1)))
Persona persona2 = new Persona(new ByteArrayInputStream(Base64.decode(encoded2)))
}

View File

@ -43,6 +43,9 @@ class ConnectionAcceptorTest {
def uploadManagerMock
UploadManager uploadManager
def connectionEstablisherMock
ConnectionEstablisher connectionEstablisher
ConnectionAcceptor acceptor
List<ConnectionEvent> connectionEvents
@ -57,6 +60,7 @@ class ConnectionAcceptorTest {
trustServiceMock = new MockFor(TrustService.class)
searchManagerMock = new MockFor(SearchManager.class)
uploadManagerMock = new MockFor(UploadManager.class)
connectionEstablisherMock = new MockFor(ConnectionEstablisher.class)
}
@After
@ -68,6 +72,7 @@ class ConnectionAcceptorTest {
trustServiceMock.verify trustService
searchManagerMock.verify searchManager
uploadManagerMock.verify uploadManager
connectionEstablisherMock.verify connectionEstablisher
Thread.sleep(100)
}
@ -87,8 +92,10 @@ class ConnectionAcceptorTest {
trustService = trustServiceMock.proxyInstance()
searchManager = searchManagerMock.proxyInstance()
uploadManager = uploadManagerMock.proxyInstance()
connectionEstablisher = connectionEstablisherMock.proxyInstance()
acceptor = new ConnectionAcceptor(eventBus, connectionManager, settings, i2pAcceptor, hostCache, trustService, searchManager, uploadManager)
acceptor = new ConnectionAcceptor(eventBus, connectionManager, settings, i2pAcceptor,
hostCache, trustService, searchManager, uploadManager, connectionEstablisher)
acceptor.start()
Thread.sleep(100)
}
@ -108,6 +115,7 @@ class ConnectionAcceptorTest {
new Endpoint(destinations.dest1, is, os, null)
}
i2pAcceptorMock.demand.accept { Thread.sleep(Integer.MAX_VALUE) }
connectionEstablisherMock.demand.isInProgress(destinations.dest1) { false }
connectionManagerMock.demand.isConnected { dest ->
assert dest == destinations.dest1
false
@ -150,6 +158,7 @@ class ConnectionAcceptorTest {
new Endpoint(destinations.dest1, is, os, null)
}
i2pAcceptorMock.demand.accept { Thread.sleep(Integer.MAX_VALUE) }
connectionEstablisherMock.demand.isInProgress(destinations.dest1) { false }
connectionManagerMock.demand.isConnected { dest ->
assert dest == destinations.dest1
false
@ -264,6 +273,7 @@ class ConnectionAcceptorTest {
new Endpoint(destinations.dest1, is, os, null)
}
i2pAcceptorMock.demand.accept { Thread.sleep(Integer.MAX_VALUE) }
connectionEstablisherMock.demand.isInProgress(destinations.dest1) { false }
connectionManagerMock.demand.isConnected { dest ->
assert dest == destinations.dest1
false
@ -310,6 +320,7 @@ class ConnectionAcceptorTest {
new Endpoint(destinations.dest1, is, os, null)
}
i2pAcceptorMock.demand.accept { Thread.sleep(Integer.MAX_VALUE) }
connectionEstablisherMock.demand.isInProgress(destinations.dest1) { false }
connectionManagerMock.demand.isConnected { dest ->
assert dest == destinations.dest1
false
@ -356,6 +367,7 @@ class ConnectionAcceptorTest {
new Endpoint(destinations.dest1, is, os, null)
}
i2pAcceptorMock.demand.accept { Thread.sleep(Integer.MAX_VALUE) }
connectionEstablisherMock.demand.isInProgress(destinations.dest1) { false }
connectionManagerMock.demand.isConnected { dest ->
assert dest == destinations.dest1
false

View File

@ -1,17 +1,26 @@
package com.muwire.core.download
import static org.junit.Assert.fail
import org.junit.After
import org.junit.Before
import org.junit.Test
import com.muwire.core.EventBus
import com.muwire.core.InfoHash
import com.muwire.core.Persona
import com.muwire.core.Personas
import com.muwire.core.connection.Endpoint
import com.muwire.core.files.FileHasher
import static com.muwire.core.util.DataUtil.readTillRN
import static com.muwire.core.util.DataUtil.encodeXHave
import net.i2p.data.Base64
import net.i2p.util.ConcurrentHashSet
class DownloadSessionTest {
private EventBus eventBus
private File source, target
private InfoHash infoHash
private Endpoint endpoint
@ -24,7 +33,17 @@ class DownloadSessionTest {
private InputStream fromDownloader, fromUploader
private OutputStream toDownloader, toUploader
private void initSession(int size) {
private volatile boolean performed
private Set<Integer> available = new ConcurrentHashSet<>()
private volatile IOException thrown
@Before
public void setUp() {
eventBus = new EventBus()
}
private void initSession(int size, def claimedPieces = []) {
Random r = new Random()
byte [] content = new byte[size]
r.nextBytes(content)
@ -48,6 +67,7 @@ class DownloadSessionTest {
else
nPieces = size / pieceSize + 1
pieces = new Pieces(nPieces)
claimedPieces.each {pieces.claimed.set(it)}
fromDownloader = new PipedInputStream()
fromUploader = new PipedInputStream()
@ -55,12 +75,20 @@ class DownloadSessionTest {
toUploader = new PipedOutputStream(fromDownloader)
endpoint = new Endpoint(null, fromUploader, toUploader, null)
session = new DownloadSession(pieces, infoHash, endpoint, target, pieceSize, size)
downloadThread = new Thread( { session.request() } as Runnable)
session = new DownloadSession(eventBus, "",pieces, infoHash, endpoint, target, pieceSize, size, available)
downloadThread = new Thread( { perform() } as Runnable)
downloadThread.setDaemon(true)
downloadThread.start()
}
private void perform() {
try {
performed = session.request()
} catch (IOException e) {
thrown = e
}
}
@After
public void teardown() {
source?.delete()
@ -74,6 +102,8 @@ class DownloadSessionTest {
initSession(20)
assert "GET $rootBase64" == readTillRN(fromDownloader)
assert "Range: 0-19" == readTillRN(fromDownloader)
readTillRN(fromDownloader)
readTillRN(fromDownloader)
assert "" == readTillRN(fromDownloader)
toDownloader.write("200 OK\r\n".bytes)
@ -85,6 +115,9 @@ class DownloadSessionTest {
assert pieces.isComplete()
assert target.bytes == source.bytes
assert performed
assert available.isEmpty()
assert thrown == null
}
@Test
@ -95,6 +128,8 @@ class DownloadSessionTest {
assert "GET $rootBase64" == readTillRN(fromDownloader)
readTillRN(fromDownloader)
readTillRN(fromDownloader)
readTillRN(fromDownloader)
assert "" == readTillRN(fromDownloader)
toDownloader.write("200 OK\r\n".bytes)
@ -105,6 +140,9 @@ class DownloadSessionTest {
Thread.sleep(150)
assert pieces.isComplete()
assert target.bytes == source.bytes
assert performed
assert available.isEmpty()
assert thrown == null
}
@Test
@ -122,6 +160,8 @@ class DownloadSessionTest {
assert (start == 0 && end == ((1 << pieceSize) - 1)) ||
(start == (1 << pieceSize) && end == (1 << pieceSize))
readTillRN(fromDownloader)
readTillRN(fromDownloader)
assert "" == readTillRN(fromDownloader)
toDownloader.write("200 OK\r\n".bytes)
@ -134,5 +174,160 @@ class DownloadSessionTest {
Thread.sleep(150)
assert !pieces.isComplete()
assert 1 == pieces.donePieces()
assert performed
assert available.isEmpty()
assert thrown == null
}
@Test
public void testSmallFileClaimed() {
initSession(20, [0])
long now = System.currentTimeMillis()
downloadThread.join(100)
assert 100 >= (System.currentTimeMillis() - now)
assert !performed
assert available.isEmpty()
assert thrown == null
}
@Test
public void testClaimedPiecesAvoided() {
int pieceSize = FileHasher.getPieceSize(1)
int size = (1 << pieceSize) * 10
initSession(size, [1,2,3,4,5,6,7,8,9])
assert !pieces.claimed.get(0)
assert "GET $rootBase64" == readTillRN(fromDownloader)
String range = readTillRN(fromDownloader)
def matcher = (range =~ /^Range: (\d+)-(\d+)$/)
int start = Integer.parseInt(matcher[0][1])
int end = Integer.parseInt(matcher[0][2])
assert pieces.claimed.get(0)
assert start == 0 && end == (1 << pieceSize) - 1
}
@Test
public void test416NoHave() {
initSession(20)
readAllHeaders(fromDownloader)
toDownloader.write("416 don't have it\r\n\r\n".bytes)
toDownloader.flush()
Thread.sleep(150)
assert !performed
assert available.isEmpty()
assert thrown != null
}
@Test
public void test416Have() {
initSession(20)
readAllHeaders(fromDownloader)
toDownloader.write("416 don't have it\r\n".bytes)
toDownloader.write("X-Have: ${encodeXHave([0], 1)}\r\n\r\n".bytes)
toDownloader.flush()
Thread.sleep(150)
assert performed
assert available.contains(0)
assert thrown == null
}
@Test
public void test416Have2Pieces() {
int pieceSize = FileHasher.getPieceSize(1)
int size = (1 << pieceSize) + 1
initSession(size)
readAllHeaders(fromDownloader)
toDownloader.write("416 don't have it\r\n".bytes)
toDownloader.write("X-Have: ${encodeXHave([1], 2)}\r\n\r\n".bytes)
toDownloader.flush()
Thread.sleep(150)
assert performed
assert available.contains(1)
assert thrown == null
}
@Test
public void test200TwoPieces1Available() {
int pieceSize = FileHasher.getPieceSize(1)
int size = (1 << pieceSize) * 9 + 1
initSession(size)
Set<String> headers = readAllHeaders(fromDownloader)
def matcher = null
headers.each {
if (it.startsWith("Range"))
matcher = (it =~ /^Range: (\d+)-(\d+)$/)
}
assert matcher.groupCount() > 0
int start = Integer.parseInt(matcher[0][1])
int end = Integer.parseInt(matcher[0][2])
if (start == 0)
fail("inconlcusive")
toDownloader.write("416 don't have it \r\n".bytes)
toDownloader.write("X-Have: ${encodeXHave([0],2)}\r\n\r\n".bytes)
toDownloader.flush()
downloadThread.join()
assert performed
performed = false
assert available.contains(0)
assert thrown == null
// request same session
downloadThread = new Thread( { perform() } as Runnable)
downloadThread.setDaemon(true)
downloadThread.start()
Thread.sleep(150)
headers = readAllHeaders(fromDownloader)
matcher = null
headers.each {
if (it.startsWith("Range"))
matcher = (it =~ /^Range: (\d+)-(\d+)$/)
}
assert matcher.groupCount() > 0
start = Integer.parseInt(matcher[0][1])
end = Integer.parseInt(matcher[0][2])
assert start == 0
}
@Test
public void testXAlt() throws Exception {
Personas personas = new Personas()
def sources = []
def listener = new Object() {
public void onSourceDiscoveredEvent(SourceDiscoveredEvent e) {
sources << e.source
}
}
eventBus.register(SourceDiscoveredEvent.class, listener)
initSession(20)
readAllHeaders(fromDownloader)
toDownloader.write("416 don't have it\r\n".bytes)
toDownloader.write("X-Alt: ${personas.persona1.toBase64()},${personas.persona2.toBase64()}\r\n\r\n".bytes)
toDownloader.flush()
Thread.sleep(150)
assert sources.contains(personas.persona1)
assert sources.contains(personas.persona2)
assert 2 == sources.size()
}
private static Set<String> readAllHeaders(InputStream is) {
Set<String> rv = new HashSet<>()
String header
while((header = readTillRN(is)) != "")
rv.add(header)
rv
}
}

View File

@ -16,7 +16,7 @@ class PiecesTest {
public void testSinglePiece() {
pieces = new Pieces(1)
assert !pieces.isComplete()
assert pieces.getRandomPiece() == 0
assert pieces.claim() == 0
pieces.markDownloaded(0)
assert pieces.isComplete()
}
@ -25,13 +25,28 @@ class PiecesTest {
public void testTwoPieces() {
pieces = new Pieces(2)
assert !pieces.isComplete()
int piece = pieces.getRandomPiece()
int piece = pieces.claim()
assert piece == 0 || piece == 1
pieces.markDownloaded(piece)
assert !pieces.isComplete()
int piece2 = pieces.getRandomPiece()
int piece2 = pieces.claim()
assert piece != piece2
pieces.markDownloaded(piece2)
assert pieces.isComplete()
}
@Test
public void testClaimAvailable() {
pieces = new Pieces(2)
int claimed = pieces.claim([0].toSet())
assert claimed == 0
assert -1 == pieces.claim([0].toSet())
}
@Test
public void testClaimNoneAvailable() {
pieces = new Pieces(20)
int claimed = pieces.claim()
assert -1 == pieces.claim([claimed].toSet())
}
}

View File

@ -24,9 +24,9 @@ class FileHasherTest extends GroovyTestCase {
@Test
void testPieceSize() {
assert 18 == FileHasher.getPieceSize(1000000)
assert 20 == FileHasher.getPieceSize(100000000)
assert 30 == FileHasher.getPieceSize(FileHasher.MAX_SIZE)
assert 17 == FileHasher.getPieceSize(1000000)
assert 17 == FileHasher.getPieceSize(100000000)
assert 24 == FileHasher.getPieceSize(FileHasher.MAX_SIZE)
shouldFail IllegalArgumentException, {
FileHasher.getPieceSize(Long.MAX_VALUE)
}
@ -48,7 +48,7 @@ class FileHasherTest extends GroovyTestCase {
fos.write b
fos.close()
def ih = hasher.hashFile tmp
assert ih.getHashList().length == 32
assert ih.getHashList().length == 64
}
@Test
@ -58,7 +58,7 @@ class FileHasherTest extends GroovyTestCase {
fos.write b
fos.close()
def ih = hasher.hashFile tmp
assert ih.getHashList().length == 64
assert ih.getHashList().length == 96
}
@Test
@ -68,7 +68,7 @@ class FileHasherTest extends GroovyTestCase {
fos.write b
fos.close()
def ih = hasher.hashFile tmp
assert ih.getHashList().length == 64
assert ih.getHashList().length == 128
}
@Test
@ -78,6 +78,6 @@ class FileHasherTest extends GroovyTestCase {
fos.write b
fos.close()
def ih = hasher.hashFile tmp
assert ih.getHashList().length == 32 * 3
assert ih.getHashList().length == 160
}
}

View File

@ -5,6 +5,7 @@ import org.junit.Test
import com.muwire.core.EventBus
import com.muwire.core.InfoHash
import com.muwire.core.MuWireSettings
import com.muwire.core.SharedFile
import com.muwire.core.search.ResultsEvent
import com.muwire.core.search.SearchEvent
@ -26,7 +27,7 @@ class FileManagerTest {
void before() {
eventBus = new EventBus()
eventBus.register(ResultsEvent.class, listener)
manager = new FileManager(eventBus)
manager = new FileManager(eventBus, new MuWireSettings())
results = null
}
@ -34,7 +35,7 @@ class FileManagerTest {
void testHash1Result() {
File f = new File("a b.c")
InfoHash ih = InfoHash.fromHashList(new byte[32])
SharedFile sf = new SharedFile(f,ih)
SharedFile sf = new SharedFile(f,ih, 0)
FileHashedEvent fhe = new FileHashedEvent(sharedFile: sf)
manager.onFileHashedEvent(fhe)
@ -53,8 +54,8 @@ class FileManagerTest {
@Test
void testHash2Results() {
InfoHash ih = InfoHash.fromHashList(new byte[32])
SharedFile sf1 = new SharedFile(new File("a b.c"), ih)
SharedFile sf2 = new SharedFile(new File("d e.f"), ih)
SharedFile sf1 = new SharedFile(new File("a b.c"), ih, 0)
SharedFile sf2 = new SharedFile(new File("d e.f"), ih, 0)
manager.onFileLoadedEvent new FileLoadedEvent(loadedFile : sf1)
manager.onFileLoadedEvent new FileLoadedEvent(loadedFile : sf2)
@ -75,7 +76,7 @@ class FileManagerTest {
void testHash0Results() {
File f = new File("a b.c")
InfoHash ih = InfoHash.fromHashList(new byte[32])
SharedFile sf = new SharedFile(f,ih)
SharedFile sf = new SharedFile(f,ih, 0)
FileHashedEvent fhe = new FileHashedEvent(sharedFile: sf)
manager.onFileHashedEvent(fhe)
@ -89,7 +90,7 @@ class FileManagerTest {
void testKeyword1Result() {
File f = new File("a b.c")
InfoHash ih = InfoHash.fromHashList(new byte[32])
SharedFile sf = new SharedFile(f,ih)
SharedFile sf = new SharedFile(f,ih,0)
FileHashedEvent fhe = new FileHashedEvent(sharedFile: sf)
manager.onFileHashedEvent(fhe)
@ -107,12 +108,12 @@ class FileManagerTest {
void testKeyword2Results() {
File f1 = new File("a b.c")
InfoHash ih1 = InfoHash.fromHashList(new byte[32])
SharedFile sf1 = new SharedFile(f1, ih1)
SharedFile sf1 = new SharedFile(f1, ih1, 0)
manager.onFileLoadedEvent new FileLoadedEvent(loadedFile: sf1)
File f2 = new File("c d.e")
InfoHash ih2 = InfoHash.fromHashList(new byte[64])
SharedFile sf2 = new SharedFile(f2, ih2)
SharedFile sf2 = new SharedFile(f2, ih2, 0)
manager.onFileLoadedEvent new FileLoadedEvent(loadedFile: sf2)
UUID uuid = UUID.randomUUID()
@ -130,7 +131,7 @@ class FileManagerTest {
void testKeyword0Results() {
File f = new File("a b.c")
InfoHash ih = InfoHash.fromHashList(new byte[32])
SharedFile sf = new SharedFile(f,ih)
SharedFile sf = new SharedFile(f,ih,0)
FileHashedEvent fhe = new FileHashedEvent(sharedFile: sf)
manager.onFileHashedEvent(fhe)
@ -143,8 +144,8 @@ class FileManagerTest {
@Test
void testRemoveFileExistingHash() {
InfoHash ih = InfoHash.fromHashList(new byte[32])
SharedFile sf1 = new SharedFile(new File("a b.c"), ih)
SharedFile sf2 = new SharedFile(new File("d e.f"), ih)
SharedFile sf1 = new SharedFile(new File("a b.c"), ih, 0)
SharedFile sf2 = new SharedFile(new File("d e.f"), ih, 0)
manager.onFileLoadedEvent new FileLoadedEvent(loadedFile : sf1)
manager.onFileLoadedEvent new FileLoadedEvent(loadedFile : sf2)
@ -161,12 +162,12 @@ class FileManagerTest {
void testRemoveFile() {
File f1 = new File("a b.c")
InfoHash ih1 = InfoHash.fromHashList(new byte[32])
SharedFile sf1 = new SharedFile(f1, ih1)
SharedFile sf1 = new SharedFile(f1, ih1, 0)
manager.onFileLoadedEvent new FileLoadedEvent(loadedFile: sf1)
File f2 = new File("c d.e")
InfoHash ih2 = InfoHash.fromHashList(new byte[64])
SharedFile sf2 = new SharedFile(f2, ih2)
SharedFile sf2 = new SharedFile(f2, ih2, 0)
manager.onFileLoadedEvent new FileLoadedEvent(loadedFile: sf2)
manager.onFileUnsharedEvent new FileUnsharedEvent(unsharedFile: sf2)

View File

@ -8,6 +8,7 @@ import org.junit.Before
import org.junit.Test
import com.muwire.core.EventBus
import com.muwire.core.MuWireSettings
class HasherServiceTest {
@ -24,8 +25,9 @@ class HasherServiceTest {
void before() {
eventBus = new EventBus()
hasher = new FileHasher()
service = new HasherService(hasher, eventBus)
service = new HasherService(hasher, eventBus, new FileManager(eventBus, new MuWireSettings()))
eventBus.register(FileHashedEvent.class, listener)
eventBus.register(FileSharedEvent.class, service)
service.start()
}

View File

@ -78,7 +78,7 @@ class PersisterServiceLoadingTest {
persisted.write json
PersisterService ps = new PersisterService(persisted, eventBus, 100, null)
ps.start()
ps.onUILoadedEvent(null)
Thread.sleep(2000)
assert listener.publishedFiles.size() == 1
@ -99,7 +99,7 @@ class PersisterServiceLoadingTest {
FileHasher fh = new FileHasher()
InfoHash ih1 = fh.hashFile(sharedFile1)
assert ih1.getHashList().length == 2 * 32
assert ih1.getHashList().length == 96
def json = [:]
json.file = getSharedFileJsonName(sharedFile1)
@ -111,7 +111,9 @@ class PersisterServiceLoadingTest {
String hash1 = Base64.encode(tmp)
System.arraycopy(ih1.getHashList(), 32, tmp, 0, 32)
String hash2 = Base64.encode(tmp)
json.hashList = [hash1, hash2]
System.arraycopy(ih1.getHashList(), 64, tmp, 0, 32)
String hash3 = Base64.encode(tmp)
json.hashList = [hash1, hash2, hash3]
json = JsonOutput.toJson(json)
@ -119,7 +121,7 @@ class PersisterServiceLoadingTest {
persisted.write json
PersisterService ps = new PersisterService(persisted, eventBus, 100, null)
ps.start()
ps.onUILoadedEvent(null)
Thread.sleep(2000)
assert listener.publishedFiles.size() == 1
@ -161,7 +163,7 @@ class PersisterServiceLoadingTest {
persisted.append "$json2\n"
PersisterService ps = new PersisterService(persisted, eventBus, 100, null)
ps.start()
ps.onUILoadedEvent(null)
Thread.sleep(2000)
assert listener.publishedFiles.size() == 2
@ -193,7 +195,7 @@ class PersisterServiceLoadingTest {
persisted.write json1
PersisterService ps = new PersisterService(persisted, eventBus, 100, null)
ps.start()
ps.onUILoadedEvent(null)
Thread.sleep(2000)
assert listener.publishedFiles.size() == 1

View File

@ -8,6 +8,7 @@ import com.muwire.core.Destinations
import com.muwire.core.DownloadedFile
import com.muwire.core.EventBus
import com.muwire.core.InfoHash
import com.muwire.core.MuWireSettings
import com.muwire.core.SharedFile
import com.muwire.core.util.DataUtil
@ -31,7 +32,7 @@ class PersisterServiceSavingTest {
f = new File("build.gradle")
f = f.getCanonicalFile()
ih = fh.hashFile(f)
fileSource = new FileManager(eventBus) {
fileSource = new FileManager(eventBus, new MuWireSettings()) {
Map<File, SharedFile> getSharedFiles() {
Map<File, SharedFile> rv = new HashMap<>()
rv.put(f, sf)
@ -54,10 +55,10 @@ class PersisterServiceSavingTest {
@Test
void testSavingSharedFile() {
sf = new SharedFile(f, ih)
sf = new SharedFile(f, ih, 0)
ps = new PersisterService(persisted, eventBus, 100, fileSource)
ps.start()
ps.onUILoadedEvent(null)
Thread.sleep(1500)
JsonSlurper jsonSlurper = new JsonSlurper()
@ -73,10 +74,10 @@ class PersisterServiceSavingTest {
@Test
void testSavingDownloadedFile() {
Destinations dests = new Destinations()
sf = new DownloadedFile(f, ih, new HashSet([dests.dest1, dests.dest2]))
sf = new DownloadedFile(f, ih, 0, new HashSet([dests.dest1, dests.dest2]))
ps = new PersisterService(persisted, eventBus, 100, fileSource)
ps.start()
ps.onUILoadedEvent(null)
Thread.sleep(1500)
JsonSlurper jsonSlurper = new JsonSlurper()

View File

@ -30,7 +30,18 @@ class SearchIndexTest {
assert found.size() == 2
assert found.contains("a b.c")
assert found.contains("c d.e")
}
@Test
public void testDrillDownDoesNotModifyIndex() {
initIndex(["a b.c", "c d.e"])
index.search(["c","e"])
def found = index.search(["c"])
assert found.size() == 2
assert found.contains("a b.c")
assert found.contains("c d.e")
}
@Test
void testDrillDown() {
@ -72,4 +83,11 @@ class SearchIndexTest {
assert found.size() == 1
assert found.contains("b c.d")
}
@Test
void testDuplicateTerm() {
initIndex(["MuWire-0.3.3.jar"])
def found = index.search(["muwire", "0", "3", "jar"])
assert found.size() == 1
}
}

View File

@ -5,14 +5,17 @@ import org.junit.Before
import org.junit.Test
import com.muwire.core.Destinations
import com.muwire.core.Persona
import com.muwire.core.Personas
import net.i2p.data.Base64
import net.i2p.data.Destination
class TrustServiceTest {
TrustService service
File persistGood, persistBad
Destinations dests = new Destinations()
Personas personas = new Personas()
@Before
void before() {
@ -33,51 +36,50 @@ class TrustServiceTest {
@Test
void testEmpty() {
assert TrustLevel.NEUTRAL == service.getLevel(dests.dest1)
assert TrustLevel.NEUTRAL == service.getLevel(dests.dest2)
assert TrustLevel.NEUTRAL == service.getLevel(personas.persona1.destination)
assert TrustLevel.NEUTRAL == service.getLevel(personas.persona2.destination)
}
@Test
void testOnEvent() {
service.onTrustEvent new TrustEvent(level: TrustLevel.TRUSTED, destination: dests.dest1)
service.onTrustEvent new TrustEvent(level: TrustLevel.DISTRUSTED, destination: dests.dest2)
service.onTrustEvent new TrustEvent(level: TrustLevel.TRUSTED, persona: personas.persona1)
service.onTrustEvent new TrustEvent(level: TrustLevel.DISTRUSTED, persona: personas.persona2)
assert TrustLevel.TRUSTED == service.getLevel(dests.dest1)
assert TrustLevel.DISTRUSTED == service.getLevel(dests.dest2)
assert TrustLevel.TRUSTED == service.getLevel(personas.persona1.destination)
assert TrustLevel.DISTRUSTED == service.getLevel(personas.persona2.destination)
}
@Test
void testPersist() {
service.onTrustEvent new TrustEvent(level: TrustLevel.TRUSTED, destination: dests.dest1)
service.onTrustEvent new TrustEvent(level: TrustLevel.DISTRUSTED, destination: dests.dest2)
service.onTrustEvent new TrustEvent(level: TrustLevel.TRUSTED, persona: personas.persona1)
service.onTrustEvent new TrustEvent(level: TrustLevel.DISTRUSTED, persona: personas.persona2)
Thread.sleep(250)
def trusted = new HashSet<>()
persistGood.eachLine {
trusted.add(new Destination(it))
trusted.add(new Persona(new ByteArrayInputStream(Base64.decode(it))))
}
def distrusted = new HashSet<>()
persistBad.eachLine {
distrusted.add(new Destination(it))
distrusted.add(new Persona(new ByteArrayInputStream(Base64.decode(it))))
}
assert trusted.size() == 1
assert trusted.contains(dests.dest1)
assert trusted.contains(personas.persona1)
assert distrusted.size() == 1
assert distrusted.contains(dests.dest2)
assert distrusted.contains(personas.persona2)
}
@Test
void testLoad() {
service.stop()
persistGood.append("${dests.dest1.toBase64()}\n")
persistBad.append("${dests.dest2.toBase64()}\n")
persistGood.append("${personas.persona1.toBase64()}\n")
persistBad.append("${personas.persona2.toBase64()}\n")
service = new TrustService(persistGood, persistBad, 100)
service.start()
Thread.sleep(10)
Thread.sleep(50)
assert TrustLevel.TRUSTED == service.getLevel(dests.dest1)
assert TrustLevel.DISTRUSTED == service.getLevel(dests.dest2)
assert TrustLevel.TRUSTED == service.getLevel(personas.persona1.destination)
assert TrustLevel.DISTRUSTED == service.getLevel(personas.persona2.destination)
}
}

View File

@ -9,18 +9,18 @@ import com.muwire.core.InfoHash
class RequestParsingTest {
Request request
ContentRequest request
private void fromString(String requestString) {
def is = new ByteArrayInputStream(requestString.getBytes(StandardCharsets.US_ASCII))
request = Request.parse(new InfoHash(new byte[InfoHash.SIZE]), is)
request = Request.parseContentRequest(new InfoHash(new byte[InfoHash.SIZE]), is)
}
private static void failed(String requestString) {
try {
def is = new ByteArrayInputStream(requestString.getBytes(StandardCharsets.US_ASCII))
Request.parse(new InfoHash(new byte[InfoHash.SIZE]), is)
Request.parseContentRequest(new InfoHash(new byte[InfoHash.SIZE]), is)
assert false
} catch (IOException expected) {}
}

View File

@ -19,7 +19,7 @@ class UploaderTest {
InputStream is
OutputStream os
Request request
ContentRequest request
Uploader uploader
byte[] inFile
@ -52,7 +52,7 @@ class UploaderTest {
}
private void startUpload() {
uploader = new Uploader(file, request, endpoint)
uploader = new ContentUploader(file, request, endpoint)
uploadThread = new Thread(uploader.respond() as Runnable)
uploadThread.setDaemon(true)
uploadThread.start()
@ -77,7 +77,7 @@ class UploaderTest {
@Test
public void testSmallFile() {
fillFile(20)
request = new Request(range : new Range(0,19))
request = new ContentRequest(range : new Range(0,19))
startUpload()
assert "200 OK" == readUntilRN()
assert "Content-Range: 0-19" == readUntilRN()
@ -92,7 +92,7 @@ class UploaderTest {
@Test
public void testRequestMiddle() {
fillFile(20)
request = new Request(range : new Range(5,15))
request = new ContentRequest(range : new Range(5,15))
startUpload()
assert "200 OK" == readUntilRN()
assert "Content-Range: 5-15" == readUntilRN()
@ -108,7 +108,7 @@ class UploaderTest {
@Test
public void testOutOfRange() {
fillFile(20)
request = new Request(range : new Range(0,20))
request = new ContentRequest(range : new Range(0,20))
startUpload()
assert "416 Range Not Satisfiable" == readUntilRN()
assert "" == readUntilRN()
@ -118,7 +118,7 @@ class UploaderTest {
public void testLargeFile() {
final int length = 0x1 << 14
fillFile(length)
request = new Request(range : new Range(0, length - 1))
request = new ContentRequest(range : new Range(0, length - 1))
startUpload()
readUntilRN()
readUntilRN()

View File

@ -49,7 +49,7 @@ Files are transferred over HTTP1.1 protocol with some custom headers added for d
### Mesh management
Download mesh management is identical to Gnutella, except instead of ip addresses MuWire personas are used. [More information](http://rfc-gnutella.sourceforge.net/developer/tmp/download-mesh.html)
Download mesh management is a simplified version of Gnutella's "Alternate Location" system. For more information see the "download-mesh" document.
### In-Network updates

15
doc/download-mesh.md Normal file
View File

@ -0,0 +1,15 @@
# Download Mesh / Partial Sharing
MuWire uses a system similar to Gnutella's "Alternate Location" download mesh management system, however it is simplified to account for I2P's strengths and borrows a bit from BitTorrent's "Have" message.
### "X-Have" header
With every request a downloader makes it sends an "X-Have" header containing the Base64-encoded representation of a bitfield where bits set to 1 represent pieces of the file that the downloader already has. To make partial file sharing possible, if the uploader does not have the complete file it also sends this header in every response. If the header is missing it is assumed the uploader has the complete file.
### "X-Alt" header
The uploader can recommend other uploaders to the downloader via the "X-Alt" header. The format of this header is a comma-separated list of Base64-encoded Personas that have previously reported having at least one piece of the file to the uploader via the "X-Have" header.
### Differences from Gnutella
Unlike Gnutella the uploader is the sole repository where possible sources of the file are tracked. There is no negative "X-Nalt" header to prevent attacking the download mesh by mass downvoting of sources.

37
doc/infohash-upgrade.md Normal file
View File

@ -0,0 +1,37 @@
# InfoHash Upgrade
An infohash is a list of hashes of the pieces of the file. In MuWire 0.1.0 the piece size is determined by policy based on the file size, with the intention being to keep the list of hashes to maximum 128 in number. The reason for this is that infohashes get returned with search results, and smaller piece size will result in larger infohash which will slow down the transmission of search results.
### The problem
This presents the following problem - larger files have larger piece sizes: a 2GB file will have a 16MB piece size, a 4GB file 32MB and so on. Pieces are atomic, i.e. if a download fails halfway through a piece it will resume since the beginning of the piece. Unfortunately in the current state of I2P the failure rate of streaming connections is too high and transmitting an entire piece over a single connection is not likely to succeed as the size of the piece increases. This makes downloading multi-gigabyte files nearly impossible.
### Out-of-band request proposal
Barring any improvement to the reliability of I2P connections, the following approach can be used to enable smaller piece sizes and the corresponding increase in download success rate of large files:
* Search results do carry the full infohash of the file, instead they carry just the root and the number of 32-byte hashes in the infohash
* When a downloader begins a download, it issues a request for the full infohash first. Only after that is fetched and verified, the download proceeds as usual.
Such approach is more complicated in nature than the current simplistic one, but it has the additional benefit of reducing the size of search results.
### Wire protocol changes
A new request method - "HASHLIST" is introduced. It is immediately followed by the Base64 encoded root of the infohash and '\r\n'. The request may contain HTTP headers in the same format as in HTTP 1.1. One such header may be the X-Persona header, but that is optional. After all the headers a blank line with just '\r\n' is sent.
The response is identical to that of regular GET request, with the same response codes. The response may also carry headers, and is also followed by a blank line with '\r\n'. What follows immediately after that is a binary representation of the hashlist. After sending the full hashlist, the uploader keeps the connection open in anticipation of the first content GET request.
The downloader verifies the hashlist by hashing it with SHA256 and comparing it to the advertised infohash root. If there is a match, it proceeds with the rest of the download as in MuWire 0.1.0.
### Necessary changes to MuWire 0.1.0
To accommodate this proposal in a backwards compatible manner, it is necessary to first de-hardcode the piece count computation logic which is currently hardcoded in a few places. Then it is necessary to:
* persist the piece size to disk when a file is being shared so that it can be returned in search results
* search queries need to carry a flag of some kind that indicates support for out-of-band infohash support
* that in turn requires nodes to support passing of that flag as the queries are being routed through the network
* the returned results need to indicate whether they are returning a full infohash or just a root; the "version" field in the json can be used for that
### Roadmap
Support for this proposal is currently intended for MuWire 0.2.0. However, in order to make rollout smooth, in MuWire 0.1.1 support for the first two items will be introduced. Since there already are users on the network who have shared files without persisting the size of their pieces on disk, those files will not be eligible to participate in this scheme unless re-shared (which implies re-hashing).

View File

@ -131,12 +131,18 @@ Sent by a leaf or ultrapeer when performing a search. Contains the reply-to per
firstHop: false,
keywords : ["keyword1","keyword2"...]
infohash: "asdfasdf...",
replyTo : "asdfasf...b64"
replyTo : "asdfasf...b64",
originator : "asfasdf...",
"oobHashlist" : true
}
```
A search can contain either the query entered by the user in the UI or the infohash if the user is looking for a specific file. If both are present, the infohash takes precedence and the keyword query is ignored.
The "originator" field contains the Base64-encoded persona of the originator of the query. It is used for display purposes only. The I2P destination in that persona must match the one in the "replyTo" field.
The oobHashlist flag indicates support for out-of-band hashlist delivery, which is not yet implemented. Nevertheless, this flag gets propagated through the network for future-proofing.
### Ultrapeer to leaf
The "Search" message is also sent from an ultrapeer to a leaf.
@ -175,6 +181,8 @@ Search results are sent through and HTTP POST method from the responder to the o
* The "altlocs" list contains list of alternate personas that the responder thinks may also have the file.
* The "pieceSize" field is the size of the each individual file piece (except possibly the last) in powers of 2
Results version 1 contain the full hashlist, version 2 does not contain that list. See the "infohash-upgrade" document for more information.
### "Who do you trust" query - any node to any node
(See the "web-of-trust" document for more info on this query)

View File

@ -1,5 +1,8 @@
group = com.muwire
version = 0.0.3
version = 0.4.3
groovyVersion = 2.4.15
slf4jVersion = 1.7.25
spockVersion = 1.1-groovy-2.4
sourceCompatibility=1.8
targetCompatibility=1.8

View File

@ -41,6 +41,7 @@ griffon {
}
mainClassName = 'com.muwire.gui.Launcher'
applicationDefaultJvmArgs = ['-Djava.util.logging.config.file=logging.properties']
apply from: 'gradle/publishing.gradle'
apply from: 'gradle/code-coverage.gradle'
@ -58,6 +59,7 @@ dependencies {
compile "org.codehaus.griffon:griffon-guice:${griffon.version}"
runtime "org.slf4j:slf4j-simple:${slf4jVersion}"
runtime "javax.annotation:javax.annotation-api:1.3.2"
testCompile "org.codehaus.griffon:griffon-fest-test:${griffon.version}"
testCompile "org.spockframework:spock-core:${spockVersion}"

View File

@ -21,4 +21,19 @@ mvcGroups {
view = 'com.muwire.gui.SearchTabView'
controller = 'com.muwire.gui.SearchTabController'
}
}
'Options' {
model = 'com.muwire.gui.OptionsModel'
view = 'com.muwire.gui.OptionsView'
controller = 'com.muwire.gui.OptionsController'
}
"mu-wire-status" {
model = 'com.muwire.gui.MuWireStatusModel'
view = 'com.muwire.gui.MuWireStatusView'
controller = 'com.muwire.gui.MuWireStatusController'
}
'i-2-p-status' {
model = 'com.muwire.gui.I2PStatusModel'
view = 'com.muwire.gui.I2PStatusView'
controller = 'com.muwire.gui.I2PStatusController'
}
}

View File

@ -0,0 +1,40 @@
package com.muwire.gui
import griffon.core.artifact.GriffonController
import griffon.core.controller.ControllerAction
import griffon.inject.MVCMember
import griffon.metadata.ArtifactProviderFor
import net.i2p.router.Router
import javax.annotation.Nonnull
import com.muwire.core.Core
@ArtifactProviderFor(GriffonController)
class I2PStatusController {
@MVCMember @Nonnull
I2PStatusModel model
@MVCMember @Nonnull
I2PStatusView view
@ControllerAction
void refresh() {
Core core = application.context.get("core")
Router router = core.router
model.networkStatus = router._context.commSystem().status.toStatusString()
model.ntcpConnections = router._context.commSystem().getTransports()["NTCP"].countPeers()
model.ssuConnections = router._context.commSystem().getTransports()["SSU"].countPeers()
model.participatingTunnels = router._context.tunnelManager().getParticipatingCount()
model.activePeers = router._context.profileOrganizer().countActivePeers()
model.receiveBps = router._context.bandwidthLimiter().getReceiveBps15s()
model.sendBps = router._context.bandwidthLimiter().getSendBps15s()
model.participatingBW = router._context.bandwidthLimiter().getCurrentParticipatingBandwidth()
}
@ControllerAction
void close() {
view.dialog.setVisible(false)
mvcGroup.destroy()
}
}

View File

@ -7,12 +7,19 @@ import griffon.core.mvc.MVCGroup
import griffon.core.mvc.MVCGroupConfiguration
import griffon.inject.MVCMember
import griffon.metadata.ArtifactProviderFor
import net.i2p.data.Base64
import javax.annotation.Nonnull
import javax.inject.Inject
import com.muwire.core.Constants
import com.muwire.core.Core
import com.muwire.core.download.DownloadStartedEvent
import com.muwire.core.download.UIDownloadCancelledEvent
import com.muwire.core.download.UIDownloadEvent
import com.muwire.core.download.UIDownloadPausedEvent
import com.muwire.core.download.UIDownloadResumedEvent
import com.muwire.core.files.DirectoryUnsharedEvent
import com.muwire.core.search.QueryEvent
import com.muwire.core.search.SearchEvent
import com.muwire.core.trust.TrustEvent
@ -31,17 +38,64 @@ class MainFrameController {
@ControllerAction
void search() {
def cardsPanel = builder.getVariable("cards-panel")
cardsPanel.getLayout().show(cardsPanel, "search window")
def search = builder.getVariable("search-field").text
search = search.trim()
if (search.length() == 0)
return
if (search.length() > 128)
search = search.substring(0,128)
def uuid = UUID.randomUUID()
Map<String, Object> params = new HashMap<>()
params["search-terms"] = search
params["uuid"] = uuid.toString()
def group = mvcGroup.createMVCGroup("SearchTab", uuid.toString(), params)
model.results[uuid.toString()] = group
boolean hashSearch = false
byte [] root = null
if (search.length() == 44 && search.indexOf(" ") < 0) {
try {
root = Base64.decode(search)
hashSearch = true
} catch (Exception e) {
// not a hash search
}
}
def searchEvent = new SearchEvent(searchTerms : [search], uuid : uuid)
def searchEvent
if (hashSearch) {
searchEvent = new SearchEvent(searchHash : root, uuid : uuid, oobInfohash: true)
} else {
// this can be improved a lot
def replaced = search.toLowerCase().trim().replaceAll(Constants.SPLIT_PATTERN, " ")
def terms = replaced.split(" ")
def nonEmpty = []
terms.each { if (it.length() > 0) nonEmpty << it }
searchEvent = new SearchEvent(searchTerms : nonEmpty, uuid : uuid, oobInfohash: true)
}
core.eventBus.publish(new QueryEvent(searchEvent : searchEvent, firstHop : true,
replyTo: core.me.destination, receivedOn: core.me.destination))
replyTo: core.me.destination, receivedOn: core.me.destination,
originator : core.me))
}
void search(String infoHash, String tabTitle) {
def cardsPanel = builder.getVariable("cards-panel")
cardsPanel.getLayout().show(cardsPanel, "search window")
def uuid = UUID.randomUUID()
Map<String, Object> params = new HashMap<>()
params["search-terms"] = tabTitle
params["uuid"] = uuid.toString()
def group = mvcGroup.createMVCGroup("SearchTab", uuid.toString(), params)
model.results[uuid.toString()] = group
def searchEvent = new SearchEvent(searchHash : Base64.decode(infoHash), uuid:uuid,
oobInfohash: true)
core.eventBus.publish(new QueryEvent(searchEvent : searchEvent, firstHop : true,
replyTo: core.me.destination, receivedOn: core.me.destination,
originator : core.me))
}
private def selectedResult() {
@ -49,17 +103,42 @@ class MainFrameController {
def group = selected.getClientProperty("mvc-group")
def table = selected.getClientProperty("results-table")
int row = table.getSelectedRow()
if (row == -1)
return
def sortEvt = group.view.lastSortEvent
if (sortEvt != null) {
row = group.view.resultsTable.rowSorter.convertRowIndexToModel(row)
}
group.model.results[row]
}
private int selectedDownload() {
def downloadsTable = builder.getVariable("downloads-table")
def selected = downloadsTable.getSelectedRow()
def sortEvt = mvcGroup.view.lastDownloadSortEvent
if (sortEvt != null)
selected = downloadsTable.rowSorter.convertRowIndexToModel(selected)
selected
}
@ControllerAction
void download() {
def result = selectedResult()
if (result == null)
return // TODO disable button
def file = new File(application.context.get("muwire-settings").downloadLocation, result.name)
core.eventBus.publish(new UIDownloadEvent(result : result, target : file))
return
if (!model.canDownload(result.infohash))
return
def file = new File(application.context.get("muwire-settings").downloadLocation, result.name)
def selected = builder.getVariable("result-tabs").getSelectedComponent()
def group = selected.getClientProperty("mvc-group")
def resultsBucket = group.model.hashBucket[result.infohash]
def sources = group.model.sourcesBucket[result.infohash]
core.eventBus.publish(new UIDownloadEvent(result : resultsBucket, sources: sources, target : file))
}
@ControllerAction
@ -67,7 +146,7 @@ class MainFrameController {
def result = selectedResult()
if (result == null)
return // TODO disable button
core.eventBus.publish( new TrustEvent(destination : result.sender.destination, level : TrustLevel.TRUSTED))
core.eventBus.publish( new TrustEvent(persona : result.sender, level : TrustLevel.TRUSTED))
}
@ControllerAction
@ -75,7 +154,79 @@ class MainFrameController {
def result = selectedResult()
if (result == null)
return // TODO disable button
core.eventBus.publish( new TrustEvent(destination : result.sender.destination, level : TrustLevel.DISTRUSTED))
core.eventBus.publish( new TrustEvent(persona : result.sender, level : TrustLevel.DISTRUSTED))
}
@ControllerAction
void cancel() {
def downloader = model.downloads[selectedDownload()].downloader
downloader.cancel()
model.downloadInfoHashes.remove(downloader.getInfoHash())
core.eventBus.publish(new UIDownloadCancelledEvent(downloader : downloader))
}
@ControllerAction
void resume() {
def downloader = model.downloads[selectedDownload()].downloader
downloader.resume()
core.eventBus.publish(new UIDownloadResumedEvent())
}
@ControllerAction
void pause() {
def downloader = model.downloads[selectedDownload()].downloader
downloader.pause()
core.eventBus.publish(new UIDownloadPausedEvent())
}
private void markTrust(String tableName, TrustLevel level, def list) {
int row = builder.getVariable(tableName).getSelectedRow()
if (row < 0)
return
core.eventBus.publish(new TrustEvent(persona : list[row], level : level))
}
@ControllerAction
void markTrusted() {
markTrust("distrusted-table", TrustLevel.TRUSTED, model.distrusted)
}
@ControllerAction
void markNeutralFromDistrusted() {
markTrust("distrusted-table", TrustLevel.NEUTRAL, model.distrusted)
}
@ControllerAction
void markDistrusted() {
markTrust("trusted-table", TrustLevel.DISTRUSTED, model.trusted)
}
@ControllerAction
void markNeutralFromTrusted() {
markTrust("trusted-table", TrustLevel.NEUTRAL, model.trusted)
}
void unshareSelectedFiles() {
println "unsharing selected files"
}
void stopWatchingDirectory() {
String directory = mvcGroup.view.getSelectedWatchedDirectory()
if (directory == null)
return
core.muOptions.watchedDirectories.remove(directory)
saveMuWireSettings()
core.eventBus.publish(new DirectoryUnsharedEvent(directory : new File(directory)))
model.watched.remove(directory)
builder.getVariable("watched-directories-table").model.fireTableDataChanged()
}
void saveMuWireSettings() {
File f = new File(core.home, "MuWire.properties")
f.withOutputStream {
core.muOptions.write(it)
}
}
void mvcGroupInit(Map<String, String> args) {

Some files were not shown because too many files have changed in this diff Show More