Compare commits

...

373 Commits

Author SHA1 Message Date
bcb41baca2 update version, add link to Packaging page 2020-09-27 07:26:06 +01:00
72985bacb6 add ability to disable updates completely, intended for 3rd party packaging 2020-09-26 17:53:29 +01:00
3387d22a6c remove HOPELESS downloads from the download list via an event 2020-09-26 17:10:24 +01:00
10bd566d58 Release 0.7.4 2020-09-25 16:51:40 +01:00
f4e0c707df fix cleaning up of hopeless downloads in plugin 2020-09-23 15:23:51 +01:00
c11a427483 fix cleaning up of hopeless downloads in gui 2020-09-23 15:21:47 +01:00
e9db22c562 option for download attempts before giving up in desktop gui 2020-09-23 14:54:19 +01:00
fa53a35023 option for download attempts before giving up in plugin 2020-09-23 14:44:51 +01:00
94dd6101aa show sequential status and hopeless host count in plugin 2020-09-23 14:33:18 +01:00
e65fbe1bd1 show sequential status and hopeless host count in download details panel 2020-09-23 14:26:02 +01:00
964e315367 add a hopeless state for a download where all sources are hopeless 2020-09-23 14:17:40 +01:00
140231e362 Give up on download sources after a number of attempts 2020-09-23 14:00:52 +01:00
c73a821c67 put verified sources in the responder cache as well 2020-09-23 11:30:20 +01:00
0ebe00b526 reduce hopeless interval to 1hr and purge interval to 24hrs 2020-09-22 16:08:17 +01:00
b2a3bfce54 make sure we have the Persona of the altloc 2020-09-22 16:07:18 +01:00
c490a511bd distinguish between discovered sources and verified sources. Only propagate and persist verified sources 2020-09-22 13:34:49 +01:00
cbaa3470d2 make responder cache size configurable 2020-09-22 12:17:27 +01:00
84d61fccd5 cache recent responders and always forward queries to them. Thanks to qtm for the idea 2020-09-21 15:28:15 +01:00
a88db8f50f FixedSizeFIFOSet 2020-09-21 15:13:44 +01:00
5a38154e15 make sure the pongs uuid matches the last sent ping uuid 2020-09-20 18:42:26 +01:00
e5891de136 send and read up to 2 hosts per pong 2020-09-20 18:26:29 +01:00
1e729bae1c implement forgetting of hopeless hosts after some time 2020-09-20 17:49:07 +01:00
3e6e0c7e9f cache calls to System.currenTimeMillis() 2020-09-20 17:34:28 +01:00
44af23c162 restrict forwarding of queries to sqrt of neighboring connections. Thanks to 'qtm' for the idea 2020-09-19 17:28:19 +01:00
a262c99efe reduce limit on peer connections 2020-09-19 17:16:36 +01:00
9eff723dd3 Release 0.7.3 2020-09-18 18:44:15 +01:00
f2531c80d5 disable Configure button if no directory is selected 2020-09-18 18:35:10 +01:00
944cb29901 show error messages if the command is not in the appropriate room 2020-09-18 18:15:15 +01:00
2c7cf24942 browse and subscribe actions from upload table for plugin 2020-09-18 17:30:47 +01:00
5a94d14b8e pass event correctly 2020-09-18 16:13:48 +01:00
f20b23434f one more 2020-09-18 16:13:23 +01:00
8d523a6265 update to api 2020-09-18 15:56:39 +01:00
38b9ab5200 gui browse/feed/chat actions from upload table 2020-09-18 15:42:35 +01:00
f6fdf9e33f parse browse/feed/chat headers on the uploader side 2020-09-18 14:59:58 +01:00
b729a89672 Advertise browse/feed/chat abilities in download headers 2020-09-18 14:50:49 +01:00
e531093b28 do not try to connect to hopeless hosts 2020-09-17 19:58:36 +01:00
b18772465c limit the number of hosts read from each pong 2020-09-17 16:19:48 +01:00
20aac03789 send an uuid with pings and echo it in pongs 2020-09-17 16:11:32 +01:00
ac8d9c1281 update room scrollback limit to new api, update TODO 2020-09-16 17:41:01 +01:00
ad8693d512 some styling of chat lines 2020-09-16 17:15:23 +01:00
144ad634c8 fix ambiguous Math.max 2020-09-16 14:46:34 +01:00
4cdb383b9f fix minutes-to-milliseconds conversion of feed update interval in desktop gui. Make default feed update interval one hour 2020-09-16 14:37:08 +01:00
9c6f6bf266 Make number of connections for ultrapeer a property 2020-09-16 12:30:41 +01:00
9a4e6b868b get rid of clear search button to make more space for search field, as it is not very big on Mac LnF 2020-09-14 15:35:49 +01:00
31e0962b73 fix test target by excluding testng dependency 2020-09-14 14:38:12 +01:00
2acac4b1ea fix java 14 on mac 2020-09-14 13:33:01 +01:00
03d00a22d7 try native lnf on all platforms with metal last 2020-09-14 13:32:25 +01:00
0e54fb1ed1 update TODO 2020-09-14 12:45:42 +01:00
2dee5e2a8a short-circuit logic if monitor is not visible 2020-09-14 12:44:52 +01:00
c7406a4838 preserve selection when updating shared files table 2020-09-14 12:39:18 +01:00
c9eb702d7c add a section to the TODO for mwtrackerd with some items 2020-09-12 18:16:27 +01:00
253603cac7 add a log statement for infohash parsing 2020-09-12 14:37:54 +01:00
3af6ee3bce update TODO 2020-09-12 14:26:48 +01:00
bfa88b0b7a Release 0.7.2 2020-09-10 16:43:21 +01:00
1400967b22 update TODO 2020-09-04 14:47:13 +01:00
5739760075 up router version to 0.9.47 2020-09-04 14:46:37 +01:00
fec042ec36 check if watched directories disappeared while MW was down 2020-09-04 14:44:04 +01:00
d3477b91fc Merge branch 'iconfix' into 'master'
update the muwire icon which appears on the router console itself

See merge request zlatinb/muwire!48
2020-08-14 14:49:33 +00:00
idk
1f973cf076 Merge branch 'iconfix-redux' of i2pgit.org:idk/muwire into iconfix 2020-08-12 21:30:55 -04:00
idk
fdb64f5539 re-try muwire icon fix with update codebase 2020-08-12 21:28:20 -04:00
idk
5b4f3202d6 update the image and create a smaller, icon-ized version 2020-08-12 21:23:42 -04:00
idk
64eb2dad80 add larger muwire logo, use it for the home page 2020-08-11 22:12:52 -04:00
idk
ffe328eee6 update the muwire icon which appears on the router console itself 2020-08-11 19:26:53 -04:00
eb1f2fe19d escape download file name 2020-06-18 21:23:51 +01:00
17c59102ad one more --add-opens 2020-06-16 14:05:41 +01:00
26e8300d18 first pass at a collections proposal 2020-06-12 19:02:05 +01:00
47ac0fd9ac fix gui:run target 2020-06-11 19:16:48 +01:00
0b8b489169 read i2p.properties from file 2020-06-11 17:38:17 +01:00
fb32690c7c de-hardcode groovy versions 2020-06-06 14:01:32 +01:00
a11c504271 update all of the gui to groovy 3.0.4 2020-06-06 04:23:14 +01:00
76e726b520 switch to the Groovy json library 2020-06-05 20:29:39 +01:00
4f626615d8 disable translation instead of throwing an Error if the user is on development i2p router build 2020-06-05 20:05:03 +01:00
061a1a88dd disable indy because on groovy 3 it uses too much memory 2020-06-05 14:32:54 +01:00
ad20d7cf9a get rid of illegal reflective access warnings 2020-06-05 13:23:54 +01:00
895df6cf94 disable pack200 2020-06-05 02:35:04 +01:00
59b5d88829 shorter connect message 2020-06-03 12:58:16 +01:00
f382d2ecbf move the share button to the left next to the shared files count 2020-06-03 12:55:20 +01:00
6740d09479 checkboxes for font style 2020-06-03 12:44:56 +01:00
8cbada110e switch to gridBagLayout to hopefully avoid hiding of short name 2020-06-03 02:58:33 +01:00
33982dd24b preserve main frame dimensions across restarts 2020-06-03 02:22:59 +01:00
274edcc599 preserve initial font+size+style accross restarts 2020-06-03 02:03:44 +01:00
af218a369c migrate to groovy 3.0.4 2020-06-01 13:40:28 +01:00
f0aaa83b7f clean up most of noise on console when running without a log file 2020-06-01 13:14:33 +01:00
b9c34cb944 Add ability to specify default values for the wizard from system property. GitHub issue #32 2020-06-01 12:09:58 +01:00
59353a6718 update readme 2020-05-29 12:35:51 +01:00
c25546e1e1 Release 0.7.1 2020-05-29 12:06:53 +01:00
f9fb9e4f07 get rid of dead code 2020-05-29 12:04:55 +01:00
72f2b2bd37 fix manual searching for updates 2020-05-29 12:03:09 +01:00
eb242b0889 reduce default speed smoothing interval 2020-05-29 11:14:56 +01:00
6508522c9c tunnel quantity and length sliders step 2020-05-29 11:05:58 +01:00
f38b8217c2 steps for embedded or external router, formatting 2020-05-29 10:52:58 +01:00
c9c5e8617a Directory validation and creation 2020-05-29 01:51:13 +01:00
8c4bafda82 move the button enabling logic in view 2020-05-29 01:50:53 +01:00
c2044044c0 add a final step 2020-05-29 01:27:36 +01:00
cb54b30967 apply steps at the end, add ability to cancel wizard 2020-05-29 01:15:56 +01:00
c041f6baaa skeleton of setup wizard 2020-05-28 20:08:57 +01:00
bf28278f72 Release 0.7.0 2020-05-26 19:03:05 +01:00
6462675091 enable dual keys 2020-05-26 19:01:31 +01:00
5adf8d8276 up to router 0.9.46 2020-05-26 18:58:40 +01:00
2fbab55f68 remove references to tunnelName variable 2020-05-25 19:44:50 +01:00
0d783a6bcd harmonize some strings 2020-05-21 11:31:27 +01:00
017454c4b3 use sliders instead of fields in I2P settings 2020-05-18 14:39:16 +01:00
ec41985d31 Release 0.6.15 2020-05-17 22:36:08 +01:00
5daad35ee2 new icon 2020-05-17 14:30:23 +01:00
8df9f63bc7 new icons 2020-05-17 13:11:59 +01:00
367a43825f trim whitespaces before signing 2020-05-15 13:21:51 +01:00
7b34b0cffc link to mucats 2020-05-14 20:15:54 +01:00
bb6692c38e Merge branch 'strings' into 'master'
Prep for push to transifex

See merge request zlatinb/muwire!47
2020-05-13 15:31:37 +00:00
zzz
f1a2b103a8 Prep for push to transifex
Additional string cleanup, regenerate English po file
2020-05-13 10:29:18 -04:00
c1324c92ba Merge branch 'csp3' into 'master'
Fix script line changes from previous MR

See merge request zlatinb/muwire!46
2020-05-11 17:02:49 +00:00
zzz
179c3438cd Fix script line changes from previous MR
Fix some missing quotes
Move util.js and tables.js to css.jsi
script files don't require nonce; inline scripts do.
Nonce doesn't matter until we turn on the CSP.
2020-05-11 10:59:28 -04:00
7fa6812ee9 merge from zzz/csp2 2020-05-11 13:38:20 +01:00
zzz
a1c714b46e Replace innerHTML part 1 (Gitlab issue #45)
Change all plain text and empty content from innerHTML to textContent
2020-05-11 08:20:03 -04:00
4f7cf4fbfc Merge branch 'csp1' into 'master'
Plugin headers and CSP (Gitlab issue #44)

See merge request zlatinb/muwire!44
2020-05-11 12:04:25 +00:00
zzz
2d3e843d64 Plugin headers and CSP (Gitlab issue #44)
Prep for stricter script-src:
Add headers, remove js onload, move init call to the js
Add nonces to all scripts, can't use yet due to innerHTML (see Gitlab issue #45)
2020-05-11 07:50:36 -04:00
2e36812740 Merge branch 'sigtype' into 'master'
Signature must be constructed with the sigtype of the signing key

See merge request zlatinb/muwire!43
2020-05-10 11:15:12 +00:00
zzz
61340f346a Signature must be constructed with the sigtype of the signing key 2020-05-10 06:26:35 -04:00
992daa1e45 size limit on nicknames 2020-05-10 09:51:56 +01:00
3b825263a7 Make the random port selection range match that of the I2P router 2020-05-08 17:37:06 +01:00
e1bf6c0821 prevent invalid characters in searchers of persisted files from breaking the loading process. Related to GitHub issue #45 2020-05-08 17:33:57 +01:00
a6eca11479 Release 0.6.14 2020-05-07 13:39:17 +01:00
11aa6dda70 sign tool in web ui 2020-05-07 13:29:21 +01:00
3116e20c7c Fix i2np port change on every restart, github issue #45 2020-05-07 03:12:09 +01:00
58a92e7442 disallow certain characters in nicknames 2020-05-06 11:49:52 +01:00
d18cdb15cd disallow certain characters in nicknames 2020-05-06 11:39:08 +01:00
ed02b718d9 sign raw UTF-8 representation, removing size limit 2020-05-06 05:36:59 +01:00
564db3473c publish core to local maven repo 2020-05-05 16:01:23 +01:00
6d6063829a convert the core project into a library 2020-05-05 15:39:54 +01:00
ecaec1df3b sign tool 2020-05-04 23:16:32 +01:00
8b99f83db8 Release 0.6.13 2020-05-04 14:18:08 +01:00
33b159477a get test targets to pass, ignoring some tests which are not relevant anymore 2020-05-04 13:08:18 +01:00
91d8175cc5 fix method name 2020-05-04 13:07:32 +01:00
b4c6c77167 Ability to configure watched directories from swing gui 2020-05-04 12:15:27 +01:00
fb59d1ca0c fix the wait window while core is loading 2020-05-04 08:15:25 +01:00
3de4c65d2f Merge branch 'accordion' into 'master'
Accordion

See merge request zlatinb/muwire!41
2020-05-03 16:35:13 +00:00
zzz
91ea2c0184 Move accordion javascript to its own file
Open the accordion section for the page you are on
2020-05-03 12:16:05 -04:00
4a81a3539e Merge branch 'master' into 'master'
Clean up help text for consistency and translatability

See merge request zlatinb/muwire!40
2020-05-03 12:56:03 +00:00
zzz
fcfb506787 Clean up help text for consistency and translatability 2020-05-03 12:56:03 +00:00
zzz
44dc7b808f Clean up help text for consistency and translatability 2020-05-03 08:28:05 -04:00
339f4aaa3e Merge branch 'master' into 'master'
Add top-level groovy and java compile options to build.gradle

See merge request zlatinb/muwire!39
2020-05-02 15:20:25 +00:00
zzz
bf06c3b15f Add top-level groovy and java compile options to build.gradle
Add compilerArgs to gradle.properties
Fix compile warnings in DataUtil
2020-05-02 10:53:00 -04:00
b5e41d72b8 typo 2020-04-29 12:57:01 +01:00
2fe9309519 update README with link to Tracker wiki page 2020-04-29 12:55:17 +01:00
2410ed7199 Merge branch 'tracking-server-side' 2020-04-29 12:37:06 +01:00
9167c9edf7 add a max failuires parameter when deciding whether to expire a host. Report the number of negative hosts in the info rpc 2020-04-29 12:20:23 +01:00
028a8d5044 handle tracker pongs 2020-04-29 11:24:58 +01:00
356d7fe2ff always include the uuid in the tracker response 2020-04-29 11:23:42 +01:00
9da7a90653 wip on pinging swarm members 2020-04-29 07:26:51 +01:00
2001419f1a Iterate through the swarms in order of last pinged, get hosts which have not been pinged recently, also in chronological order 2020-04-29 06:07:22 +01:00
eec9bab081 Start work on timer-based swarm tracking 2020-04-29 05:21:18 +01:00
0a66267264 add missing dependency on java11 2020-04-29 03:47:01 +01:00
ad698cf1b9 use spring configuration for the tracker properties 2020-04-29 02:58:48 +01:00
fd9866c519 implement "info" json-rpc method 2020-04-29 02:03:50 +01:00
83bea0c823 report # of swarms in status, add forget method 2020-04-29 00:54:05 +01:00
71789d96d2 working injection and query kickoff through json-rpc, wip on swarm monitoring 2020-04-28 23:35:29 +01:00
7860aa2b1c prevent replay attacks by attaching an uuid to the crawler pings and pongs 2020-04-28 19:46:13 +01:00
301c2ec0e2 make I2PSession visible 2020-04-28 19:29:09 +01:00
c306864781 add type to the tracker pong and echo the infohash that was queried 2020-04-28 19:18:37 +01:00
acee9a5805 customize port and interface of web server 2020-04-28 18:26:36 +01:00
d34c4e1990 hello spring boot 2020-04-28 18:11:26 +01:00
7be3821e53 will use spring boot for json-rpc endpoints 2020-04-28 17:03:00 +01:00
872e932629 logging.properties for the hostcache and a script to count total hosts 2020-04-27 19:43:33 +01:00
84c7da1fe0 * More logging
* Include leaseset in crawler pings
* serialize hourly files in a directory, keep history
2020-04-26 20:15:48 +01:00
4aed958319 wip on tracker 2020-04-26 19:31:21 +01:00
5fc0283da7 revert change to constructor 2020-04-26 19:30:26 +01:00
c4d908f571 switch to simple-json-rpc library, add basic rpc server over tcp 2020-04-15 10:26:47 +01:00
4d5497c12f setup wizard 2020-04-14 13:18:13 +01:00
1d22abfa88 add ability to change the tunnel name 2020-04-14 13:17:47 +01:00
7a7ebc9690 skeleton of tracker project 2020-04-13 19:43:48 +01:00
16d3a109ca option to disable tracking in web ui 2020-04-12 11:40:21 +01:00
7864eebb24 gui option to disable tracking 2020-04-12 11:26:08 +01:00
9f7aaec991 include local persona in tracker response 2020-04-12 06:57:39 +01:00
1c214ad68a server side of file tracking 2020-04-12 05:56:06 +01:00
3436af75bf remove redundant header parsing code 2020-04-10 08:04:00 +01:00
9b6a2fd952 write to memmapped file in 8kb increments 2020-04-08 13:25:08 +01:00
85ad3109f9 get rid of sNL and darktrion hostcaches, add echelon's 2020-04-02 12:29:44 +01:00
293ff76ae9 Move the wait for client manager in the background thread, hopefully fixes #42 2020-03-30 13:28:22 +01:00
acb70f72d6 fix determination if a directory is shared 2020-03-30 12:42:16 +01:00
62bb4f9e5f actions dropdown on trust lists page 2020-03-29 21:16:54 +01:00
03d6fb15f2 Actions menu on TrustUsers page 2020-03-29 20:58:15 +01:00
699f3ce1b6 convert the Mark (Dis)Trusted links on search results page to hover menu 2020-03-29 19:15:34 +01:00
7f9c8bddb6 fix the color of the hover menu when hovering over a table 2020-03-29 13:18:40 +01:00
d111983d68 help text for each page 2020-03-28 23:33:11 +00:00
50148e5603 add a Help tooltip section in the header. To be updated with different text for each page 2020-03-28 22:51:07 +00:00
1054fe0935 x -> px 2020-03-28 20:19:01 +00:00
2de2badb0b tooltips on config options 2020-03-28 19:55:00 +00:00
424922f2e3 start adding tooltips to config options 2020-03-28 19:19:35 +00:00
adce4b1574 help tooltips on Browse and Feeds pages 2020-03-28 18:39:30 +00:00
355535e660 help tooltips on search box and share input box 2020-03-28 16:14:03 +00:00
09db68182c add a description of the advanced sharing page, wording and css tweaks 2020-03-28 03:02:48 +00:00
1e67139e74 display Never if directory was never synced 2020-03-28 02:52:22 +00:00
9837e1e3d7 emit an event on every dir sync so that UI can update timestamps 2020-03-28 02:47:29 +00:00
2c52486476 fix manual syncing 2020-03-27 15:47:58 +00:00
a88dc17064 add a sync option, fix sorting of table 2020-03-27 15:42:50 +00:00
862967bf8e configure panel for directories 2020-03-27 15:14:36 +00:00
9f1f718870 show the dirs in a table, no actions yet 2020-03-27 12:54:01 +00:00
2fd0a3833f wip on web ui for advanced sharing 2020-03-27 11:10:25 +00:00
435170cb1b update the advanced sharing pane 2020-03-26 17:32:42 +00:00
1c5fec7e9a Merge branch 'master' of 127.0.0.1:zlatinb/muwire into watched-directories
So that I can get B0B's icon
2020-03-26 15:41:34 +00:00
e2a0a37abf ui force sync event 2020-03-26 15:40:53 +00:00
a4bee73b8a process changes in configuration 2020-03-26 15:19:09 +00:00
056e5800c2 implement directory polling 2020-03-26 14:55:44 +00:00
6e0d51c221 first load all watched directories, only then register and scan the auto-watched 2020-03-26 13:10:56 +00:00
496e2e7f91 scan autoWatched directories on startup 2020-03-26 12:53:54 +00:00
a560b14d91 hook up directory manager with share & unshare events 2020-03-26 12:24:07 +00:00
faad6b6b0e query the manager if a directory is watched instead of settings 2020-03-26 12:23:15 +00:00
dfc62b943f wip on persisting and loading of watched directory metadata, emit the event to register on autowatch service 2020-03-26 06:21:41 +00:00
244ce43794 persistence of WatchedDirectory object 2020-03-26 05:31:39 +00:00
f0c8c11094 get rid of UI-side watching of directories on AllFilesLoadedEvent 2020-03-26 05:31:05 +00:00
11e320ef53 wip on directory watching 2020-03-26 04:09:18 +00:00
aae88e80ee Merge branch 'master' into 'master'
AdvancedSharing.png icons. Creative Commons CC0.

See merge request zlatinb/muwire!38
2020-03-25 23:37:51 +00:00
Bob
bbf97311d1 AdvancedSharing.png icons. Creative Commons CC0. 2020-03-25 23:37:51 +00:00
23b6995bf2 start work on advanced watched directories 2020-03-25 22:39:03 +00:00
518bdc44e6 update TODO 2020-03-25 20:27:51 +00:00
5368dbe181 CSS tweaks from B0B 2020-03-25 15:37:48 +00:00
e216678d9a Release 0.6.12 2020-03-25 08:41:42 +00:00
4582cfa0b5 router version 0.9.45 2020-03-25 08:38:51 +00:00
5ea64ecb90 update webui for directory deletion 2020-03-25 08:10:26 +00:00
bd9315954a add a positive tree so that deleting of shared directories can be detected 2020-03-25 08:09:45 +00:00
83bdf76c08 cache the file/dir status when creating a tree node so that traversal can work if the file is deleted 2020-03-25 08:08:55 +00:00
a2ed308cd0 only fetch the latest revision number on initialization. This fixes the flicker on first refreshStatus() 2020-03-24 13:28:56 +00:00
4020df0a77 update expanded tree paths on file events 2020-03-23 20:44:13 +00:00
6f4b4a2c2d update plugin file manager on deleted files 2020-03-23 18:31:42 +00:00
83cd5e57a2 only refresh feeds table if something changes. This prevents the hover menu from flickering 2020-03-23 08:47:05 +00:00
bb69535874 convert feeds table actions to hover menu 2020-03-23 08:28:13 +00:00
b7033e3277 display build number in MuStatus 2020-03-23 07:59:18 +00:00
4a9cea7d2e special-case the files table with some padding to make the hover menu visible without scrolling, in some cases. 2020-03-23 07:47:41 +00:00
2aea965d72 fix hover menu in files table, break small x-display size 2020-03-23 00:29:26 +00:00
9a6a1c8371 fix missing image 2020-03-22 23:07:53 +00:00
2042bfccb7 get rid of effect where ellipsis overflow doesn't work 2020-03-22 23:03:29 +00:00
0d4b0df19d remove Bote's icons 2020-03-22 21:52:35 +00:00
f363296ed1 use new icons 2020-03-22 21:32:20 +00:00
8b33a5a284 new icons from B0B, licensed under CC0 2020-03-22 21:32:09 +00:00
7e70dbda86 link to repo 2020-03-22 18:40:36 +00:00
c23db1293f add note about GitLab mirroring 2020-03-22 17:53:00 +00:00
54f4874ad6 count the times a file has been hit due to feed update 2020-03-22 10:46:53 +00:00
886effa3b6 size columns 2020-03-22 04:01:17 +00:00
64d8b98ee2 show/hide comments in certificates 2020-03-22 03:46:58 +00:00
2f2f620ae5 certificates table 2020-03-22 03:01:07 +00:00
9a74cc5026 downloaders table 2020-03-22 02:51:26 +00:00
e3c5fe291d WIP on file details page 2020-03-22 02:13:23 +00:00
c77b848d44 correct comparision 2020-03-21 20:59:28 +00:00
cf5b5b164d copy hash to clipboard in files table 2020-03-21 11:46:09 +00:00
3a340e40c8 copy hash to clipboard functionality in file tree 2020-03-21 11:41:44 +00:00
e9eafe9380 Actions menu in table view 2020-03-20 16:56:00 +00:00
270a8519b4 Actions link on folders 2020-03-20 16:27:37 +00:00
f8bbeb8ac0 switch to a dropdown menu on file tree 2020-03-20 15:59:54 +00:00
2a4db868aa collapsible Trust Configuration and About sections 2020-03-20 15:01:45 +00:00
59219da1a2 tighten the file tree a bit 2020-03-20 14:20:28 +00:00
a5fb824f71 link 'browsing' links to specific matching table entries 2020-03-19 22:38:51 +00:00
68bc0bbf30 open the latest search by default 2020-03-19 22:03:40 +00:00
c6c1ac1d93 more descriptive errors on Browse and Feed submit actions 2020-03-19 20:47:44 +00:00
9646eadcb1 better config input validation, fixes resetting of checkboxes to default values on invalid input 2020-03-19 20:20:41 +00:00
db91c9171d add copy-to-clipboard ability for full id 2020-03-19 19:19:47 +00:00
e542a50260 status page with some MW internals 2020-03-19 18:12:52 +00:00
a9539c5999 add an About Me page which shows the short and full ids 2020-03-19 17:08:35 +00:00
d93dbbeb8b spacing for readability 2020-03-19 16:40:19 +00:00
45659f0dca change message for Browse and Feeds input box 2020-03-19 16:39:49 +00:00
31a607ed7d canonicalize download / incomplete locations before testing 2020-03-19 16:19:43 +00:00
7a6538beff fix sorting by feed status 2020-03-17 19:07:33 +00:00
509b5c3b99 avoid more conversions to BigDecimal 2020-03-17 16:43:31 +00:00
fbb710cfc8 avoid more conversions to BigDecimal 2020-03-17 16:39:41 +00:00
244015465a avoid groovy's implicit conversion to BigDecimal 2020-03-17 16:31:29 +00:00
7285c12b97 clear cached cardinality on cancelling 2020-03-16 23:18:07 +00:00
aac259c0fe cache the cardinality to speed up UI sorting 2020-03-16 22:45:16 +00:00
e3f58f8f5a catch general exceptions because otherwise they get lost in the executor thread 2020-03-15 01:21:20 +00:00
045859fe04 more items 2020-03-14 22:59:50 +00:00
3a8c66e857 more todo items 2020-03-14 22:16:43 +00:00
773513b257 more todo items 2020-03-14 22:16:12 +00:00
83fe2e9b75 update TODO 2020-03-13 11:52:48 +00:00
455b0ea48e config options for feeds 2020-03-13 11:50:45 +00:00
f4c96db841 publish/unpublish functionality 2020-03-13 08:56:45 +00:00
fca8870283 fix default feed update interval 2020-03-13 07:32:08 +00:00
3efb04d7bb missed a B 2020-03-13 07:29:37 +00:00
62ce8ffa46 size columns 2020-03-13 07:26:02 +00:00
05b70a4573 Individual feed configuration ability 2020-03-13 06:48:58 +00:00
b339784826 view comment functionality 2020-03-13 03:54:30 +00:00
488f2964ee Display feed presence in search results, various fixes 2020-03-13 03:40:41 +00:00
369779ab6a swallow an exception that happens in plugin mostly 2020-03-13 02:42:46 +00:00
f5fe3da09d hook up some actions 2020-03-13 02:04:50 +00:00
392deee34c wip on feeds page js side 2020-03-13 00:37:58 +00:00
7183f15c5c plumbing for /Feeds page 2020-03-12 23:33:04 +00:00
ca33535630 POST hook for downloading feed items 2020-03-12 22:46:48 +00:00
54abf82a91 wip on server side of feeds for plugin 2020-03-12 22:28:11 +00:00
14546737fd release 0.6.11 2020-03-10 22:31:33 +00:00
0f069f2fc9 Merge branch 'file-feeds' 2020-03-10 22:28:38 +00:00
9a44603d2f prevent duplicate feed subscriptions 2020-03-10 21:30:27 +00:00
38a027c308 context menu on the feed items table 2020-03-10 21:15:59 +00:00
2ba81ccc84 context menu for feeds table 2020-03-10 20:50:00 +00:00
0408349c07 size columns 2020-03-10 20:04:28 +00:00
95cb7f3214 auto-download feed items functionality 2020-03-10 19:48:36 +00:00
69810d7203 fix variable name 2020-03-10 19:35:39 +00:00
f202fa34f3 auto-publish shared files functionality 2020-03-10 19:12:49 +00:00
c082e25c81 individual feed configuration panel 2020-03-10 18:48:20 +00:00
2bb07ff7b5 do not trim feed items if setting is negative 2020-03-10 17:53:17 +00:00
ff952890bc populate new feeds from defaults 2020-03-10 17:51:14 +00:00
fc393619d8 options for feeds 2020-03-10 17:47:08 +00:00
2882c73876 enable button when switching to chat window 2020-03-10 17:05:44 +00:00
cbb1de046b fetch certificates functionality 2020-03-10 16:53:28 +00:00
a272a45928 persist the right number of feed items 2020-03-10 16:41:31 +00:00
3133581363 view comment functionality 2020-03-10 16:35:02 +00:00
c3d0dce281 store last update attempt and do not retry active feeds 2020-03-10 16:05:32 +00:00
8f710e68c2 download feed item action 2020-03-10 15:06:42 +00:00
15430d6c03 manual update and unsubscribe actions 2020-03-10 13:51:07 +00:00
166b71f128 fix NPE when logging is enabled 2020-03-10 13:47:31 +00:00
d724986ec6 proper method name 2020-03-10 13:34:33 +00:00
198c5b5538 fix json parsing 2020-03-10 13:06:47 +00:00
96d71ed08f fix method name 2020-03-10 13:06:36 +00:00
bb7385688c it always points to the innermost closure 2020-03-10 12:55:09 +00:00
e70bec3a51 hook up feed subscription 2020-03-10 12:44:25 +00:00
ed04c40420 return an empty set if no items are found 2020-03-10 12:43:49 +00:00
e9f00c2995 subscribe button in search tab 2020-03-10 12:16:18 +00:00
fd75d8229b fix feed checkbox for local results 2020-03-10 12:15:52 +00:00
0ff9ca8572 wip on feed items table 2020-03-10 11:50:55 +00:00
a07f01b641 utility method to check if an infohash is shared 2020-03-10 11:50:09 +00:00
b9333913c6 hook up feeds table to feed items table 2020-03-10 10:47:05 +00:00
fcb5c573f9 wip on feeds table 2020-03-10 10:39:18 +00:00
1610766e01 wip on feeds table 2020-03-10 07:33:29 +00:00
e2a9db8056 add an IDLE status to feeds for display purposes 2020-03-10 07:32:45 +00:00
a0cb214e2b placeholder feeds panel 2020-03-10 06:22:40 +00:00
f2bf921d4c parse feed flag in results 2020-03-10 06:06:57 +00:00
aa0fcfb7de fix capitalization in event name 2020-03-10 05:50:54 +00:00
48cfce71a8 emit event on publishing 2020-03-10 05:47:42 +00:00
8798ea38e8 button for publishing, column in the shared files table 2020-03-10 01:39:55 +00:00
17cd60afe3 deleting of feeds 2020-03-10 00:58:43 +00:00
c10c1118e8 feed client 2020-03-09 19:28:42 +00:00
28425e93dc persist only as many items as configured to keep 2020-03-09 18:53:43 +00:00
032338bb48 Persist feed metadata and items on successful fetch. Register feed manager for various events 2020-03-09 18:31:10 +00:00
12e56b1c9a events associated with updating a feed 2020-03-09 17:37:17 +00:00
cc8801c48b do not NPE if hashing fails 2020-03-09 16:05:57 +00:00
57c75978b6 wip on feed manager deserialization 2020-03-08 20:19:37 +00:00
bfe198e1a6 represenation of a feed 2020-03-08 19:38:48 +00:00
8e274f940e Feed item representation and serialization 2020-03-08 19:30:04 +00:00
9f3942c1c7 settings to disable or not advertise file feed 2020-03-08 17:15:00 +00:00
d60d57ee43 wip on server side feed handling 2020-03-08 17:04:11 +00:00
8e3a433afb persist shared file on publish/unpublish 2020-03-08 16:06:28 +00:00
49cf56fabb UI Publish & Unpublish events 2020-03-08 16:01:50 +00:00
2b6565d107 unpublish method 2020-03-08 16:01:23 +00:00
366a2ef841 published flag and timestamp in shared files 2020-03-08 15:46:36 +00:00
bcd24e56ac TODO updates for plugin 2020-03-07 15:48:20 +00:00
c7d1f0c23c Connect to i2p router after creating the Core object, should help with plugin init issues #39 2020-02-23 18:29:09 +00:00
853b9f67fc Release 0.6.10 2020-02-23 15:42:03 +00:00
a505a2449a persist SharedFile on change of comments #35 2020-02-18 02:14:32 +00:00
c11d81c6c3 Release 0.6.9 2020-02-16 16:33:33 +00:00
ee5e90c4ab ignore events from old persister service, prevents duplicate entries during migration #35 2020-02-14 18:20:39 +00:00
64d2a87d26 more occurrences of SharedFile::getInfoHash #35 2020-02-14 17:53:09 +00:00
f0304dbe7d fix copy-hash-to-clipboard #35 2020-02-14 16:14:36 +00:00
bdad8d9309 make extended signatures mandatory 2020-02-14 15:34:21 +00:00
8c110bbae5 more occurrences of SharedFile::getInfoHash #35 2020-02-14 15:24:39 +00:00
2cc1e384bc more occurrences of SharedFile::getInfoHash #35 2020-02-14 15:20:01 +00:00
9337d1b74d chase down references to missing infoHash #35 2020-02-14 01:48:02 +00:00
16ed5dd346 chase down some usages of deprecated getInfoHash method #35 2020-02-14 01:32:38 +00:00
7b55fc9ed8 working uploads #35 2020-02-14 01:15:10 +00:00
d5c8050572 wip on separate hashlist storage #35 2020-02-14 00:37:07 +00:00
83546d68d2 Merge pull request #37 from LoveIsGrief/change-persister
Introduce persister that uses a directory structure
2020-01-25 14:36:41 +00:00
a891c83518 Only persist downloaded files if sharing thereof is enabled
Otherwise we might inadvertently share downloads
2020-01-25 15:25:48 +01:00
aa56cc23c0 Cache base 64 path hash
Can't do it in constructor without an ugly try/catch
 therefore this is done on demand
2020-01-25 15:20:38 +01:00
a2b37ef567 Persist downloaded files 2020-01-25 15:06:12 +01:00
4bc04ae631 Revert "Reduce log levels in Connection"
This reverts commit dcd233b7
2020-01-25 15:01:21 +01:00
56da9a16b0 Set FileLoadedEvent::source in the subclass
Setting it in the super class means we don't set the right value for every case
2020-01-25 15:00:48 +01:00
2935ee1a1d Remove unnecessary executor
It was doing nothing but starting and stopping
2020-01-25 14:49:59 +01:00
855183397b Remove TODO
There's already an issue open https://github.com/zlatinb/muwire/issues/35
2020-01-22 21:35:54 +01:00
e27704c1af Make sure migration from PersisterService works
this.getClass() and this.class kept resolving to Class.
Using a string is much simpler

mkdirs() is also necessary because the directory structure doesn't exist
 when persistFile is called the first time
2020-01-22 20:59:05 +01:00
5c18b4a141 Add more logs PersisterFolderService 2020-01-22 15:12:22 +01:00
dcd233b7ad Reduce log levels in Connection
Too verbose
2020-01-22 15:12:01 +01:00
7cee8a28ba FileLoadedEvent should include class when coming from old persister
Otherwise the new PersisterFolderService won't migrate
2020-01-22 15:07:00 +01:00
7446fc949a Remove UIPersistFilesEvent
Hashing is done per file now and those are triggered by individual events
2020-01-22 13:00:55 +01:00
598ab90f63 Clear up the event path when starting up the old and new persisters
The new persister won't load anything until the old one has finished
2020-01-22 12:36:34 +01:00
043028c296 Introduce PersisterFolderService to replace PersisterService
An attempt at automatically migrate from PersisterService was made, but the events aren't triggered in the right order.
We need to make sure that we don't trigger the "AllFilesLoadedEvent" before the migration is done
2020-01-21 23:34:33 +01:00
cd1757fac3 Use Java 11
Java9 isn't available on Ubuntu anymore, which would make development harder
2020-01-19 21:46:47 +01:00
9d4b365e63 Log the time it take to persist files and hashes 2020-01-19 21:43:03 +01:00
b12d57e30a fix bracket 2020-01-14 20:27:21 +00:00
f33d1b6db3 move the docker documentation to the wiki 2020-01-14 20:26:47 +00:00
277 changed files with 10739 additions and 1513 deletions

4
.gitignore vendored
View File

@ -2,7 +2,7 @@
**/.settings
**/build
.gradle
.project
.classpath
**/.project
**/.classpath
**/*.rej
**/*.orig

View File

@ -4,7 +4,7 @@ FROM jlesage/baseimage-gui:alpine-3.10-glibc
ARG DOCKER_IMAGE_VERSION=unknown
# JDK version
ARG JDK=9
ARG JDK=11
# Important directories
ARG TMP_DIR=/muwire-tmp

View File

@ -1,8 +1,10 @@
The GitHub repo is mirrored from the in-I2P GitLab repo. Please open PRs and issues at http://git.idk.i2p/zlatinb/muwire
# MuWire - Easy Anonymous File-Sharing
MuWire is an easy to use file-sharing program which offers anonymity using [I2P technology](http://geti2p.net). It works on any platform Java works on, including Windows,MacOS,Linux.
The current stable release - 0.6.8 is avaiable for download at https://muwire.com. The latest plugin build and instructions how to install the plugin are available inside I2P at http://muwire.i2p.
The current stable release - 0.7.4 is avaiable for download at https://muwire.com. The latest plugin build and instructions how to install the plugin are available inside I2P at http://muwire.i2p.
You can find technical documentation in the [doc] folder. Also check out the [Wiki] for various other documentation.
@ -19,7 +21,7 @@ If you want to run the unit tests, type
./gradlew clean build
```
If you want to build binary bundles that do not depend on Java or I2P, see the [muwire-pkg] project
If you want to build binary bundles that do not depend on Java or I2P, see the [muwire-pkg] project. If you want to package MuWire for a Linux distribution, see the [Packaging] wiki page.
## Running the GUI
@ -28,9 +30,7 @@ Type
./gradlew gui:run
```
If you have an I2P router running on the same machine that is all you need to do. If you use a custom I2CP host and port, create a file `i2p.properties` and put `i2cp.tcp.host=<host>` and `i2cp.tcp.port=<port>` in there. On Windows that file should go into `%HOME%\AppData\Roaming\MuWire`, on Mac into `$HOME/Library/Application Support/MuWire` and on Linux `$HOME/.MuWire`
[Default I2CP port]\: `7654`
The setup wizard will ask you for the host and port of an I2P or I2Pd router.
## Running the CLI
@ -44,57 +44,17 @@ There is a Web-based UI under development. It is intended to be run as a plugin
## Docker
The Docker image is based on the wonderful work in [jlesage/docker-baseimage-gui].
You can refer to it for environment variables to pass to the container.
If you don't want to use the image on dockerhub, build an image yourself.
```bash
MUWIRE_VERSION=`awk -F "=" '/^version/ { gsub(" ","") ; print $2}' gradle.properties`
docker build -t muwire:latest,muwire:${MUWIRE_VERSION} .
```
**Necessary configuration**
Since MuWire will be running in a container, it won't have direct access to the host's localhost.
By default, it will be configured to use `172.17.0.1` as the target host.
You'll need to open the I2CP port on that interface.
If you're running I2P on the localhost, navigate to http://localhost:7657/configi2cp and make the necessary changes.
![i2cp_config.png]
Should you be using a different interface write an `i2p.properties` and then put that into the shared docker volume.
Example configuration file:
```properties
i2cp.tcp.host=112.13.0.1
```
**Running**
```bash
docker run \
-p 5800:5800 \
-v config:/muwire/.MuWire \
-v incompletes:/incompletes \
-v output:/output \
--name muwire \
zlatinb/muwire
```
You will then be able to access the muwire GUI over a browser at http://localhost:5800
**Options**
| Option | Description |
|--------------|--------------------------------------------|
|`-v config:/muwire/.MuWire`| This is where the `i2p.properties` and possibly other config should go |
|`-v incompletes:/incompletes`| The `/incompletes` volume should be used to store MuWire's **incomplete** download/upload data \*|
|`-v output:/output`| The `/output` volume should be used to store MuWire's download/upload data |
MuWire is available as a Docker image. For more information see the [Docker] page.
## Translations
If you want to help translate MuWire, instructions are on the wiki https://github.com/zlatinb/muwire/wiki/Translate
## Related Projects
### MuWire Tracker Daemon
The MuWire Tracker Daemon (or mwtrackerd for short) is a project to bring functionality similar to BitTorrent tracking to MuWire. For more info see the [Tracker] page.
### MuCats
MuCats is a project to create a website for hosting hashes of files shared on the MuWire network. For more info see the [MuCats] project.
## GPG Fingerprint
```
@ -108,9 +68,11 @@ You can find the full key at https://keybase.io/zlatinb
[Wiki]: https://github.com/zlatinb/muwire/wiki
[doc]: https://github.com/zlatinb/muwire/tree/master/doc
[muwire-pkg]: https://github.com/zlatinb/muwire-pkg
[Packaging]: https://github.com/zlatinb/muwire/wiki/Packaging
[cli options]: https://github.com/zlatinb/muwire/wiki/CLI-Configuration-Options
[I2P Github]: https://github.com/i2p/i2p.i2p
[Plugin]: https://github.com/zlatinb/muwire/wiki/Plugin
[i2cp_config.png]: ./images/i2cp_config.png
[muwire_incompletes.png]: ./images/muwire_incompletes.png
[Docker]: https://github.com/zlatinb/muwire/wiki/Docker
[jlesage/docker-baseimage-gui]: https://github.com/jlesage/docker-baseimage-gui
[Tracker]: https://github.com/zlatinb/muwire/wiki/Tracker-Daemon
[MuCats]: https://github.com/zlatinb/mucats

15
TODO.md
View File

@ -15,16 +15,14 @@ This helps with scalability
* Metadata parsing and search
* Automatic adjustment of number of I2P tunnels
* Persist trust immediately
* Check if user-selected download and incomplete locations exist and are writeable
* Enum i18n
* Ability to share trust list only with trusted users
* Confidential files visible only to certain users
* Public Feed feature
* Download queue with priorities
* Use tracker pings - either embedded logic or external mwtrackerd to add more sources to downloads
### Chat
* echo "unknown/innappropriate command" in the console
* break up lines on CR/LF, send multiple messages
* Style timestamps and persona names
* enforce # in room names or ignore it
* auto-create/join channel on server start
* jump from notification window to room with message
@ -32,11 +30,16 @@ This helps with scalability
### Swing GUI
* I2P Status panel - display message when connected to external router
* Search box - left identation
* Ability to disable switching of tabs on actions
### Web UI/Plugin
* HTML 5 media players
* Minimal dependency (break up groovy-all.jar)
* Remove versions from jar names
* Security: POST nonces, CSP headers
* Upload files from browser to plugin via drag-and-drop
* Check permissions, display better errors when sharing local folders
### mwtrackerd
* `save` and `load` JSON-RPC commands that save and load swarm state respectively
* load-test with many many hashes (1M?)
* evaluate other usage scenarios besides website backend

View File

@ -2,13 +2,26 @@ subprojects {
apply plugin: 'groovy'
dependencies {
compile 'org.codehaus.groovy:groovy:2.4.15'
compile 'org.codehaus.groovy:groovy-jsr223:2.4.15'
compile 'org.codehaus.groovy:groovy-json:2.4.15'
compile "org.codehaus.groovy:groovy:${groovyVersion}"
compile "org.codehaus.groovy:groovy-jsr223:${groovyVersion}"
compile "org.codehaus.groovy:groovy-json:${groovyVersion}"
}
compileGroovy {
groovyOptions.optimizationOptions.indy = true
groovyOptions.optimizationOptions.indy = false
sourceCompatibility = project.sourceCompatibility
targetCompatibility = project.targetCompatibility
options.compilerArgs += project.compilerArgs
options.deprecation = true
options.encoding = 'UTF-8'
}
compileJava {
sourceCompatibility = project.sourceCompatibility
targetCompatibility = project.targetCompatibility
options.compilerArgs += project.compilerArgs
options.deprecation = true
options.encoding = 'UTF-8'
}
repositories {

View File

@ -32,7 +32,7 @@ import com.muwire.core.UILoadedEvent
import com.muwire.core.files.AllFilesLoadedEvent
class CliLanterna {
private static final String MW_VERSION = "0.6.8"
private static final String MW_VERSION = "0.7.4"
private static volatile Core core

View File

@ -3,6 +3,7 @@ package com.muwire.clilanterna
import com.googlecode.lanterna.gui2.TextGUIThread
import com.googlecode.lanterna.gui2.table.TableModel
import com.muwire.core.Core
import com.muwire.core.InfoHash
import com.muwire.core.SharedFile
import com.muwire.core.files.AllFilesLoadedEvent
import com.muwire.core.files.DirectoryWatchedEvent
@ -27,7 +28,6 @@ class FilesModel {
core.eventBus.register(FileLoadedEvent.class, this)
core.eventBus.register(FileUnsharedEvent.class, this)
core.eventBus.register(FileHashedEvent.class, this)
core.eventBus.register(AllFilesLoadedEvent.class, this)
Runnable refreshModel = {refreshModel()}
Timer timer = new Timer(true)
@ -37,15 +37,6 @@ class FilesModel {
}
void onAllFilesLoadedEvent(AllFilesLoadedEvent e) {
def eventBus = core.eventBus
guiThread.invokeLater {
core.muOptions.watchedDirectories.each {
eventBus.publish(new FileSharedEvent(file: new File(it)))
}
}
}
void onFileLoadedEvent(FileLoadedEvent e) {
guiThread.invokeLater {
sharedFiles.add(e.loadedFile)
@ -72,7 +63,7 @@ class FilesModel {
sharedFiles.each {
long size = it.getCachedLength()
boolean comment = it.comment != null
boolean certified = core.certificateManager.hasLocalCertificate(it.getInfoHash())
boolean certified = core.certificateManager.hasLocalCertificate(new InfoHash(it.getRoot()))
String hits = String.valueOf(it.getHits())
String downloaders = String.valueOf(it.getDownloaders().size())
model.addRow(new SharedFileWrapper(it), DataHelper.formatSize2(size, false)+"B", comment, certified, hits, downloaders)

View File

@ -21,7 +21,6 @@ import com.muwire.core.filecert.UICreateCertificateEvent
import com.muwire.core.files.DirectoryUnsharedEvent
import com.muwire.core.files.FileSharedEvent
import com.muwire.core.files.FileUnsharedEvent
import com.muwire.core.files.UIPersistFilesEvent
class FilesView extends BasicWindow {
private final FilesModel model
@ -84,7 +83,6 @@ class FilesView extends BasicWindow {
Button unshareButton = new Button("Unshare", {
core.eventBus.publish(new FileUnsharedEvent(unsharedFile : sf))
core.eventBus.publish(new UIPersistFilesEvent())
MessageDialog.showMessageDialog(textGUI, "File Unshared", "Unshared "+sf.getFile().getName(), MessageDialogButton.OK)
} )
Button addCommentButton = new Button("Add Comment", {

View File

@ -1,12 +1,42 @@
apply plugin : 'application'
mainClassName = 'com.muwire.core.Core'
applicationDefaultJvmArgs = ['-Djava.util.logging.config.file=logging.properties']
dependencies {
compile "net.i2p:i2p:${i2pVersion}"
compile "net.i2p:router:${i2pVersion}"
compile "net.i2p.client:mstreaming:${i2pVersion}"
compile "net.i2p.client:streaming:${i2pVersion}"
testCompile 'org.junit.jupiter:junit-jupiter-api:5.4.2'
testCompile 'junit:junit:4.12'
plugins {
id 'java-library'
id 'maven-publish'
}
dependencies {
api "net.i2p:i2p:${i2pVersion}"
api "net.i2p:router:${i2pVersion}"
api "net.i2p.client:mstreaming:${i2pVersion}"
implementation "net.i2p.client:streaming:${i2pVersion}"
testImplementation 'org.junit.jupiter:junit-jupiter-api:5.4.2'
testImplementation 'junit:junit:4.12'
testImplementation 'org.codehaus.groovy:groovy-all:3.0.4'
}
// this is necessary because applying both groovy and java-library doesn't work well
configurations {
apiElements.outgoing.variants {
classes {
artifact file: compileGroovy.destinationDir, builtBy: compileGroovy
}
}
}
configurations.testImplementation {
exclude group:'org.codehaus.groovy', module:'groovy-testng'
}
// publish core to local maven repo for sister projects
publishing {
publications {
muCore(MavenPublication) {
from components.java
}
}
repositories {
mavenLocal()
}
}

View File

@ -1,7 +1,12 @@
package com.muwire.core
import com.muwire.core.files.PersisterDoneEvent
import com.muwire.core.files.PersisterFolderService
import java.nio.charset.StandardCharsets
import java.util.concurrent.atomic.AtomicBoolean
import java.util.logging.Level
import java.util.zip.ZipException
import com.muwire.core.chat.ChatDisconnectionEvent
import com.muwire.core.chat.ChatManager
@ -18,8 +23,10 @@ import com.muwire.core.connection.I2PAcceptor
import com.muwire.core.connection.I2PConnector
import com.muwire.core.connection.LeafConnectionManager
import com.muwire.core.connection.UltrapeerConnectionManager
import com.muwire.core.download.DownloadHopelessEvent
import com.muwire.core.download.DownloadManager
import com.muwire.core.download.SourceDiscoveredEvent
import com.muwire.core.download.SourceVerifiedEvent
import com.muwire.core.download.UIDownloadCancelledEvent
import com.muwire.core.download.UIDownloadEvent
import com.muwire.core.download.UIDownloadPausedEvent
@ -29,9 +36,18 @@ import com.muwire.core.filecert.CertificateManager
import com.muwire.core.filecert.UICreateCertificateEvent
import com.muwire.core.filecert.UIFetchCertificatesEvent
import com.muwire.core.filecert.UIImportCertificateEvent
import com.muwire.core.filefeeds.FeedClient
import com.muwire.core.filefeeds.FeedFetchEvent
import com.muwire.core.filefeeds.FeedItemFetchedEvent
import com.muwire.core.filefeeds.FeedManager
import com.muwire.core.filefeeds.UIDownloadFeedItemEvent
import com.muwire.core.filefeeds.UIFilePublishedEvent
import com.muwire.core.filefeeds.UIFeedConfigurationEvent
import com.muwire.core.filefeeds.UIFeedDeletedEvent
import com.muwire.core.filefeeds.UIFeedUpdateEvent
import com.muwire.core.filefeeds.UIFileUnpublishedEvent
import com.muwire.core.files.FileDownloadedEvent
import com.muwire.core.files.FileHashedEvent
import com.muwire.core.files.FileHashingEvent
import com.muwire.core.files.FileHasher
import com.muwire.core.files.FileLoadedEvent
import com.muwire.core.files.FileManager
@ -41,7 +57,11 @@ import com.muwire.core.files.HasherService
import com.muwire.core.files.PersisterService
import com.muwire.core.files.SideCarFileEvent
import com.muwire.core.files.UICommentEvent
import com.muwire.core.files.UIPersistFilesEvent
import com.muwire.core.files.directories.UISyncDirectoryEvent
import com.muwire.core.files.directories.WatchedDirectoryConfigurationEvent
import com.muwire.core.files.directories.WatchedDirectoryConvertedEvent
import com.muwire.core.files.directories.WatchedDirectoryConverter
import com.muwire.core.files.directories.WatchedDirectoryManager
import com.muwire.core.files.AllFilesLoadedEvent
import com.muwire.core.files.DirectoryUnsharedEvent
import com.muwire.core.files.DirectoryWatchedEvent
@ -52,6 +72,7 @@ import com.muwire.core.hostcache.HostDiscoveredEvent
import com.muwire.core.mesh.MeshManager
import com.muwire.core.search.BrowseManager
import com.muwire.core.search.QueryEvent
import com.muwire.core.search.ResponderCache
import com.muwire.core.search.ResultsEvent
import com.muwire.core.search.ResultsSender
import com.muwire.core.search.SearchEvent
@ -67,6 +88,7 @@ import com.muwire.core.upload.UploadManager
import com.muwire.core.util.MuWireLogManager
import com.muwire.core.content.ContentControlEvent
import com.muwire.core.content.ContentManager
import com.muwire.core.tracker.TrackerResponder
import groovy.util.logging.Log
import net.i2p.I2PAppContext
@ -74,10 +96,8 @@ import net.i2p.client.I2PClientFactory
import net.i2p.client.I2PSession
import net.i2p.client.streaming.I2PSocketManager
import net.i2p.client.streaming.I2PSocketManagerFactory
import net.i2p.client.streaming.I2PSocketOptions
import net.i2p.client.streaming.I2PSocketManager.DisconnectListener
import net.i2p.crypto.DSAEngine
import net.i2p.crypto.SigType
import net.i2p.data.Destination
import net.i2p.data.PrivateKey
import net.i2p.data.Signature
@ -96,18 +116,20 @@ public class Core {
final Properties i2pOptions
final MuWireSettings muOptions
private final I2PSession i2pSession;
final I2PSession i2pSession;
private I2PSocketManager i2pSocketManager
final TrustService trustService
final TrustSubscriber trustSubscriber
private final PersisterService persisterService
private final HostCache hostCache
private final ConnectionManager connectionManager
private final PersisterFolderService persisterFolderService
final HostCache hostCache
final ConnectionManager connectionManager
private final CacheClient cacheClient
private final UpdateClient updateClient
private final ConnectionAcceptor connectionAcceptor
final ConnectionAcceptor connectionAcceptor
private final ConnectionEstablisher connectionEstablisher
private final HasherService hasherService
private final DownloadManager downloadManager
final DownloadManager downloadManager
private final DirectoryWatcher directoryWatcher
final FileManager fileManager
final UploadManager uploadManager
@ -115,6 +137,11 @@ public class Core {
final CertificateManager certificateManager
final ChatServer chatServer
final ChatManager chatManager
final FeedManager feedManager
private final FeedClient feedClient
private final WatchedDirectoryConverter watchedDirectoryConverter
final WatchedDirectoryManager watchedDirectoryManager
private final TrackerResponder trackerResponder
private final Router router
@ -131,7 +158,11 @@ public class Core {
// Read defaults
def defaultI2PFile = getClass()
.getClassLoader().getResource("defaults/i2p.properties");
defaultI2PFile.withInputStream { i2pOptions.load(it) }
try {
defaultI2PFile.withInputStream { i2pOptions.load(it) }
} catch (ZipException mystery) {
log.log(Level.SEVERE, "couldn't load default i2p properties", mystery)
}
def i2pOptionsFile = new File(home, "i2p.properties")
if (i2pOptionsFile.exists()) {
@ -142,15 +173,17 @@ public class Core {
if (!i2pOptions.containsKey("outbound.nickname"))
i2pOptions["outbound.nickname"] = "MuWire"
}
if (!(i2pOptions.hasProperty("i2np.ntcp.port")
&& i2pOptions.hasProperty("i2np.udp.port")
if (!(i2pOptions.containsKey("i2np.ntcp.port")
&& i2pOptions.containsKey("i2np.udp.port")
)) {
Random r = new Random()
int port = r.nextInt(60000) + 4000
int port = 9151 + r.nextInt(1 + 30777 - 9151) // this range matches what the i2p router would choose
i2pOptions["i2np.ntcp.port"] = String.valueOf(port)
i2pOptions["i2np.udp.port"] = String.valueOf(port)
i2pOptionsFile.withOutputStream { i2pOptions.store(it, "") }
}
i2pOptions['i2cp.leaseSetEncType']='4,0'
if (!props.embeddedRouter) {
if (!(I2PAppContext.getGlobalContext() instanceof RouterContext)) {
@ -191,14 +224,13 @@ public class Core {
// options like tunnel length and quantity
I2PSocketManager socketManager
keyDat.withInputStream {
socketManager = new I2PSocketManagerFactory().createManager(it, i2pOptions["i2cp.tcp.host"], i2pOptions["i2cp.tcp.port"].toInteger(), i2pOptions)
i2pSocketManager = new I2PSocketManagerFactory().createDisconnectedManager(it, i2pOptions["i2cp.tcp.host"], i2pOptions["i2cp.tcp.port"].toInteger(), i2pOptions)
}
socketManager.getDefaultOptions().setReadTimeout(60000)
socketManager.getDefaultOptions().setConnectTimeout(30000)
socketManager.addDisconnectListener({eventBus.publish(new RouterDisconnectedEvent())} as DisconnectListener)
i2pSession = socketManager.getSession()
i2pSocketManager.getDefaultOptions().setReadTimeout(60000)
i2pSocketManager.getDefaultOptions().setConnectTimeout(30000)
i2pSocketManager.addDisconnectListener({eventBus.publish(new RouterDisconnectedEvent())} as DisconnectListener)
i2pSession = i2pSocketManager.getSession()
def destination = new Destination()
spk = new SigningPrivateKey(Constants.SIG_TYPE)
@ -255,11 +287,22 @@ public class Core {
log.info("initializing mesh manager")
MeshManager meshManager = new MeshManager(fileManager, home, props)
eventBus.register(SourceDiscoveredEvent.class, meshManager)
eventBus.register(SourceVerifiedEvent.class, meshManager)
log.info "initializing persistence service"
persisterService = new PersisterService(new File(home, "files.json"), eventBus, 60000, fileManager)
eventBus.register(UILoadedEvent.class, persisterService)
eventBus.register(UIPersistFilesEvent.class, persisterService)
log.info "initializing folder persistence service"
persisterFolderService = new PersisterFolderService(this, new File(home, "files"), eventBus)
eventBus.register(PersisterDoneEvent.class, persisterFolderService)
eventBus.register(FileDownloadedEvent.class, persisterFolderService)
eventBus.register(FileLoadedEvent.class, persisterFolderService)
eventBus.register(FileHashedEvent.class, persisterFolderService)
eventBus.register(FileUnsharedEvent.class, persisterFolderService)
eventBus.register(UICommentEvent.class, persisterFolderService)
eventBus.register(UIFilePublishedEvent.class, persisterFolderService)
eventBus.register(UIFileUnpublishedEvent.class, persisterFolderService)
log.info("initializing host cache")
File hostStorage = new File(home, "hosts.json")
@ -267,10 +310,17 @@ public class Core {
eventBus.register(HostDiscoveredEvent.class, hostCache)
eventBus.register(ConnectionEvent.class, hostCache)
log.info("initializing responder cache")
ResponderCache responderCache = new ResponderCache(props.responderCacheSize)
eventBus.register(UIResultBatchEvent.class, responderCache)
eventBus.register(SourceVerifiedEvent.class, responderCache)
log.info("initializing connection manager")
connectionManager = props.isLeaf() ?
new LeafConnectionManager(eventBus, me, 3, hostCache, props) :
new UltrapeerConnectionManager(eventBus, me, 512, 512, hostCache, trustService, props)
new UltrapeerConnectionManager(eventBus, me, props.peerConnections, props.leafConnections, hostCache, responderCache, trustService, props)
eventBus.register(TrustEvent.class, connectionManager)
eventBus.register(ConnectionEvent.class, connectionManager)
eventBus.register(DisconnectionEvent.class, connectionManager)
@ -279,16 +329,16 @@ public class Core {
log.info("initializing cache client")
cacheClient = new CacheClient(eventBus,hostCache, connectionManager, i2pSession, props, 10000)
if (!props.plugin) {
if (!(props.plugin || props.disableUpdates)) {
log.info("initializing update client")
updateClient = new UpdateClient(eventBus, i2pSession, myVersion, props, fileManager, me, spk)
eventBus.register(FileDownloadedEvent.class, updateClient)
eventBus.register(UIResultBatchEvent.class, updateClient)
} else
log.info("running as plugin, not initializing update client")
log.info("running as plugin or updates disabled, not initializing update client")
log.info("initializing connector")
I2PConnector i2pConnector = new I2PConnector(socketManager)
I2PConnector i2pConnector = new I2PConnector(i2pSocketManager)
log.info("initializing certificate client")
CertificateClient certificateClient = new CertificateClient(eventBus, i2pConnector)
@ -302,6 +352,19 @@ public class Core {
register(TrustEvent.class, chatServer)
}
log.info("initializing feed manager")
feedManager = new FeedManager(eventBus, home)
eventBus.with {
register(FeedItemFetchedEvent.class, feedManager)
register(FeedFetchEvent.class, feedManager)
register(UIFeedConfigurationEvent.class, feedManager)
register(UIFeedDeletedEvent.class, feedManager)
}
log.info("initializing feed client")
feedClient = new FeedClient(i2pConnector, eventBus, me, feedManager)
eventBus.register(UIFeedUpdateEvent.class, feedClient)
log.info "initializing results sender"
ResultsSender resultsSender = new ResultsSender(eventBus, i2pConnector, me, props, certificateManager, chatServer)
@ -310,23 +373,6 @@ public class Core {
eventBus.register(QueryEvent.class, searchManager)
eventBus.register(ResultsEvent.class, searchManager)
log.info("initializing download manager")
downloadManager = new DownloadManager(eventBus, trustService, meshManager, props, i2pConnector, home, me)
eventBus.register(UIDownloadEvent.class, downloadManager)
eventBus.register(UILoadedEvent.class, downloadManager)
eventBus.register(FileDownloadedEvent.class, downloadManager)
eventBus.register(UIDownloadCancelledEvent.class, downloadManager)
eventBus.register(SourceDiscoveredEvent.class, downloadManager)
eventBus.register(UIDownloadPausedEvent.class, downloadManager)
eventBus.register(UIDownloadResumedEvent.class, downloadManager)
log.info("initializing upload manager")
uploadManager = new UploadManager(eventBus, fileManager, meshManager, downloadManager, props)
log.info("initializing connection establisher")
connectionEstablisher = new ConnectionEstablisher(eventBus, i2pConnector, props, connectionManager, hostCache)
log.info("initializing chat manager")
chatManager = new ChatManager(eventBus, me, i2pConnector, trustService, props)
eventBus.with {
@ -336,17 +382,33 @@ public class Core {
register(ChatDisconnectionEvent.class, chatManager)
}
log.info("initializing download manager")
downloadManager = new DownloadManager(eventBus, trustService, meshManager, props, i2pConnector, home, me, chatServer)
eventBus.register(UIDownloadEvent.class, downloadManager)
eventBus.register(UIDownloadFeedItemEvent.class, downloadManager)
eventBus.register(UILoadedEvent.class, downloadManager)
eventBus.register(FileDownloadedEvent.class, downloadManager)
eventBus.register(UIDownloadCancelledEvent.class, downloadManager)
eventBus.register(SourceDiscoveredEvent.class, downloadManager)
eventBus.register(UIDownloadPausedEvent.class, downloadManager)
eventBus.register(UIDownloadResumedEvent.class, downloadManager)
eventBus.register(DownloadHopelessEvent.class, downloadManager)
log.info("initializing upload manager")
uploadManager = new UploadManager(eventBus, fileManager, meshManager, downloadManager, persisterFolderService, props)
log.info("initializing tracker responder")
trackerResponder = new TrackerResponder(i2pSession, props, fileManager, downloadManager, meshManager, trustService, me)
log.info("initializing connection establisher")
connectionEstablisher = new ConnectionEstablisher(eventBus, i2pConnector, props, connectionManager, hostCache)
log.info("initializing acceptor")
I2PAcceptor i2pAcceptor = new I2PAcceptor(socketManager)
I2PAcceptor i2pAcceptor = new I2PAcceptor(i2pSocketManager)
connectionAcceptor = new ConnectionAcceptor(eventBus, connectionManager, props,
i2pAcceptor, hostCache, trustService, searchManager, uploadManager, fileManager, connectionEstablisher,
certificateManager, chatServer)
log.info("initializing directory watcher")
directoryWatcher = new DirectoryWatcher(eventBus, fileManager, home, props)
eventBus.register(DirectoryWatchedEvent.class, directoryWatcher)
eventBus.register(AllFilesLoadedEvent.class, directoryWatcher)
eventBus.register(DirectoryUnsharedEvent.class, directoryWatcher)
log.info("initializing hasher service")
hasherService = new HasherService(new FileHasher(), eventBus, fileManager, props)
@ -368,9 +430,32 @@ public class Core {
BrowseManager browseManager = new BrowseManager(i2pConnector, eventBus, me)
eventBus.register(UIBrowseEvent.class, browseManager)
log.info("initializing watched directory converter")
watchedDirectoryConverter = new WatchedDirectoryConverter(this)
eventBus.register(AllFilesLoadedEvent.class, watchedDirectoryConverter)
log.info("initializing watched directory manager")
watchedDirectoryManager = new WatchedDirectoryManager(home, eventBus, fileManager)
eventBus.with {
register(WatchedDirectoryConfigurationEvent.class, watchedDirectoryManager)
register(WatchedDirectoryConvertedEvent.class, watchedDirectoryManager)
register(FileSharedEvent.class, watchedDirectoryManager)
register(DirectoryUnsharedEvent.class, watchedDirectoryManager)
register(UISyncDirectoryEvent.class, watchedDirectoryManager)
}
log.info("initializing directory watcher")
directoryWatcher = new DirectoryWatcher(eventBus, fileManager, home, watchedDirectoryManager)
eventBus.with {
register(DirectoryWatchedEvent.class, directoryWatcher)
register(WatchedDirectoryConvertedEvent.class, directoryWatcher)
register(DirectoryUnsharedEvent.class, directoryWatcher)
register(WatchedDirectoryConfigurationEvent.class, directoryWatcher)
}
}
public void startServices() {
i2pSession.connect()
hasherService.start()
trustService.start()
trustService.waitForLoad()
@ -381,6 +466,9 @@ public class Core {
connectionEstablisher.start()
hostCache.waitForLoad()
updateClient?.start()
feedManager.start()
feedClient.start()
trackerResponder.start()
}
public void shutdown() {
@ -398,6 +486,8 @@ public class Core {
trustService.stop()
log.info("shutting down persister service")
persisterService.stop()
log.info("shutting down persisterFolder service")
persisterFolderService.stop()
log.info("shutting down download manager")
downloadManager.shutdown()
log.info("shutting down connection acceptor")
@ -406,16 +496,28 @@ public class Core {
connectionEstablisher.stop()
log.info("shutting down directory watcher")
directoryWatcher.stop()
log.info("shutting down watch directory manager")
watchedDirectoryManager.shutdown()
log.info("shutting down cache client")
cacheClient.stop()
log.info("shutting down chat server")
chatServer.stop()
log.info("shutting down chat manager")
chatManager.shutdown()
log.info("shutting down feed manager")
feedManager.stop()
log.info("shutting down feed client")
feedClient.stop()
log.info("shutting down tracker responder")
trackerResponder.stop()
log.info("shutting down connection manager")
connectionManager.shutdown()
log.info("killing i2p session")
i2pSession.destroySession()
if (updateClient != null) {
log.info("shutting down update client")
updateClient.stop()
}
log.info("killing socket manager")
i2pSocketManager.destroySocketManager()
if (router != null) {
log.info("shutting down embedded router")
router.shutdown(0)
@ -459,7 +561,7 @@ public class Core {
}
}
Core core = new Core(props, home, "0.6.8")
Core core = new Core(props, home, "0.7.4")
core.startServices()
// ... at the end, sleep or execute script

View File

@ -16,7 +16,7 @@ class MuWireSettings {
boolean allowTrustLists
int trustListInterval
Set<Persona> trustSubscriptions
int downloadRetryInterval
int downloadRetryInterval, downloadMaxFailures
int totalUploadSlots
int uploadSlotsPerUser
int updateCheckInterval
@ -31,17 +31,33 @@ class MuWireSettings {
boolean shareHiddenFiles
boolean searchComments
boolean browseFiles
boolean allowTracking
boolean fileFeed
boolean advertiseFeed
boolean autoPublishSharedFiles
boolean defaultFeedAutoDownload
long defaultFeedUpdateInterval
int defaultFeedItemsToKeep
boolean defaultFeedSequential
int peerConnections
int leafConnections
int responderCacheSize
boolean startChatServer
int maxChatConnections
boolean advertiseChat
File chatWelcomeFile
Set<String> watchedDirectories
float downloadSequentialRatio
int hostClearInterval, hostHopelessInterval, hostRejectInterval
int hostClearInterval, hostHopelessInterval, hostRejectInterval, hostHopelessPurgeInterval
int meshExpiration
int speedSmoothSeconds
boolean embeddedRouter
boolean plugin
boolean disableUpdates
int inBw, outBw
Set<String> watchedKeywords
Set<String> watchedRegexes
@ -65,6 +81,7 @@ class MuWireSettings {
if (incompleteLocationProp != null)
incompleteLocation = new File(incompleteLocationProp)
downloadRetryInterval = Integer.parseInt(props.getProperty("downloadRetryInterval","60"))
downloadMaxFailures = Integer.parseInt(props.getProperty("downloadMaxFailures","10"))
updateCheckInterval = Integer.parseInt(props.getProperty("updateCheckInterval","24"))
lastUpdateCheck = Long.parseLong(props.getProperty("lastUpdateChec","0"))
autoDownloadUpdate = Boolean.parseBoolean(props.getProperty("autoDownloadUpdate","true"))
@ -73,16 +90,36 @@ class MuWireSettings {
shareHiddenFiles = Boolean.parseBoolean(props.getProperty("shareHiddenFiles","false"))
downloadSequentialRatio = Float.valueOf(props.getProperty("downloadSequentialRatio","0.8"))
hostClearInterval = Integer.valueOf(props.getProperty("hostClearInterval","15"))
hostHopelessInterval = Integer.valueOf(props.getProperty("hostHopelessInterval", "1440"))
hostHopelessInterval = Integer.valueOf(props.getProperty("hostHopelessInterval", "60"))
hostRejectInterval = Integer.valueOf(props.getProperty("hostRejectInterval", "1"))
hostHopelessPurgeInterval = Integer.valueOf(props.getProperty("hostHopelessPurgeInterval","1440"))
meshExpiration = Integer.valueOf(props.getProperty("meshExpiration","60"))
embeddedRouter = Boolean.valueOf(props.getProperty("embeddedRouter","false"))
plugin = Boolean.valueOf(props.getProperty("plugin","false"))
disableUpdates = Boolean.valueOf(props.getProperty("disableUpdates","false"))
inBw = Integer.valueOf(props.getProperty("inBw","256"))
outBw = Integer.valueOf(props.getProperty("outBw","128"))
searchComments = Boolean.valueOf(props.getProperty("searchComments","true"))
browseFiles = Boolean.valueOf(props.getProperty("browseFiles","true"))
speedSmoothSeconds = Integer.valueOf(props.getProperty("speedSmoothSeconds","60"))
allowTracking = Boolean.valueOf(props.getProperty("allowTracking","true"))
// feed settings
fileFeed = Boolean.valueOf(props.getProperty("fileFeed","true"))
advertiseFeed = Boolean.valueOf(props.getProperty("advertiseFeed","true"))
autoPublishSharedFiles = Boolean.valueOf(props.getProperty("autoPublishSharedFiles", "false"))
defaultFeedAutoDownload = Boolean.valueOf(props.getProperty("defaultFeedAutoDownload", "false"))
defaultFeedItemsToKeep = Integer.valueOf(props.getProperty("defaultFeedItemsToKeep", "1000"))
defaultFeedSequential = Boolean.valueOf(props.getProperty("defaultFeedSequential", "false"))
defaultFeedUpdateInterval = Long.valueOf(props.getProperty("defaultFeedUpdateInterval", "3600000"))
// ultrapeer connection settings
leafConnections = Integer.valueOf(props.getProperty("leafConnections","512"))
peerConnections = Integer.valueOf(props.getProperty("peerConnections","128"))
// responder cache settings
responderCacheSize = Integer.valueOf(props.getProperty("responderCacheSize","32"))
speedSmoothSeconds = Integer.valueOf(props.getProperty("speedSmoothSeconds","10"))
totalUploadSlots = Integer.valueOf(props.getProperty("totalUploadSlots","-1"))
uploadSlotsPerUser = Integer.valueOf(props.getProperty("uploadSlotsPerUser","-1"))
startChatServer = Boolean.valueOf(props.getProperty("startChatServer","false"))
@ -120,6 +157,7 @@ class MuWireSettings {
if (incompleteLocation != null)
props.setProperty("incompleteLocation", incompleteLocation.getAbsolutePath())
props.setProperty("downloadRetryInterval", String.valueOf(downloadRetryInterval))
props.setProperty("downloadMaxFailures", String.valueOf(downloadMaxFailures))
props.setProperty("updateCheckInterval", String.valueOf(updateCheckInterval))
props.setProperty("lastUpdateCheck", String.valueOf(lastUpdateCheck))
props.setProperty("autoDownloadUpdate", String.valueOf(autoDownloadUpdate))
@ -130,13 +168,33 @@ class MuWireSettings {
props.setProperty("hostClearInterval", String.valueOf(hostClearInterval))
props.setProperty("hostHopelessInterval", String.valueOf(hostHopelessInterval))
props.setProperty("hostRejectInterval", String.valueOf(hostRejectInterval))
props.setProperty("hostHopelessPurgeInterval", String.valueOf(hostHopelessPurgeInterval))
props.setProperty("meshExpiration", String.valueOf(meshExpiration))
props.setProperty("embeddedRouter", String.valueOf(embeddedRouter))
props.setProperty("plugin", String.valueOf(plugin))
props.setProperty("disableUpdates", String.valueOf(disableUpdates))
props.setProperty("inBw", String.valueOf(inBw))
props.setProperty("outBw", String.valueOf(outBw))
props.setProperty("searchComments", String.valueOf(searchComments))
props.setProperty("browseFiles", String.valueOf(browseFiles))
props.setProperty("allowTracking", String.valueOf(allowTracking))
// feed settings
props.setProperty("fileFeed", String.valueOf(fileFeed))
props.setProperty("advertiseFeed", String.valueOf(advertiseFeed))
props.setProperty("autoPublishSharedFiles", String.valueOf(autoPublishSharedFiles))
props.setProperty("defaultFeedAutoDownload", String.valueOf(defaultFeedAutoDownload))
props.setProperty("defaultFeedItemsToKeep", String.valueOf(defaultFeedItemsToKeep))
props.setProperty("defaultFeedSequential", String.valueOf(defaultFeedSequential))
props.setProperty("defaultFeedUpdateInterval", String.valueOf(defaultFeedUpdateInterval))
// ultrapeer connection settings
props.setProperty("peerConnections", String.valueOf(peerConnections))
props.setProperty("leafConnections", String.valueOf(leafConnections))
// responder cache settings
props.setProperty("responderCacheSize", String.valueOf(responderCacheSize))
props.setProperty("speedSmoothSeconds", String.valueOf(speedSmoothSeconds))
props.setProperty("totalUploadSlots", String.valueOf(totalUploadSlots))
props.setProperty("uploadSlotsPerUser", String.valueOf(uploadSlotsPerUser))

View File

@ -233,7 +233,7 @@ class ChatConnection implements ChatLink {
daos.close()
byte [] signed = baos.toByteArray()
def spk = sender.destination.getSigningPublicKey()
def signature = new Signature(Constants.SIG_TYPE, sig)
def signature = new Signature(spk.getType(), sig)
DSAEngine.getInstance().verifySignature(signature, signed, spk)
}

View File

@ -29,6 +29,10 @@ class ChatManager {
timer.schedule({connect()} as TimerTask, 1000, 1000)
}
boolean isConnected(Persona p) {
clients.containsKey(p)
}
void onUIConnectChatEvent(UIConnectChatEvent e) {
if (e.host == me) {
eventBus.publish(new ChatConnectionEvent(status : ChatConnectionAttemptStatus.SUCCESSFUL,

View File

@ -60,6 +60,10 @@ class ChatServer {
echo(getWelcome(),me.destination)
}
public boolean isRunning() {
running.get()
}
private String getWelcome() {
String welcome = DEFAULT_WELCOME
if (settings.chatWelcomeFile != null)
@ -197,9 +201,19 @@ class ChatServer {
return
}
if ((command.action.console && e.room != CONSOLE) ||
(!command.action.console && e.room == CONSOLE) ||
!command.action.user)
if (command.action.console && e.room != CONSOLE) {
echo("/SAY ERROR: You can only execute that command in the chat console, not in a chat room.",
e.sender.destination, e.room)
return
}
if (!command.action.console && e.room == CONSOLE) {
echo("/SAY ERROR: You need to be in a chat room. Type /LIST for list of rooms or /JOIN to join or create a room.",
e.sender.destination)
return
}
if (!command.action.user)
return
if (command.action.local && e.sender != me)
@ -296,17 +310,17 @@ class ChatServer {
echo(help, d)
}
private void echo(String payload, Destination d) {
private void echo(String payload, Destination d, String room = CONSOLE) {
log.info "echoing $payload"
UUID uuid = UUID.randomUUID()
long now = System.currentTimeMillis()
byte [] sig = ChatConnection.sign(uuid, now, CONSOLE, payload, me, me, spk)
byte [] sig = ChatConnection.sign(uuid, now, room, payload, me, me, spk)
ChatMessageEvent echo = new ChatMessageEvent(
uuid : uuid,
payload : payload,
sender : me,
host : me,
room : CONSOLE,
room : room,
chatTime : now,
sig : sig
)

View File

@ -49,6 +49,8 @@ abstract class Connection implements Closeable {
protected final String name
long lastPingSentTime, lastPongReceivedTime
private volatile UUID lastPingUUID
Connection(EventBus eventBus, Endpoint endpoint, boolean incoming,
HostCache hostCache, TrustService trustService, MuWireSettings settings) {
@ -132,6 +134,8 @@ abstract class Connection implements Closeable {
def ping = [:]
ping.type = "Ping"
ping.version = 1
lastPingUUID = UUID.randomUUID()
ping.uuid = lastPingUUID.toString()
messages.put(ping)
lastPingSentTime = System.currentTimeMillis()
}
@ -160,12 +164,14 @@ abstract class Connection implements Closeable {
messages.put(query)
}
protected void handlePing() {
protected void handlePing(def ping) {
log.fine("$name received ping")
def pong = [:]
pong.type = "Pong"
pong.version = 1
pong.pongs = hostCache.getGoodHosts(10).collect { d -> d.toBase64() }
if (ping.uuid != null)
pong.uuid = ping.uuid
pong.pongs = hostCache.getGoodHosts(2).collect { d -> d.toBase64() }
messages.put(pong)
}
@ -174,7 +180,23 @@ abstract class Connection implements Closeable {
lastPongReceivedTime = System.currentTimeMillis()
if (pong.pongs == null)
throw new Exception("Pong doesn't have pongs")
pong.pongs.each {
if (lastPingUUID == null) {
log.fine "$name received an unexpected pong"
return
}
if (pong.uuid == null) {
log.fine "$name pong doesn't have uuid"
return
}
UUID pongUUID = UUID.fromString(pong.uuid)
if (pongUUID != lastPingUUID) {
log.fine "$name ping/pong uuid mismatch"
return
}
lastPingUUID = null
pong.pongs.stream().limit(2).forEach {
def dest = new Destination(it)
eventBus.publish(new HostDiscoveredEvent(destination: dest))
}
@ -244,7 +266,7 @@ abstract class Connection implements Closeable {
else
payload = String.join(" ",search.keywords).getBytes(StandardCharsets.UTF_8)
def spk = originator.destination.getSigningPublicKey()
def signature = new Signature(Constants.SIG_TYPE, sig)
def signature = new Signature(spk.getType(), sig)
if (!DSAEngine.getInstance().verifySignature(signature, payload, spk)) {
log.info("signature didn't match keywords")
return
@ -255,7 +277,6 @@ abstract class Connection implements Closeable {
return
}
// TODO: make this mandatory at some point
byte[] sig2 = null
long queryTime = 0
if (search.sig2 != null) {
@ -267,7 +288,7 @@ abstract class Connection implements Closeable {
queryTime = search.queryTime
byte [] payload = (search.uuid + String.valueOf(queryTime)).getBytes(StandardCharsets.US_ASCII)
def spk = originator.destination.getSigningPublicKey()
def signature = new Signature(Constants.SIG_TYPE, sig2)
def signature = new Signature(spk.getType(), sig2)
if (!DSAEngine.getInstance().verifySignature(signature, payload, spk)) {
log.info("extended signature didn't match uuid and timestamp")
return
@ -278,8 +299,10 @@ abstract class Connection implements Closeable {
return
}
}
} else
} else {
log.info("no extended signature in query")
return
}
SearchEvent searchEvent = new SearchEvent(searchTerms : search.keywords,
searchHash : infohash,

View File

@ -15,9 +15,11 @@ import com.muwire.core.EventBus
import com.muwire.core.InfoHash
import com.muwire.core.MuWireSettings
import com.muwire.core.Persona
import com.muwire.core.SharedFile
import com.muwire.core.chat.ChatServer
import com.muwire.core.filecert.Certificate
import com.muwire.core.filecert.CertificateManager
import com.muwire.core.filefeeds.FeedItems
import com.muwire.core.files.FileManager
import com.muwire.core.hostcache.HostCache
import com.muwire.core.trust.TrustLevel
@ -58,7 +60,7 @@ class ConnectionAcceptor {
private volatile shutdown
private volatile int browsed
volatile int browsed
ConnectionAcceptor(EventBus eventBus, UltrapeerConnectionManager manager,
MuWireSettings settings, I2PAcceptor acceptor, HostCache hostCache,
@ -161,6 +163,9 @@ class ConnectionAcceptor {
case (byte)'I':
processIRC(e)
break
case (byte)'F':
processFEED(e)
break
default:
throw new Exception("Invalid read $read")
}
@ -310,6 +315,9 @@ class ConnectionAcceptor {
boolean chat = false
if (headers.containsKey('Chat'))
chat = Boolean.parseBoolean(headers['Chat'])
boolean feed = false
if (headers.containsKey('Feed'))
feed = Boolean.parseBoolean(headers['Feed'])
byte [] personaBytes = Base64.decode(headers['Sender'])
Persona sender = new Persona(new ByteArrayInputStream(personaBytes))
@ -329,6 +337,7 @@ class ConnectionAcceptor {
def json = slurper.parse(payload)
results[i] = ResultsParser.parse(sender, resultsUUID, json)
results[i].chat = chat
results[i].feed = feed
}
eventBus.publish(new UIResultBatchEvent(uuid: resultsUUID, results: results))
} catch (IOException bad) {
@ -371,16 +380,19 @@ class ConnectionAcceptor {
os.write("Count: ${sharedFiles.size()}\r\n".getBytes(StandardCharsets.US_ASCII))
boolean chat = chatServer.running.get() && settings.advertiseChat
boolean chat = chatServer.isRunning() && settings.advertiseChat
os.write("Chat: ${chat}\r\n".getBytes(StandardCharsets.US_ASCII))
boolean feed = settings.fileFeed && settings.advertiseFeed
os.write("Feed: ${feed}\r\n".getBytes(StandardCharsets.US_ASCII))
os.write("\r\n".getBytes(StandardCharsets.US_ASCII))
DataOutputStream dos = new DataOutputStream(new GZIPOutputStream(os))
JsonOutput jsonOutput = new JsonOutput()
sharedFiles.each {
it.hit(browser, System.currentTimeMillis(), "Browse Host");
int certificates = certificateManager.getByInfoHash(it.getInfoHash()).size()
int certificates = certificateManager.getByInfoHash(new InfoHash(it.getRoot())).size()
def obj = ResultsSender.sharedFileToObj(it, false, certificates)
def json = jsonOutput.toJson(obj)
dos.writeShort((short)json.length())
@ -524,5 +536,58 @@ class ConnectionAcceptor {
throw new Exception("Invalid IRC connection")
chatServer.handle(e)
}
private void processFEED(Endpoint e) {
try {
byte[] EED = new byte[5];
DataInputStream dis = new DataInputStream(e.getInputStream())
dis.readFully(EED);
if (EED != "EED\r\n".getBytes(StandardCharsets.US_ASCII))
throw new Exception("Invalid FEED connection")
OutputStream os = e.getOutputStream()
Map<String, String> headers = DataUtil.readAllHeaders(dis)
if (!headers.containsKey("Persona"))
throw new Exception("Persona header missing")
Persona requestor = new Persona(new ByteArrayInputStream(Base64.decode(headers['Persona'])))
if (requestor.destination != e.destination)
throw new Exception("Requestor persona mismatch")
if (!settings.fileFeed) {
os.write("403 Not Allowed\r\n\r\n".getBytes(StandardCharsets.US_ASCII))
os.flush()
e.close()
return
}
long timestamp = 0
if (headers.containsKey("Timestamp")) {
timestamp = Long.parseLong(headers['Timestamp'])
}
List<SharedFile> published = fileManager.getPublishedSince(timestamp)
os.write("200 OK\r\n".getBytes(StandardCharsets.US_ASCII))
os.write("Count: ${published.size()}\r\n".getBytes(StandardCharsets.US_ASCII));
os.write("\r\n".getBytes(StandardCharsets.US_ASCII))
DataOutputStream dos = new DataOutputStream(new GZIPOutputStream(os))
JsonOutput jsonOutput = new JsonOutput()
final long now = System.currentTimeMillis();
published.each {
it.hit(requestor, now, "Feed Update");
int certificates = certificateManager.getByInfoHash(new InfoHash(it.getRoot())).size()
def obj = FeedItems.sharedFileToObj(it, certificates)
def json = jsonOutput.toJson(obj)
dos.writeShort((short)json.length())
dos.write(json.getBytes(StandardCharsets.US_ASCII))
}
dos.flush()
dos.close()
} finally {
e.close()
}
}
}

View File

@ -34,6 +34,8 @@ class ConnectionEstablisher {
final ExecutorService executor, closer
final Set inProgress = new ConcurrentHashSet()
private volatile boolean shutdown
ConnectionEstablisher(){}
@ -60,12 +62,15 @@ class ConnectionEstablisher {
}
void stop() {
shutdown = true
timer.cancel()
executor.shutdownNow()
closer.shutdownNow()
}
private void connectIfNeeded() {
if (shutdown)
return
if (!connectionManager.needsConnections())
return
if (inProgress.size() >= CONCURRENT)
@ -89,6 +94,8 @@ class ConnectionEstablisher {
}
private void connect(Destination toTry) {
if (shutdown)
return
log.info("starting connect to ${toTry.toBase32()}")
try {
def endpoint = i2pConnector.connect(toTry)
@ -123,6 +130,8 @@ class ConnectionEstablisher {
}
private void fail(Endpoint endpoint) {
if (shutdown)
return
if (!closer.isShutdown()) {
closer.execute {
endpoint.close()

View File

@ -53,7 +53,7 @@ class PeerConnection extends Connection {
if (json.type == null)
throw new Exception("missing json type")
switch(json.type) {
case "Ping" : handlePing(); break;
case "Ping" : handlePing(json); break;
case "Pong" : handlePong(json); break;
case "Search": handleSearch(json); break
default :

View File

@ -8,6 +8,7 @@ import com.muwire.core.MuWireSettings
import com.muwire.core.Persona
import com.muwire.core.hostcache.HostCache
import com.muwire.core.search.QueryEvent
import com.muwire.core.search.ResponderCache
import com.muwire.core.trust.TrustService
import groovy.util.logging.Log
@ -18,18 +19,22 @@ class UltrapeerConnectionManager extends ConnectionManager {
final int maxPeers, maxLeafs
final TrustService trustService
final ResponderCache responderCache
final Map<Destination, PeerConnection> peerConnections = new ConcurrentHashMap()
final Map<Destination, LeafConnection> leafConnections = new ConcurrentHashMap()
private final Random random = new Random()
UltrapeerConnectionManager() {}
public UltrapeerConnectionManager(EventBus eventBus, Persona me, int maxPeers, int maxLeafs,
HostCache hostCache, TrustService trustService, MuWireSettings settings) {
HostCache hostCache, ResponderCache responderCache, TrustService trustService, MuWireSettings settings) {
super(eventBus, me, hostCache, settings)
this.maxPeers = maxPeers
this.maxLeafs = maxLeafs
this.trustService = trustService
this.responderCache = responderCache
}
@Override
public void drop(Destination d) {
@ -44,8 +49,18 @@ class UltrapeerConnectionManager extends ConnectionManager {
if (e.replyTo != me.destination && e.receivedOn != me.destination &&
!leafConnections.containsKey(e.receivedOn))
e.firstHop = false
final int connCount = peerConnections.size()
if (connCount == 0)
return
final int treshold = (int)(Math.sqrt(connCount)) + 1
peerConnections.values().each {
if (e.getReceivedOn() != it.getEndpoint().getDestination())
// 1. do not send query back to originator
// 2. if firstHop forward to everyone
// 3. otherwise to everyone who has recently responded/transferred to us + randomized sqrt of neighbors
if (e.getReceivedOn() != it.getEndpoint().getDestination() &&
(e.firstHop ||
responderCache.hasResponded(it.endpoint.destination) ||
random.nextInt(connCount) < treshold))
it.sendQuery(e)
}
}
@ -105,8 +120,8 @@ class UltrapeerConnectionManager extends ConnectionManager {
@Override
void shutdown() {
super.shutdown()
peerConnections.values().stream().parallel().forEach({v -> v.close()})
leafConnections.values().stream().parallel().forEach({v -> v.close()})
peerConnections.values().stream().forEach({v -> v.close()})
leafConnections.values().stream().forEach({v -> v.close()})
peerConnections.clear()
leafConnections.clear()
}

View File

@ -0,0 +1,7 @@
package com.muwire.core.download
import com.muwire.core.Event
class DownloadHopelessEvent extends Event {
Downloader downloader
}

View File

@ -1,6 +1,7 @@
package com.muwire.core.download
import com.muwire.core.connection.I2PConnector
import com.muwire.core.filefeeds.UIDownloadFeedItemEvent
import com.muwire.core.files.FileDownloadedEvent
import com.muwire.core.files.FileHasher
import com.muwire.core.mesh.Mesh
@ -22,6 +23,8 @@ import com.muwire.core.InfoHash
import com.muwire.core.MuWireSettings
import com.muwire.core.Persona
import com.muwire.core.UILoadedEvent
import com.muwire.core.chat.ChatManager
import com.muwire.core.chat.ChatServer
import java.util.concurrent.ConcurrentHashMap
import java.util.concurrent.Executor
@ -34,16 +37,17 @@ public class DownloadManager {
private final EventBus eventBus
private final TrustService trustService
private final MeshManager meshManager
private final MuWireSettings muSettings
final MuWireSettings muSettings
private final I2PConnector connector
private final Executor executor
private final File home
private final Persona me
private final ChatServer chatServer
private final Map<InfoHash, Downloader> downloaders = new ConcurrentHashMap<>()
public DownloadManager(EventBus eventBus, TrustService trustService, MeshManager meshManager, MuWireSettings muSettings,
I2PConnector connector, File home, Persona me) {
I2PConnector connector, File home, Persona me, ChatServer chatServer) {
this.eventBus = eventBus
this.trustService = trustService
this.meshManager = meshManager
@ -51,6 +55,7 @@ public class DownloadManager {
this.connector = connector
this.home = home
this.me = me
this.chatServer = chatServer
this.executor = Executors.newCachedThreadPool({ r ->
Thread rv = new Thread(r)
@ -62,11 +67,6 @@ public class DownloadManager {
public void onUIDownloadEvent(UIDownloadEvent e) {
File incompletes = muSettings.incompleteLocation
if (incompletes == null)
incompletes = new File(home, "incompletes")
incompletes.mkdirs()
def size = e.result[0].size
def infohash = e.result[0].infohash
@ -79,12 +79,29 @@ public class DownloadManager {
destinations.addAll(e.sources)
destinations.remove(me.destination)
Pieces pieces = getPieces(infohash, size, pieceSize, e.sequential)
doDownload(infohash, e.target, size, pieceSize, e.sequential, destinations)
def downloader = new Downloader(eventBus, this, me, e.target, size,
infohash, pieceSize, connector, destinations,
incompletes, pieces)
downloaders.put(infohash, downloader)
}
public void onUIDownloadFeedItemEvent(UIDownloadFeedItemEvent e) {
Set<Destination> singleSource = new HashSet<>()
singleSource.add(e.item.getPublisher().getDestination())
doDownload(e.item.getInfoHash(), e.target, e.item.getSize(), e.item.getPieceSize(),
e.sequential, singleSource)
}
private void doDownload(InfoHash infoHash, File target, long size, int pieceSize,
boolean sequential, Set<Destination> destinations) {
File incompletes = muSettings.incompleteLocation
if (incompletes == null)
incompletes = new File(home, "incompletes")
incompletes.mkdirs()
Pieces pieces = getPieces(infoHash, size, pieceSize, sequential)
def downloader = new Downloader(eventBus, this, chatServer, me, target, size,
infoHash, pieceSize, connector, destinations,
incompletes, pieces, muSettings.downloadMaxFailures)
downloaders.put(infoHash, downloader)
persistDownloaders()
executor.execute({downloader.download()} as Runnable)
eventBus.publish(new DownloadStartedEvent(downloader : downloader))
@ -95,6 +112,11 @@ public class DownloadManager {
persistDownloaders()
}
public void onDownloadHopelessEvent(DownloadHopelessEvent e) {
downloaders.remove(e.downloader.infoHash)
persistDownloaders()
}
public void onUIDownloadPausedEvent(UIDownloadPausedEvent e) {
persistDownloaders()
}
@ -145,8 +167,8 @@ public class DownloadManager {
Pieces pieces = getPieces(infoHash, (long)json.length, json.pieceSizePow2, sequential)
def downloader = new Downloader(eventBus, this, me, file, (long)json.length,
infoHash, json.pieceSizePow2, connector, destinations, incompletes, pieces)
def downloader = new Downloader(eventBus, this, chatServer, me, file, (long)json.length,
infoHash, json.pieceSizePow2, connector, destinations, incompletes, pieces, muSettings.downloadMaxFailures)
if (json.paused != null)
downloader.paused = json.paused
@ -229,4 +251,8 @@ public class DownloadManager {
downloaders.values().each { it.stop() }
Downloader.executorService.shutdownNow()
}
public boolean isDownloading(InfoHash infoHash) {
downloaders.containsKey(infoHash)
}
}

View File

@ -37,13 +37,15 @@ class DownloadSession {
private final long fileLength
private final Set<Integer> available
private final MessageDigest digest
private final boolean browse, feed, chat
private final AtomicLong dataSinceLastRead
private MappedByteBuffer mapped
DownloadSession(EventBus eventBus, String meB64, Pieces pieces, InfoHash infoHash, Endpoint endpoint, File file,
int pieceSize, long fileLength, Set<Integer> available, AtomicLong dataSinceLastRead) {
int pieceSize, long fileLength, Set<Integer> available, AtomicLong dataSinceLastRead,
boolean browse, boolean feed, boolean chat) {
this.eventBus = eventBus
this.meB64 = meB64
this.pieces = pieces
@ -54,6 +56,9 @@ class DownloadSession {
this.fileLength = fileLength
this.available = available
this.dataSinceLastRead = dataSinceLastRead
this.browse = browse
this.feed = feed
this.chat = chat
try {
digest = MessageDigest.getInstance("SHA-256")
} catch (NoSuchAlgorithmException impossible) {
@ -94,6 +99,12 @@ class DownloadSession {
os.write("GET $root\r\n".getBytes(StandardCharsets.US_ASCII))
os.write("Range: $start-$end\r\n".getBytes(StandardCharsets.US_ASCII))
os.write("X-Persona: $meB64\r\n".getBytes(StandardCharsets.US_ASCII))
if (browse)
os.write("Browse: true\r\n".getBytes(StandardCharsets.US_ASCII))
if (feed)
os.write("Feed: true\r\n".getBytes(StandardCharsets.US_ASCII))
if (chat)
os.write("Chat: true\r\n".getBytes(StandardCharsets.US_ASCII))
String xHave = DataUtil.encodeXHave(pieces.getDownloaded(), pieces.nPieces)
os.write("X-Have: $xHave\r\n\r\n".getBytes(StandardCharsets.US_ASCII))
os.flush()
@ -183,15 +194,14 @@ class DownloadSession {
mapped.position(position)
byte[] tmp = new byte[0x1 << 13]
DataInputStream dis = new DataInputStream(is)
while(mapped.hasRemaining()) {
if (mapped.remaining() < tmp.length)
tmp = new byte[mapped.remaining()]
int read = is.read(tmp)
if (read == -1)
throw new IOException()
dis.readFully(tmp)
synchronized(this) {
mapped.put(tmp, 0, read)
dataSinceLastRead.addAndGet(read)
mapped.put(tmp)
dataSinceLastRead.addAndGet(tmp.length)
pieces.markPartial(piece, mapped.position())
}
}
@ -205,6 +215,8 @@ class DownloadSession {
pieces.markPartial(piece, 0)
throw new BadHashException("bad hash on piece $piece")
}
eventBus.publish(new SourceVerifiedEvent(infoHash : infoHash, source : endpoint.destination))
} finally {
try { channel?.close() } catch (IOException ignore) {}
DataUtil.tryUnmap(mapped)

View File

@ -2,6 +2,8 @@ package com.muwire.core.download
import com.muwire.core.InfoHash
import com.muwire.core.Persona
import com.muwire.core.chat.ChatManager
import com.muwire.core.chat.ChatServer
import com.muwire.core.connection.Endpoint
import java.nio.file.AtomicMoveNotSupportedException
@ -29,7 +31,7 @@ import net.i2p.util.ConcurrentHashSet
@Log
public class Downloader {
public enum DownloadState { CONNECTING, HASHLIST, DOWNLOADING, FAILED, CANCELLED, PAUSED, FINISHED }
public enum DownloadState { CONNECTING, HASHLIST, DOWNLOADING, FAILED, HOPELESS, CANCELLED, PAUSED, FINISHED }
private enum WorkerState { CONNECTING, HASHLIST, DOWNLOADING, FINISHED}
private static final ExecutorService executorService = Executors.newCachedThreadPool({r ->
@ -41,6 +43,7 @@ public class Downloader {
private final EventBus eventBus
private final DownloadManager downloadManager
private final ChatServer chatServer
private final Persona me
private final File file
private final Pieces pieces
@ -56,10 +59,14 @@ public class Downloader {
final int pieceSizePow2
private final Map<Destination, DownloadWorker> activeWorkers = new ConcurrentHashMap<>()
private final Set<Destination> successfulDestinations = new ConcurrentHashSet<>()
/** LOCKING: itself */
private final Map<Destination, Integer> failingDestinations = new HashMap<>()
private final int maxFailures
private volatile boolean cancelled, paused
private final AtomicBoolean eventFired = new AtomicBoolean()
private final AtomicBoolean hopelessEventFired = new AtomicBoolean()
private boolean piecesFileClosed
private final AtomicLong dataSinceLastRead = new AtomicLong(0)
@ -68,13 +75,14 @@ public class Downloader {
private int speedPos = 0
private int speedAvg = 0
public Downloader(EventBus eventBus, DownloadManager downloadManager,
public Downloader(EventBus eventBus, DownloadManager downloadManager, ChatServer chatServer,
Persona me, File file, long length, InfoHash infoHash,
int pieceSizePow2, I2PConnector connector, Set<Destination> destinations,
File incompletes, Pieces pieces) {
File incompletes, Pieces pieces, int maxFailures) {
this.eventBus = eventBus
this.me = me
this.downloadManager = downloadManager
this.chatServer = chatServer
this.file = file
this.infoHash = infoHash
this.length = length
@ -87,6 +95,7 @@ public class Downloader {
this.pieceSize = 1 << pieceSizePow2
this.pieces = pieces
this.nPieces = pieces.nPieces
this.maxFailures = maxFailures
}
public synchronized InfoHash getInfoHash() {
@ -116,7 +125,7 @@ public class Downloader {
void download() {
readPieces()
destinations.each {
if (it != me.destination) {
if (it != me.destination && !isHopeless(it)) {
def worker = new DownloadWorker(it)
activeWorkers.put(it, worker)
executorService.submit(worker)
@ -160,7 +169,7 @@ public class Downloader {
long dataRead = dataSinceLastRead.getAndSet(0)
long now = System.currentTimeMillis()
if (now > lastSpeedRead)
currSpeed = (int) (dataRead * 1000.0 / (now - lastSpeedRead))
currSpeed = (int) (dataRead * 1000.0d / (now - lastSpeedRead))
lastSpeedRead = now
}
@ -206,6 +215,8 @@ public class Downloader {
if (allFinished) {
if (pieces.isComplete())
return DownloadState.FINISHED
if (!hasLiveSources())
return DownloadState.HOPELESS
return DownloadState.FAILED
}
@ -269,11 +280,22 @@ public class Downloader {
public int getTotalWorkers() {
return activeWorkers.size();
}
public int countHopelessSources() {
synchronized(failingDestinations) {
return destinations.count { isHopeless(it)}
}
}
private boolean hasLiveSources() {
destinations.size() > countHopelessSources()
}
public void resume() {
paused = false
readPieces()
destinations.each { destination ->
destinations.stream().filter({!isHopeless(it)}).forEach { destination ->
log.fine("resuming source ${destination.toBase32()}")
def worker = activeWorkers.get(destination)
if (worker != null) {
if (worker.currentState == WorkerState.FINISHED) {
@ -290,8 +312,9 @@ public class Downloader {
}
void addSource(Destination d) {
if (activeWorkers.containsKey(d))
if (activeWorkers.containsKey(d) || isHopeless(d))
return
destinations.add(d)
DownloadWorker newWorker = new DownloadWorker(d)
activeWorkers.put(d, newWorker)
executorService.submit(newWorker)
@ -347,6 +370,28 @@ public class Downloader {
try {os?.close() } catch (IOException ignore) {}
}
}
private boolean isHopeless(Destination d) {
if (maxFailures < 0)
return false
synchronized(failingDestinations) {
return !successfulDestinations.contains(d) &&
failingDestinations.containsKey(d) &&
failingDestinations[d] >= maxFailures
}
}
private void markFailed(Destination d) {
log.fine("marking failed ${d.toBase32()}")
synchronized(failingDestinations) {
Integer count = failingDestinations.get(d)
if (count == null) {
failingDestinations.put(d, 1)
} else {
failingDestinations.put(d, count + 1)
}
}
}
class DownloadWorker implements Runnable {
private final Destination destination
@ -373,10 +418,16 @@ public class Downloader {
setInfoHash(received)
}
currentState = WorkerState.DOWNLOADING
boolean browse = downloadManager.muSettings.browseFiles
boolean feed = downloadManager.muSettings.fileFeed && downloadManager.muSettings.advertiseFeed
boolean chat = chatServer.isRunning() && downloadManager.muSettings.advertiseChat
boolean requestPerformed
while(!pieces.isComplete()) {
currentSession = new DownloadSession(eventBus, me.toBase64(), pieces, getInfoHash(),
endpoint, incompleteFile, pieceSize, length, available, dataSinceLastRead)
endpoint, incompleteFile, pieceSize, length, available, dataSinceLastRead,
browse, feed, chat)
requestPerformed = currentSession.request()
if (!requestPerformed)
break
@ -385,6 +436,9 @@ public class Downloader {
}
} catch (Exception bad) {
log.log(Level.WARNING,"Exception while downloading",DataUtil.findRoot(bad))
markFailed(destination)
if (!hasLiveSources() && hopelessEventFired.compareAndSet(false, true))
eventBus.publish(new DownloadHopelessEvent(downloader : Downloader.this))
} finally {
writePieces()
currentState = WorkerState.FINISHED
@ -405,8 +459,9 @@ public class Downloader {
}
eventBus.publish(
new FileDownloadedEvent(
downloadedFile : new DownloadedFile(file.getCanonicalFile(), getInfoHash(), pieceSizePow2, successfulDestinations),
downloader : Downloader.this))
downloadedFile : new DownloadedFile(file.getCanonicalFile(), getInfoHash().getRoot(), pieceSizePow2, successfulDestinations),
downloader : Downloader.this,
infoHash: getInfoHash()))
}
endpoint?.close()

View File

@ -2,10 +2,11 @@ package com.muwire.core.download
class Pieces {
private final BitSet done, claimed
private final int nPieces
final int nPieces
private final float ratio
private final Random random = new Random()
private final Map<Integer,Integer> partials = new HashMap<>()
private int cachedDone;
Pieces(int nPieces) {
this(nPieces, 1.0f)
@ -78,6 +79,7 @@ class Pieces {
if (piece >= nPieces)
throw new IllegalArgumentException("invalid piece marked as downloaded? $piece/$nPieces")
done.set(piece)
cachedDone = done.cardinality();
claimed.set(piece)
partials.remove(piece)
}
@ -91,11 +93,11 @@ class Pieces {
}
synchronized boolean isComplete() {
done.cardinality() == nPieces
cachedDone == nPieces
}
synchronized int donePieces() {
done.cardinality()
cachedDone
}
synchronized boolean isDownloaded(int piece) {
@ -104,6 +106,7 @@ class Pieces {
synchronized void clearAll() {
done.clear()
cachedDone = 0
claimed.clear()
partials.clear()
}

View File

@ -0,0 +1,11 @@
package com.muwire.core.download
import com.muwire.core.Event
import com.muwire.core.InfoHash
import net.i2p.data.Destination
class SourceVerifiedEvent extends Event {
InfoHash infoHash
Destination source
}

View File

@ -105,7 +105,7 @@ class Certificate {
byte [] payload = baos.toByteArray()
SigningPublicKey spk = issuer.destination.getSigningPublicKey()
Signature signature = new Signature(Constants.SIG_TYPE, sig)
Signature signature = new Signature(spk.getType(), sig)
DSAEngine.getInstance().verifySignature(signature, payload, spk)
}

View File

@ -70,7 +70,7 @@ class CertificateManager {
}
void onUICreateCertificateEvent(UICreateCertificateEvent e) {
InfoHash infoHash = e.sharedFile.getInfoHash()
InfoHash infoHash = new InfoHash(e.sharedFile.getRoot())
String name = e.sharedFile.getFile().getName()
long timestamp = System.currentTimeMillis()

View File

@ -0,0 +1,110 @@
package com.muwire.core.filefeeds
import java.util.logging.Level
import java.nio.charset.StandardCharsets
import java.util.concurrent.Executor
import java.util.concurrent.ExecutorService
import java.util.concurrent.Executors
import java.util.zip.GZIPInputStream
import com.muwire.core.EventBus
import com.muwire.core.Persona
import com.muwire.core.connection.Endpoint
import com.muwire.core.connection.I2PConnector
import com.muwire.core.util.DataUtil
import groovy.json.JsonSlurper
import groovy.util.logging.Log
@Log
class FeedClient {
private final I2PConnector connector
private final EventBus eventBus
private final Persona me
private final FeedManager feedManager
private final ExecutorService feedFetcher = Executors.newCachedThreadPool()
private final Timer feedUpdater = new Timer("feed-updater", true)
FeedClient(I2PConnector connector, EventBus eventBus, Persona me, FeedManager feedManager) {
this.connector = connector
this.eventBus = eventBus
this.me = me
this.feedManager = feedManager
}
private void start() {
feedUpdater.schedule({updateAnyFeeds()} as TimerTask, 60000, 60000)
}
private void stop() {
feedUpdater.cancel()
feedFetcher.shutdown()
}
private void updateAnyFeeds() {
feedManager.getFeedsToUpdate().each { feed ->
feedFetcher.execute({updateFeed(feed)} as Runnable)
}
}
void onUIFeedUpdateEvent(UIFeedUpdateEvent e) {
Feed feed = feedManager.getFeed(e.host)
if (feed == null) {
log.severe("UI request to update non-existent feed " + e.host.getHumanReadableName())
return
}
feedFetcher.execute({updateFeed(feed)} as Runnable)
}
private void updateFeed(Feed feed) {
log.info("updating feed " + feed.getPublisher().getHumanReadableName())
Endpoint endpoint = null
try {
eventBus.publish(new FeedFetchEvent(host : feed.getPublisher(), status : FeedFetchStatus.CONNECTING))
feed.setLastUpdateAttempt(System.currentTimeMillis())
endpoint = connector.connect(feed.getPublisher().getDestination())
OutputStream os = endpoint.getOutputStream()
os.write("FEED\r\n".getBytes(StandardCharsets.US_ASCII))
os.write("Persona:${me.toBase64()}\r\n".getBytes(StandardCharsets.US_ASCII))
os.write("Timestamp:${feed.getLastUpdated()}\r\n".getBytes(StandardCharsets.US_ASCII))
os.write("\r\n".getBytes(StandardCharsets.US_ASCII))
os.flush()
InputStream is = endpoint.getInputStream()
String code = DataUtil.readTillRN(is)
if (!code.startsWith("200"))
throw new IOException("Invalid code $code")
// parse all headers
Map<String,String> headers = DataUtil.readAllHeaders(is)
if (!headers.containsKey("Count"))
throw new IOException("No count header")
int items = Integer.parseInt(headers['Count'])
eventBus.publish(new FeedFetchEvent(host : feed.getPublisher(), status : FeedFetchStatus.FETCHING, totalItems: items))
JsonSlurper slurper = new JsonSlurper()
DataInputStream dis = new DataInputStream(new GZIPInputStream(is))
for (int i = 0; i < items; i++) {
int size = dis.readUnsignedShort()
byte [] tmp = new byte[size]
dis.readFully(tmp)
def json = slurper.parse(tmp)
FeedItem item = FeedItems.objToFeedItem(json, feed.getPublisher())
eventBus.publish(new FeedItemFetchedEvent(item: item))
}
eventBus.publish(new FeedFetchEvent(host : feed.getPublisher(), status : FeedFetchStatus.FINISHED))
} catch (Exception bad) {
log.log(Level.WARNING, "Feed update failed", bad)
eventBus.publish(new FeedFetchEvent(host : feed.getPublisher(), status : FeedFetchStatus.FAILED))
} finally {
endpoint?.close()
}
}
}

View File

@ -0,0 +1,10 @@
package com.muwire.core.filefeeds
import com.muwire.core.Event
import com.muwire.core.Persona
class FeedFetchEvent extends Event {
Persona host
FeedFetchStatus status
int totalItems
}

View File

@ -0,0 +1,7 @@
package com.muwire.core.filefeeds
import com.muwire.core.Event
class FeedItemFetchedEvent extends Event {
FeedItem item
}

View File

@ -0,0 +1,7 @@
package com.muwire.core.filefeeds
import com.muwire.core.Event
class FeedItemLoadedEvent extends Event {
FeedItem item
}

View File

@ -0,0 +1,79 @@
package com.muwire.core.filefeeds
import com.muwire.core.InfoHash
import com.muwire.core.Persona
import com.muwire.core.SharedFile
import com.muwire.core.files.FileHasher
import com.muwire.core.util.DataUtil
import net.i2p.data.Base64
class FeedItems {
public static def sharedFileToObj(SharedFile sf, int certificates) {
def json = [:]
json.type = "FeedItem"
json.version = 1
json.name = Base64.encode(DataUtil.encodei18nString(sf.getFile().getName()))
json.infoHash = Base64.encode(sf.getRoot())
json.size = sf.getCachedLength()
json.pieceSize = sf.getPieceSize()
if (sf.getComment() != null)
json.comment = sf.getComment()
json.certificates = certificates
json.timestamp = sf.getPublishedTimestamp()
json
}
public static FeedItem objToFeedItem(def obj, Persona publisher) throws InvalidFeedItemException {
if (obj.timestamp == null)
throw new InvalidFeedItemException("No timestamp");
if (obj.name == null)
throw new InvalidFeedItemException("No name");
if (obj.size == null || obj.size <= 0 || obj.size > FileHasher.MAX_SIZE)
throw new InvalidFeedItemException("length missing or invalid ${obj.size}")
if (obj.pieceSize == null || obj.pieceSize < FileHasher.MIN_PIECE_SIZE_POW2 || obj.pieceSize > FileHasher.MAX_PIECE_SIZE_POW2)
throw new InvalidFeedItemException("piece size missing or invalid ${obj.pieceSize}")
if (obj.infoHash == null)
throw new InvalidFeedItemException("Infohash missing")
InfoHash infoHash
try {
infoHash = new InfoHash(Base64.decode(obj.infoHash))
} catch (Exception bad) {
throw new InvalidFeedItemException("Invalid infohash", bad)
}
String name
try {
name = DataUtil.readi18nString(Base64.decode(obj.name))
} catch (Exception bad) {
throw new InvalidFeedItemException("Invalid name", bad)
}
int certificates = 0
if (obj.certificates != null)
certificates = obj.certificates
new FeedItem(publisher, obj.timestamp, name, obj.size, obj.pieceSize, infoHash, certificates, obj.comment)
}
public static def feedItemToObj(FeedItem item) {
def json = [:]
json.type = "FeedItem"
json.version = 1
json.name = Base64.encode(DataUtil.encodei18nString(item.getName()))
json.infoHash = Base64.encode(item.getInfoHash().getRoot())
json.size = item.getSize()
json.pieceSize = item.getPieceSize()
json.timestamp = item.getTimestamp()
json.certificates = item.getCertificates()
json.comment = item.getComment()
json
}
}

View File

@ -0,0 +1,7 @@
package com.muwire.core.filefeeds
import com.muwire.core.Event
class FeedLoadedEvent extends Event {
Feed feed
}

View File

@ -0,0 +1,225 @@
package com.muwire.core.filefeeds
import java.nio.file.Files
import java.util.concurrent.ConcurrentHashMap
import java.util.concurrent.ExecutorService
import java.util.concurrent.Executors
import java.util.concurrent.ThreadFactory
import java.util.stream.Collectors
import com.muwire.core.EventBus
import com.muwire.core.Persona
import groovy.json.JsonOutput
import groovy.json.JsonSlurper
import groovy.util.logging.Log
import net.i2p.data.Base64
import net.i2p.util.ConcurrentHashSet
@Log
class FeedManager {
private final EventBus eventBus
private final File metadataFolder, itemsFolder
private final Map<Persona, Feed> feeds = new ConcurrentHashMap<>()
private final Map<Persona, Set<FeedItem>> feedItems = new ConcurrentHashMap<>()
private final ExecutorService persister = Executors.newSingleThreadExecutor({r ->
new Thread(r, "feed persister")
} as ThreadFactory)
FeedManager(EventBus eventBus, File home) {
this.eventBus = eventBus
File feedsFolder = new File(home, "filefeeds")
if (!feedsFolder.exists())
feedsFolder.mkdir()
this.metadataFolder = new File(feedsFolder, "metadata")
if (!metadataFolder.exists())
metadataFolder.mkdir()
this.itemsFolder = new File(feedsFolder, "items")
if (!itemsFolder.exists())
itemsFolder.mkdir()
}
public Feed getFeed(Persona persona) {
feeds.get(persona)
}
public Set<FeedItem> getFeedItems(Persona persona) {
feedItems.getOrDefault(persona, Collections.emptySet())
}
public List<Feed> getFeedsToUpdate() {
long now = System.currentTimeMillis()
feeds.values().stream().
filter({Feed f -> !f.getStatus().isActive()}).
filter({Feed f -> f.getLastUpdateAttempt() + f.getUpdateInterval() <= now})
.collect(Collectors.toList())
}
void start() {
log.info("starting feed manager")
persister.submit({loadFeeds()} as Runnable)
persister.submit({loadItems()} as Runnable)
}
void stop() {
persister.shutdown()
}
private void loadFeeds() {
def slurper = new JsonSlurper()
Files.walk(metadataFolder.toPath()).
filter( { it.getFileName().toString().endsWith(".json")}).
forEach( {
def parsed = slurper.parse(it.toFile())
Persona publisher = new Persona(new ByteArrayInputStream(Base64.decode(parsed.publisher)))
Feed feed = new Feed(publisher)
feed.setUpdateInterval(parsed.updateInterval)
feed.setLastUpdated(parsed.lastUpdated)
feed.setLastUpdateAttempt(parsed.lastUpdateAttempt)
feed.setItemsToKeep(parsed.itemsToKeep)
feed.setAutoDownload(parsed.autoDownload)
feed.setSequential(parsed.sequential)
feed.setStatus(FeedFetchStatus.IDLE)
feeds.put(feed.getPublisher(), feed)
eventBus.publish(new FeedLoadedEvent(feed : feed))
})
}
private void loadItems() {
def slurper = new JsonSlurper()
feeds.keySet().each { persona ->
File itemsFile = getItemsFile(feeds[persona])
if (!itemsFile.exists())
return // no items yet?
itemsFile.eachLine { line ->
def parsed = slurper.parseText(line)
FeedItem item = FeedItems.objToFeedItem(parsed, persona)
Set<FeedItem> items = feedItems.get(persona)
if (items == null) {
items = new ConcurrentHashSet<>()
feedItems.put(persona, items)
}
items.add(item)
eventBus.publish(new FeedItemLoadedEvent(item : item))
}
}
}
void onFeedItemFetchedEvent(FeedItemFetchedEvent e) {
Set<FeedItem> set = feedItems.get(e.item.getPublisher())
if (set == null) {
set = new ConcurrentHashSet<>()
feedItems.put(e.getItem().getPublisher(), set)
}
set.add(e.item)
}
void onFeedFetchEvent(FeedFetchEvent e) {
Feed feed = feeds.get(e.host)
if (feed == null) {
log.severe("Fetching non-existent feed " + e.host.getHumanReadableName())
return
}
feed.setStatus(e.status)
if (e.status.isActive())
return
if (e.status == FeedFetchStatus.FINISHED) {
feed.setStatus(FeedFetchStatus.IDLE)
feed.setLastUpdated(e.getTimestamp())
}
// save feed items, then save feed. This will save partial fetches too
// which is ok because the items are stored in a Set
persister.submit({saveFeedItems(e.host)} as Runnable)
persister.submit({saveFeedMetadata(feed)} as Runnable)
}
void onUIFeedConfigurationEvent(UIFeedConfigurationEvent e) {
feeds.put(e.feed.getPublisher(), e.feed)
persister.submit({saveFeedMetadata(e.feed)} as Runnable)
}
void onUIFeedDeletedEvent(UIFeedDeletedEvent e) {
Feed f = feeds.get(e.host)
if (f == null) {
log.severe("Deleting a non-existing feed " + e.host.getHumanReadableName())
return
}
persister.submit({deleteFeed(f)} as Runnable)
}
private void saveFeedItems(Persona publisher) {
Set<FeedItem> set = feedItems.get(publisher)
if (set == null)
return // can happen if nothing was published
Feed feed = feeds[publisher]
if (feed == null) {
log.severe("Persisting items for non-existing feed " + publisher.getHumanReadableName())
return
}
if (feed.getItemsToKeep() == 0)
return
List<FeedItem> list = new ArrayList<>(set)
if (feed.getItemsToKeep() > 0 && list.size() > feed.getItemsToKeep()) {
log.info("will persist ${feed.getItemsToKeep()}/${list.size()} items")
list.sort({l, r ->
Long.compare(r.getTimestamp(), l.getTimestamp())
} as Comparator<FeedItem>)
list = list[0..feed.getItemsToKeep() - 1]
}
File itemsFile = getItemsFile(feed)
itemsFile.withPrintWriter { writer ->
list.each { item ->
def obj = FeedItems.feedItemToObj(item)
def json = JsonOutput.toJson(obj)
writer.println(json)
}
}
}
private void saveFeedMetadata(Feed feed) {
File metadataFile = getMetadataFile(feed)
metadataFile.withPrintWriter { writer ->
def json = [:]
json.publisher = feed.getPublisher().toBase64()
json.itemsToKeep = feed.getItemsToKeep()
json.lastUpdated = feed.getLastUpdated()
json.updateInterval = feed.getUpdateInterval()
json.autoDownload = feed.isAutoDownload()
json.sequential = feed.isSequential()
json.lastUpdateAttempt = feed.getLastUpdateAttempt()
json = JsonOutput.toJson(json)
writer.println(json)
}
}
private void deleteFeed(Feed feed) {
feeds.remove(feed.getPublisher())
feedItems.remove(feed.getPublisher())
getItemsFile(feed).delete()
getMetadataFile(feed).delete()
}
private File getItemsFile(Feed feed) {
return new File(itemsFolder, feed.getPublisher().destination.toBase32() + ".json")
}
private File getMetadataFile(Feed feed) {
return new File(metadataFolder, feed.getPublisher().destination.toBase32() + ".json")
}
}

View File

@ -0,0 +1,9 @@
package com.muwire.core.filefeeds
import com.muwire.core.Event
class UIDownloadFeedItemEvent extends Event {
FeedItem item
File target
boolean sequential
}

View File

@ -0,0 +1,12 @@
package com.muwire.core.filefeeds
import com.muwire.core.Event
/**
* Emitted when configuration of a feed changes.
* The object should already contain the updated values.
*/
class UIFeedConfigurationEvent extends Event {
Feed feed
boolean newFeed
}

View File

@ -0,0 +1,8 @@
package com.muwire.core.filefeeds
import com.muwire.core.Event
import com.muwire.core.Persona
class UIFeedDeletedEvent extends Event {
Persona host
}

View File

@ -0,0 +1,8 @@
package com.muwire.core.filefeeds
import com.muwire.core.Event
import com.muwire.core.Persona
class UIFeedUpdateEvent extends Event {
Persona host
}

View File

@ -0,0 +1,8 @@
package com.muwire.core.filefeeds
import com.muwire.core.Event
import com.muwire.core.SharedFile
class UIFilePublishedEvent extends Event {
SharedFile sf
}

View File

@ -0,0 +1,8 @@
package com.muwire.core.filefeeds
import com.muwire.core.Event
import com.muwire.core.SharedFile
class UIFileUnpublishedEvent extends Event {
SharedFile sf
}

View File

@ -0,0 +1,173 @@
package com.muwire.core.files
import com.muwire.core.DownloadedFile
import com.muwire.core.InfoHash
import com.muwire.core.Persona
import com.muwire.core.Service
import com.muwire.core.SharedFile
import com.muwire.core.util.DataUtil
import net.i2p.data.Base64
import net.i2p.data.Destination
import java.util.stream.Collectors
abstract class BasePersisterService extends Service{
protected static FileLoadedEvent fromJson(def json) {
if (json.file == null || json.length == null || json.infoHash == null || json.hashList == null)
throw new IllegalArgumentException()
if (!(json.hashList instanceof List))
throw new IllegalArgumentException()
def file = new File(DataUtil.readi18nString(Base64.decode(json.file)))
file = file.getCanonicalFile()
if (!file.exists() || file.isDirectory())
return null
long length = Long.valueOf(json.length)
if (length != file.length())
return null
List hashList = (List) json.hashList
ByteArrayOutputStream baos = new ByteArrayOutputStream()
hashList.each {
byte [] hash = Base64.decode it.toString()
if (hash == null)
throw new IllegalArgumentException()
baos.write hash
}
byte[] hashListBytes = baos.toByteArray()
InfoHash ih = InfoHash.fromHashList(hashListBytes)
byte [] root = Base64.decode(json.infoHash.toString())
if (root == null)
throw new IllegalArgumentException()
if (!Arrays.equals(root, ih.getRoot()))
return null
int pieceSize = 0
if (json.pieceSize != null)
pieceSize = json.pieceSize
if (json.sources != null) {
List sources = (List)json.sources
Set<Destination> sourceSet = sources.stream().map({ d -> new Destination(d.toString())}).collect Collectors.toSet()
DownloadedFile df = new DownloadedFile(file, ih.getRoot(), pieceSize, sourceSet)
df.setComment(json.comment)
return new FileLoadedEvent(loadedFile : df, infoHash: ih)
}
SharedFile sf = new SharedFile(file, ih.getRoot(), pieceSize)
sf.setComment(json.comment)
if (json.downloaders != null)
sf.getDownloaders().addAll(json.downloaders)
if (json.searchers != null) {
json.searchers.each {
Persona searcher = null
if (it.searcher != null)
searcher = new Persona(new ByteArrayInputStream(Base64.decode(it.searcher)))
long timestamp = it.timestamp
String query = it.query
sf.hit(searcher, timestamp, query)
}
}
return new FileLoadedEvent(loadedFile: sf, infoHash: ih)
}
protected static FileLoadedEvent fromJsonLite(json) {
if (json.file == null || json.length == null || json.root == null)
throw new IllegalArgumentException()
def file = new File(DataUtil.readi18nString(Base64.decode(json.file)))
file = file.getCanonicalFile()
if (!file.exists() || file.isDirectory())
return null
long length = Long.valueOf(json.length)
if (length != file.length())
return null
byte[] root = Base64.decode(json.root)
InfoHash ih = new InfoHash(root)
int pieceSize = 0
if (json.pieceSize != null)
pieceSize = json.pieceSize
boolean published = false
long publishedTimestamp = -1
if (json.published != null && json.published) {
published = true
publishedTimestamp = json.publishedTimestamp
}
if (json.sources != null) {
List sources = (List)json.sources
Set<Destination> sourceSet = sources.stream().map({ d -> new Destination(d.toString())}).collect Collectors.toSet()
DownloadedFile df = new DownloadedFile(file, ih.getRoot(), pieceSize, sourceSet)
if (published)
df.publish(publishedTimestamp)
df.setComment(json.comment)
return new FileLoadedEvent(loadedFile : df, infoHash: ih)
}
SharedFile sf = new SharedFile(file, ih.getRoot(), pieceSize)
sf.setComment(json.comment)
if (published)
sf.publish(publishedTimestamp)
if (json.downloaders != null)
sf.getDownloaders().addAll(json.downloaders)
if (json.searchers != null) {
json.searchers.each {
Persona searcher = null
if (it.searcher != null) {
try {
searcher = new Persona(new ByteArrayInputStream(Base64.decode(it.searcher)))
} catch (Exception ignore) {
return
}
}
long timestamp = it.timestamp
String query = it.query
sf.hit(searcher, timestamp, query)
}
}
return new FileLoadedEvent(loadedFile: sf, infoHash: ih)
}
protected static toJson(SharedFile sf) {
def json = [:]
json.file = sf.getB64EncodedFileName()
json.length = sf.getCachedLength()
json.root = Base64.encode(sf.getRoot())
json.pieceSize = sf.getPieceSize()
json.comment = sf.getComment()
json.hits = sf.getHits()
json.downloaders = sf.getDownloaders()
if (!sf.searches.isEmpty()) {
Set searchers = new HashSet<>()
sf.searches.each {
def search = [:]
if (it.searcher != null)
search.searcher = it.searcher.toBase64()
search.timestamp = it.timestamp
search.query = it.query
searchers.add(search)
}
json.searchers = searchers
}
if (sf instanceof DownloadedFile) {
json.sources = sf.sources.stream().map( {d -> d.toBase64()}).collect(Collectors.toList())
}
if (sf.isPublished()) {
json.published = true
json.publishedTimestamp = sf.getPublishedTimestamp()
}
json
}
}

View File

@ -4,8 +4,9 @@ import com.muwire.core.Event
class DirectoryUnsharedEvent extends Event {
File directory
boolean deleted
public String toString() {
super.toString() + " unshared directory "+ directory.toString()
super.toString() + " unshared directory "+ directory.toString() + " deleted $deleted"
}
}

View File

@ -15,6 +15,9 @@ import java.util.concurrent.ConcurrentHashMap
import com.muwire.core.EventBus
import com.muwire.core.MuWireSettings
import com.muwire.core.SharedFile
import com.muwire.core.files.directories.WatchedDirectoryConfigurationEvent
import com.muwire.core.files.directories.WatchedDirectoryConvertedEvent
import com.muwire.core.files.directories.WatchedDirectoryManager
import groovy.util.logging.Log
import net.i2p.util.SystemVersion
@ -33,27 +36,27 @@ class DirectoryWatcher {
}
private final File home
private final MuWireSettings muOptions
private final EventBus eventBus
private final FileManager fileManager
private final WatchedDirectoryManager watchedDirectoryManager
private final Thread watcherThread, publisherThread
private final Map<File, Long> waitingFiles = new ConcurrentHashMap<>()
private final Map<File, WatchKey> watchedDirectories = new ConcurrentHashMap<>()
private WatchService watchService
private volatile boolean shutdown
DirectoryWatcher(EventBus eventBus, FileManager fileManager, File home, MuWireSettings muOptions) {
DirectoryWatcher(EventBus eventBus, FileManager fileManager, File home, WatchedDirectoryManager watchedDirectoryManager) {
this.home = home
this.muOptions = muOptions
this.eventBus = eventBus
this.fileManager = fileManager
this.watchedDirectoryManager = watchedDirectoryManager
this.watcherThread = new Thread({watch() } as Runnable, "directory-watcher")
watcherThread.setDaemon(true)
this.publisherThread = new Thread({publish()} as Runnable, "watched-files-publisher")
publisherThread.setDaemon(true)
}
void onAllFilesLoadedEvent(AllFilesLoadedEvent e) {
void onWatchedDirectoryConvertedEvent(WatchedDirectoryConvertedEvent e) {
watchService = FileSystems.getDefault().newWatchService()
watcherThread.start()
publisherThread.start()
@ -71,26 +74,26 @@ class DirectoryWatcher {
Path path = canonical.toPath()
WatchKey wk = path.register(watchService, kinds)
watchedDirectories.put(canonical, wk)
if (muOptions.watchedDirectories.add(canonical.toString()))
saveMuSettings()
}
void onDirectoryUnsharedEvent(DirectoryUnsharedEvent e) {
WatchKey wk = watchedDirectories.remove(e.directory)
wk?.cancel()
if (muOptions.watchedDirectories.remove(e.directory.toString()))
saveMuSettings()
}
private void saveMuSettings() {
File muSettingsFile = new File(home, "MuWire.properties")
muSettingsFile.withPrintWriter("UTF-8", {
muOptions.write(it)
})
void onWatchedDirectoryConfigurationEvent(WatchedDirectoryConfigurationEvent e) {
if (watchService == null)
return // still converting
if (!e.autoWatch) {
WatchKey wk = watchedDirectories.remove(e.directory)
wk?.cancel()
} else if (!watchedDirectories.containsKey(e.directory)) {
Path path = e.directory.toPath()
def wk = path.register(watchService, kinds)
watchedDirectories.put(e.directory, wk)
} // else it was already watched
}
private void watch() {
try {
while(!shutdown) {
@ -115,7 +118,7 @@ class DirectoryWatcher {
File f= join(parent, path)
log.fine("created entry $f")
if (f.isDirectory())
f.toPath().register(watchService, kinds)
eventBus.publish(new FileSharedEvent(file : f, fromWatch : true))
else
waitingFiles.put(f, System.currentTimeMillis())
}
@ -133,6 +136,10 @@ class DirectoryWatcher {
SharedFile sf = fileManager.fileToSharedFile.get(f)
if (sf != null)
eventBus.publish(new FileUnsharedEvent(unsharedFile : sf, deleted : true))
else if (watchedDirectoryManager.isWatched(f))
eventBus.publish(new DirectoryUnsharedEvent(directory : f, deleted : true))
else
log.fine("Entry was not relevant");
}
private static File join(Path parent, Path path) {
@ -149,7 +156,7 @@ class DirectoryWatcher {
waitingFiles.each { file, timestamp ->
if (now - timestamp > WAIT_TIME) {
log.fine("publishing file $file")
eventBus.publish new FileSharedEvent(file : file)
eventBus.publish new FileSharedEvent(file : file, fromWatch: true)
published << file
}
}

View File

@ -2,6 +2,7 @@ package com.muwire.core.files
import com.muwire.core.DownloadedFile
import com.muwire.core.Event
import com.muwire.core.InfoHash
import com.muwire.core.download.Downloader
import net.i2p.data.Destination
@ -9,4 +10,5 @@ import net.i2p.data.Destination
class FileDownloadedEvent extends Event {
Downloader downloader
DownloadedFile downloadedFile
InfoHash infoHash
}

View File

@ -1,16 +1,18 @@
package com.muwire.core.files
import com.muwire.core.Event
import com.muwire.core.InfoHash
import com.muwire.core.SharedFile
class FileHashedEvent extends Event {
SharedFile sharedFile
InfoHash infoHash
String error
@Override
public String toString() {
super.toString() + " sharedFile " + sharedFile?.file.getAbsolutePath() + " error: $error"
super.toString() + " sharedFile " + sharedFile?.file?.getAbsolutePath() + " error: $error"
}
}

View File

@ -1,9 +1,12 @@
package com.muwire.core.files
import com.muwire.core.Event
import com.muwire.core.InfoHash
import com.muwire.core.SharedFile
class FileLoadedEvent extends Event {
SharedFile loadedFile
InfoHash infoHash
String source
}

View File

@ -1,5 +1,8 @@
package com.muwire.core.files
import java.util.stream.Collectors
import java.util.stream.Stream
import com.muwire.core.EventBus
import com.muwire.core.InfoHash
import com.muwire.core.MuWireSettings
@ -25,6 +28,7 @@ class FileManager {
final Map<String, Set<File>> commentToFile = new HashMap<>()
final SearchIndex index = new SearchIndex()
final FileTree<Void> negativeTree = new FileTree<>()
final FileTree<SharedFile> positiveTree = new FileTree<>()
final Set<File> sideCarFiles = new HashSet<>()
FileManager(EventBus eventBus, MuWireSettings settings) {
@ -75,7 +79,7 @@ class FileManager {
private void addToIndex(SharedFile sf) {
log.info("Adding shared file " + sf.getFile())
InfoHash infoHash = sf.getInfoHash()
InfoHash infoHash = new InfoHash(sf.getRoot())
Set<SharedFile> existing = rootToFiles.get(infoHash)
if (existing == null) {
log.info("adding new root")
@ -84,6 +88,7 @@ class FileManager {
}
existing.add(sf)
fileToSharedFile.put(sf.file, sf)
positiveTree.add(sf.file, sf);
negativeTree.remove(sf.file)
String parent = sf.getFile().getParent()
@ -117,7 +122,7 @@ class FileManager {
void onFileUnsharedEvent(FileUnsharedEvent e) {
SharedFile sf = e.unsharedFile
InfoHash infoHash = sf.getInfoHash()
InfoHash infoHash = new InfoHash(sf.getRoot())
Set<SharedFile> existing = rootToFiles.get(infoHash)
if (existing != null) {
existing.remove(sf)
@ -127,6 +132,7 @@ class FileManager {
}
fileToSharedFile.remove(sf.file)
positiveTree.remove(sf.file)
if (!e.deleted && negativeTree.fileToNode.containsKey(sf.file.getParentFile())) {
negativeTree.add(sf.file,null)
saveNegativeTree()
@ -190,6 +196,10 @@ class FileManager {
Set<SharedFile> getSharedFiles(byte []root) {
return rootToFiles.get(new InfoHash(root))
}
boolean isShared(InfoHash infoHash) {
rootToFiles.containsKey(infoHash)
}
void onSearchEvent(SearchEvent e) {
// hash takes precedence
@ -239,14 +249,26 @@ class FileManager {
void onDirectoryUnsharedEvent(DirectoryUnsharedEvent e) {
negativeTree.remove(e.directory)
saveNegativeTree()
e.directory.listFiles().each {
if (it.isDirectory())
eventBus.publish(new DirectoryUnsharedEvent(directory : it))
else {
SharedFile sf = fileToSharedFile.get(it)
if (sf != null)
eventBus.publish(new FileUnsharedEvent(unsharedFile : sf))
if (!e.deleted) {
e.directory.listFiles().each {
if (it.isDirectory())
eventBus.publish(new DirectoryUnsharedEvent(directory : it))
else {
SharedFile sf = fileToSharedFile.get(it)
if (sf != null)
eventBus.publish(new FileUnsharedEvent(unsharedFile : sf))
}
}
} else {
def cb = new DirDeletionCallback()
positiveTree.traverse(e.directory, cb)
positiveTree.remove(e.directory)
cb.unsharedFiles.each {
eventBus.publish(new FileUnsharedEvent(unsharedFile : it, deleted: true))
}
cb.subDirs.each {
eventBus.publish(new DirectoryUnsharedEvent(directory : it, deleted : true))
}
}
}
@ -254,4 +276,34 @@ class FileManager {
settings.negativeFileTree.clear()
settings.negativeFileTree.addAll(negativeTree.fileToNode.keySet().collect { it.getAbsolutePath() })
}
public List<SharedFile> getPublishedSince(long timestamp) {
synchronized(fileToSharedFile) {
fileToSharedFile.values().stream().
filter({sf -> sf.isPublished()}).
filter({sf -> sf.getPublishedTimestamp() >= timestamp}).
collect(Collectors.toList())
}
}
private static class DirDeletionCallback implements FileTreeCallback<SharedFile> {
final List<File> subDirs = new ArrayList<>()
final List<SharedFile> unsharedFiles = new ArrayList<>()
@Override
public void onDirectoryEnter(File file) {
subDirs.add(file)
}
@Override
public void onDirectoryLeave() {
}
@Override
public void onFile(File file, SharedFile value) {
unsharedFiles << value
}
}
}

View File

@ -5,9 +5,10 @@ import com.muwire.core.Event
class FileSharedEvent extends Event {
File file
boolean fromWatch
@Override
public String toString() {
return super.toString() + " file: "+file.getAbsolutePath()
return super.toString() + " file: "+file.getAbsolutePath() + " fromWatch: $fromWatch"
}
}

View File

@ -23,6 +23,7 @@ class FileTree<T> {
if (existing == null) {
existing = new TreeNode()
existing.file = element
existing.isFile = element.isFile()
existing.parent = current
fileToNode.put(element, existing)
current.children.add(existing)
@ -64,7 +65,7 @@ class FileTree<T> {
private void doTraverse(TreeNode<T> node, FileTreeCallback<T> callback) {
boolean leave = false
if (node.file != null) {
if (node.file.isFile())
if (node.isFile)
callback.onFile(node.file, node.value)
else {
leave = true
@ -88,7 +89,7 @@ class FileTree<T> {
node = fileToNode.get(parent)
node.children.each {
if (it.file.isFile())
if (it.isFile)
callback.onFile(it.file, it.value)
else
callback.onDirectory(it.file)
@ -98,6 +99,7 @@ class FileTree<T> {
public static class TreeNode<T> {
TreeNode parent
File file
boolean isFile
T value;
final Set<TreeNode> children = new HashSet<>()

View File

@ -53,7 +53,6 @@ class HasherService {
private void process(File f) {
if (f.isDirectory()) {
eventBus.publish(new DirectoryWatchedEvent(directory : f))
f.listFiles().each {
eventBus.publish new FileSharedEvent(file: it)
}
@ -65,7 +64,8 @@ class HasherService {
} else {
eventBus.publish new FileHashingEvent(hashingFile: f)
def hash = hasher.hashFile f
eventBus.publish new FileHashedEvent(sharedFile: new SharedFile(f, hash, FileHasher.getPieceSize(f.length())))
eventBus.publish new FileHashedEvent(sharedFile: new SharedFile(f, hash.getRoot(), FileHasher.getPieceSize(f.length())),
infoHash : hash)
}
}
}

View File

@ -0,0 +1,12 @@
package com.muwire.core.files
import com.muwire.core.Event
/**
* Should be triggered by the old PersisterService
* once it has finished reading the old file
*
* @see PersisterService
*/
class PersisterDoneEvent extends Event{
}

View File

@ -0,0 +1,195 @@
package com.muwire.core.files
import com.muwire.core.*
import com.muwire.core.filefeeds.UIFilePublishedEvent
import com.muwire.core.filefeeds.UIFileUnpublishedEvent
import groovy.json.JsonOutput
import groovy.json.JsonSlurper
import groovy.util.logging.Log
import java.nio.file.Files
import java.nio.file.Path
import java.nio.file.Paths
import java.util.concurrent.ExecutorService
import java.util.concurrent.Executors
import java.util.concurrent.ThreadFactory
import java.util.logging.Level
/**
* A persister that stores information about the files shared using
* individual JSON files in directories.
*
* The absolute path's 32bit hash to the shared file is used
* to build the directory and filename.
*
* This persister only starts working once the old persister has finished loading
* @see PersisterFolderService#getJsonPath
*/
@Log
class PersisterFolderService extends BasePersisterService {
final static int CUT_LENGTH = 6
private final Core core;
final File location
final EventBus listener
final int interval
final Timer timer
final ExecutorService persisterExecutor = Executors.newSingleThreadExecutor({ r ->
new Thread(r, "file persister")
} as ThreadFactory)
PersisterFolderService(Core core, File location, EventBus listener) {
this.core = core;
this.location = location
this.listener = listener
this.interval = interval
timer = new Timer("file-folder persister timer", true)
}
void stop() {
timer.cancel()
persisterExecutor.shutdown()
}
void onPersisterDoneEvent(PersisterDoneEvent persisterDoneEvent) {
log.info("Old persister done")
load()
}
void onFileHashedEvent(FileHashedEvent hashedEvent) {
if (core.getMuOptions().getAutoPublishSharedFiles() && hashedEvent.sharedFile != null)
hashedEvent.sharedFile.publish(System.currentTimeMillis())
persistFile(hashedEvent.sharedFile, hashedEvent.infoHash)
}
void onFileDownloadedEvent(FileDownloadedEvent downloadedEvent) {
if (core.getMuOptions().getShareDownloadedFiles()) {
if (core.getMuOptions().getAutoPublishSharedFiles())
downloadedEvent.downloadedFile.publish(System.currentTimeMillis())
persistFile(downloadedEvent.downloadedFile, downloadedEvent.infoHash)
}
}
/**
* Get rid of the json and hashlists of unshared files
* @param unsharedEvent
*/
void onFileUnsharedEvent(FileUnsharedEvent unsharedEvent) {
def jsonPath = getJsonPath(unsharedEvent.unsharedFile)
def jsonFile = jsonPath.toFile()
if(jsonFile.isFile()){
jsonFile.delete()
}
def hashListPath = getHashListPath(unsharedEvent.unsharedFile)
def hashListFile = hashListPath.toFile()
if (hashListFile.isFile())
hashListFile.delete()
}
void onFileLoadedEvent(FileLoadedEvent loadedEvent) {
if(loadedEvent.source == "PersisterService"){
log.info("Migrating persisted file from PersisterService: "
+ loadedEvent.loadedFile.file.absolutePath.toString())
persistFile(loadedEvent.loadedFile, loadedEvent.infoHash)
}
}
void onUICommentEvent(UICommentEvent e) {
persistFile(e.sharedFile,null)
}
void onUIFilePublishedEvent(UIFilePublishedEvent e) {
persistFile(e.sf, null)
}
void onUIFileUnpublishedEvent(UIFileUnpublishedEvent e) {
persistFile(e.sf, null)
}
void load() {
log.fine("Loading...")
Thread.currentThread().setPriority(Thread.MIN_PRIORITY)
if (location.exists() && location.isDirectory()) {
try {
_load()
}
catch (Exception e) {
log.log(Level.WARNING, "couldn't load files", e)
}
} else {
location.mkdirs()
listener.publish(new AllFilesLoadedEvent())
}
loaded = true
}
/**
* Loads every JSON into memory
*/
private void _load() {
int loaded = 0
def slurper = new JsonSlurper()
Files.walk(location.toPath())
.filter({
it.getFileName().toString().endsWith(".json")
})
.forEach({
def parsed = slurper.parse it.toFile()
def event = fromJsonLite parsed
if (event == null) return
log.fine("loaded file $event.loadedFile.file")
listener.publish event
loaded++
if (loaded % 10 == 0)
Thread.sleep(20)
})
listener.publish(new AllFilesLoadedEvent())
}
private void persistFile(SharedFile sf, InfoHash ih) {
persisterExecutor.submit({
def jsonPath = getJsonPath(sf)
def startTime = System.currentTimeMillis()
jsonPath.parent.toFile().mkdirs()
jsonPath.toFile().withPrintWriter { writer ->
def json = toJson sf
json = JsonOutput.toJson(json)
writer.println json
}
if (ih != null) {
def hashListPath = getHashListPath(sf)
hashListPath.toFile().bytes = ih.hashList
}
log.fine("Time(ms) to write json+hashList: " + (System.currentTimeMillis() - startTime))
} as Runnable)
}
private Path getJsonPath(SharedFile sf){
def pathHash = sf.getB64PathHash()
return Paths.get(
location.getAbsolutePath(),
pathHash.substring(0, CUT_LENGTH),
pathHash.substring(CUT_LENGTH) + ".json"
)
}
private Path getHashListPath(SharedFile sf) {
def pathHash = sf.getB64PathHash()
return Paths.get(
location.getAbsolutePath(),
pathHash.substring(0, CUT_LENGTH),
pathHash.substring(CUT_LENGTH) + ".hashlist"
)
}
InfoHash loadInfoHash(SharedFile sf) {
def path = getHashListPath(sf)
InfoHash.fromHashList(path.toFile().bytes)
}
}

View File

@ -1,40 +1,24 @@
package com.muwire.core.files
import java.nio.file.CopyOption
import java.nio.file.Files
import java.nio.file.StandardCopyOption
import java.util.concurrent.ExecutorService
import java.util.concurrent.Executors
import java.util.concurrent.ThreadFactory
import java.util.logging.Level
import java.util.stream.Collectors
import com.muwire.core.DownloadedFile
import com.muwire.core.EventBus
import com.muwire.core.InfoHash
import com.muwire.core.Persona
import com.muwire.core.Service
import com.muwire.core.SharedFile
import com.muwire.core.UILoadedEvent
import com.muwire.core.util.DataUtil
import groovy.json.JsonOutput
import groovy.json.JsonSlurper
import groovy.util.logging.Log
import net.i2p.data.Base64
import net.i2p.data.Destination
@Log
class PersisterService extends Service {
class PersisterService extends BasePersisterService {
final File location
final EventBus listener
final int interval
final Timer timer
final FileManager fileManager
final ExecutorService persisterExecutor = Executors.newSingleThreadExecutor({ r ->
new Thread(r, "file persister")
} as ThreadFactory)
PersisterService(File location, EventBus listener, int interval, FileManager fileManager) {
this.location = location
@ -51,10 +35,6 @@ class PersisterService extends Service {
void onUILoadedEvent(UILoadedEvent e) {
timer.schedule({load()} as TimerTask, 1)
}
void onUIPersistFilesEvent(UIPersistFilesEvent e) {
persistFiles()
}
void load() {
Thread.currentThread().setPriority(Thread.MIN_PRIORITY)
@ -69,6 +49,7 @@ class PersisterService extends Service {
def event = fromJson parsed
if (event != null) {
log.fine("loaded file $event.loadedFile.file")
event.source = "PersisterService"
listener.publish event
loaded++
if (loaded % 10 == 0)
@ -76,126 +57,18 @@ class PersisterService extends Service {
}
}
}
listener.publish(new AllFilesLoadedEvent())
} catch (IllegalArgumentException|NumberFormatException e) {
// Backup the old hashes
location.renameTo(
new File(location.absolutePath + ".bak")
)
listener.publish(new PersisterDoneEvent())
} catch (Exception e) {
log.log(Level.WARNING, "couldn't load files",e)
}
} else {
listener.publish(new AllFilesLoadedEvent())
listener.publish(new PersisterDoneEvent())
}
timer.schedule({persistFiles()} as TimerTask, 1000, interval)
loaded = true
}
private static FileLoadedEvent fromJson(def json) {
if (json.file == null || json.length == null || json.infoHash == null || json.hashList == null)
throw new IllegalArgumentException()
if (!(json.hashList instanceof List))
throw new IllegalArgumentException()
def file = new File(DataUtil.readi18nString(Base64.decode(json.file)))
file = file.getCanonicalFile()
if (!file.exists() || file.isDirectory())
return null
long length = Long.valueOf(json.length)
if (length != file.length())
return null
List hashList = (List) json.hashList
ByteArrayOutputStream baos = new ByteArrayOutputStream()
hashList.each {
byte [] hash = Base64.decode it.toString()
if (hash == null)
throw new IllegalArgumentException()
baos.write hash
}
byte[] hashListBytes = baos.toByteArray()
InfoHash ih = InfoHash.fromHashList(hashListBytes)
byte [] root = Base64.decode(json.infoHash.toString())
if (root == null)
throw new IllegalArgumentException()
if (!Arrays.equals(root, ih.getRoot()))
return null
int pieceSize = 0
if (json.pieceSize != null)
pieceSize = json.pieceSize
if (json.sources != null) {
List sources = (List)json.sources
Set<Destination> sourceSet = sources.stream().map({d -> new Destination(d.toString())}).collect Collectors.toSet()
DownloadedFile df = new DownloadedFile(file, ih, pieceSize, sourceSet)
df.setComment(json.comment)
return new FileLoadedEvent(loadedFile : df)
}
SharedFile sf = new SharedFile(file, ih, pieceSize)
sf.setComment(json.comment)
if (json.downloaders != null)
sf.getDownloaders().addAll(json.downloaders)
if (json.searchers != null) {
json.searchers.each {
Persona searcher = null
if (it.searcher != null)
searcher = new Persona(new ByteArrayInputStream(Base64.decode(it.searcher)))
long timestamp = it.timestamp
String query = it.query
sf.hit(searcher, timestamp, query)
}
}
return new FileLoadedEvent(loadedFile: sf)
}
private void persistFiles() {
persisterExecutor.submit( {
def sharedFiles = fileManager.getSharedFiles()
File tmp = File.createTempFile("muwire-files", "tmp")
tmp.deleteOnExit()
tmp.withPrintWriter { writer ->
sharedFiles.each { k, v ->
def json = toJson(k,v)
json = JsonOutput.toJson(json)
writer.println json
}
}
Files.copy(tmp.toPath(), location.toPath(), StandardCopyOption.REPLACE_EXISTING)
tmp.delete()
} as Runnable)
}
private def toJson(File f, SharedFile sf) {
def json = [:]
json.file = sf.getB64EncodedFileName()
json.length = sf.getCachedLength()
InfoHash ih = sf.getInfoHash()
json.infoHash = sf.getB64EncodedHashRoot()
json.pieceSize = sf.getPieceSize()
json.hashList = sf.getB64EncodedHashList()
json.comment = sf.getComment()
json.hits = sf.getHits()
json.downloaders = sf.getDownloaders()
if (!sf.searches.isEmpty()) {
Set searchers = new HashSet<>()
sf.searches.each {
def search = [:]
if (it.searcher != null)
search.searcher = it.searcher.toBase64()
search.timestamp = it.timestamp
search.query = it.query
searchers.add(search)
}
json.searchers = searchers
}
if (sf instanceof DownloadedFile) {
json.sources = sf.sources.stream().map( {d -> d.toBase64()}).collect(Collectors.toList())
}
json
}
}

View File

@ -1,6 +0,0 @@
package com.muwire.core.files
import com.muwire.core.Event
class UIPersistFilesEvent extends Event {
}

View File

@ -0,0 +1,7 @@
package com.muwire.core.files.directories
import com.muwire.core.Event
class UISyncDirectoryEvent extends Event {
File directory
}

View File

@ -0,0 +1,37 @@
package com.muwire.core.files.directories
import com.muwire.core.util.DataUtil
import net.i2p.data.Base64
class WatchedDirectory {
final File directory
final String encodedName
boolean autoWatch
int syncInterval
long lastSync
WatchedDirectory(File directory) {
this.directory = directory.getCanonicalFile()
this.encodedName = Base64.encode(DataUtil.encodei18nString(directory.getAbsolutePath()))
}
def toJson() {
def rv = [:]
rv.directory = encodedName
rv.autoWatch = autoWatch
rv.syncInterval = syncInterval
rv.lastSync = lastSync
rv
}
static WatchedDirectory fromJson(def json) {
String dirName = DataUtil.readi18nString(Base64.decode(json.directory))
File dir = new File(dirName)
def rv = new WatchedDirectory(dir)
rv.autoWatch = json.autoWatch
rv.syncInterval = json.syncInterval
rv.lastSync = json.lastSync
rv
}
}

View File

@ -0,0 +1,9 @@
package com.muwire.core.files.directories
import com.muwire.core.Event
class WatchedDirectoryConfigurationEvent extends Event {
File directory
boolean autoWatch
int syncInterval
}

View File

@ -0,0 +1,10 @@
package com.muwire.core.files.directories
import com.muwire.core.Event
/**
* Emitted when converting an old watched directory entry to the
* new format.
*/
class WatchedDirectoryConvertedEvent extends Event {
}

View File

@ -0,0 +1,27 @@
package com.muwire.core.files.directories
import com.muwire.core.Core
import com.muwire.core.files.AllFilesLoadedEvent
/**
* converts the setting-based format to new folder-based format.
*/
class WatchedDirectoryConverter {
private final Core core
WatchedDirectoryConverter(Core core) {
this.core = core
}
void onAllFilesLoadedEvent(AllFilesLoadedEvent e) {
core.getMuOptions().getWatchedDirectories().each {
File directory = new File(it)
directory = directory.getCanonicalFile()
core.eventBus.publish(new WatchedDirectoryConfigurationEvent(directory : directory, autoWatch: true))
}
core.getMuOptions().getWatchedDirectories().clear()
core.saveMuSettings()
core.eventBus.publish(new WatchedDirectoryConvertedEvent())
}
}

View File

@ -0,0 +1,224 @@
package com.muwire.core.files.directories
import java.nio.file.Files
import java.util.concurrent.ConcurrentHashMap
import java.util.concurrent.ExecutorService
import java.util.concurrent.Executors
import java.util.concurrent.ThreadFactory
import java.util.stream.Stream
import com.muwire.core.EventBus
import com.muwire.core.SharedFile
import com.muwire.core.files.DirectoryUnsharedEvent
import com.muwire.core.files.DirectoryWatchedEvent
import com.muwire.core.files.FileListCallback
import com.muwire.core.files.FileManager
import com.muwire.core.files.FileSharedEvent
import com.muwire.core.files.FileUnsharedEvent
import groovy.json.JsonOutput
import groovy.json.JsonSlurper
import groovy.util.logging.Log
@Log
class WatchedDirectoryManager {
private final File home
private final EventBus eventBus
private final FileManager fileManager
private final Map<File, WatchedDirectory> watchedDirs = new ConcurrentHashMap<>()
private final ExecutorService diskIO = Executors.newSingleThreadExecutor({r ->
Thread t = new Thread(r, "disk-io")
t.setDaemon(true)
t
} as ThreadFactory)
private final Timer timer = new Timer("directory-timer", true)
private boolean converting = true
WatchedDirectoryManager(File home, EventBus eventBus, FileManager fileManager) {
this.home = new File(home, "directories")
this.home.mkdir()
this.eventBus = eventBus
this.fileManager = fileManager
}
public boolean isWatched(File f) {
watchedDirs.containsKey(f)
}
public Stream<WatchedDirectory> getWatchedDirsStream() {
watchedDirs.values().stream()
}
public void shutdown() {
diskIO.shutdown()
timer.cancel()
}
void onUISyncDirectoryEvent(UISyncDirectoryEvent e) {
def wd = watchedDirs.get(e.directory)
if (wd == null) {
log.warning("Got a sync event for non-watched dir ${e.directory}")
return
}
diskIO.submit({sync(wd, System.currentTimeMillis())} as Runnable)
}
void onWatchedDirectoryConfigurationEvent(WatchedDirectoryConfigurationEvent e) {
if (converting) {
def newDir = new WatchedDirectory(e.directory)
// conversion is always autowatch really
newDir.autoWatch = e.autoWatch
persist(newDir)
} else {
def wd = watchedDirs.get(e.directory)
if (wd == null) {
log.severe("got a configuration event for a non-watched directory ${e.directory}")
return
}
wd.autoWatch = e.autoWatch
wd.syncInterval = e.syncInterval
persist(wd)
}
}
void onWatchedDirectoryConvertedEvent(WatchedDirectoryConvertedEvent e) {
converting = false
diskIO.submit({
def slurper = new JsonSlurper()
Files.walk(home.toPath()).filter({
it.getFileName().toString().endsWith(".json")
}).
forEach {
def parsed = slurper.parse(it.toFile())
WatchedDirectory wd = WatchedDirectory.fromJson(parsed)
if (wd.directory.exists() && wd.directory.isDirectory()) // check if directory disappeared
watchedDirs.put(wd.directory, wd)
else
it.toFile().delete()
}
watchedDirs.values().stream().filter({it.autoWatch}).forEach {
eventBus.publish(new DirectoryWatchedEvent(directory : it.directory))
eventBus.publish(new FileSharedEvent(file : it.directory))
}
timer.schedule({sync()} as TimerTask, 1000, 1000)
} as Runnable)
}
private void persist(WatchedDirectory dir) {
diskIO.submit({doPersist(dir)} as Runnable)
}
private void doPersist(WatchedDirectory dir) {
def json = JsonOutput.toJson(dir.toJson())
def targetFile = new File(home, dir.getEncodedName() + ".json")
targetFile.text = json
}
void onFileSharedEvent(FileSharedEvent e) {
if (e.file.isFile() || watchedDirs.containsKey(e.file))
return
def wd = new WatchedDirectory(e.file)
if (e.fromWatch) {
// parent should be already watched, copy settings
def parent = watchedDirs.get(e.file.getParentFile())
if (parent == null) {
log.severe("watching found a directory without a watched parent? ${e.file}")
return
}
wd.autoWatch = parent.autoWatch
wd.syncInterval = parent.syncInterval
} else
wd.autoWatch = true
watchedDirs.put(wd.directory, wd)
persist(wd)
if (wd.autoWatch)
eventBus.publish(new DirectoryWatchedEvent(directory: wd.directory))
}
void onDirectoryUnsharedEvent(DirectoryUnsharedEvent e) {
def wd = watchedDirs.remove(e.directory)
if (wd == null) {
log.warning("unshared a directory that wasn't watched? ${e.directory}")
return
}
File persistFile = new File(home, wd.getEncodedName() + ".json")
persistFile.delete()
}
private void sync() {
long now = System.currentTimeMillis()
watchedDirs.values().stream().
filter({!it.autoWatch}).
filter({it.syncInterval > 0}).
filter({it.lastSync + it.syncInterval * 1000 < now}).
forEach({wd -> diskIO.submit({sync(wd, now)} as Runnable )})
}
private void sync(WatchedDirectory wd, long now) {
log.fine("syncing ${wd.directory}")
wd.lastSync = now
doPersist(wd)
eventBus.publish(new WatchedDirectorySyncEvent(directory: wd.directory, when: now))
def cb = new DirSyncCallback()
fileManager.positiveTree.list(wd.directory, cb)
Set<File> filesOnFS = new HashSet<>()
Set<File> dirsOnFS = new HashSet<>()
wd.directory.listFiles().each {
File canonical = it.getCanonicalFile()
if (canonical.isFile())
filesOnFS.add(canonical)
else
dirsOnFS.add(canonical)
}
Set<File> addedFiles = new HashSet<>(filesOnFS)
addedFiles.removeAll(cb.files)
addedFiles.each {
eventBus.publish(new FileSharedEvent(file : it, fromWatch : true))
}
Set<File> addedDirs = new HashSet<>(dirsOnFS)
addedDirs.removeAll(cb.dirs)
addedDirs.each {
eventBus.publish(new FileSharedEvent(file : it, fromWatch : true))
}
Set<File> deletedFiles = new HashSet<>(cb.files)
deletedFiles.removeAll(filesOnFS)
deletedFiles.each {
eventBus.publish(new FileUnsharedEvent(unsharedFile : fileManager.getFileToSharedFile().get(it), deleted : true))
}
Set<File> deletedDirs = new HashSet<>(cb.dirs)
deletedDirs.removeAll(dirsOnFS)
deletedDirs.each {
eventBus.publish(new DirectoryUnsharedEvent(directory : it, deleted: true))
}
}
private static class DirSyncCallback implements FileListCallback<SharedFile> {
private final Set<File> files = new HashSet<>()
private final Set<File> dirs = new HashSet<>()
@Override
public void onFile(File f, SharedFile value) {
files.add(f)
}
@Override
public void onDirectory(File f) {
dirs.add(f)
}
}
}

View File

@ -0,0 +1,8 @@
package com.muwire.core.files.directories
import com.muwire.core.Event
class WatchedDirectorySyncEvent extends Event {
File directory
long when
}

View File

@ -127,7 +127,8 @@ class CacheClient {
@Override
public void disconnected(I2PSession session) {
log.severe "I2P session disconnected"
if (!stopped.get())
log.severe "Cache client I2P session disconnected"
}
@Override

View File

@ -8,10 +8,8 @@ class CacheServers {
private static Set<Destination> CACHES = [
// zlatinb
new Destination("Wddh2E6FyyXBF7SvUYHKdN-vjf3~N6uqQWNeBDTM0P33YjiQCOsyedrjmDZmWFrXUJfJLWnCb5bnKezfk4uDaMyj~uvDG~yvLVcFgcPWSUd7BfGgym-zqcG1q1DcM8vfun-US7YamBlmtC6MZ2j-~Igqzmgshita8aLPCfNAA6S6e2UMjjtG7QIXlxpMec75dkHdJlVWbzrk9z8Qgru3YIk0UztYgEwDNBbm9wInsbHhr3HtAfa02QcgRVqRN2PnQXuqUJs7R7~09FZPEviiIcUpkY3FeyLlX1sgQFBeGeA96blaPvZNGd6KnNdgfLgMebx5SSxC-N4KZMSMBz5cgonQF3~m2HHFRSI85zqZNG5X9bJN85t80ltiv1W1es8ZnQW4es11r7MrvJNXz5bmSH641yJIvS6qI8OJJNpFVBIQSXLD-96TayrLQPaYw~uNZ-eXaE6G5dYhiuN8xHsFI1QkdaUaVZnvDGfsRbpS5GtpUbBDbyLkdPurG0i7dN1wAAAA"),
// sNL
new Destination("JC63wJNOqSJmymkj4~UJWywBTvDGikKMoYP0HX2Wz9c5l3otXSkwnxWAFL4cKr~Ygh3BNNi2t93vuLIiI1W8AsE42kR~PwRx~Y-WvIHXR6KUejRmOp-n8WidtjKg9k4aDy428uSOedqXDxys5mpoeQXwDsv1CoPTTwnmb1GWFy~oTGIsCguCl~aJWGnqiKarPO3GJQ~ev-NbvAQzUfC3HeP1e6pdI5CGGjExahTCID5UjpJw8GaDXWlGmYWWH303Xu4x-vAHQy1dJLsOBCn8dZravsn5BKJk~j0POUon45CCx-~NYtaPe0Itt9cMdD2ciC76Rep1D0X0sm1SjlSs8sZ52KmF3oaLZ6OzgI9QLMIyBUrfi41sK5I0qTuUVBAkvW1xr~L-20dYJ9TrbOaOb2-vDIfKaxVi6xQOuhgQDiSBhd3qv2m0xGu-BM9DQYfNA0FdMjnZmqjmji9RMavzQSsVFIbQGLbrLepiEFlb7TseCK5UtRp8TxnG7L4gbYevBQAEAAcAAA=="),
// dark_trion
new Destination("Gec9L29FVcQvYDgpcYuEYdltJn06PPoOWAcAM8Af-gDm~ehlrJcwlLXXs0hidq~yP2A0X7QcDi6i6shAfuEofTchxGJl8LRNqj9lio7WnB7cIixXWL~uCkD7Np5LMX0~akNX34oOb9RcBYVT2U5rFGJmJ7OtBv~IBkGeLhsMrqaCjahd0jdBO~QJ-t82ZKZhh044d24~JEfF9zSJxdBoCdAcXzryGNy7sYtFVDFsPKJudAxSW-UsSQiGw2~k-TxyF0r-iAt1IdzfNu8Lu0WPqLdhDYJWcPldx2PR5uJorI~zo~z3I5RX3NwzarlbD4nEP5s65ahPSfVCEkzmaJUBgP8DvBqlFaX89K4nGRYc7jkEjJ8cX4L6YPXUpTPWcfKkW259WdQY3YFh6x7rzijrGZewpczOLCrt-bZRYgDrUibmZxKZmNhy~lQu4gYVVjkz1i4tL~DWlhIc4y0x2vItwkYLArPPi~ejTnt-~Lhb7oPMXRcWa3UrwGKpFvGZY4NXBQAEAAcAAA==")
// echelon
new Destination("2MJTl8gYVPK43iJZJa~-5K1OchgPaPHXpqZmKIiKFvxyy8BlIJzUSrF4mazdta--shFHISfT0PEeI95j1yDyKMpGxatUyjSt3ZnyTfAehQR-H2kYV9FvjHo68uA9X5AaGYHKRYLuWMkihMXygd8ywoLjZtFP0UbKMPggfOZaWmjHF4081XoUXt~7MEAeYSQowndiUx0AH3HxNEiv0N373JJS61OsIXb5ctqVKkwIiX1R0ZxESzpP9Xwp8-T0ou8fsLksygbKyH~3K1CyTHjTS51Ux-U-CjOPH9rtCOjjAaifdyMpK0PxW1fVdoGswFywTz9Q-6DUMsIu5TsPMF0-UO1Wn8vCpVAWbBJAOtKCfBrGzp-E~GCbfCNs5xY19nLobMD5ehjsBdI1lXwGDCQ7kBOwC58uuC3BOoazgrB6IrGskyMTexawtthO9mhuPm91bq4xhNaCYHAe059xg5emnM7jFBVzQgjaZ5lOLn~HqcWofJ7oc0doE6XI6kOo~YncBQAEAAcAAA==")
]
static List<Destination> getCacheServers() {

View File

@ -7,17 +7,19 @@ class Host {
private static final int MAX_FAILURES = 3
final Destination destination
private final int clearInterval, hopelessInterval, rejectionInterval
private final int clearInterval, hopelessInterval, rejectionInterval, purgeInterval
int failures,successes
long lastAttempt
long lastSuccessfulAttempt
long lastRejection
public Host(Destination destination, int clearInterval, int hopelessInterval, int rejectionInterval) {
public Host(Destination destination, int clearInterval, int hopelessInterval, int rejectionInterval,
int purgeInterval) {
this.destination = destination
this.clearInterval = clearInterval
this.hopelessInterval = hopelessInterval
this.rejectionInterval = rejectionInterval
this.purgeInterval = purgeInterval
}
private void connectSuccessful() {
@ -54,17 +56,22 @@ class Host {
failures = 0
}
synchronized boolean canTryAgain() {
synchronized boolean canTryAgain(final long now) {
lastSuccessfulAttempt > 0 &&
System.currentTimeMillis() - lastAttempt > (clearInterval * 60 * 1000)
now - lastAttempt > (clearInterval * 60 * 1000)
}
synchronized boolean isHopeless() {
synchronized boolean isHopeless(final long now) {
isFailed() &&
System.currentTimeMillis() - lastSuccessfulAttempt > (hopelessInterval * 60 * 1000)
now - lastSuccessfulAttempt > (hopelessInterval * 60 * 1000)
}
synchronized boolean isRecentlyRejected() {
System.currentTimeMillis() - lastRejection < (rejectionInterval * 60 * 1000)
synchronized boolean isRecentlyRejected(final long now) {
now - lastRejection < (rejectionInterval * 60 * 1000)
}
synchronized boolean shouldBeForgotten(final long now) {
isHopeless(now) &&
now - lastAttempt > (purgeInterval * 60 * 1000)
}
}

View File

@ -52,7 +52,8 @@ class HostCache extends Service {
hosts.get(e.destination).clearFailures()
return
}
Host host = new Host(e.destination, settings.hostClearInterval, settings.hostHopelessInterval, settings.hostRejectInterval)
Host host = new Host(e.destination, settings.hostClearInterval, settings.hostHopelessInterval,
settings.hostRejectInterval, settings.hostHopelessPurgeInterval)
if (allowHost(host)) {
hosts.put(e.destination, host)
}
@ -64,7 +65,8 @@ class HostCache extends Service {
Destination dest = e.endpoint.destination
Host host = hosts.get(dest)
if (host == null) {
host = new Host(dest, settings.hostClearInterval, settings.hostHopelessInterval, settings.hostRejectInterval)
host = new Host(dest, settings.hostClearInterval, settings.hostHopelessInterval,
settings.hostRejectInterval, settings.hostHopelessPurgeInterval)
hosts.put(dest, host)
}
@ -84,9 +86,10 @@ class HostCache extends Service {
List<Destination> getHosts(int n) {
List<Destination> rv = new ArrayList<>(hosts.keySet())
rv.retainAll {allowHost(hosts[it])}
final long now = System.currentTimeMillis()
rv.removeAll {
def h = hosts[it];
(h.isFailed() && !h.canTryAgain()) || h.isRecentlyRejected()
(h.isFailed() && !h.canTryAgain(now)) || h.isRecentlyRejected(now) || h.isHopeless(now)
}
if (rv.size() <= n)
return rv
@ -116,8 +119,9 @@ class HostCache extends Service {
int countHopelessHosts() {
List<Destination> rv = new ArrayList<>(hosts.keySet())
final long now = System.currentTimeMillis()
rv.retainAll {
hosts[it].isHopeless()
hosts[it].isHopeless(now)
}
rv.size()
}
@ -128,7 +132,8 @@ class HostCache extends Service {
storage.eachLine {
def entry = slurper.parseText(it)
Destination dest = new Destination(entry.destination)
Host host = new Host(dest, settings.hostClearInterval, settings.hostHopelessInterval, settings.hostRejectInterval)
Host host = new Host(dest, settings.hostClearInterval, settings.hostHopelessInterval,
settings.hostRejectInterval, settings.hostHopelessPurgeInterval)
host.failures = Integer.valueOf(String.valueOf(entry.failures))
host.successes = Integer.valueOf(String.valueOf(entry.successes))
if (entry.lastAttempt != null)
@ -161,10 +166,12 @@ class HostCache extends Service {
}
private void save() {
final long now = System.currentTimeMillis()
hosts.keySet().removeAll { hosts[it].shouldBeForgotten(now) }
storage.delete()
storage.withPrintWriter { writer ->
hosts.each { dest, host ->
if (allowHost(host) && !host.isHopeless()) {
if (allowHost(host) && !host.isHopeless(now)) {
def map = [:]
map.destination = dest.toBase64()
map.failures = host.failures

View File

@ -1,28 +1,68 @@
package com.muwire.core.mesh
import java.util.concurrent.ConcurrentHashMap
import java.util.stream.Collectors
import com.muwire.core.InfoHash
import com.muwire.core.Persona
import com.muwire.core.download.Pieces
import com.muwire.core.util.DataUtil
import net.i2p.data.Base64
import net.i2p.data.Destination
import net.i2p.util.ConcurrentHashSet
/**
* Representation of a download mesh.
*
* Two data structures - collection of known sources and collection of sources
* we have successfully transferred data with.
*
* @author zab
*/
class Mesh {
private final InfoHash infoHash
private final Set<Persona> sources = new ConcurrentHashSet<>()
private final Pieces pieces
private final Map<Destination,Persona> sources = new HashMap<>()
private final Set<Destination> verified = new HashSet<>()
final Pieces pieces
Mesh(InfoHash infoHash, Pieces pieces) {
this.infoHash = infoHash
this.pieces = pieces
}
Set<Persona> getRandom(int n, Persona exclude) {
List<Persona> tmp = new ArrayList<>(sources)
tmp.remove(exclude)
synchronized Set<Persona> getRandom(int n, Persona exclude) {
List<Destination> tmp = new ArrayList<>(verified)
if (exclude != null)
tmp.remove(exclude.destination)
tmp.retainAll(sources.keySet()) // verified may contain nodes not in sources
Collections.shuffle(tmp)
if (tmp.size() < n)
return tmp
tmp[0..n-1]
if (tmp.size() > n)
tmp = tmp[0..n-1]
tmp.collect(new HashSet<>(), { sources[it] })
}
synchronized void add(Persona persona) {
sources.put(persona.destination, persona)
}
synchronized void verify(Destination d) {
verified.add(d)
}
synchronized def toJson() {
def json = [:]
json.timestamp = System.currentTimeMillis()
json.infoHash = Base64.encode(infoHash.getRoot())
Set<Persona> toPersist = new HashSet<>(sources.values())
toPersist.retainAll { verified.contains(it.destination) }
json.sources = toPersist.collect {it.toBase64()}
json.nPieces = pieces.nPieces
List<Integer> downloaded = pieces.getDownloaded()
if( downloaded.size() > pieces.nPieces)
return null
json.xHave = DataUtil.encodeXHave(downloaded, pieces.nPieces)
json
}
}

View File

@ -9,6 +9,7 @@ import com.muwire.core.MuWireSettings
import com.muwire.core.Persona
import com.muwire.core.download.Pieces
import com.muwire.core.download.SourceDiscoveredEvent
import com.muwire.core.download.SourceVerifiedEvent
import com.muwire.core.files.FileManager
import com.muwire.core.util.DataUtil
@ -56,25 +57,25 @@ class MeshManager {
Mesh mesh = meshes.get(e.infoHash)
if (mesh == null)
return
mesh.sources.add(e.source)
save()
mesh.add(e.source)
}
void onSourceVerifiedEvent(SourceVerifiedEvent e) {
Mesh mesh = meshes.get(e.infoHash)
if (mesh == null)
return
mesh.verify(e.source)
save()
}
private void save() {
File meshFile = new File(home, "mesh.json")
synchronized(meshes) {
meshFile.withPrintWriter { writer ->
meshes.values().each { mesh ->
def json = [:]
json.timestamp = System.currentTimeMillis()
json.infoHash = Base64.encode(mesh.infoHash.getRoot())
json.sources = mesh.sources.stream().map({it.toBase64()}).collect(Collectors.toList())
json.nPieces = mesh.pieces.nPieces
List<Integer> downloaded = mesh.pieces.getDownloaded()
if( downloaded.size() > mesh.pieces.nPieces)
return
json.xHave = DataUtil.encodeXHave(downloaded, mesh.pieces.nPieces)
writer.println(JsonOutput.toJson(json))
def json = mesh.toJson()
if (json != null)
writer.println(JsonOutput.toJson(json))
}
}
}
@ -99,7 +100,8 @@ class MeshManager {
Mesh mesh = new Mesh(infoHash, pieces)
json.sources.each { source ->
Persona persona = new Persona(new ByteArrayInputStream(Base64.decode(source)))
mesh.sources.add(persona)
mesh.add(persona)
mesh.verify(persona.destination) // assume if persisted it was verified
}
if (json.xHave != null) {

View File

@ -0,0 +1,30 @@
package com.muwire.core.search
import com.muwire.core.download.SourceVerifiedEvent
import com.muwire.core.util.FixedSizeFIFOSet
import net.i2p.data.Destination
/**
* Caches destinations that have recently responded to with results.
*/
class ResponderCache {
private final FixedSizeFIFOSet<Destination> cache
ResponderCache(int capacity) {
cache = new FixedSizeFIFOSet<>(capacity)
}
synchronized void onUIResultBatchEvent(UIResultBatchEvent e) {
cache.add(e.results[0].sender.destination)
}
synchronized void onSourceVerifiedEvent(SourceVerifiedEvent e) {
cache.add(e.source)
}
synchronized boolean hasResponded(Destination d) {
cache.contains(d)
}
}

View File

@ -9,9 +9,11 @@ import com.muwire.core.Persona
import com.muwire.core.files.FileHasher
import com.muwire.core.util.DataUtil
import groovy.util.logging.Log
import net.i2p.data.Base64
import net.i2p.data.Destination
@Log
class ResultsParser {
public static UIResultEvent parse(Persona p, UUID uuid, def json) throws InvalidSearchResultException {
if (json.type != "Result")
@ -103,6 +105,8 @@ class ResultsParser {
int certificates = 0
if (json.certificates != null)
certificates = json.certificates
log.fine("Received result from ${p.getHumanReadableName()} name \"$name\" infoHash:\"${json.infohash}\"")
return new UIResultEvent( sender : p,
name : name,

View File

@ -77,18 +77,19 @@ class ResultsSender {
if (it.getComment() != null) {
comment = DataUtil.readi18nString(Base64.decode(it.getComment()))
}
int certificates = certificateManager.getByInfoHash(it.getInfoHash()).size()
int certificates = certificateManager.getByInfoHash(new InfoHash(it.getRoot())).size()
def uiResultEvent = new UIResultEvent( sender : me,
name : it.getFile().getName(),
size : length,
infohash : it.getInfoHash(),
infohash : new InfoHash(it.getRoot()),
pieceSize : pieceSize,
uuid : uuid,
browse : settings.browseFiles,
sources : suggested,
comment : comment,
certificates : certificates,
chat : chatServer.running.get() && settings.advertiseChat
chat : chatServer.isRunning() && settings.advertiseChat,
feed : settings.fileFeed && settings.advertiseFeed
)
uiResultEvents << uiResultEvent
}
@ -119,7 +120,7 @@ class ResultsSender {
me.write(os)
os.writeShort((short)results.length)
results.each {
int certificates = certificateManager.getByInfoHash(it.getInfoHash()).size()
int certificates = certificateManager.getByInfoHash(new InfoHash(it.getRoot())).size()
def obj = sharedFileToObj(it, settings.browseFiles, certificates)
def json = jsonOutput.toJson(obj)
os.writeShort((short)json.length())
@ -136,12 +137,14 @@ class ResultsSender {
os.write("RESULTS $uuid\r\n".getBytes(StandardCharsets.US_ASCII))
os.write("Sender: ${me.toBase64()}\r\n".getBytes(StandardCharsets.US_ASCII))
os.write("Count: $results.length\r\n".getBytes(StandardCharsets.US_ASCII))
boolean chat = chatServer.running.get() && settings.advertiseChat
boolean chat = chatServer.isRunning() && settings.advertiseChat
os.write("Chat: $chat\r\n".getBytes(StandardCharsets.US_ASCII))
boolean feed = settings.fileFeed && settings.advertiseFeed
os.write("Feed: $feed\r\n".getBytes(StandardCharsets.US_ASCII))
os.write("\r\n".getBytes(StandardCharsets.US_ASCII))
DataOutputStream dos = new DataOutputStream(new GZIPOutputStream(os))
results.each {
int certificates = certificateManager.getByInfoHash(it.getInfoHash()).size()
int certificates = certificateManager.getByInfoHash(new InfoHash(it.getRoot())).size()
def obj = sharedFileToObj(it, settings.browseFiles, certificates)
def json = jsonOutput.toJson(obj)
dos.writeShort((short)json.length())
@ -170,7 +173,7 @@ class ResultsSender {
obj.type = "Result"
obj.version = 2
obj.name = encodedName
obj.infohash = Base64.encode(sf.getInfoHash().getRoot())
obj.infohash = Base64.encode(sf.getRoot())
obj.size = sf.getCachedLength()
obj.pieceSize = sf.getPieceSize()

View File

@ -18,7 +18,8 @@ class UIResultEvent extends Event {
boolean browse
int certificates
boolean chat
boolean feed
@Override
public String toString() {
super.toString() + "name:$name size:$size sender:${sender.getHumanReadableName()} pieceSize $pieceSize"

View File

@ -0,0 +1,218 @@
package com.muwire.core.tracker
import java.util.concurrent.ConcurrentHashMap
import java.util.logging.Level
import java.util.stream.Collectors
import com.muwire.core.Constants
import com.muwire.core.InfoHash
import com.muwire.core.MuWireSettings
import com.muwire.core.Persona
import com.muwire.core.download.DownloadManager
import com.muwire.core.download.Pieces
import com.muwire.core.files.FileManager
import com.muwire.core.mesh.Mesh
import com.muwire.core.mesh.MeshManager
import com.muwire.core.trust.TrustLevel
import com.muwire.core.trust.TrustService
import com.muwire.core.util.DataUtil
import groovy.json.JsonOutput
import groovy.json.JsonSlurper
import groovy.util.logging.Log
import net.i2p.client.I2PSession
import net.i2p.client.I2PSessionMuxedListener
import net.i2p.client.SendMessageOptions
import net.i2p.client.datagram.I2PDatagramDissector
import net.i2p.client.datagram.I2PDatagramMaker
import net.i2p.data.Base64
@Log
class TrackerResponder {
private final I2PSession i2pSession
private final MuWireSettings muSettings
private final FileManager fileManager
private final DownloadManager downloadManager
private final MeshManager meshManager
private final TrustService trustService
private final Persona me
private final Map<UUID,Long> uuids = new HashMap<>()
private final Timer expireTimer = new Timer("tracker-responder-timer", true)
private static final long UUID_LIFETIME = 10 * 60 * 1000
private volatile boolean shutdown
TrackerResponder(I2PSession i2pSession, MuWireSettings muSettings,
FileManager fileManager, DownloadManager downloadManager,
MeshManager meshManager, TrustService trustService,
Persona me) {
this.i2pSession = i2pSession
this.muSettings = muSettings
this.fileManager = fileManager
this.downloadManager = downloadManager
this.meshManager = meshManager
this.trustService = trustService
this.me = me
}
void start() {
i2pSession.addMuxedSessionListener(new Listener(), I2PSession.PROTO_DATAGRAM, Constants.TRACKER_PORT)
expireTimer.schedule({expireUUIDs()} as TimerTask, UUID_LIFETIME, UUID_LIFETIME)
}
void stop() {
shutdown = true
expireTimer.cancel()
}
private void expireUUIDs() {
final long now = System.currentTimeMillis()
synchronized(uuids) {
for (Iterator<UUID> iter = uuids.keySet().iterator(); iter.hasNext();) {
UUID uuid = iter.next();
Long time = uuids.get(uuid)
if (now - time > UUID_LIFETIME)
iter.remove()
}
}
}
private void respond(host, json) {
log.info("responding to host $host with json $json")
def message = JsonOutput.toJson(json)
def maker = new I2PDatagramMaker(i2pSession)
message = maker.makeI2PDatagram(message.bytes)
def options = new SendMessageOptions()
options.setSendLeaseSet(false)
i2pSession.sendMessage(host, message, 0, message.length, I2PSession.PROTO_DATAGRAM, Constants.TRACKER_PORT, Constants.TRACKER_PORT, options)
}
class Listener implements I2PSessionMuxedListener {
@Override
public void messageAvailable(I2PSession session, int msgId, long size) {
}
@Override
public void messageAvailable(I2PSession session, int msgId, long size, int proto, int fromport, int toport) {
if (proto != I2PSession.PROTO_DATAGRAM) {
log.warning "Received unexpected protocol $proto"
return
}
byte[] payload = session.receiveMessage(msgId)
def dissector = new I2PDatagramDissector()
try {
dissector.loadI2PDatagram(payload)
def sender = dissector.getSender()
log.info("got a tracker datagram from ${sender.toBase32()}")
// if not trusted, just drop it
TrustLevel trustLevel = trustService.getLevel(sender)
if (trustLevel == TrustLevel.DISTRUSTED ||
(trustLevel == TrustLevel.NEUTRAL && !muSettings.allowUntrusted)) {
log.info("dropping, untrusted")
return
}
payload = dissector.getPayload()
def slurper = new JsonSlurper()
def json = slurper.parse(payload)
if (json.type != "TrackerPing") {
log.warning("unknown type $json.type")
return
}
def response = [:]
response.type = "TrackerPong"
response.me = me.toBase64()
if (json.infoHash == null) {
log.warning("infoHash missing")
return
}
if (json.uuid == null) {
log.warning("uuid missing")
return
}
UUID uuid = UUID.fromString(json.uuid)
synchronized(uuids) {
if (uuids.containsKey(uuid)) {
log.warning("duplicate uuid $uuid")
return
}
uuids.put(uuid, System.currentTimeMillis())
}
response.uuid = json.uuid
if (!muSettings.allowTracking) {
response.code = 403
respond(sender, response)
return
}
if (json.version != 1) {
log.warning("unknown version $json.version")
response.code = 400
response.message = "I only support version 1"
respond(sender,response)
return
}
byte[] infoHashBytes = Base64.decode(json.infoHash)
InfoHash infoHash = new InfoHash(infoHashBytes)
log.info("servicing request for infoHash ${json.infoHash} with uuid ${json.uuid}")
if (!(fileManager.isShared(infoHash) || downloadManager.isDownloading(infoHash))) {
response.code = 404
respond(sender, response)
return
}
Mesh mesh = meshManager.get(infoHash)
if (fileManager.isShared(infoHash))
response.code = 200
else if (mesh != null) {
response.code = 206
Pieces pieces = mesh.getPieces()
response.xHave = DataUtil.encodeXHave(pieces, pieces.getnPieces())
}
if (mesh != null)
response.altlocs = mesh.getRandom(10, me).stream().map({it.toBase64()}).collect(Collectors.toList())
respond(sender,response)
} catch (Exception e) {
log.log(Level.WARNING, "invalid datagram", e)
}
}
@Override
public void reportAbuse(I2PSession session, int severity) {
}
@Override
public void disconnected(I2PSession session) {
if (!shutdown)
log.severe("Tracker Responder session disconnected")
}
@Override
public void errorOccurred(I2PSession session, String message, Throwable error) {
log.log(Level.SEVERE, message, error)
}
}
}

View File

@ -2,6 +2,7 @@ package com.muwire.core.update
import java.util.logging.Level
import com.muwire.core.Constants
import com.muwire.core.EventBus
import com.muwire.core.InfoHash
import com.muwire.core.MuWireSettings
@ -48,6 +49,7 @@ class UpdateClient {
private volatile boolean updateDownloading
private volatile String text
private volatile boolean shutdown
UpdateClient(EventBus eventBus, I2PSession session, String myVersion, MuWireSettings settings,
FileManager fileManager, Persona me, SigningPrivateKey spk) {
@ -63,11 +65,12 @@ class UpdateClient {
}
void start() {
session.addMuxedSessionListener(new Listener(), I2PSession.PROTO_DATAGRAM, 2)
session.addMuxedSessionListener(new Listener(), I2PSession.PROTO_DATAGRAM, Constants.UPDATE_PORT)
timer.schedule({checkUpdate()} as TimerTask, 60000, 60 * 60 * 1000)
}
void stop() {
shutdown = true
timer.cancel()
}
@ -83,7 +86,7 @@ class UpdateClient {
}
void onFileDownloadedEvent(FileDownloadedEvent e) {
if (e.downloadedFile.infoHash != updateInfoHash)
if (e.infoHash != updateInfoHash)
return
updateDownloading = false
eventBus.publish(new UpdateDownloadedEvent(version : version, signer : signer, text : text))
@ -108,7 +111,7 @@ class UpdateClient {
ping = maker.makeI2PDatagram(ping.bytes)
def options = new SendMessageOptions()
options.setSendLeaseSet(true)
session.sendMessage(UpdateServers.UPDATE_SERVER, ping, 0, ping.length, I2PSession.PROTO_DATAGRAM, 2, 0, options)
session.sendMessage(UpdateServers.UPDATE_SERVER, ping, 0, ping.length, I2PSession.PROTO_DATAGRAM, Constants.UPDATE_PORT, 0, options)
}
class Listener implements I2PSessionMuxedListener {
@ -198,7 +201,8 @@ class UpdateClient {
@Override
public void disconnected(I2PSession session) {
log.severe("I2P session disconnected")
if (!shutdown)
log.severe("I2P session disconnected")
}
@Override

View File

@ -3,4 +3,5 @@ package com.muwire.core.upload
class ContentRequest extends Request {
Range range
int have
boolean browse, feed, chat
}

View File

@ -106,7 +106,7 @@ class ContentUploader extends Uploader {
return done ? 100 : 0
int position = mapped.position()
int total = request.getRange().end - request.getRange().start
(int)(position * 100.0 / total)
(int)(position * 100.0d / total)
}
@Override
@ -137,4 +137,24 @@ class ContentUploader extends Uploader {
request.infoHash == other.request.infoHash &&
request.getDownloader() == other.request.getDownloader()
}
@Override
public boolean isBrowseEnabled() {
request.browse
}
@Override
public boolean isFeedEnabled() {
request.feed
}
@Override
public boolean isChatEnabled() {
request.chat
}
@Override
public Persona getDownloaderPersona() {
request.downloader
}
}

View File

@ -4,6 +4,7 @@ import java.nio.ByteBuffer
import java.nio.charset.StandardCharsets
import com.muwire.core.InfoHash
import com.muwire.core.Persona
import com.muwire.core.connection.Endpoint
import net.i2p.data.Base64
@ -45,7 +46,7 @@ class HashListUploader extends Uploader {
@Override
public synchronized int getProgress() {
(int)(mapped.position() * 100.0 / mapped.capacity())
(int)(mapped.position() * 100.0d / mapped.capacity())
}
@Override
@ -75,4 +76,24 @@ class HashListUploader extends Uploader {
HashListUploader other = (HashListUploader)o
infoHash == other.infoHash && request.downloader == other.request.downloader
}
@Override
public boolean isBrowseEnabled() {
return false;
}
@Override
public boolean isFeedEnabled() {
return false;
}
@Override
public boolean isChatEnabled() {
return false;
}
@Override
public Persona getDownloaderPersona() {
request.downloader
}
}

View File

@ -22,7 +22,7 @@ class Request {
static Request parseContentRequest(InfoHash infoHash, InputStream is) throws IOException {
Map<String, String> headers = parseHeaders(is)
Map<String, String> headers = DataUtil.readAllHeaders(is)
if (!headers.containsKey("Range"))
throw new IOException("Range header not found")
@ -55,12 +55,18 @@ class Request {
def encoded = headers["X-Have"].trim()
have = DataUtil.decodeXHave(encoded).size()
}
boolean browse = headers.containsKey("Browse") && Boolean.parseBoolean(headers['Browse'])
boolean feed = headers.containsKey("Feed") && Boolean.parseBoolean(headers['Feed'])
boolean chat = headers.containsKey("Chat") && Boolean.parseBoolean(headers['Chat'])
new ContentRequest( infoHash : infoHash, range : new Range(start, end),
headers : headers, downloader : downloader, have : have)
headers : headers, downloader : downloader, have : have,
browse : browse, feed : feed, chat : chat)
}
static Request parseHashListRequest(InfoHash infoHash, InputStream is) throws IOException {
Map<String,String> headers = parseHeaders(is)
Map<String,String> headers = DataUtil.readAllHeaders(is)
Persona downloader = null
if (headers.containsKey("X-Persona")) {
def encoded = headers["X-Persona"].trim()
@ -69,55 +75,4 @@ class Request {
}
new HashListRequest(infoHash : infoHash, headers : headers, downloader : downloader)
}
private static Map<String, String> parseHeaders(InputStream is) {
Map<String,String> headers = new HashMap<>()
byte [] tmp = new byte[Constants.MAX_HEADER_SIZE]
while(headers.size() < Constants.MAX_HEADERS) {
boolean r = false
boolean n = false
int idx = 0
while (true) {
byte read = is.read()
if (read == -1)
throw new IOException("Stream closed")
if (!r && read == N)
throw new IOException("Received N before R")
if (read == R) {
if (r)
throw new IOException("double R")
r = true
continue
}
if (r && !n) {
if (read != N)
throw new IOException("R not followed by N")
n = true
break
}
if (idx == 0x1 << 14)
throw new IOException("Header too long")
tmp[idx++] = read
}
if (idx == 0)
break
String header = new String(tmp, 0, idx, StandardCharsets.US_ASCII)
log.fine("Read header $header")
int keyIdx = header.indexOf(":")
if (keyIdx < 1)
throw new IOException("Header key not found")
if (keyIdx == header.length())
throw new IOException("Header value not found")
String key = header.substring(0, keyIdx)
String value = header.substring(keyIdx + 1)
headers.put(key, value)
}
headers
}
}

View File

@ -11,7 +11,9 @@ import com.muwire.core.connection.Endpoint
import com.muwire.core.download.DownloadManager
import com.muwire.core.download.Downloader
import com.muwire.core.download.SourceDiscoveredEvent
import com.muwire.core.download.SourceVerifiedEvent
import com.muwire.core.files.FileManager
import com.muwire.core.files.PersisterFolderService
import com.muwire.core.mesh.Mesh
import com.muwire.core.mesh.MeshManager
@ -22,6 +24,7 @@ import net.i2p.data.Base64
public class UploadManager {
private final EventBus eventBus
private final FileManager fileManager
private final PersisterFolderService persisterService
private final MeshManager meshManager
private final DownloadManager downloadManager
private final MuWireSettings props
@ -34,9 +37,11 @@ public class UploadManager {
public UploadManager(EventBus eventBus, FileManager fileManager,
MeshManager meshManager, DownloadManager downloadManager,
PersisterFolderService persisterService,
MuWireSettings props) {
this.eventBus = eventBus
this.fileManager = fileManager
this.persisterService = persisterService
this.meshManager = meshManager
this.downloadManager = downloadManager
this.props = props
@ -119,6 +124,7 @@ public class UploadManager {
eventBus.publish(new UploadEvent(uploader : uploader))
try {
uploader.respond()
eventBus.publish(new SourceVerifiedEvent(infoHash : request.infoHash, source : request.downloader.destination))
} finally {
decrementUploads(request.downloader)
eventBus.publish(new UploadFinishedEvent(uploader : uploader))
@ -162,7 +168,7 @@ public class UploadManager {
InfoHash fullInfoHash
if (downloader == null) {
fullInfoHash = sharedFiles.iterator().next().infoHash
fullInfoHash = persisterService.loadInfoHash(sharedFiles.iterator().next())
} else {
byte [] hashList = downloader.getInfoHash().getHashList()
if (hashList != null && hashList.length > 0)
@ -255,6 +261,7 @@ public class UploadManager {
eventBus.publish(new UploadEvent(uploader : uploader))
try {
uploader.respond()
eventBus.publish(new SourceVerifiedEvent(infoHash : request.infoHash, source : request.downloader.destination))
} finally {
eventBus.publish(new UploadFinishedEvent(uploader : uploader))
}

View File

@ -6,6 +6,7 @@ import java.nio.charset.StandardCharsets
import java.nio.file.Files
import java.nio.file.StandardOpenOption
import com.muwire.core.Persona
import com.muwire.core.connection.Endpoint
abstract class Uploader {
@ -31,6 +32,7 @@ abstract class Uploader {
}
abstract String getName();
/**
* @return an integer between 0 and 100
@ -38,6 +40,7 @@ abstract class Uploader {
abstract int getProgress();
abstract String getDownloader();
abstract Persona getDownloaderPersona();
abstract int getDonePieces();
@ -45,11 +48,15 @@ abstract class Uploader {
abstract long getTotalSize();
abstract boolean isBrowseEnabled();
abstract boolean isFeedEnabled();
abstract boolean isChatEnabled();
synchronized int speed() {
final long now = System.currentTimeMillis()
long interval = Math.max(1000, now - lastSpeedRead)
lastSpeedRead = now;
int currSpeed = (int) (dataSinceLastRead * 1000.0 / interval)
int currSpeed = (int) (dataSinceLastRead * 1000.0d / interval)
dataSinceLastRead = 0
// normalize to speedArr.size

View File

@ -4,6 +4,8 @@ import net.i2p.crypto.SigType;
public class Constants {
public static final byte PERSONA_VERSION = (byte)1;
public static final String INVALID_NICKNAME_CHARS = "'\"();<>=@$%";
public static final int MAX_NICKNAME_LENGTH = 30;
public static final byte FILE_CERT_VERSION = (byte)2;
public static final int CHAT_VERSION = 1;
@ -17,5 +19,8 @@ public class Constants {
public static final int MAX_COMMENT_LENGTH = 0x1 << 15;
public static final long MAX_QUERY_AGE = 5 * 60 * 1000L;
public static final long MAX_QUERY_AGE = 5 * 60 * 1000L;
public static final int UPDATE_PORT = 2;
public static final int TRACKER_PORT = 3;
}

View File

@ -10,9 +10,9 @@ public class DownloadedFile extends SharedFile {
private final Set<Destination> sources;
public DownloadedFile(File file, InfoHash infoHash, int pieceSize, Set<Destination> sources)
public DownloadedFile(File file, byte[] root, int pieceSize, Set<Destination> sources)
throws IOException {
super(file, infoHash, pieceSize);
super(file, root, pieceSize);
this.sources = sources;
}

View File

@ -0,0 +1,25 @@
package com.muwire.core;
public class InvalidNicknameException extends Exception {
public InvalidNicknameException() {
}
public InvalidNicknameException(String message) {
super(message);
}
public InvalidNicknameException(Throwable cause) {
super(cause);
}
public InvalidNicknameException(String message, Throwable cause) {
super(message, cause);
}
public InvalidNicknameException(String message, Throwable cause, boolean enableSuppression,
boolean writableStackTrace) {
super(message, cause, enableSuppression, writableStackTrace);
}
}

View File

@ -7,6 +7,8 @@ import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import com.muwire.core.util.DataUtil;
import net.i2p.crypto.DSAEngine;
import net.i2p.data.Base64;
import net.i2p.data.DataFormatException;
@ -25,12 +27,15 @@ public class Persona {
private volatile String base64;
private volatile byte[] payload;
public Persona(InputStream personaStream) throws IOException, DataFormatException, InvalidSignatureException {
public Persona(InputStream personaStream) throws IOException, DataFormatException, InvalidSignatureException, InvalidNicknameException {
version = (byte) (personaStream.read() & 0xFF);
if (version != Constants.PERSONA_VERSION)
throw new IOException("Unknown version "+version);
name = new Name(personaStream);
if (!DataUtil.isValidName(name.name))
throw new InvalidNicknameException(name.name + " is not a valid nickname");
destination = Destination.create(personaStream);
sig = new byte[SIG_LEN];
DataInputStream dis = new DataInputStream(personaStream);
@ -38,7 +43,7 @@ public class Persona {
if (!verify(version, name, destination, sig))
throw new InvalidSignatureException(getHumanReadableName() + " didn't verify");
}
private static boolean verify(byte version, Name name, Destination destination, byte [] sig)
throws IOException, DataFormatException {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
@ -47,7 +52,7 @@ public class Persona {
destination.writeBytes(baos);
byte[] payload = baos.toByteArray();
SigningPublicKey spk = destination.getSigningPublicKey();
Signature signature = new Signature(Constants.SIG_TYPE, sig);
Signature signature = new Signature(spk.getType(), sig);
return DSAEngine.getInstance().verifySignature(signature, payload, spk);
}

View File

@ -2,7 +2,10 @@ package com.muwire.core;
import java.io.File;
import java.io.IOException;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
@ -16,44 +19,49 @@ import net.i2p.data.Base64;
public class SharedFile {
private final File file;
private final InfoHash infoHash;
private final byte[] root;
private final int pieceSize;
private final String cachedPath;
private final long cachedLength;
private String b64PathHash;
private final String b64EncodedFileName;
private final String b64EncodedHashRoot;
private final List<String> b64EncodedHashList;
private volatile String comment;
private final Set<String> downloaders = Collections.synchronizedSet(new HashSet<>());
private final Set<SearchEntry> searches = Collections.synchronizedSet(new HashSet<>());
private volatile boolean published;
private volatile long publishedTimestamp;
public SharedFile(File file, InfoHash infoHash, int pieceSize) throws IOException {
public SharedFile(File file, byte[] root, int pieceSize) throws IOException {
this.file = file;
this.infoHash = infoHash;
this.root = root;
this.pieceSize = pieceSize;
this.cachedPath = file.getAbsolutePath();
this.cachedLength = file.length();
this.b64EncodedFileName = Base64.encode(DataUtil.encodei18nString(file.toString()));
this.b64EncodedHashRoot = Base64.encode(infoHash.getRoot());
List<String> b64List = new ArrayList<String>();
byte[] tmp = new byte[32];
for (int i = 0; i < infoHash.getHashList().length / 32; i++) {
System.arraycopy(infoHash.getHashList(), i * 32, tmp, 0, 32);
b64List.add(Base64.encode(tmp));
}
this.b64EncodedHashList = b64List;
}
public File getFile() {
return file;
}
public InfoHash getInfoHash() {
return infoHash;
public byte[] getPathHash() throws NoSuchAlgorithmException {
MessageDigest digester = MessageDigest.getInstance("SHA-256");
digester.update(file.getAbsolutePath().getBytes());
return digester.digest();
}
public String getB64PathHash() throws NoSuchAlgorithmException {
if(b64PathHash == null){
b64PathHash = Base64.encode(getPathHash());
}
return b64PathHash;
}
public byte[] getRoot() {
return root;
}
public int getPieceSize() {
@ -73,14 +81,6 @@ public class SharedFile {
return b64EncodedFileName;
}
public String getB64EncodedHashRoot() {
return b64EncodedHashRoot;
}
public List<String> getB64EncodedHashList() {
return b64EncodedHashList;
}
public String getCachedPath() {
return cachedPath;
}
@ -116,10 +116,28 @@ public class SharedFile {
public void addDownloader(String name) {
downloaders.add(name);
}
public void publish(long timestamp) {
published = true;
publishedTimestamp = timestamp;
}
public void unpublish() {
published = false;
publishedTimestamp = 0;
}
public boolean isPublished() {
return published;
}
public long getPublishedTimestamp() {
return publishedTimestamp;
}
@Override
public int hashCode() {
return file.hashCode() ^ infoHash.hashCode();
return file.hashCode() ^ Arrays.hashCode(root);
}
@Override
@ -127,7 +145,7 @@ public class SharedFile {
if (!(o instanceof SharedFile))
return false;
SharedFile other = (SharedFile)o;
return file.equals(other.file) && infoHash.equals(other.infoHash);
return file.equals(other.file) && Arrays.equals(root, other.root);
}
public static class SearchEntry {
@ -141,6 +159,18 @@ public class SharedFile {
this.query = query;
}
public Persona getSearcher() {
return searcher;
}
public long getTimestamp() {
return timestamp;
}
public String getQuery() {
return query;
}
public int hashCode() {
return Objects.hash(searcher) ^ Objects.hash(timestamp) ^ query.hashCode();
}

View File

@ -0,0 +1,81 @@
package com.muwire.core.filefeeds;
import com.muwire.core.Persona;
public class Feed {
private final Persona publisher;
private long updateInterval;
private long lastUpdated;
private volatile long lastUpdateAttempt;
private int itemsToKeep;
private boolean autoDownload;
private boolean sequential;
private FeedFetchStatus status;
public Feed(Persona publisher) {
this.publisher = publisher;
this.status = FeedFetchStatus.IDLE;
}
public long getUpdateInterval() {
return updateInterval;
}
public void setUpdateInterval(long updateInterval) {
this.updateInterval = updateInterval;
}
public long getLastUpdated() {
return lastUpdated;
}
public void setLastUpdated(long lastUpdated) {
this.lastUpdated = lastUpdated;
}
public int getItemsToKeep() {
return itemsToKeep;
}
public void setItemsToKeep(int itemsToKeep) {
this.itemsToKeep = itemsToKeep;
}
public boolean isAutoDownload() {
return autoDownload;
}
public void setAutoDownload(boolean autoDownload) {
this.autoDownload = autoDownload;
}
public Persona getPublisher() {
return publisher;
}
public void setStatus(FeedFetchStatus status) {
this.status = status;
}
public FeedFetchStatus getStatus() {
return status;
}
public void setSequential(boolean sequential) {
this.sequential = sequential;
}
public boolean isSequential() {
return sequential;
}
public void setLastUpdateAttempt(long lastUpdateAttempt) {
this.lastUpdateAttempt = lastUpdateAttempt;
}
public long getLastUpdateAttempt() {
return lastUpdateAttempt;
}
}

View File

@ -0,0 +1,19 @@
package com.muwire.core.filefeeds;
public enum FeedFetchStatus {
IDLE(false),
CONNECTING(true),
FETCHING(true),
FINISHED(false),
FAILED(false);
private final boolean active;
FeedFetchStatus(boolean active) {
this.active = active;
}
public boolean isActive() {
return active;
}
}

View File

@ -0,0 +1,79 @@
package com.muwire.core.filefeeds;
import java.util.Objects;
import com.muwire.core.InfoHash;
import com.muwire.core.Persona;
public class FeedItem {
private final Persona publisher;
private final long timestamp;
private final String name;
private final long size;
private final int pieceSize;
private final InfoHash infoHash;
private final int certificates;
private final String comment;
public FeedItem(Persona publisher, long timestamp, String name, long size, int pieceSize, InfoHash infoHash,
int certificates, String comment) {
super();
this.publisher = publisher;
this.timestamp = timestamp;
this.name = name;
this.size = size;
this.pieceSize = pieceSize;
this.infoHash = infoHash;
this.certificates = certificates;
this.comment = comment;
}
public Persona getPublisher() {
return publisher;
}
public long getTimestamp() {
return timestamp;
}
public String getName() {
return name;
}
public long getSize() {
return size;
}
public int getPieceSize() {
return pieceSize;
}
public InfoHash getInfoHash() {
return infoHash;
}
public int getCertificates() {
return certificates;
}
public String getComment() {
return comment;
}
@Override
public int hashCode() {
return Objects.hash(publisher, timestamp, name, infoHash);
}
@Override
public boolean equals(Object o) {
if (!(o instanceof FeedItem))
return false;
FeedItem other = (FeedItem)o;
return Objects.equals(publisher, other.publisher) &&
timestamp == other.timestamp &&
Objects.equals(name, other.name) &&
Objects.equals(infoHash, other.infoHash);
}
}

View File

@ -0,0 +1,30 @@
package com.muwire.core.filefeeds;
public class InvalidFeedItemException extends Exception {
public InvalidFeedItemException() {
super();
}
public InvalidFeedItemException(String message, Throwable cause, boolean enableSuppression,
boolean writableStackTrace) {
super(message, cause, enableSuppression, writableStackTrace);
// TODO Auto-generated constructor stub
}
public InvalidFeedItemException(String message, Throwable cause) {
super(message, cause);
// TODO Auto-generated constructor stub
}
public InvalidFeedItemException(String message) {
super(message);
// TODO Auto-generated constructor stub
}
public InvalidFeedItemException(Throwable cause) {
super(cause);
// TODO Auto-generated constructor stub
}
}

View File

@ -58,9 +58,9 @@ public class DataUtil {
if (header.length != 3)
throw new IllegalArgumentException("header length $header.length");
return (((int)(header[0] & 0x7F)) << 16) |
(((int)(header[1] & 0xFF) << 8)) |
((int)header[2] & 0xFF);
return ((header[0] & 0x7F) << 16) |
((header[1] & 0xFF) << 8) |
(header[2] & 0xFF);
}
public static String readi18nString(byte [] encoded) {
@ -174,7 +174,7 @@ public class DataUtil {
clean.setAccessible(true);
clean.invoke(cleaner.invoke(cb));
} else {
Class unsafeClass;
Class<?> unsafeClass;
try {
unsafeClass = Class.forName("sun.misc.Unsafe");
} catch(Exception ex) {
@ -216,4 +216,13 @@ public class DataUtil {
Signature sig = DSAEngine.getInstance().sign(payload, spk);
return sig.getData();
}
public static boolean isValidName(String name) {
if (name.length() > Constants.MAX_NICKNAME_LENGTH)
return false;
for (int i = 0; i < Constants.INVALID_NICKNAME_CHARS.length(); i++)
if (name.indexOf(Constants.INVALID_NICKNAME_CHARS.charAt(i)) >= 0)
return false;
return true;
}
}

View File

@ -0,0 +1,35 @@
package com.muwire.core.util;
import java.util.ArrayDeque;
import java.util.Deque;
import java.util.HashSet;
import java.util.Set;
public class FixedSizeFIFOSet<T> {
private final int capacity;
private final Set<T> set = new HashSet<>();
private final Deque<T> fifo = new ArrayDeque<>();
public FixedSizeFIFOSet(final int capacity) {
this.capacity = capacity;
}
public boolean contains(T element) {
return set.contains(element);
}
public void add(T element) {
if (!set.contains(element)) {
if (set.size() == capacity) {
T toRemove = fifo.removeLast();
set.remove(toRemove);
}
fifo.addFirst(element);
set.add(element);
} else {
fifo.remove(element);
fifo.addFirst(element);
}
}
}

View File

@ -46,7 +46,7 @@ class DownloadSessionTest {
eventBus = new EventBus()
}
private void initSession(int size, def claimedPieces = []) {
private void initSession(int size, def claimedPieces = [], boolean browse = false, boolean feed = false, boolean chat = false) {
Random r = new Random()
byte [] content = new byte[size]
r.nextBytes(content)
@ -78,7 +78,8 @@ class DownloadSessionTest {
toUploader = new PipedOutputStream(fromDownloader)
endpoint = new Endpoint(null, fromUploader, toUploader, null)
session = new DownloadSession(eventBus, "",pieces, infoHash, endpoint, target, pieceSize, size, available, new AtomicLong())
session = new DownloadSession(eventBus, "",pieces, infoHash, endpoint, target, pieceSize, size, available, new AtomicLong(),
browse, feed, chat)
downloadThread = new Thread( { perform() } as Runnable)
downloadThread.setDaemon(true)
downloadThread.start()

View File

@ -39,13 +39,13 @@ class FileManagerTest {
@Test
void testHash1Result() {
File f = new File("a b.c")
InfoHash ih = InfoHash.fromHashList(new byte[32])
SharedFile sf = new SharedFile(f,ih, 0)
byte [] root = new byte[32]
SharedFile sf = new SharedFile(f,root, 0)
FileHashedEvent fhe = new FileHashedEvent(sharedFile: sf)
manager.onFileHashedEvent(fhe)
UUID uuid = UUID.randomUUID()
SearchEvent se = new SearchEvent(searchHash: ih.getRoot(), uuid: uuid)
SearchEvent se = new SearchEvent(searchHash: root, uuid: uuid)
manager.onSearchEvent(se)
Thread.sleep(20)
@ -58,14 +58,14 @@ class FileManagerTest {
@Test
void testHash2Results() {
InfoHash ih = InfoHash.fromHashList(new byte[32])
SharedFile sf1 = new SharedFile(new File("a b.c"), ih, 0)
SharedFile sf2 = new SharedFile(new File("d e.f"), ih, 0)
byte [] root = new byte[32]
SharedFile sf1 = new SharedFile(new File("a b.c"), root, 0)
SharedFile sf2 = new SharedFile(new File("d e.f"), root, 0)
manager.onFileLoadedEvent new FileLoadedEvent(loadedFile : sf1)
manager.onFileLoadedEvent new FileLoadedEvent(loadedFile : sf2)
UUID uuid = UUID.randomUUID()
SearchEvent se = new SearchEvent(searchHash: ih.getRoot(), uuid: uuid)
SearchEvent se = new SearchEvent(searchHash: root, uuid: uuid)
manager.onSearchEvent(se)
Thread.sleep(20)
@ -81,7 +81,7 @@ class FileManagerTest {
void testHash0Results() {
File f = new File("a b.c")
InfoHash ih = InfoHash.fromHashList(new byte[32])
SharedFile sf = new SharedFile(f,ih, 0)
SharedFile sf = new SharedFile(f,ih.getRoot(), 0)
FileHashedEvent fhe = new FileHashedEvent(sharedFile: sf)
manager.onFileHashedEvent(fhe)
@ -95,7 +95,7 @@ class FileManagerTest {
void testKeyword1Result() {
File f = new File("a b.c")
InfoHash ih = InfoHash.fromHashList(new byte[32])
SharedFile sf = new SharedFile(f,ih,0)
SharedFile sf = new SharedFile(f,ih.getRoot(),0)
FileHashedEvent fhe = new FileHashedEvent(sharedFile: sf)
manager.onFileHashedEvent(fhe)
@ -113,12 +113,12 @@ class FileManagerTest {
void testKeyword2Results() {
File f1 = new File("a b.c")
InfoHash ih1 = InfoHash.fromHashList(new byte[32])
SharedFile sf1 = new SharedFile(f1, ih1, 0)
SharedFile sf1 = new SharedFile(f1, ih1.getRoot(), 0)
manager.onFileLoadedEvent new FileLoadedEvent(loadedFile: sf1)
File f2 = new File("c d.e")
InfoHash ih2 = InfoHash.fromHashList(new byte[64])
SharedFile sf2 = new SharedFile(f2, ih2, 0)
SharedFile sf2 = new SharedFile(f2, ih2.getRoot(), 0)
manager.onFileLoadedEvent new FileLoadedEvent(loadedFile: sf2)
UUID uuid = UUID.randomUUID()
@ -136,7 +136,7 @@ class FileManagerTest {
void testKeyword0Results() {
File f = new File("a b.c")
InfoHash ih = InfoHash.fromHashList(new byte[32])
SharedFile sf = new SharedFile(f,ih,0)
SharedFile sf = new SharedFile(f,ih.getRoot(),0)
FileHashedEvent fhe = new FileHashedEvent(sharedFile: sf)
manager.onFileHashedEvent(fhe)
@ -149,8 +149,8 @@ class FileManagerTest {
@Test
void testRemoveFileExistingHash() {
InfoHash ih = InfoHash.fromHashList(new byte[32])
SharedFile sf1 = new SharedFile(new File("a b.c"), ih, 0)
SharedFile sf2 = new SharedFile(new File("d e.f"), ih, 0)
SharedFile sf1 = new SharedFile(new File("a b.c"), ih.getRoot(), 0)
SharedFile sf2 = new SharedFile(new File("d e.f"), ih.getRoot(), 0)
manager.onFileLoadedEvent new FileLoadedEvent(loadedFile : sf1)
manager.onFileLoadedEvent new FileLoadedEvent(loadedFile : sf2)
@ -167,12 +167,12 @@ class FileManagerTest {
void testRemoveFile() {
File f1 = new File("a b.c")
InfoHash ih1 = InfoHash.fromHashList(new byte[32])
SharedFile sf1 = new SharedFile(f1, ih1, 0)
SharedFile sf1 = new SharedFile(f1, ih1.getRoot(), 0)
manager.onFileLoadedEvent new FileLoadedEvent(loadedFile: sf1)
File f2 = new File("c d.e")
InfoHash ih2 = InfoHash.fromHashList(new byte[64])
SharedFile sf2 = new SharedFile(f2, ih2, 0)
SharedFile sf2 = new SharedFile(f2, ih2.getRoot(), 0)
manager.onFileLoadedEvent new FileLoadedEvent(loadedFile: sf2)
manager.onFileUnsharedEvent new FileUnsharedEvent(deleted : true, unsharedFile: sf2)
@ -198,7 +198,7 @@ class FileManagerTest {
comment = Base64.encode(DataUtil.encodei18nString(comment))
File f1 = new File("MuWire-0.5.10.AppImage")
InfoHash ih1 = InfoHash.fromHashList(new byte[32])
SharedFile sf1 = new SharedFile(f1, ih1, 0)
SharedFile sf1 = new SharedFile(f1, ih1.getRoot(), 0)
sf1.setComment(comment)
manager.onFileLoadedEvent(new FileLoadedEvent(loadedFile : sf1))
@ -206,7 +206,7 @@ class FileManagerTest {
File f2 = new File("MuWire-0.6.0.AppImage")
InfoHash ih2 = InfoHash.fromHashList(new byte[64])
SharedFile sf2 = new SharedFile(f2, ih2, 0)
SharedFile sf2 = new SharedFile(f2, ih2.getRoot(), 0)
sf2.setComment(comment)
manager.onFileLoadedEvent(new FileLoadedEvent(loadedFile : sf2))

View File

@ -45,7 +45,7 @@ class HasherServiceTest {
def hashed = listener.poll()
assert hashed instanceof FileHashedEvent
assert hashed.sharedFile.file == f.getCanonicalFile()
assert hashed.sharedFile.infoHash != null
assert hashed.sharedFile.root != null
assert listener.isEmpty()
}

View File

@ -85,7 +85,7 @@ class PersisterServiceLoadingTest {
def loadedFile = listener.publishedFiles[0]
assert loadedFile != null
assert loadedFile.file == sharedFile1.getCanonicalFile()
assert loadedFile.infoHash == ih1
assert loadedFile.root == ih1.getRoot()
}
private static String getSharedFileJsonName(File sharedFile) {
@ -128,7 +128,7 @@ class PersisterServiceLoadingTest {
def loadedFile = listener.publishedFiles[0]
assert loadedFile != null
assert loadedFile.file == sharedFile1.getCanonicalFile()
assert loadedFile.infoHash == ih1
assert loadedFile.root == ih1.getRoot()
}
@Test
@ -169,10 +169,10 @@ class PersisterServiceLoadingTest {
assert listener.publishedFiles.size() == 2
def loadedFile1 = listener.publishedFiles[0]
assert loadedFile1.file == sharedFile1.getCanonicalFile()
assert loadedFile1.infoHash == ih1
assert loadedFile1.root == ih1.getRoot()
def loadedFile2 = listener.publishedFiles[1]
assert loadedFile2.file == sharedFile2.getCanonicalFile()
assert loadedFile2.infoHash == ih2
assert loadedFile2.root == ih2.getRoot()
}
@Test

View File

@ -2,6 +2,7 @@ package com.muwire.core.files
import org.junit.After
import org.junit.Before
import org.junit.Ignore
import org.junit.Test
import com.muwire.core.Destinations
@ -16,6 +17,7 @@ import groovy.json.JsonSlurper
import net.i2p.data.Base32
import net.i2p.data.Base64
@Ignore
class PersisterServiceSavingTest {
File f

View File

@ -75,6 +75,7 @@ class HostCacheTest {
settingsMock.ignore.getHostClearInterval { 0 }
settingsMock.ignore.getHostHopelessInterval { 0 }
settingsMock.ignore.getHostRejectInterval { 0 }
settingsMock.ignore.getHostHopelessPurgeInterval { 0 }
initMocks()
@ -97,6 +98,7 @@ class HostCacheTest {
settingsMock.ignore.getHostClearInterval { 0 }
settingsMock.ignore.getHostHopelessInterval { 0 }
settingsMock.ignore.getHostRejectInterval { 0 }
settingsMock.ignore.getHostHopelessPurgeInterval { 0 }
initMocks()
@ -114,6 +116,7 @@ class HostCacheTest {
settingsMock.ignore.getHostClearInterval { 0 }
settingsMock.ignore.getHostHopelessInterval { 0 }
settingsMock.ignore.getHostRejectInterval { 0 }
settingsMock.ignore.getHostHopelessPurgeInterval { 0 }
initMocks()
@ -136,6 +139,7 @@ class HostCacheTest {
settingsMock.ignore.getHostClearInterval { 0 }
settingsMock.ignore.getHostHopelessInterval { 0 }
settingsMock.ignore.getHostRejectInterval { 0 }
settingsMock.ignore.getHostHopelessPurgeInterval { 0 }
initMocks()
cache.onHostDiscoveredEvent(new HostDiscoveredEvent(destination: destinations.dest1))
@ -160,6 +164,7 @@ class HostCacheTest {
settingsMock.ignore.getHostClearInterval { 100 }
settingsMock.ignore.getHostHopelessInterval { 0 }
settingsMock.ignore.getHostRejectInterval { 0 }
settingsMock.ignore.getHostHopelessPurgeInterval { 0 }
initMocks()
cache.onHostDiscoveredEvent(new HostDiscoveredEvent(destination: destinations.dest1))
@ -182,6 +187,7 @@ class HostCacheTest {
settingsMock.ignore.getHostClearInterval { 0 }
settingsMock.ignore.getHostHopelessInterval { 0 }
settingsMock.ignore.getHostRejectInterval { 0 }
settingsMock.ignore.getHostHopelessPurgeInterval { 0 }
initMocks()
cache.onHostDiscoveredEvent(new HostDiscoveredEvent(destination: destinations.dest1))
@ -211,6 +217,7 @@ class HostCacheTest {
settingsMock.ignore.getHostClearInterval { 0 }
settingsMock.ignore.getHostHopelessInterval { 0 }
settingsMock.ignore.getHostRejectInterval { 0 }
settingsMock.ignore.getHostHopelessPurgeInterval { 0 }
initMocks()
cache.onHostDiscoveredEvent(new HostDiscoveredEvent(destination: destinations.dest1))
@ -246,6 +253,7 @@ class HostCacheTest {
settingsMock.ignore.getHostClearInterval { 0 }
settingsMock.ignore.getHostHopelessInterval { 0 }
settingsMock.ignore.getHostRejectInterval { 0 }
settingsMock.ignore.getHostHopelessPurgeInterval { 0 }
initMocks()
cache.onHostDiscoveredEvent(new HostDiscoveredEvent(destination: destinations.dest1))
@ -266,6 +274,7 @@ class HostCacheTest {
settingsMock.ignore.getHostClearInterval { 0 }
settingsMock.ignore.getHostHopelessInterval { 0 }
settingsMock.ignore.getHostRejectInterval { 0 }
settingsMock.ignore.getHostHopelessPurgeInterval { 0 }
initMocks()
cache.onHostDiscoveredEvent(new HostDiscoveredEvent(destination: destinations.dest1))
@ -301,6 +310,7 @@ class HostCacheTest {
settingsMock.ignore.getHostClearInterval { 0 }
settingsMock.ignore.getHostHopelessInterval { 0 }
settingsMock.ignore.getHostRejectInterval { 0 }
settingsMock.ignore.getHostHopelessPurgeInterval { 0 }
initMocks()
def rv = cache.getHosts(5)

View File

@ -0,0 +1,49 @@
package com.muwire.core.util
import org.junit.Test
class FixedSizeFIFOSetTest {
@Test
public void testFifo() {
FixedSizeFIFOSet<String> fifoSet = new FixedSizeFIFOSet(3);
fifoSet.add("a")
assert fifoSet.contains("a")
fifoSet.add("b")
assert fifoSet.contains("a")
assert fifoSet.contains("b")
fifoSet.add("c")
assert fifoSet.contains("a")
assert fifoSet.contains("b")
assert fifoSet.contains("c")
fifoSet.add("d")
assert !fifoSet.contains("a")
assert fifoSet.contains("b")
assert fifoSet.contains("c")
assert fifoSet.contains("d")
}
@Test
public void testDuplicateElement() {
FixedSizeFIFOSet<String> fifoSet = new FixedSizeFIFOSet(3);
fifoSet.add("a")
fifoSet.add("b")
fifoSet.add("c")
fifoSet.add("a")
assert fifoSet.contains("a")
assert fifoSet.contains("b")
assert fifoSet.contains("c")
fifoSet.add("d")
assert fifoSet.contains("a")
assert !fifoSet.contains("b")
assert fifoSet.contains("c")
assert fifoSet.contains("d")
}
}

49
doc/collections.md Normal file
View File

@ -0,0 +1,49 @@
# MuWire Collections
Status: Draft, Proposal, Unimplemented
MuWire collections are files containing meta-information about a grouping of files. They serve a similar purpose like .torrent files, but the internal format is rather different to account for the MuWire identity management.
A user wishing to create a collection of files needs to have shared all the files that are going to be part of the collection. Their full MuWire ID will be stored in the collection, so anyone wishing to download any of the files in the collection will try to download from them first.
The collection will be signed, so anyone can verify that the embedded full MuWire ID authored the collection.
### File Format
Header:
```
byte 0: Collection version, currently fixed at "1".
bytes 1,2 : unsigned 16-bit value of the number of files in the collection. Empty files or directories are not allowed.
bytes 3-N: Full MuWire ID of the publisher of the collection, in Persona format.
bytes N+1 to N+9: Timestamp of the collection, in milliseconds since epoch UTC
bytes N+9 to M: Free-form description of the collection (comment). Format is UTF-8, maximum size is 32kb.
```
The header is followed by a file entry for each file in the collection. The format is the follows:
```
byte 0: File entry version, currently fixed at "1".
byte 1-33: hash of the file
byte 34: Unsigned 8-bit number of path elements from root to where the file will ultimately be placed upon download.
bytes 35-N : UTF-8 encoded length-prefixed path elements. Each element can be at most 32kb long. The last element is the name of the file.
bytes N-M: free from description of the file (comment). Format is UTF-8, maximum size is 32kb.
```
After the file entries follows a footer, which is simply a signature of the byte payload of the header and the file entries.
### Downloading
Since the collection is created from individual shared files, every file within the collection is searchable. It is possible to extend the shared file data structure to contain refererences to any collections the file belongs to - TBD.
When a user searches for a keyword or hash, they can find either the collection metafile itself or a file which is a member of one or more collections. In the latter case, the user is given the option to download the collection metafile.
If the user chooses to download the collection metafile, they will be presented with a dialog containing the metainformation contained in the collection descriptor. They will be able to see directory structure contained in the collection and will be able to choose individual files to download.
TBD - what happens when some of the files are already downloaded but are not in the final directory location?
Finally, when starting the download, the downloader always queries the persona in the collection first, regardless of who returned the search result.
### Sharing
When downloading the collection descriptor, the user makes the descriptor available for indexing. This way collection descriptors can propagate on the network.
TBD - do they also index the comments and file names in the descriptor, even if they haven't downloaded the files?

View File

@ -1,15 +1,17 @@
group = com.muwire
version = 0.6.8
i2pVersion = 0.9.44
groovyVersion = 2.4.15
version = 0.7.4
i2pVersion = 0.9.47
groovyVersion = 3.0.4
slf4jVersion = 1.7.25
spockVersion = 1.1-groovy-2.4
grailsVersion=4.0.0
gorm.version=7.0.2.RELEASE
griffonEnv=prod
# javac properties
sourceCompatibility=1.8
targetCompatibility=1.8
compilerArgs=-Xlint:unchecked,cast,path,divzero,empty,path,finally,overrides
# plugin properties
author = zab@mail.i2p
@ -18,4 +20,4 @@ keystorePassword=changeit
websiteURL=http://muwire.i2p
updateURLsu3=http://muwire.i2p/MuWire-update.su3
pack200=true
pack200=false

View File

@ -42,10 +42,25 @@ griffon {
application {
mainClassName = 'com.muwire.gui.Launcher'
applicationDefaultJvmArgs = ['-Djava.util.logging.config.file=logging.properties','-Xmx256M']
applicationName = 'MuWire'
}
run {
applicationDefaultJvmArgs=[]
}
startScripts.doFirst {
application.applicationDefaultJvmArgs = ["-Djava.util.logging.config.file=logging.properties",
"-Xmx256M",
"--add-opens", "java.base/java.lang=ALL-UNNAMED",
"--add-opens", "java.base/sun.nio.fs=ALL-UNNAMED",
"--add-opens", "java.base/java.nio=ALL-UNNAMED",
"--add-opens", "java.desktop/java.awt=ALL-UNNAMED",
"--add-opens", "java.desktop/javax.swing=ALL-UNNAMED",
"--add-opens", "java.desktop/javax.swing.plaf.basic=ALL-UNNAMED"]
}
apply from: 'gradle/publishing.gradle'
// apply from: 'gradle/code-coverage.gradle'
// apply from: 'gradle/code-quality.gradle'
@ -57,9 +72,27 @@ apply plugin: 'org.kordamp.gradle.stats'
apply plugin: 'com.github.ben-manes.versions'
apply plugin: 'com.github.kt3k.coveralls'
configurations.all {
exclude group:'org.codehaus.groovy', module:'groovy-test'
exclude group:'org.codehaus.groovy', module:'groovy-testng'
exclude group:'org.codehaus.groovy', module:'groovy-test-junit5'
exclude group:'org.codehaus.groovy', module:'groovy-ant'
exclude group:'org.codehaus.groovy', module:'groovy-sql'
exclude group:'org.codehaus.groovy', module:'groovy-nio'
exclude group:'org.codehaus.groovy', module:'groovy-servlet'
exclude group:'org.codehaus.groovy', module:'groovy-jmx'
exclude group:'org.codehaus.groovy', module:'groovy-groovydoc'
exclude group:'org.codehaus.groovy', module:'groovy-groovysh'
exclude group:'org.codehaus.groovy', module:'groovy-xml'
exclude group:'org.codehaus.groovy', module:'groovy-docgenerator'
// TODO: add more as discovered
}
dependencies {
compile project(":core")
compile "org.codehaus.griffon:griffon-guice:${griffon.version}"
compile "org.codehaus.groovy:groovy-all:${groovyVersion}"
// runtime "org.slf4j:slf4j-simple:${slf4jVersion}"
@ -68,6 +101,9 @@ dependencies {
runtime group: 'org.slf4j', name: 'jul-to-slf4j', version: "${slf4jVersion}"
runtime "javax.annotation:javax.annotation-api:1.3.2"
// because java 14 doesn't come with it
runtime 'mrj:MRJToolkitStubs:1.0'
testCompile "org.codehaus.griffon:griffon-fest-test:${griffon.version}"
testCompile "org.spockframework:spock-core:${spockVersion}"
testCompile('org.awaitility:awaitility-groovy:3.1.0') {

Some files were not shown because too many files have changed in this diff Show More