Compare commits

..

4 Commits

Author SHA1 Message Date
bc9172be83 fix date in changelog 2007-12-01 18:31:05 +00:00
5ebeb3ef4f tweak full hd warning - disable it in libtorrent 2007-12-01 06:45:43 +00:00
ab420e4fed update changelogs and up trunk version 2007-12-01 04:20:47 +00:00
14eaf79371 tag fix release 2007-12-01 04:12:54 +00:00
2831 changed files with 270155 additions and 673614 deletions

5
.gitattributes vendored
View File

@ -1,5 +0,0 @@
.gitattributes export-ignore
.gitmodules export-ignore
.gitignore export-ignore
*.py diff=python
ext-all.js diff=minjs

View File

@ -1,116 +0,0 @@
name: CI
on:
push:
pull_request:
# Allows you to run this workflow manually from the Actions tab
workflow_dispatch:
jobs:
test-linux:
runs-on: ubuntu-20.04
steps:
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
- uses: actions/checkout@v2
with:
fetch-depth: 0
- name: Set up Python
uses: actions/setup-python@v2
with:
python-version: "3.8"
- name: Cache pip
uses: actions/cache@v2
with:
path: ~/.cache/pip
# Look to see if there is a cache hit for the corresponding requirements file
key: ${{ runner.os }}-pip-${{ hashFiles('tox.ini', 'setup.py', 'requirements*.txt') }}
restore-keys: |
${{ runner.os }}-pip-
${{ runner.os }}-
- name: Add libtorrent deb repository
uses: myci-actions/add-deb-repo@8
with:
repo: deb http://ppa.launchpad.net/libtorrent.org/1.2-daily/ubuntu focal main
repo-name: libtorrent
keys: 58E5430D9667FAEFFCA0B93F32309D6B9E009EDB
key-server: keyserver.ubuntu.com
install: python3-libtorrent-dbg
- name: Sets env var for security
if: (github.event_name == 'pull_request' && contains(github.event.pull_request.body, 'security_test')) || (github.event_name == 'push' && contains(github.event.head_commit.message, 'security_test'))
run: echo "SECURITY_TESTS=True" >> $GITHUB_ENV
- name: Install dependencies
run: |
pip install --upgrade pip wheel
pip install -r requirements.txt -r requirements-tests.txt
pip install -e .
- name: Install security dependencies
if: contains(env.SECURITY_TESTS, 'True')
run: |
wget -O- $TESTSSL_URL$TESTSSL_VER | tar xz
mv -t deluge/tests/data testssl.sh-$TESTSSL_VER/testssl.sh testssl.sh-$TESTSSL_VER/etc/;
env:
TESTSSL_VER: 3.0.6
TESTSSL_URL: https://codeload.github.com/drwetter/testssl.sh/tar.gz/refs/tags/v
- name: Setup core dump directory
run: |
sudo mkdir /cores/ && sudo chmod 777 /cores/
echo "/cores/%E.%p" | sudo tee /proc/sys/kernel/core_pattern
- name: Test with pytest
run: |
ulimit -c unlimited # Enable core dumps to be captured
cp /usr/lib/python3/dist-packages/libtorrent*.so $GITHUB_WORKSPACE/deluge
python -c 'from deluge._libtorrent import lt; print(lt.__version__)';
catchsegv python -X dev -m pytest -v -m "not (todo or gtkui)" deluge
- uses: actions/upload-artifact@v2
# capture all crashes as build artifacts
if: failure()
with:
name: crashes
path: /cores
test-windows:
runs-on: windows-latest
steps:
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
- uses: actions/checkout@v2
with:
fetch-depth: 0
- name: Set up Python
uses: actions/setup-python@v2
with:
python-version: "3.7"
- name: Cache pip
uses: actions/cache@v2
with:
path: '%LOCALAPPDATA%\pip\Cache'
# Look to see if there is a cache hit for the corresponding requirements file
key: ${{ runner.os }}-pip-${{ hashFiles('tox.ini', 'setup.py', 'requirements*.txt') }}
restore-keys: |
${{ runner.os }}-pip-
${{ runner.os }}-
- name: Install dependencies
run: |
python -m pip install --upgrade pip wheel
python -m pip install libtorrent==1.2.*
pip install -r requirements.txt -r requirements-tests.txt
pip install -e .
- name: Test with pytest
run: |
python -c 'import libtorrent as lt; print(lt.__version__)';
pytest -m "not (todo or gtkui or security)" deluge

View File

@ -1,45 +0,0 @@
name: Docs
# Controls when the action will run.
on:
# Triggers the workflow on push or pull request events but only for the main branch
push:
pull_request:
# Allows you to run this workflow manually from the Actions tab
workflow_dispatch:
jobs:
build:
runs-on: ubuntu-latest
steps:
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
- uses: actions/checkout@v2
with:
fetch-depth: 0
- uses: actions/setup-python@v2
with:
python-version: "3.8"
- name: Cache pip
uses: actions/cache@v2
with:
# This path is specific to Ubuntu
path: ~/.cache/pip
# Look to see if there is a cache hit for the corresponding requirements file
key: ${{ runner.os }}-pip-${{ hashFiles('requirements*.txt') }}
restore-keys: |
${{ runner.os }}-pip-
${{ runner.os }}-
- name: Install dependencies
run: |
pip install --upgrade pip wheel
pip install tox
sudo apt-get install enchant
- name: Test with tox
env:
TOX_ENV: docs
run: |
tox -e $TOX_ENV

View File

@ -1,17 +0,0 @@
name: Linting
on:
push:
pull_request:
# Allows you to run this workflow manually from the Actions tab
workflow_dispatch:
jobs:
lint:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions/setup-python@v2
- name: Run pre-commit linting
uses: pre-commit/action@v2.0.2

26
.gitignore vendored
View File

@ -1,26 +0,0 @@
*~
build
.cache
dist
docs/source/modules/deluge*.rst
*.egg-info/
*.dist-info/
*.egg
*.log
__pycache__/
*.py[cod]
*.tar.*
_trial_temp
.tox/
deluge/i18n/*/
deluge.pot
deluge/ui/web/js/*.js
deluge/ui/web/js/extjs/ext-extensions*.js
*.desktop
*.appdata.xml
.build_data*
osx/app
RELEASE-VERSION
.venv*
# used by setuptools to cache downloaded eggs
/.eggs

View File

@ -1,42 +0,0 @@
default_language_version:
python: python3
exclude: >
(?x)^(
deluge/ui/web/docs/template/.*|
)$
repos:
- repo: https://github.com/ambv/black
rev: 20.8b1
hooks:
- id: black
name: Fmt Black
- repo: https://github.com/pre-commit/mirrors-prettier
rev: v2.2.1
hooks:
- id: prettier
name: Fmt Prettier
# Workaround to list modified files only.
args: [--list-different]
- repo: https://gitlab.com/pycqa/flake8
# v3.7.9 due to E402 issue: https://gitlab.com/pycqa/flake8/-/issues/638
rev: 3.7.9
hooks:
- id: flake8
name: Chk Flake8
additional_dependencies:
- flake8-isort==4.0.0
- pep8-naming==0.11.1
args: [--isort-show-traceback]
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v3.4.0
hooks:
- id: double-quote-string-fixer
name: Fix Double-quotes
- id: end-of-file-fixer
name: Fix End-of-files
exclude_types: [javascript, css]
- id: mixed-line-ending
name: Fix Line endings
args: [--fix=auto]
- id: trailing-whitespace
name: Fix Trailing whitespace

View File

@ -1,6 +0,0 @@
deluge/ui/web/css/ext-*.css
deluge/ui/web/js/extjs/ext-*.js
deluge/ui/web/docs/
deluge/ui/web/themes/images/
*.py*
*.html

View File

@ -1,13 +0,0 @@
trailingComma: "es5"
tabWidth: 4
singleQuote: true
overrides:
- files:
- "*.yaml"
- ".*.yaml"
- "*.yml"
- ".*.yml"
- "*.md"
options:
tabWidth: 2
singleQuote: false

420
.pylintrc
View File

@ -1,420 +0,0 @@
[MASTER]
# Specify a configuration file.
#rcfile=
# Python code to execute, usually for sys.path manipulation such as
# pygtk.require().
#init-hook=
# Add files or directories to the blacklist. They should be base names, not
# paths.
ignore=CVS
# Add files or directories matching the regex patterns to the blacklist. The
# regex matches against base names, not paths.
ignore-patterns=
# Pickle collected data for later comparisons.
persistent=yes
# List of plugins (as comma separated values of python modules names) to load,
# usually to register additional checkers.
load-plugins=
# Use multiple processes to speed up Pylint.
jobs=2
# Allow loading of arbitrary C extensions. Extensions are imported into the
# active Python interpreter and may run arbitrary code.
unsafe-load-any-extension=no
# A comma-separated list of package or module names from where C extensions may
# be loaded. Extensions are loading into the active Python interpreter and may
# run arbitrary code
extension-pkg-whitelist=
# Allow optimization of some AST trees. This will activate a peephole AST
# optimizer, which will apply various small optimizations. For instance, it can
# be used to obtain the result of joining multiple strings with the addition
# operator. Joining a lot of strings can lead to a maximum recursion error in
# Pylint and this flag can prevent that. It has one side effect, the resulting
# AST will be different than the one from reality. This option is deprecated
# and it will be removed in Pylint 2.0.
optimize-ast=no
[MESSAGES CONTROL]
# Only show warnings with the listed confidence levels. Leave empty to show
# all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED
confidence=
# Enable the message, report, category or checker with the given id(s). You can
# either give multiple identifier separated by comma (,) or put this option
# multiple time (only on the command line, not in the configuration file where
# it should appear only once). See also the "--disable" option for examples.
#enable=
# Disable the message, report, category or checker with the given id(s). You
# can either give multiple identifiers separated by comma (,) or put this
# option multiple times (only on the command line, not in the configuration
# file where it should appear only once).You can also use "--disable=all" to
# disable everything first and then reenable specific checks. For example, if
# you want to run only the similarities checker, you can use "--disable=all
# --enable=similarities". If you want to run only the classes checker, but have
# no Warning level messages displayed, use"--disable=all --enable=classes
# --disable=W"
#
# Arranged by category and use symbolic names instead of ids.
disable=
# Convention
missing-docstring, invalid-name, bad-continuation,
# Error
no-member, no-name-in-module,
# Information
locally-disabled,
# Refactor
no-self-use, too-many-arguments, too-many-branches, too-many-instance-attributes,
too-many-locals, too-few-public-methods, too-many-public-methods, too-many-statements,
# Refactor msgs that should eventually be enabled:
redefined-variable-type, too-many-ancestors,
too-many-nested-blocks, too-many-return-statements,
# Warning
unused-argument, protected-access, import-error, unused-variable,
attribute-defined-outside-init,
# Warning msgs that should eventually be enabled:
arguments-differ, global-statement, fixme, broad-except
[REPORTS]
# Set the output format. Available formats are text, parseable, colorized, msvs
# (visual studio) and html. You can also give a reporter class, eg
# mypackage.mymodule.MyReporterClass.
output-format=parseable
# Put messages in a separate file for each module / package specified on the
# command line instead of printing them on stdout. Reports (if any) will be
# written in a file name "pylint_global.[txt|html]". This option is deprecated
# and it will be removed in Pylint 2.0.
files-output=no
# Tells whether to display a full report or only the messages
reports=no
# Python expression which should return a note less than 10 (10 is the highest
# note). You have access to the variables errors warning, statement which
# respectively contain the number of errors / warnings messages and the total
# number of statements analyzed. This is used by the global evaluation report
# (RP0004).
evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)
# Template used to display messages. This is a python new-style format string
# used to format the message information. See doc for all details
#msg-template=
[SPELLING]
# Spelling dictionary name. Available dictionaries: none. To make it working
# install python-enchant package.
spelling-dict=
# List of comma separated words that should not be checked.
spelling-ignore-words=
# A path to a file that contains private dictionary; one word per line.
spelling-private-dict-file=
# Tells whether to store unknown words to indicated private dictionary in
# --spelling-private-dict-file option instead of raising a message.
spelling-store-unknown-words=no
[BASIC]
# Good variable names which should always be accepted, separated by a comma
good-names=d,i,j,k,ex,Run,_,log
# Bad variable names which should always be refused, separated by a comma
bad-names=foo,bar,baz,toto,tutu,tata
# Colon-delimited sets of names that determine each other's naming style when
# the name regexes allow several styles.
name-group=
# Include a hint for the correct naming format with invalid-name
include-naming-hint=no
# List of decorators that produce properties, such as abc.abstractproperty. Add
# to this list to register other decorators that produce valid properties.
property-classes=abc.abstractproperty
# Regular expression matching correct function names
function-rgx=[a-z_][a-z0-9_]{2,30}$
# Naming hint for function names
function-name-hint=[a-z_][a-z0-9_]{2,30}$
# Regular expression matching correct variable names
variable-rgx=[a-z_][a-z0-9_]{2,30}$
# Naming hint for variable names
variable-name-hint=[a-z_][a-z0-9_]{2,30}$
# Regular expression matching correct constant names
const-rgx=(([A-Z_][A-Z0-9_]*)|(__.*__))$
# Naming hint for constant names
const-name-hint=(([A-Z_][A-Z0-9_]*)|(__.*__))$
# Regular expression matching correct attribute names
attr-rgx=[a-z_][a-z0-9_]{2,30}$
# Naming hint for attribute names
attr-name-hint=[a-z_][a-z0-9_]{2,30}$
# Regular expression matching correct argument names
argument-rgx=[a-z_][a-z0-9_]{2,30}$
# Naming hint for argument names
argument-name-hint=[a-z_][a-z0-9_]{2,30}$
# Regular expression matching correct class attribute names
class-attribute-rgx=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$
# Naming hint for class attribute names
class-attribute-name-hint=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$
# Regular expression matching correct inline iteration names
inlinevar-rgx=[A-Za-z_][A-Za-z0-9_]*$
# Naming hint for inline iteration names
inlinevar-name-hint=[A-Za-z_][A-Za-z0-9_]*$
# Regular expression matching correct class names
class-rgx=[A-Z_][a-zA-Z0-9]+$
# Naming hint for class names
class-name-hint=[A-Z_][a-zA-Z0-9]+$
# Regular expression matching correct module names
module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$
# Naming hint for module names
module-name-hint=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$
# Regular expression matching correct method names
method-rgx=[a-z_][a-z0-9_]{2,40}$
# Naming hint for method names
method-name-hint=[a-z_][a-z0-9_]{2,30}$
# Regular expression which should only match function or class names that do
# not require a docstring.
no-docstring-rgx=__.*__
# Minimum line length for functions/classes that require docstrings, shorter
# ones are exempt.
docstring-min-length=-1
[ELIF]
# Maximum number of nested blocks for function / method body
max-nested-blocks=5
[LOGGING]
# Logging modules to check that the string format arguments are in logging
# function parameter format
logging-modules=logging
[FORMAT]
# Maximum number of characters on a single line.
max-line-length=120
# Regexp for a line that is allowed to be longer than the limit.
ignore-long-lines=^\s*(# )?<?https?://\S+>?$
# Allow the body of an if to be on the same line as the test if there is no
# else.
single-line-if-stmt=no
# List of optional constructs for which whitespace checking is disabled. `dict-
# separator` is used to allow tabulation in dicts, etc.: {1 : 1,\n222: 2}.
# `trailing-comma` allows a space between comma and closing bracket: (a, ).
# `empty-line` allows space-only lines.
no-space-check=trailing-comma,dict-separator
# Maximum number of lines in a module
max-module-lines=1550
# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1
# tab).
indent-string=' '
# Number of spaces of indent required inside a hanging or continued line.
indent-after-paren=4
# Expected format of line ending, e.g. empty (any line ending), LF or CRLF.
expected-line-ending-format=LF
[MISCELLANEOUS]
# List of note tags to take in consideration, separated by a comma.
notes=FIXME,XXX,TODO
[VARIABLES]
# Tells whether we should check for unused import in __init__ files.
init-import=no
# A regular expression matching the name of dummy variables (i.e. expectedly
# not used).
dummy-variables-rgx=_$|dummy
# List of additional names supposed to be defined in builtins. Remember that
# you should avoid to define new builtins when possible.
additional-builtins=_,_n,__request__,WindowsError
# List of strings which can identify a callback function by name. A callback
# name must start or end with one of those strings.
callbacks=cb_,_cb
# List of qualified module names which can have objects that can redefine
# builtins.
redefining-builtins-modules=
[TYPECHECK]
# Tells whether missing members accessed in mixin class should be ignored. A
# mixin class is detected if its name ends with "mixin" (case insensitive).
ignore-mixin-members=yes
# List of module names for which member attributes should not be checked
# (useful for modules/projects where namespaces are manipulated during runtime
# and thus existing member attributes cannot be deduced by static analysis. It
# supports qualified module names, as well as Unix pattern matching.
ignored-modules=
# List of class names for which member attributes should not be checked (useful
# for classes with dynamically set attributes). This supports the use of
# qualified names.
ignored-classes=SQLObject,twisted.internet.reactor
# List of members which are set dynamically and missed by pylint inference
# system, and so shouldn't trigger E1101 when accessed. Python regular
# expressions are accepted.
generated-members=REQUEST,acl_users,aq_parent
# List of decorators that produce context managers, such as
# contextlib.contextmanager. Add to this list to register other decorators that
# produce valid context managers.
contextmanager-decorators=contextlib.contextmanager
[SIMILARITIES]
# Minimum lines number of a similarity.
min-similarity-lines=4
# Ignore comments when computing similarities.
ignore-comments=yes
# Ignore docstrings when computing similarities.
ignore-docstrings=yes
# Ignore imports when computing similarities.
ignore-imports=no
[IMPORTS]
# Deprecated modules which should not be used, separated by a comma
deprecated-modules=regsub,TERMIOS,Bastion,rexec
# Create a graph of every (i.e. internal and external) dependencies in the
# given file (report RP0402 must not be disabled)
import-graph=
# Create a graph of external dependencies in the given file (report RP0402 must
# not be disabled)
ext-import-graph=
# Create a graph of internal dependencies in the given file (report RP0402 must
# not be disabled)
int-import-graph=
# Force import order to recognize a module as part of the standard
# compatibility libraries.
known-standard-library=
# Force import order to recognize a module as part of a third party library.
known-third-party=enchant
[DESIGN]
# Maximum number of arguments for function / method
max-args=7
# Argument names that match this expression will be ignored. Default to name
# with leading underscore
ignored-argument-names=_.*
# Maximum number of locals for function / method body
max-locals=15
# Maximum number of return / yield for function / method body
max-returns=6
# Maximum number of branch for function / method body
max-branches=12
# Maximum number of statements in function / method body
max-statements=50
# Maximum number of parents for a class (see R0901).
max-parents=7
# Maximum number of attributes for a class (see R0902).
max-attributes=7
# Minimum number of public methods for a class (see R0903).
min-public-methods=2
# Maximum number of public methods for a class (see R0904).
max-public-methods=20
# Maximum number of boolean expressions in a if statement
max-bool-expr=5
[CLASSES]
# List of method names used to declare (i.e. assign) instance attributes.
defining-attr-methods=__init__,__new__,setUp
# List of valid names for the first argument in a class method.
valid-classmethod-first-arg=cls
# List of valid names for the first argument in a metaclass class method.
valid-metaclass-classmethod-first-arg=mcs
# List of member names, which should be excluded from the protected access
# warning.
exclude-protected=_asdict,_fields,_replace,_source,_make
[EXCEPTIONS]
# Exceptions that will emit a warning when being caught. Defaults to
# "Exception"
overgeneral-exceptions=Exception

View File

@ -1,22 +0,0 @@
# .readthedocs.yml
# Read the Docs configuration file
# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details
# Required
version: 2
# Build documentation in the docs/ directory with Sphinx
sphinx:
configuration: docs/source/conf.py
# Optionally build your docs in additional formats such as PDF and ePub
formats: all
# Optionally set the version of Python and requirements required to build your docs
python:
version: 3.7
install:
- requirements: requirements.txt
- requirements: docs/requirements.txt
- method: setuptools
path: .

790
AUTHORS
View File

@ -1,790 +0,0 @@
Authors:
* Andrew Resch ('andar') <andrewresch@gmail.com>
* Damien Churchill ('damoxc') <damoxc@gmail.com>
Main Developers:
* Andrew Resch
* Damien Churchill
* John Garland ('johnnyg') <johnnybg+deluge@gmail.com>
* Calum Lind ('cas') <calumlind+deluge@gmail.com>
libtorrent (http://www.libtorrent.org):
* Arvid Norberg
Contributors (and Past Developers):
* Zach Tibbitts <zach@collegegeek.org>
* Alon Zakai ('Kripken') <kripkensteiner@gmail.com>
* Marcos Mobley ('markybob') <markybob@gmail.com>
* Alex Dedul
* Sadrul Habib Chowdhury
* Ido Abramovich <ido.deluge@gmail.com>
* Martijn Voncken <mvoncken@gmail.com>
* Mark Stahler ('kramed') <markstahler@gmail.com>
* Pedro Algarvio ('s0undt3ch') <ufs@ufsoft.org>
* Cristian Greco ('cgreco') <cristian@regolo.cc>
* Chase Sterling ('gazpachoKing') <chase.sterling@gmail.com>
Plugin Developers:
* Autoadd : Chase Sterling
* Blocklist : John Garland
* Execute : Damien Churchill
* Extractor : Andrew Resch
* Label : Martijn Voncken
* Notifications : Pedro Algarvio
* Scheduler : Andrew Resch
* Webui : Damien Churchill
Images Authors:
* files: deluge/ui/data/pixmaps/*.svg, *.png
deluge/ui/web/icons/active.png, alert.png, all.png, checking.png, dht.png,
downloading.png, inactive.png, queued.png, seeding.png, traffic.png
deluge/ui/web/images/deluge*.png
deluge/ui/web/icons/apple-pre-*.png, deluge*.png
copyright: Calum Lind
license: GPLv3
* files: deluge/plugins/blocklist/blocklist/data/*.png
deluge/ui/data/pixmaps/tracker_warning16.png, tracker_all16.png, lock48.png
copyright: Gnome Icon Theme
license: GPLv2
url: http://ftp.acc.umu.se/pub/GNOME/sources/gnome-icon-theme
* files: deluge/ui/data/pixmaps/magnet*.svg, *.png
copyright: Matias Wilkman
license:
* files: deluge/ui/data/pixmaps/flags/*.png
copyright: Mark James <mjames@gmail.com>
license: Public Domain
url: http://famfamfam.com/lab/icons/flags/
* files: deluge/ui/web/icons/*.png
exceptions: apple-pre-*.png, active.png, alert.png, all.png, deluge.png, dht.png,
downloading.png, inactive.png, queued.png, seeding.png, traffic.png
copyright: Yusuke Kamiyamane <p@yusukekamiyamane.com>
license: Creative Commons Attribution 3.0 License
url: http://p.yusukekamiyamane.com/
* files: deluge/ui/web/images/spinner.gif, spinner-split.gif
copyright: Steven Chim
license: BSD license
url: http://members.upc.nl/j.chim/ext/spinner2/ext-spinner.html
Translation Contributors:
* files: deluge/i18n/*.po
Aaron Wang Shi
abbigss
ABCdatos
Abcx
Actam
Adam
adaminikisi
adi_oporanu
Adrian Goll
afby
Ahmades
Ahmad Farghal
Ahmad Gharbeia أحمد غربية
akira
Aki Sivula
Alan Pepelko
Alberto
Alberto Ferrer
alcatr4z
AlckO
Aleksej Korgenkov
Alessio Treglia
Alexander Ilyashov
Alexander Matveev
Alexander Saltykov
Alexander Taubenkorb
Alexander Telenga
Alexander Yurtsev
Alexandre Martani
Alexandre Rosenfeld
Alexandre Sapata Carbonell
Alexey Osipov
Alin Claudiu Radut
allah
AlSim
Alvaro Carrillanca P.
A.Matveev
Andras Hipsag
András Kárász
Andrea Ratto
Andreas Johansson
Andreas Str
André F. Oliveira
AndreiF
andrewh
Angel Guzman Maeso
Aníbal Deboni Neto
animarval
Antonio Cono
antoniojreyes
Anton Shestakov
Anton Yakutovich
antou
Arkadiusz Kalinowski
Artin
artir
Astur
Athanasios Lefteris
Athmane MOKRAOUI (ButterflyOfFire)
Augusta Carla Klug
Avoledo Marco
axaard
AxelRafn
Axezium
Ayont
b3rx
Bae Taegil
Bajusz Tamás
Balaam's Miracle
Ballestein
Bent Ole Fosse
berto89
bigx
Bjorn Inge Berg
blackbird
Blackeyed
blackmx
BlueSky
Blutheo
bmhm
bob00work
boenki
Bogdan Bădic-Spătariu
bonpu
Boone
boss01
Branislav Jovanović
bronze
brownie
Brus46
bumper
butely
BXCracer
c0nfidencal
Can Kaya
Carlos Alexandro Becker
cassianoleal
Cédric.h
César Rubén
chaoswizard
Chen Tao
chicha
Chien Cheng Wei
Christian Kopac
Christian Widell
Christoffer Brodd-Reijer
christooss
CityAceE
Clopy
Clusty
cnu
Commandant
Constantinos Koniaris
Coolmax
cosmix
Costin Chirvasuta
CoVaLiDiTy
cow_2001
Crispin Kirchner
crom
Cruster
Cybolic
Dan Bishop
Danek
Dani
Daniel Demarco
Daniel Ferreira
Daniel Frank
Daniel Holm
Daniel Høyer Iversen
Daniel Marynicz
Daniel Nylander
Daniel Patriche
Daniel Schildt
Daniil Sorokin
Dante Díaz
Daria Michalska
DarkenCZ
Darren
Daspah
David Eurenius
davidhjelm
David Machakhelidze
Dawid Dziurdzia
Daya Adianto
dcruz
Deady
Dereck Wonnacott
Devgru
Devid Antonio FiloniDevilDogTG
di0rz`
Dialecti Valsamou
Diego Medeiros
Dkzoffy
Dmitrij D. Czarkoff
Dmitriy Geels
Dmitry Olyenyov
Dominik Kozaczko
Dominik Lübben
doomster
Dorota Król
Doyen Philippe
Dread Knight
DreamSonic
duan
Duong Thanh An
DvoglavaZver
dwori
dylansmrjones
Ebuntor
Edgar Alejandro Jarquin Flores
Eetu
ekerazha
Elias Julkunen
elparia
Emberke
Emiliano Goday Caneda
EndelWar
eng.essam
enubuntu
ercangun
Erdal Ronahi
ergin üresin
Eric
Éric Lassauge
Erlend Finvåg
Errdil
ethan shalev
Evgeni Spasov
ezekielnin
Fabian Ordelmans
Fabio Mazanatti
Fábio Nogueira
FaCuZ
Felipe Lerena
Fernando Pereira
fjetland
Florian Schäfer
FoBoS
Folke
Force
fosk
fragarray
freddeg
Frédéric Perrin
Fredrik Kilegran
FreeAtMind
Fulvio Ciucci
Gabor Kelemen
Galatsanos Panagiotis
Gaussian
gdevitis
Georg Brzyk
George Dumitrescu
Georgi Arabadjiev
Georg Sieber
Gerd Radecke
Germán Heusdens
Gianni Vialetto
Gigih Aji Ibrahim
Giorgio Wicklein
Giovanni Rapagnani
Giuseppe
gl
glen
granjerox
Green Fish
greentea
Greyhound
G. U.
Guillaume BENOIT
Guillaume Pelletier
Gustavo Henrique Klug
gutocarvalho
Guybrush88
Hans Rødtang
HardDisk
Hargas Gábor
Heitor Thury Barreiros Barbosa
helios91940
helix84
Helton Rodrigues
Hendrik Luup
Henrique Ferreiro
Henry Goury-Laffont
Hezy Amiel
hidro
hoball
hokten
Holmsss
hristo.num
Hubert Życiński
Hyo
Iarwain
ibe
ibear
Id2ndR
Igor Zubarev
IKON (Ion)
imen
Ionuț Jula
Isabelle STEVANT
István Nyitrai
Ivan Petrovic
Ivan Prignano
IvaSerge
jackmc
Jacks0nxD
Jack Shen
Jacky Yeung
Jacques Stadler
Janek Thomaschewski
Jan Kaláb
Jan Niklas Hasse
Jasper Groenewegen
Javi Rodríguez
Jayasimha (ಜಯಸಿಂಹ)
jeannich
Jeff Bailes
Jesse Zilstorff
Joan Duran
João Santos
Joar Bagge
Joe Anderson
Joel Calado
Johan Linde
John Garland
Jojan
jollyr0ger
Jonas Bo Grimsgaard
Jonas Granqvist
Jonas Slivka
Jonathan Zeppettini
Jørgen
Jørgen Tellnes
josé
José Geraldo Gouvêa
José Iván León Islas
José Lou C.
Jose Sun
Jr.
Jukka Kauppinen
Julián Alarcón
julietgolf
Jusic
Justzupi
Kaarel
Kai Thomsen
Kalman Tarnay
Kamil Páral
Kane_F
kaotiks@gmail.com
Kateikyoushii
kaxhinaz
Kazuhiro NISHIYAMA
Kerberos
Keresztes Ákos
kevintyk
kiersie
Kimbo^
Kim Lübbe
kitzOgen
Kjetil Rydland
kluon
kmikz
Knedlyk
koleoptero
Kőrösi Krisztián
Kouta
Krakatos
Krešo Kunjas
kripken
Kristaps
Kristian Øllegaard
Kristoffer Egil Bonarjee
Krzysztof Janowski
Krzysztof Zawada
Larry Wei Liu
laughterwym
Laur Mõtus
lazka
leandrud
lê bình
Le Coz Florent
Leo
liorda
LKRaider
LoLo_SaG
Long Tran
Lorenz
Low Kian Seong
Luca Andrea Rossi
Luca Ferretti
Lucky LIX
Luis Gomes
Luis Reis
Łukasz Wyszyński
luojie-dune
maaark
Maciej Chojnacki
Maciej Meller
Mads Peter Rommedahl
Major Kong
Malaki
malde
Malte Lenz
Mantas Kriaučiūnas
Mara Sorella
Marcin
Marcin Falkiewicz
marcobra
Marco da Silva
Marco de Moulin
Marco Rodrigues
Marcos
Marcos Escalier
Marcos Mobley
Marcus Ekstrom
Marek Dębowski
Mário Buči
Mario Munda
Marius Andersen
Marius Hudea
Marius Mihai
Mariusz Cielecki
Mark Krapivner
marko-markovic
Markus Brummer
Markus Sutter
Martin
Martin Dybdal
Martin Iglesias
Martin Lettner
Martin Pihl
Masoud Kalali
mat02
Matej Urbančič
Mathias-K
Mathieu Arès
Mathieu D. (MatToufoutu)
Mathijs
Matrik
Matteo Renzulli
Matteo Settenvini
Matthew Gadd
Matthias Benkard
Matthias Mailänder
Mattias Ohlsson
Mauro de Carvalho
Max Molchanov
Me
MercuryCC
Mert Bozkurt
Mert Dirik
MFX
mhietar
mibtha
Michael Budde
Michael Kaliszka
Michalis Makaronides
Michał Tokarczyk
Miguel Pires da Rosa
Mihai Capotă
Miika Metsälä
Mikael Fernblad
Mike Sierra
mikhalek
Milan Prvulović
Milo Casagrande
Mindaugas
Miroslav Matejaš
misel
mithras
Mitja Pagon
M.Kitchen
Mohamed Magdy
moonkey
MrBlonde
muczy
Münir Ekinci
Mustafa Temizel
mvoncken
Mytonn
NagyMarton
neaion
Neil Lin
Nemo
Nerijus Arlauskas
Nicklas Larsson
Nicolaj Wyke
Nicola Piovesan
Nicolas Sabatier
Nicolas Velin
Nightfall
NiKoB
Nikolai M. Riabov
Niko_Thien
niska
Nithir
noisemonkey
nomemohes
nosense
null
Nuno Estêvão
Nuno Santos
nxxs
nyo
obo
Ojan
Olav Andreas Lindekleiv
oldbeggar
Olivier FAURAX
orphe
osantana
Osman Tosun
OssiR
otypoks
ounn
Oz123
Özgür BASKIN
Pablo Carmona A.
Pablo Ledesma
Pablo Navarro Castillo
Paco Molinero
Pål-Eivind Johnsen
pano
Paolo Naldini
Paracelsus
Patryk13_03
Patryk Skorupa
PattogoTehen
Paul Lange
Pavcio
Paweł Wysocki
Pedro Brites Moita
Pedro Clemente Pereira Neto
Pekka "PEXI" Niemistö
Penegal
Penzo
perdido
Peter Kotrcka
Peter Skov
Peter Van den Bosch
Petter Eklund
Petter Viklund
phatsphere
Phenomen
Philipi
Philippides Homer
phoenix
pidi
Pierre Quillery
Pierre Rudloff
Pierre Slamich
Pietrao
Piotr Strębski
Piotr Wicijowski
Pittmann Tamás
Playmolas
Prescott
Prescott_SK
pronull
Przemysław Kulczycki
Pumy
pushpika
PY
qubicllj
r21vo
Rafał Barański
rainofchaos
Rajbir
ras0ir
Rat
rd1381
Renato
Rene Hennig
Rene Pärts
Ricardo Duarte
Richard
Robert Hrovat
Roberth Sjonøy
Robert Lundmark
Robin Jakobsson
Robin Kåveland
Rodrigo Donado
Roel Groeneveld
rohmaru
Rolf Christensen
Rolf Leggewie
Roni Kantis
Ronmi
Rostislav Raykov
royto
RuiAmaro
Rui Araújo
Rui Moura
Rune Svendsen
Rusna
Rytis
Sabirov Mikhail
salseeg
Sami Koskinen
Samir van de Sand
Samuel Arroyo Acuña
Samuel R. C. Vale
Sanel
Santi
Santi Martínez Cantelli
Sardan
Sargate Kanogan
Sarmad Jari
Saša Bodiroža
sat0shi
Saulius Pranckevičius
Savvas Radevic
Sebastian Krauß
Sebastián Porta
Sedir
Sefa Denizoğlu
sekolands
Selim Suerkan
semsomi
Sergii Golovatiuk
setarcos
Sheki
Shironeko
Shlomil
silfiriel
Simone Tolotti
Simone Vendemia
sirkubador
Sławomir Więch
slip
slyon
smoke
Sonja
spectral
spin_555
spitf1r3
Spiziuz
Spyros Theodoritsis
SqUe
Squigly
srtck
Stefan Horning
Stefano Maggiolo
Stefano Roberto Soleti
steinberger
Stéphane Travostino
Stephan Klein
Steven De Winter
Stevie
Stian24
stylius
Sukarn Maini
Sunjae Park
Susana Pereira
szymon siglowy
takercena
TAS
Taygeto
temy4
texxxxxx
thamood
Thanos Chatziathanassiou
Tharawut Paripaiboon
Theodoor
Théophane Anestis
Thor Marius K. Høgås
Tiago Silva
Tiago Sousa
Tikkel
tim__b
Tim Bordemann
Tim Fuchs
Tim Kornhammar
Timo
Timo Jyrinki
Timothy Babych
TitkosRejtozo
Tom
Tomas Gustavsson
Tomas Valentukevičius
Tomasz Dominikowski
Tomislav Plavčić
Tom Mannerhagen
Tommy Mikkelsen
Tom Verdaat
Tony Manco
Tor Erling H. Opsahl
Toudi
tqm_z
Trapanator
Tribaal
Triton
TuniX12
Tuomo Sipola
turbojugend_gr
Turtle.net
twilight
tymmej
Ulrik
Umarzuki Mochlis
unikob
Vadim Gusev
Vagi
Valentin Bora
Valmantas Palikša
VASKITTU
Vassilis Skoullis
vetal17
vicedo
viki
villads hamann
Vincent Garibal
Vincent Ortalda
vinchi007
Vinícius de Figueiredo Silva
Vinzenz Vietzke
virtoo
virtual_spirit
Vitor Caike
Vitor Lamas Gatti
Vladimir Lazic
Vladimir Sharshov
Wanderlust
Wander Nauta
Ward De Ridder
WebCrusader
webdr
Wentao Tang
wilana
Wilfredo Ernesto Guerrero Campos
Wim Champagne
World Sucks
Xabi Ezpeleta
Xavi de Moner
XavierToo
XChesser
Xiaodong Xu
xyb
Yaron
Yasen Pramatarov
YesPoX
Yuren Ju
Yves MATHIEU
zekopeko
zhuqin
Zissan
Γιάννης Κατσαμπίρης
Артём Попов
Миша
Шаймарданов Максим
蔡查理

View File

@ -1,181 +0,0 @@
# Changelog
## 2.1.0 (WIP)
- Removed Python 2 support.
## 2.0.5 (2021-12-15)
### WebUI
- Fix js minifying error resulting in WebUI blank screen.
- Silence erronous missing translations warning.
## 2.0.4 (2021-12-12)
### Packaging
- Fix python optional setup.py requirements
### Gtk UI
- Add detection of torrent URL on GTK UI focus
- Fix piecesbar crashing when enabled
- Remove num_blocks_cache_hits in stats
- Fix unhandled error with empty clipboard
- Add torrentdetails tabs position menu (#3441)
- Hide pygame community banner in console
- Fix cmp function for None types (#3309)
- Fix loading config with double-quotes in string
- Fix Status tab download speed and uploaded
### Web UI
- Handle torrent add failures
- Add menu option to copy magnet URI
- Fix md5sums in torrent files breaking file listing (#3388)
- Add country flag alt/title for accessibility
### Console UI
- Fix allowing use of windows-curses on Windows
- Fix hostlist status lookup errors
- Fix AttributeError setting config values
- Fix setting 'Skip' priority
### Core
- Add workaround libtorrent 2.0 file_progress error
- Fix allow enabling any plugin Python version
- Export torrent get_magnet_uri method
- Fix loading magnet with resume_data and no metadata (#3478)
- Fix httpdownloader reencoding torrent file downloads (#3440)
- Fix lt listen_interfaces not comma-separated (#3337)
- Fix unable to remove magnet with delete_copies enabled (#3325)
- Fix Python 3.8 compatibility
- Fix loading config with double-quotes in string
- Fix pickle loading non-ascii state error (#3298)
- Fix creation of pidfile via command option
- Fix for peer.client UnicodeDecodeError
- Fix show_file unhandled dbus error
### Documentation
- Add How-to guides about services.
### Stats plugin
- Fix constant session status key warnings
- Fix cairo error
### Notifications plugin
- Fix email KeyError with status name
- Fix unhandled TypeErrors on Python 3
### Autoadd plugin
- Fix magnet missing applied labels
### Execute plugin
- Fix failing to run on Windows (#3439)
## 2.0.3 (2019-06-12)
### Gtk UI
- Fix errors running on Wayland (#3265).
- Fix Peers Tab tooltip and context menu errors (#3266).
### Web UI
- Fix TypeError in Peers Tab setting country flag.
- Fix reverse proxy header TypeError (#3260).
- Fix request.base 'idna' codec error (#3261).
- Fix unable to change password (#3262).
### Extractor plugin
- Fix potential error starting plugin.
### Documentation
- Fix macOS install typo.
- Fix Windows install instructions.
## 2.0.2 (2019-06-08)
### Packaging
- Add systemd deluged and deluge-web service files to package tarball (#2034)
### Core
- Fix Python 2 compatibility issue with SimpleNamespace.
## 2.0.1 (2019-06-07)
### Packaging
- Fix `setup.py` build error without git installed.
## 2.0.0 (2019-06-06)
### Codebase
- Ported to Python 3
### Core
- Improved Logging
- Removed the AutoAdd feature on the core. It's now handled with the AutoAdd
plugin, which is also shipped with Deluge, and it does a better job and
now, it even supports multiple users perfectly.
- Authentication/Permission exceptions are now sent to clients and recreated
there to allow acting upon them.
- Updated SSL/TLS Protocol parameters for better security.
- Make the distinction between adding to the session new unmanaged torrents
and torrents loaded from state. This will break backwards compatibility.
- Pass a copy of an event instead of passing the event arguments to the
event handlers. This will break backwards compatibility.
- Allow changing ownership of torrents.
- File modifications on the auth file are now detected and when they happen,
the file is reloaded. Upon finding an old auth file with an old format, an
upgrade to the new format is made, file saved, and reloaded.
- Authentication no longer requires a username/password. If one or both of
these is missing, an authentication error will be sent to the client
which should then ask the username/password to the user.
- Implemented sequential downloads.
- Provide information about a torrent's pieces states
- Add Option To Specify Outgoing Connection Interface.
- Fix potential for host_id collision when creating hostlist entries.
### Gtk UI
- Ported to GTK3 (3rd-party plugins will need updated).
- Allow changing ownership of torrents.
- Host entries in the Connection Manager UI are now editable.
- Implemented sequential downloads UI handling.
- Add optional pieces bar instead of a regular progress bar in torrent status tab.
- Make torrent opening compatible with all Unicode paths.
- Fix magnet association button on Windows.
- Add keyboard shortcuts for changing queue position:
- Up: `Ctrl+Alt+Up`
- Down: `Ctrl+Alt+Down`
- Top: `Ctrl+Alt+Shift+Up`
- Bottom: `Ctrl+Alt+Shift+Down`
### Web UI
- Server (deluge-web) now daemonizes by default, use '-d' or '--do-not-daemonize' to disable.
- Fixed the '--base' option to work for regular use, not just with reverse proxies.
### Blocklist Plugin
- Implemented whitelist support to both core and GTK UI.
- Implemented IP filter cleaning before each update. Restarting the deluge
daemon is no longer needed.
- If "check_after_days" is 0(zero), the timer is not started anymore. It
would keep updating one call after the other. If the value changed, the
timer is now stopped and restarted using the new value.

167
ChangeLog Normal file
View File

@ -0,0 +1,167 @@
Deluge 0.5.7.1 (01 December 2007)
* Tweak full hd warning so that it only displays itself once per torrent that
it has to pause.
* Fixed crash and corruption of persistent.state while adding a duplicate
torrent. Also caused yet another invalid handle error.
* Increase tracker timeout
Deluge 0.5.7 (26 November 2007)
* Scrape support
* Manual force-recheck
* Add local peer discovery (aka local service discovery)
* Blocklist plugin will now display errors, instead of just crashing on a bad
list or wrong type
* Add torrent in paused state option
* Add advanced progress bar
* Fix bug in merging trackers
* Various updates to WebUI, including https support and advanced template by vonck7
* Add maximum connection attempts per second preference
* Fix bug where loaded plugins were forgotten if Deluge crashed
* Fix ratio bugs (hopefully for the last time)
* Add preference to only show file selection popup if torrent has multiple files
* Fix pause all and resume all bugs
* Fix client not loading if our website goes down (new version check failing)
* Allow torrent creation with no trackers
* Scheduler plugin revamp by Ben Klein
* Fix ETA from going backwards
* UI warning on full HD - no longer just silently pauses torrents
* Replace SimpleRSS with FlexRSS
* Add preference for the location of torrent files
* Add autoload folder
* Copy translator credits from Launchpad to our about->credits
* Differentiate between queued and paused torrents. Able to pause queued
torrents - patch by yobbobandana
* Show error when writing/permission problems occur
Deluge 0.5.6.2 (31 October 2007)
* Set default piece size to 256-KiB in TorrentCreator plugin and add 2048KiB
as a size option.
* Fix a bug in Debian package that caused the UI to completely freeze when a
torrent finished
* Find and fix another shutdown bug that mostly Gutsy users were incountering
* Fix a couple of WebUI bugs, including the "index" page erroring out
Deluge 0.5.6.1 (28 October 2007)
* Fix invalid handle error
* Fix shutdown hang
Deluge 0.5.6 (24 October 2007)
* Web Interface Plugin
* Hopefully fix "losing data" and having to re-download parts (for real this time :p)
* Use new full allocation method which does not create files until one of its
pieces is downloaded
* Tray lock password is no longer stored in plain text
* Update the Scheduler plugin and fix a bunch of bugs on it
* Double-clicking on a torrent opens up its containing folder
* Fix SpeedLimiter plugin when setting upload limits
* Fix MoveTorrent plugin when moving actively downloading torrents
* Pause torrents while importing blocklist and resume them when finished
* Remove TorrentPieces and disable its use
* A whole bunch of stuff for Win32
* Add private flag to TorrentCreator plugin
* Use SVG for internal logo usage (except on Win32)
* Use theme for tray icon instead of hard-coded
* Properly release port on shutdown
* TorrentFiles plugin now has progress bars
* Removing torrent files no longer deletes files that werent part of the torrent
* New max half-open connections setting to deal with cheap/broken routers
* Inherit UPnP fixes from libtorrent
* Use threading for everything, instead of spawnning
Deluge 0.5.5 (09 September 2007)
* Editing a torrent's tracker list is now persistent between sessions
* Persistence between sessions for Speed Limiter, Web Seed and Desired Ratio
plugins
* New wizard to aid first-time users with configuration
* Reorderable tabs and remember order (with exception of details tab)
* Fix losing data and having to re-download parts
* Fix password lock showing when main window is not hidden
* Get rid of the plugin manager and integrate it into preferences
* New Move torrent plugin - takes over for "move completed downloads" feature
and provides a "Move Torrent" option when right-clicking on a torrent
* Save column widths
* Queue order after restart fixes.
* Use payload instead of including protocol overhead to ease user confusion
of seeding torrents "downloading"
* New Web Seed plugin for adding URLs to torrents for http seeding
* Add FAST-extension (http://www.bittorrent.org/fast_extensions.html)
Deluge 0.5.4.1 (10 August 2007)
* Add "Open containing folder" and "Open File" to the torrent and file
menu, respectively
* Load Blocklist plugin last and have it not lock up the interface during
import
* Add full allocation to preferences for clarification
* Catch SIGINT, SIGHUP, SIGTERM and Gnome logoff to quit properly
* Add send local info to developers
* Fix up pieces, peers and files plugins
* UPnP fixes
* Add ExtraStats plugin
* FreeBSD full allocation fix
* Added per torrent max upload slots and max connections preferences
* A lot of other less visible improvements
Deluge 0.5.4 (06 August 2007)
* Tray message includes session totals
* Ticket #198 - Display peers countries in the Peers tab.
* Ticket #474 - Multiple password prompts displayed
* Pause all/resume all in tray menu
* Peers and Files tabs are now plugins
* New Location plugin
* Option to use a random port every time
* Proxy system redone - you can now specify different information for each
type of proxy (DHT, peer, tracker, web-seed)
* TorrentPieces plugin to view piece updates and show pieces table per file
* EventLogger plugin to view/log every activity
* SpeedLimiter plugin, which allows you to set speed limits on a per-torrent
basis
* New release alerts - Client will inform user if their version of deluge is
outdated
Deluge 0.5.3 (25 July 2007)
* Added ChangeLog
* Ticket #53 - Added files priorities within torrent
* Ticket #111 - Remember directory of last added torrent
* Ticket #232 - Added Move completed downloads to feature
* Ticket #245 - Added ability to select torrent files before starting
* Ticked #368 - Added ability to prioritize first and last pieces of files
in torrents
* Ticket #371 - Proper full storage allocation of files on reiser4 and
ntfs-3g filesystems
* Ticket #420 - Show size of torrent minus size of unselected files as Total
Size
* Ticket #405 - Properly start in tray when run deluge --tray
* Ticket #437 - Gracefully upgrade from old versions
* Picking a file to not download now checks for compact_mode status to prevent
all sorts of problems such as downloading pieces into the wrong file
* RSS plugin inclusion
* Added ability to queue new torrents above completed ones when
seeds are set to queue at the bottom
* Added availability and piece size display in details, availailability
column
* Added ability to automatically remove torrents when max share ratio is set
* Show text from clipboard in Add URL dialog only if it looks like an URL
* Added Torrent Notification plugin
* Added event handling callbacks for plugins
* Added ability to designate a torrent as private (in file selection dialog)
* Added merging trackers of duplicate torrents
* Details, Peers and Files tabs more responsible and their perfomance
greatly improved especially on torrents with many files and peers
* A lot of other less visible improvements
Deluge 0.5.2 (05 July 2007)
* ticket #6 - Torrent creation built into main client
* ticket #315 - Plugins implemented as modules
* ticket #310 - Configuration options for PEX and UPnP
* ticket #390 - Individual file progress shown in File tab
* The usual slew of improvements
Deluge 0.5.1 (11 June 2007)
* Peer Exchange
* ticket #254 - Encryption
* ticket #142 - UPnP + NATPMP
* Improved user interface
* Redesigned preferences dialog
* Proper startup and shutdown

View File

@ -1,98 +0,0 @@
# Deluge dependencies
The following are required to install and run Deluge. They are separated into
sections to distinguish the precise requirements for each module.
All modules will require the [common](#common) section dependencies.
## Prerequisite
- [Python] _>= 3.5_
## Build
- [setuptools]
- [intltool] - Optional: Desktop file translation for \*nix.
- [closure-compiler] - Minify javascript (alternative is [rjsmin])
## Common
- [Twisted] _>= 17.1_ - Use `TLS` extras for `service_identity` and `idna`.
- [OpenSSL] _>= 1.0.1_
- [pyOpenSSL]
- [rencode] _>= 1.0.2_ - Encoding library.
- [PyXDG] - Access freedesktop.org standards for \*nix.
- [xdg-utils] - Provides xdg-open for \*nix.
- [zope.interface]
- [chardet] - Optional: Encoding detection.
- [setproctitle] - Optional: Renaming processes.
- [Pillow] - Optional: Support for resizing tracker icons.
- [dbus-python] - Optional: Show item location in filemanager.
### Linux and BSD
- [distro] - Optional: OS platform information.
### Windows OS
- [pywin32]
- [certifi]
## Core (deluged daemon)
- [libtorrent] _>= 1.1.1_
- [GeoIP] - Optional: IP address location lookup. (_Debian: `python-geoip`_)
## GTK UI
- [GTK+] >= 3.10
- [PyGObject]
- [Pycairo]
- [librsvg] _>= 2_
- [libappindicator3] w/GIR - Optional: Ubuntu system tray icon.
### MacOS
- [GtkOSXApplication]
## Web UI
- [mako]
## Plugins
### Notifications
- [pygame] - Optional: Play sounds
- [libnotify] w/GIR - Optional: Desktop popups.
[python]: https://www.python.org/
[setuptools]: https://setuptools.readthedocs.io/en/latest/
[intltool]: https://freedesktop.org/wiki/Software/intltool/
[closure-compiler]: https://developers.google.com/closure/compiler/
[rjsmin]: https://pypi.org/project/rjsmin/
[openssl]: https://www.openssl.org/
[pyopenssl]: https://pyopenssl.org
[twisted]: https://twistedmatrix.com
[pillow]: https://pypi.org/project/Pillow/
[libtorrent]: https://libtorrent.org/
[zope.interface]: https://pypi.org/project/zope.interface/
[distro]: https://github.com/nir0s/distro
[pywin32]: https://github.com/mhammond/pywin32
[certifi]: https://pypi.org/project/certifi/
[dbus-python]: https://pypi.org/project/dbus-python/
[setproctitle]: https://pypi.org/project/setproctitle/
[gtkosxapplication]: https://github.com/jralls/gtk-mac-integration
[chardet]: https://chardet.github.io/
[rencode]: https://github.com/aresch/rencode
[pyxdg]: https://www.freedesktop.org/wiki/Software/pyxdg/
[xdg-utils]: https://www.freedesktop.org/wiki/Software/xdg-utils/
[gtk+]: https://www.gtk.org/
[pycairo]: https://cairographics.org/pycairo/
[pygobject]: https://pygobject.readthedocs.io/en/latest/
[geoip]: https://pypi.org/project/GeoIP/
[mako]: https://www.makotemplates.org/
[pygame]: https://www.pygame.org/
[libnotify]: https://developer.gnome.org/libnotify/
[python-appindicator]: https://packages.ubuntu.com/xenial/python-appindicator
[librsvg]: https://wiki.gnome.org/action/show/Projects/LibRsvg

40
HACKING Normal file
View File

@ -0,0 +1,40 @@
# Copyright (c) 2006 Marcos Pinto ('markybob') <markybob@gmail.com>
This is pretty much taken straight out of PEP 8, the "Style Guide for Python
Code" (http://www.python.org/dev/peps/pep-0008/)
More or less, if you try to submit a patch that doesn't follow this guide, odds
are your patch will be denied...unless it does some incredibly magnificient
things, in which case I *might* edit it. Don't bet on it, though.
Here are the highlights:
Indents are FOUR (4) spaces. Not 8, not 5 or 2 and definitely NOT tab.
Limit all lines to a maximum of 80 characters.
Use UTF-8 encoding
Every single import should be on its own line
Avoid extraneous whitespace in the following situations:
Yes: spam(ham[1], {eggs: 2})
No: spam( ham[ 1 ], { eggs: 2 } )
Yes: spam(1)
No: spam (1)
Yes: if x == 4: print x, y; x, y = y, x
No: if x == 4 : print x , y ; x , y = y , x
Yes: dict['key'] = list[index]
No: dict ['key'] = list [index]
Yes:
x = 1
y = 2
long_variable = 3
No:
x = 1
y = 2
long_variable = 3
Some more recommendations:
* "Don't repeat yourself (DRY). Every distinct concept and/or piece of
data should live in one, and only one, place. Redundancy is bad.
Normalization is good." (taken straight from django's Design philosophies)
* Try to use iterators/generators where applicable. The simplest change from
range to xrange is also good.
* In UI and deluge code for consistency we use notion of speed not rate.
Libtorrent mixes this usage and so do we on deluge-libtorrent boundary,
but all deluge only code should use speed.

908
LICENSE
View File

@ -1,634 +1,350 @@
Deluge is licensed under the GNU General Public License version 3 with the
addition of the following special exception:
GNU GENERAL PUBLIC LICENSE
Version 2, June 1991
In addition, as a special exception, the copyright holders give
permission to link the code of portions of this program with the OpenSSL
library.
You must obey the GNU General Public License in all respects for all of
the code used other than OpenSSL. If you modify file(s) with this
exception, you may extend this exception to your version of the file(s),
but you are not obligated to do so. If you do not wish to do so, delete
this exception statement from your version. If you delete this exception
statement from all source files in the program, then also delete it here.
GNU GENERAL PUBLIC LICENSE
Version 3, 29 June 2007
Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/>
Copyright (C) 1989, 1991 Free Software Foundation, Inc.
59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
Everyone is permitted to copy and distribute verbatim copies
of this license document, but changing it is not allowed.
Preamble
Preamble
The GNU General Public License is a free, copyleft license for
software and other kinds of works.
The licenses for most software and other practical works are designed
to take away your freedom to share and change the works. By contrast,
the GNU General Public License is intended to guarantee your freedom to
share and change all versions of a program--to make sure it remains free
software for all its users. We, the Free Software Foundation, use the
GNU General Public License for most of our software; it applies also to
any other work released this way by its authors. You can apply it to
The licenses for most software are designed to take away your
freedom to share and change it. By contrast, the GNU General Public
License is intended to guarantee your freedom to share and change free
software--to make sure the software is free for all its users. This
General Public License applies to most of the Free Software
Foundation's software and to any other program whose authors commit to
using it. (Some other Free Software Foundation software is covered by
the GNU Library General Public License instead.) You can apply it to
your programs, too.
When we speak of free software, we are referring to freedom, not
price. Our General Public Licenses are designed to make sure that you
have the freedom to distribute copies of free software (and charge for
them if you wish), that you receive source code or can get it if you
want it, that you can change the software or use pieces of it in new
free programs, and that you know you can do these things.
this service if you wish), that you receive source code or can get it
if you want it, that you can change the software or use pieces of it
in new free programs; and that you know you can do these things.
To protect your rights, we need to prevent others from denying you
these rights or asking you to surrender the rights. Therefore, you have
certain responsibilities if you distribute copies of the software, or if
you modify it: responsibilities to respect the freedom of others.
To protect your rights, we need to make restrictions that forbid
anyone to deny you these rights or to ask you to surrender the rights.
These restrictions translate to certain responsibilities for you if you
distribute copies of the software, or if you modify it.
For example, if you distribute copies of such a program, whether
gratis or for a fee, you must pass on to the recipients the same
freedoms that you received. You must make sure that they, too, receive
or can get the source code. And you must show them these terms so they
know their rights.
gratis or for a fee, you must give the recipients all the rights that
you have. You must make sure that they, too, receive or can get the
source code. And you must show them these terms so they know their
rights.
Developers that use the GNU GPL protect your rights with two steps:
(1) assert copyright on the software, and (2) offer you this License
giving you legal permission to copy, distribute and/or modify it.
We protect your rights with two steps: (1) copyright the software, and
(2) offer you this license which gives you legal permission to copy,
distribute and/or modify the software.
For the developers' and authors' protection, the GPL clearly explains
that there is no warranty for this free software. For both users' and
authors' sake, the GPL requires that modified versions be marked as
changed, so that their problems will not be attributed erroneously to
authors of previous versions.
Also, for each author's protection and ours, we want to make certain
that everyone understands that there is no warranty for this free
software. If the software is modified by someone else and passed on, we
want its recipients to know that what they have is not the original, so
that any problems introduced by others will not reflect on the original
authors' reputations.
Some devices are designed to deny users access to install or run
modified versions of the software inside them, although the manufacturer
can do so. This is fundamentally incompatible with the aim of
protecting users' freedom to change the software. The systematic
pattern of such abuse occurs in the area of products for individuals to
use, which is precisely where it is most unacceptable. Therefore, we
have designed this version of the GPL to prohibit the practice for those
products. If such problems arise substantially in other domains, we
stand ready to extend this provision to those domains in future versions
of the GPL, as needed to protect the freedom of users.
Finally, every program is threatened constantly by software patents.
States should not allow patents to restrict development and use of
software on general-purpose computers, but in those that do, we wish to
avoid the special danger that patents applied to a free program could
make it effectively proprietary. To prevent this, the GPL assures that
patents cannot be used to render the program non-free.
Finally, any free program is threatened constantly by software
patents. We wish to avoid the danger that redistributors of a free
program will individually obtain patent licenses, in effect making the
program proprietary. To prevent this, we have made it clear that any
patent must be licensed for everyone's free use or not licensed at all.
The precise terms and conditions for copying, distribution and
modification follow.
TERMS AND CONDITIONS
0. Definitions.
"This License" refers to version 3 of the GNU General Public License.
"Copyright" also means copyright-like laws that apply to other kinds of
works, such as semiconductor masks.
"The Program" refers to any copyrightable work licensed under this
License. Each licensee is addressed as "you". "Licensees" and
"recipients" may be individuals or organizations.
To "modify" a work means to copy from or adapt all or part of the work
in a fashion requiring copyright permission, other than the making of an
exact copy. The resulting work is called a "modified version" of the
earlier work or a work "based on" the earlier work.
A "covered work" means either the unmodified Program or a work based
on the Program.
To "propagate" a work means to do anything with it that, without
permission, would make you directly or secondarily liable for
infringement under applicable copyright law, except executing it on a
computer or modifying a private copy. Propagation includes copying,
distribution (with or without modification), making available to the
public, and in some countries other activities as well.
To "convey" a work means any kind of propagation that enables other
parties to make or receive copies. Mere interaction with a user through
a computer network, with no transfer of a copy, is not conveying.
An interactive user interface displays "Appropriate Legal Notices"
to the extent that it includes a convenient and prominently visible
feature that (1) displays an appropriate copyright notice, and (2)
tells the user that there is no warranty for the work (except to the
extent that warranties are provided), that licensees may convey the
work under this License, and how to view a copy of this License. If
the interface presents a list of user commands or options, such as a
menu, a prominent item in the list meets this criterion.
1. Source Code.
The "source code" for a work means the preferred form of the work
for making modifications to it. "Object code" means any non-source
form of a work.
A "Standard Interface" means an interface that either is an official
standard defined by a recognized standards body, or, in the case of
interfaces specified for a particular programming language, one that
is widely used among developers working in that language.
The "System Libraries" of an executable work include anything, other
than the work as a whole, that (a) is included in the normal form of
packaging a Major Component, but which is not part of that Major
Component, and (b) serves only to enable use of the work with that
Major Component, or to implement a Standard Interface for which an
implementation is available to the public in source code form. A
"Major Component", in this context, means a major essential component
(kernel, window system, and so on) of the specific operating system
(if any) on which the executable work runs, or a compiler used to
produce the work, or an object code interpreter used to run it.
The "Corresponding Source" for a work in object code form means all
the source code needed to generate, install, and (for an executable
work) run the object code and to modify the work, including scripts to
control those activities. However, it does not include the work's
System Libraries, or general-purpose tools or generally available free
programs which are used unmodified in performing those activities but
which are not part of the work. For example, Corresponding Source
includes interface definition files associated with source files for
the work, and the source code for shared libraries and dynamically
linked subprograms that the work is specifically designed to require,
such as by intimate data communication or control flow between those
subprograms and other parts of the work.
The Corresponding Source need not include anything that users
can regenerate automatically from other parts of the Corresponding
Source.
The Corresponding Source for a work in source code form is that
same work.
2. Basic Permissions.
All rights granted under this License are granted for the term of
copyright on the Program, and are irrevocable provided the stated
conditions are met. This License explicitly affirms your unlimited
permission to run the unmodified Program. The output from running a
covered work is covered by this License only if the output, given its
content, constitutes a covered work. This License acknowledges your
rights of fair use or other equivalent, as provided by copyright law.
You may make, run and propagate covered works that you do not
convey, without conditions so long as your license otherwise remains
in force. You may convey covered works to others for the sole purpose
of having them make modifications exclusively for you, or provide you
with facilities for running those works, provided that you comply with
the terms of this License in conveying all material for which you do
not control copyright. Those thus making or running the covered works
for you must do so exclusively on your behalf, under your direction
and control, on terms that prohibit them from making any copies of
your copyrighted material outside their relationship with you.
Conveying under any other circumstances is permitted solely under
the conditions stated below. Sublicensing is not allowed; section 10
makes it unnecessary.
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
No covered work shall be deemed part of an effective technological
measure under any applicable law fulfilling obligations under article
11 of the WIPO copyright treaty adopted on 20 December 1996, or
similar laws prohibiting or restricting circumvention of such
measures.
When you convey a covered work, you waive any legal power to forbid
circumvention of technological measures to the extent such circumvention
is effected by exercising rights under this License with respect to
the covered work, and you disclaim any intention to limit operation or
modification of the work as a means of enforcing, against the work's
users, your or third parties' legal rights to forbid circumvention of
technological measures.
4. Conveying Verbatim Copies.
You may convey verbatim copies of the Program's source code as you
receive it, in any medium, provided that you conspicuously and
appropriately publish on each copy an appropriate copyright notice;
keep intact all notices stating that this License and any
non-permissive terms added in accord with section 7 apply to the code;
keep intact all notices of the absence of any warranty; and give all
recipients a copy of this License along with the Program.
You may charge any price or no price for each copy that you convey,
and you may offer support or warranty protection for a fee.
5. Conveying Modified Source Versions.
You may convey a work based on the Program, or the modifications to
produce it from the Program, in the form of source code under the
terms of section 4, provided that you also meet all of these conditions:
a) The work must carry prominent notices stating that you modified
it, and giving a relevant date.
b) The work must carry prominent notices stating that it is
released under this License and any conditions added under section
7. This requirement modifies the requirement in section 4 to
"keep intact all notices".
c) You must license the entire work, as a whole, under this
License to anyone who comes into possession of a copy. This
License will therefore apply, along with any applicable section 7
additional terms, to the whole of the work, and all its parts,
regardless of how they are packaged. This License gives no
permission to license the work in any other way, but it does not
invalidate such permission if you have separately received it.
d) If the work has interactive user interfaces, each must display
Appropriate Legal Notices; however, if the Program has interactive
interfaces that do not display Appropriate Legal Notices, your
work need not make them do so.
A compilation of a covered work with other separate and independent
works, which are not by their nature extensions of the covered work,
and which are not combined with it such as to form a larger program,
in or on a volume of a storage or distribution medium, is called an
"aggregate" if the compilation and its resulting copyright are not
used to limit the access or legal rights of the compilation's users
beyond what the individual works permit. Inclusion of a covered work
in an aggregate does not cause this License to apply to the other
parts of the aggregate.
6. Conveying Non-Source Forms.
You may convey a covered work in object code form under the terms
of sections 4 and 5, provided that you also convey the
machine-readable Corresponding Source under the terms of this License,
in one of these ways:
a) Convey the object code in, or embodied in, a physical product
(including a physical distribution medium), accompanied by the
Corresponding Source fixed on a durable physical medium
customarily used for software interchange.
b) Convey the object code in, or embodied in, a physical product
(including a physical distribution medium), accompanied by a
written offer, valid for at least three years and valid for as
long as you offer spare parts or customer support for that product
model, to give anyone who possesses the object code either (1) a
copy of the Corresponding Source for all the software in the
product that is covered by this License, on a durable physical
medium customarily used for software interchange, for a price no
more than your reasonable cost of physically performing this
conveying of source, or (2) access to copy the
Corresponding Source from a network server at no charge.
c) Convey individual copies of the object code with a copy of the
written offer to provide the Corresponding Source. This
alternative is allowed only occasionally and noncommercially, and
only if you received the object code with such an offer, in accord
with subsection 6b.
d) Convey the object code by offering access from a designated
place (gratis or for a charge), and offer equivalent access to the
Corresponding Source in the same way through the same place at no
further charge. You need not require recipients to copy the
Corresponding Source along with the object code. If the place to
copy the object code is a network server, the Corresponding Source
may be on a different server (operated by you or a third party)
that supports equivalent copying facilities, provided you maintain
clear directions next to the object code saying where to find the
Corresponding Source. Regardless of what server hosts the
Corresponding Source, you remain obligated to ensure that it is
available for as long as needed to satisfy these requirements.
e) Convey the object code using peer-to-peer transmission, provided
you inform other peers where the object code and Corresponding
Source of the work are being offered to the general public at no
charge under subsection 6d.
A separable portion of the object code, whose source code is excluded
from the Corresponding Source as a System Library, need not be
included in conveying the object code work.
A "User Product" is either (1) a "consumer product", which means any
tangible personal property which is normally used for personal, family,
or household purposes, or (2) anything designed or sold for incorporation
into a dwelling. In determining whether a product is a consumer product,
doubtful cases shall be resolved in favor of coverage. For a particular
product received by a particular user, "normally used" refers to a
typical or common use of that class of product, regardless of the status
of the particular user or of the way in which the particular user
actually uses, or expects or is expected to use, the product. A product
is a consumer product regardless of whether the product has substantial
commercial, industrial or non-consumer uses, unless such uses represent
the only significant mode of use of the product.
"Installation Information" for a User Product means any methods,
procedures, authorization keys, or other information required to install
and execute modified versions of a covered work in that User Product from
a modified version of its Corresponding Source. The information must
suffice to ensure that the continued functioning of the modified object
code is in no case prevented or interfered with solely because
modification has been made.
If you convey an object code work under this section in, or with, or
specifically for use in, a User Product, and the conveying occurs as
part of a transaction in which the right of possession and use of the
User Product is transferred to the recipient in perpetuity or for a
fixed term (regardless of how the transaction is characterized), the
Corresponding Source conveyed under this section must be accompanied
by the Installation Information. But this requirement does not apply
if neither you nor any third party retains the ability to install
modified object code on the User Product (for example, the work has
been installed in ROM).
The requirement to provide Installation Information does not include a
requirement to continue to provide support service, warranty, or updates
for a work that has been modified or installed by the recipient, or for
the User Product in which it has been modified or installed. Access to a
network may be denied when the modification itself materially and
adversely affects the operation of the network or violates the rules and
protocols for communication across the network.
Corresponding Source conveyed, and Installation Information provided,
in accord with this section must be in a format that is publicly
documented (and with an implementation available to the public in
source code form), and must require no special password or key for
unpacking, reading or copying.
7. Additional Terms.
"Additional permissions" are terms that supplement the terms of this
License by making exceptions from one or more of its conditions.
Additional permissions that are applicable to the entire Program shall
be treated as though they were included in this License, to the extent
that they are valid under applicable law. If additional permissions
apply only to part of the Program, that part may be used separately
under those permissions, but the entire Program remains governed by
this License without regard to the additional permissions.
When you convey a copy of a covered work, you may at your option
remove any additional permissions from that copy, or from any part of
it. (Additional permissions may be written to require their own
removal in certain cases when you modify the work.) You may place
additional permissions on material, added by you to a covered work,
for which you have or can give appropriate copyright permission.
Notwithstanding any other provision of this License, for material you
add to a covered work, you may (if authorized by the copyright holders of
that material) supplement the terms of this License with terms:
a) Disclaiming warranty or limiting liability differently from the
terms of sections 15 and 16 of this License; or
b) Requiring preservation of specified reasonable legal notices or
author attributions in that material or in the Appropriate Legal
Notices displayed by works containing it; or
c) Prohibiting misrepresentation of the origin of that material, or
requiring that modified versions of such material be marked in
reasonable ways as different from the original version; or
d) Limiting the use for publicity purposes of names of licensors or
authors of the material; or
e) Declining to grant rights under trademark law for use of some
trade names, trademarks, or service marks; or
f) Requiring indemnification of licensors and authors of that
material by anyone who conveys the material (or modified versions of
it) with contractual assumptions of liability to the recipient, for
any liability that these contractual assumptions directly impose on
those licensors and authors.
All other non-permissive additional terms are considered "further
restrictions" within the meaning of section 10. If the Program as you
received it, or any part of it, contains a notice stating that it is
governed by this License along with a term that is a further
restriction, you may remove that term. If a license document contains
a further restriction but permits relicensing or conveying under this
License, you may add to a covered work material governed by the terms
of that license document, provided that the further restriction does
not survive such relicensing or conveying.
If you add terms to a covered work in accord with this section, you
must place, in the relevant source files, a statement of the
additional terms that apply to those files, or a notice indicating
where to find the applicable terms.
Additional terms, permissive or non-permissive, may be stated in the
form of a separately written license, or stated as exceptions;
the above requirements apply either way.
8. Termination.
You may not propagate or modify a covered work except as expressly
provided under this License. Any attempt otherwise to propagate or
modify it is void, and will automatically terminate your rights under
this License (including any patent licenses granted under the third
paragraph of section 11).
However, if you cease all violation of this License, then your
license from a particular copyright holder is reinstated (a)
provisionally, unless and until the copyright holder explicitly and
finally terminates your license, and (b) permanently, if the copyright
holder fails to notify you of the violation by some reasonable means
prior to 60 days after the cessation.
Moreover, your license from a particular copyright holder is
reinstated permanently if the copyright holder notifies you of the
violation by some reasonable means, this is the first time you have
received notice of violation of this License (for any work) from that
copyright holder, and you cure the violation prior to 30 days after
your receipt of the notice.
Termination of your rights under this section does not terminate the
licenses of parties who have received copies or rights from you under
this License. If your rights have been terminated and not permanently
reinstated, you do not qualify to receive new licenses for the same
material under section 10.
9. Acceptance Not Required for Having Copies.
You are not required to accept this License in order to receive or
run a copy of the Program. Ancillary propagation of a covered work
occurring solely as a consequence of using peer-to-peer transmission
to receive a copy likewise does not require acceptance. However,
nothing other than this License grants you permission to propagate or
modify any covered work. These actions infringe copyright if you do
not accept this License. Therefore, by modifying or propagating a
covered work, you indicate your acceptance of this License to do so.
10. Automatic Licensing of Downstream Recipients.
Each time you convey a covered work, the recipient automatically
receives a license from the original licensors, to run, modify and
propagate that work, subject to this License. You are not responsible
for enforcing compliance by third parties with this License.
An "entity transaction" is a transaction transferring control of an
organization, or substantially all assets of one, or subdividing an
organization, or merging organizations. If propagation of a covered
work results from an entity transaction, each party to that
transaction who receives a copy of the work also receives whatever
licenses to the work the party's predecessor in interest had or could
give under the previous paragraph, plus a right to possession of the
Corresponding Source of the work from the predecessor in interest, if
the predecessor has it or can get it with reasonable efforts.
You may not impose any further restrictions on the exercise of the
rights granted or affirmed under this License. For example, you may
not impose a license fee, royalty, or other charge for exercise of
rights granted under this License, and you may not initiate litigation
(including a cross-claim or counterclaim in a lawsuit) alleging that
any patent claim is infringed by making, using, selling, offering for
sale, or importing the Program or any portion of it.
11. Patents.
A "contributor" is a copyright holder who authorizes use under this
License of the Program or a work on which the Program is based. The
work thus licensed is called the contributor's "contributor version".
A contributor's "essential patent claims" are all patent claims
owned or controlled by the contributor, whether already acquired or
hereafter acquired, that would be infringed by some manner, permitted
by this License, of making, using, or selling its contributor version,
but do not include claims that would be infringed only as a
consequence of further modification of the contributor version. For
purposes of this definition, "control" includes the right to grant
patent sublicenses in a manner consistent with the requirements of
GNU GENERAL PUBLIC LICENSE
TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
0. This License applies to any program or other work which contains
a notice placed by the copyright holder saying it may be distributed
under the terms of this General Public License. The "Program", below,
refers to any such program or work, and a "work based on the Program"
means either the Program or any derivative work under copyright law:
that is to say, a work containing the Program or a portion of it,
either verbatim or with modifications and/or translated into another
language. (Hereinafter, translation is included without limitation in
the term "modification".) Each licensee is addressed as "you".
Activities other than copying, distribution and modification are not
covered by this License; they are outside its scope. The act of
running the Program is not restricted, and the output from the Program
is covered only if its contents constitute a work based on the
Program (independent of having been made by running the Program).
Whether that is true depends on what the Program does.
1. You may copy and distribute verbatim copies of the Program's
source code as you receive it, in any medium, provided that you
conspicuously and appropriately publish on each copy an appropriate
copyright notice and disclaimer of warranty; keep intact all the
notices that refer to this License and to the absence of any warranty;
and give any other recipients of the Program a copy of this License
along with the Program.
You may charge a fee for the physical act of transferring a copy, and
you may at your option offer warranty protection in exchange for a fee.
2. You may modify your copy or copies of the Program or any portion
of it, thus forming a work based on the Program, and copy and
distribute such modifications or work under the terms of Section 1
above, provided that you also meet all of these conditions:
a) You must cause the modified files to carry prominent notices
stating that you changed the files and the date of any change.
b) You must cause any work that you distribute or publish, that in
whole or in part contains or is derived from the Program or any
part thereof, to be licensed as a whole at no charge to all third
parties under the terms of this License.
c) If the modified program normally reads commands interactively
when run, you must cause it, when started running for such
interactive use in the most ordinary way, to print or display an
announcement including an appropriate copyright notice and a
notice that there is no warranty (or else, saying that you provide
a warranty) and that users may redistribute the program under
these conditions, and telling the user how to view a copy of this
License. (Exception: if the Program itself is interactive but
does not normally print such an announcement, your work based on
the Program is not required to print an announcement.)
These requirements apply to the modified work as a whole. If
identifiable sections of that work are not derived from the Program,
and can be reasonably considered independent and separate works in
themselves, then this License, and its terms, do not apply to those
sections when you distribute them as separate works. But when you
distribute the same sections as part of a whole which is a work based
on the Program, the distribution of the whole must be on the terms of
this License, whose permissions for other licensees extend to the
entire whole, and thus to each and every part regardless of who wrote it.
Thus, it is not the intent of this section to claim rights or contest
your rights to work written entirely by you; rather, the intent is to
exercise the right to control the distribution of derivative or
collective works based on the Program.
In addition, mere aggregation of another work not based on the Program
with the Program (or with a work based on the Program) on a volume of
a storage or distribution medium does not bring the other work under
the scope of this License.
3. You may copy and distribute the Program (or a work based on it,
under Section 2) in object code or executable form under the terms of
Sections 1 and 2 above provided that you also do one of the following:
a) Accompany it with the complete corresponding machine-readable
source code, which must be distributed under the terms of Sections
1 and 2 above on a medium customarily used for software interchange; or,
b) Accompany it with a written offer, valid for at least three
years, to give any third party, for a charge no more than your
cost of physically performing source distribution, a complete
machine-readable copy of the corresponding source code, to be
distributed under the terms of Sections 1 and 2 above on a medium
customarily used for software interchange; or,
c) Accompany it with the information you received as to the offer
to distribute corresponding source code. (This alternative is
allowed only for noncommercial distribution and only if you
received the program in object code or executable form with such
an offer, in accord with Subsection b above.)
The source code for a work means the preferred form of the work for
making modifications to it. For an executable work, complete source
code means all the source code for all modules it contains, plus any
associated interface definition files, plus the scripts used to
control compilation and installation of the executable. However, as a
special exception, the source code distributed need not include
anything that is normally distributed (in either source or binary
form) with the major components (compiler, kernel, and so on) of the
operating system on which the executable runs, unless that component
itself accompanies the executable.
If distribution of executable or object code is made by offering
access to copy from a designated place, then offering equivalent
access to copy the source code from the same place counts as
distribution of the source code, even though third parties are not
compelled to copy the source along with the object code.
4. You may not copy, modify, sublicense, or distribute the Program
except as expressly provided under this License. Any attempt
otherwise to copy, modify, sublicense or distribute the Program is
void, and will automatically terminate your rights under this License.
However, parties who have received copies, or rights, from you under
this License will not have their licenses terminated so long as such
parties remain in full compliance.
5. You are not required to accept this License, since you have not
signed it. However, nothing else grants you permission to modify or
distribute the Program or its derivative works. These actions are
prohibited by law if you do not accept this License. Therefore, by
modifying or distributing the Program (or any work based on the
Program), you indicate your acceptance of this License to do so, and
all its terms and conditions for copying, distributing or modifying
the Program or works based on it.
6. Each time you redistribute the Program (or any work based on the
Program), the recipient automatically receives a license from the
original licensor to copy, distribute or modify the Program subject to
these terms and conditions. You may not impose any further
restrictions on the recipients' exercise of the rights granted herein.
You are not responsible for enforcing compliance by third parties to
this License.
Each contributor grants you a non-exclusive, worldwide, royalty-free
patent license under the contributor's essential patent claims, to
make, use, sell, offer for sale, import and otherwise run, modify and
propagate the contents of its contributor version.
In the following three paragraphs, a "patent license" is any express
agreement or commitment, however denominated, not to enforce a patent
(such as an express permission to practice a patent or covenant not to
sue for patent infringement). To "grant" such a patent license to a
party means to make such an agreement or commitment not to enforce a
patent against the party.
If you convey a covered work, knowingly relying on a patent license,
and the Corresponding Source of the work is not available for anyone
to copy, free of charge and under the terms of this License, through a
publicly available network server or other readily accessible means,
then you must either (1) cause the Corresponding Source to be so
available, or (2) arrange to deprive yourself of the benefit of the
patent license for this particular work, or (3) arrange, in a manner
consistent with the requirements of this License, to extend the patent
license to downstream recipients. "Knowingly relying" means you have
actual knowledge that, but for the patent license, your conveying the
covered work in a country, or your recipient's use of the covered work
in a country, would infringe one or more identifiable patents in that
country that you have reason to believe are valid.
If, pursuant to or in connection with a single transaction or
arrangement, you convey, or propagate by procuring conveyance of, a
covered work, and grant a patent license to some of the parties
receiving the covered work authorizing them to use, propagate, modify
or convey a specific copy of the covered work, then the patent license
you grant is automatically extended to all recipients of the covered
work and works based on it.
A patent license is "discriminatory" if it does not include within
the scope of its coverage, prohibits the exercise of, or is
conditioned on the non-exercise of one or more of the rights that are
specifically granted under this License. You may not convey a covered
work if you are a party to an arrangement with a third party that is
in the business of distributing software, under which you make payment
to the third party based on the extent of your activity of conveying
the work, and under which the third party grants, to any of the
parties who would receive the covered work from you, a discriminatory
patent license (a) in connection with copies of the covered work
conveyed by you (or copies made from those copies), or (b) primarily
for and in connection with specific products or compilations that
contain the covered work, unless you entered into that arrangement,
or that patent license was granted, prior to 28 March 2007.
Nothing in this License shall be construed as excluding or limiting
any implied license or other defenses to infringement that may
otherwise be available to you under applicable patent law.
12. No Surrender of Others' Freedom.
If conditions are imposed on you (whether by court order, agreement or
7. If, as a consequence of a court judgment or allegation of patent
infringement or for any other reason (not limited to patent issues),
conditions are imposed on you (whether by court order, agreement or
otherwise) that contradict the conditions of this License, they do not
excuse you from the conditions of this License. If you cannot convey a
covered work so as to satisfy simultaneously your obligations under this
License and any other pertinent obligations, then as a consequence you may
not convey it at all. For example, if you agree to terms that obligate you
to collect a royalty for further conveying from those to whom you convey
the Program, the only way you could satisfy both those terms and this
License would be to refrain entirely from conveying the Program.
excuse you from the conditions of this License. If you cannot
distribute so as to satisfy simultaneously your obligations under this
License and any other pertinent obligations, then as a consequence you
may not distribute the Program at all. For example, if a patent
license would not permit royalty-free redistribution of the Program by
all those who receive copies directly or indirectly through you, then
the only way you could satisfy both it and this License would be to
refrain entirely from distribution of the Program.
13. Use with the GNU Affero General Public License.
If any portion of this section is held invalid or unenforceable under
any particular circumstance, the balance of the section is intended to
apply and the section as a whole is intended to apply in other
circumstances.
Notwithstanding any other provision of this License, you have
permission to link or combine any covered work with a work licensed
under version 3 of the GNU Affero General Public License into a single
combined work, and to convey the resulting work. The terms of this
License will continue to apply to the part which is the covered work,
but the special requirements of the GNU Affero General Public License,
section 13, concerning interaction through a network will apply to the
combination as such.
It is not the purpose of this section to induce you to infringe any
patents or other property right claims or to contest validity of any
such claims; this section has the sole purpose of protecting the
integrity of the free software distribution system, which is
implemented by public license practices. Many people have made
generous contributions to the wide range of software distributed
through that system in reliance on consistent application of that
system; it is up to the author/donor to decide if he or she is willing
to distribute software through any other system and a licensee cannot
impose that choice.
14. Revised Versions of this License.
This section is intended to make thoroughly clear what is believed to
be a consequence of the rest of this License.
8. If the distribution and/or use of the Program is restricted in
certain countries either by patents or by copyrighted interfaces, the
original copyright holder who places the Program under this License
may add an explicit geographical distribution limitation excluding
those countries, so that distribution is permitted only in or among
countries not thus excluded. In such case, this License incorporates
the limitation as if written in the body of this License.
The Free Software Foundation may publish revised and/or new versions of
the GNU General Public License from time to time. Such new versions will
9. The Free Software Foundation may publish revised and/or new versions
of the General Public License from time to time. Such new versions will
be similar in spirit to the present version, but may differ in detail to
address new problems or concerns.
Each version is given a distinguishing version number. If the
Program specifies that a certain numbered version of the GNU General
Public License "or any later version" applies to it, you have the
option of following the terms and conditions either of that numbered
version or of any later version published by the Free Software
Foundation. If the Program does not specify a version number of the
GNU General Public License, you may choose any version ever published
by the Free Software Foundation.
Each version is given a distinguishing version number. If the Program
specifies a version number of this License which applies to it and "any
later version", you have the option of following the terms and conditions
either of that version or of any later version published by the Free
Software Foundation. If the Program does not specify a version number of
this License, you may choose any version ever published by the Free Software
Foundation.
If the Program specifies that a proxy can decide which future
versions of the GNU General Public License can be used, that proxy's
public statement of acceptance of a version permanently authorizes you
to choose that version for the Program.
10. If you wish to incorporate parts of the Program into other free
programs whose distribution conditions are different, write to the author
to ask for permission. For software which is copyrighted by the Free
Software Foundation, write to the Free Software Foundation; we sometimes
make exceptions for this. Our decision will be guided by the two goals
of preserving the free status of all derivatives of our free software and
of promoting the sharing and reuse of software generally.
Later license versions may give you additional or different
permissions. However, no additional obligations are imposed on any
author or copyright holder as a result of your choosing to follow a
later version.
NO WARRANTY
15. Disclaimer of Warranty.
11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY
FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN
OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES
PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED
OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS
TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE
PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING,
REPAIR OR CORRECTION.
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR
REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES,
INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING
OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED
TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY
YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER
PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE
POSSIBILITY OF SUCH DAMAGES.
16. Limitation of Liability.
END OF TERMS AND CONDITIONS
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
SUCH DAMAGES.
How to Apply These Terms to Your New Programs
17. Interpretation of Sections 15 and 16.
If you develop a new program, and you want it to be of the greatest
possible use to the public, the best way to achieve this is to make it
free software which everyone can redistribute and change under these terms.
If the disclaimer of warranty and limitation of liability provided
above cannot be given local legal effect according to their terms,
reviewing courts shall apply local law that most closely approximates
an absolute waiver of all civil liability in connection with the
Program, unless a warranty or assumption of liability accompanies a
copy of the Program in return for a fee.
To do so, attach the following notices to the program. It is safest
to attach them to the start of each source file to most effectively
convey the exclusion of warranty; and each file should have at least
the "copyright" line and a pointer to where the full notice is found.
END OF TERMS AND CONDITIONS
<one line to give the program's name and a brief idea of what it does.>
Copyright (C) <year> <name of author>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
Also add information on how to contact you by electronic and paper mail.
If the program is interactive, make it output a short notice like this
when it starts in an interactive mode:
Gnomovision version 69, Copyright (C) year name of author
Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
This is free software, and you are welcome to redistribute it
under certain conditions; type `show c' for details.
The hypothetical commands `show w' and `show c' should show the appropriate
parts of the General Public License. Of course, the commands you use may
be called something other than `show w' and `show c'; they could even be
mouse-clicks or menu items--whatever suits your program.
You should also get your employer (if you work as a programmer) or your
school, if any, to sign a "copyright disclaimer" for the program, if
necessary. Here is a sample; alter the names:
Yoyodyne, Inc., hereby disclaims all copyright interest in the program
`Gnomovision' (which makes passes at compilers) written by James Hacker.
<signature of Ty Coon>, 1 April 1989
Ty Coon, President of Vice
This General Public License does not permit incorporating your program into
proprietary programs. If your program is a subroutine library, you may
consider it more useful to permit linking proprietary applications with the
library. If this is what you want to do, use the GNU Library General
Public License instead of this License.
In addition, as a special exception, the copyright holders give
permission to link the code of portions of this program with the OpenSSL
library.
You must obey the GNU General Public License in all respects for all of
the code used other than OpenSSL. If you modify file(s) with this
exception, you may extend this exception to your version of the file(s),
but you are not obligated to do so. If you do not wish to do so, delete
this exception statement from your version. If you delete this exception
statement from all source files in the program, then also delete it here.

View File

@ -1,36 +1,12 @@
include *.md
include AUTHORS
include LICENSE
include RELEASE-VERSION
include README
include README.Packagers
include Makefile
include deluge.desktop
include deluge.png
include msgfmt.py
include minify_web_js.py
include version.py
include gen_web_gettext.py
graft docs/man
graft packaging/systemd
include deluge/i18n/*.po
recursive-exclude deluge/i18n *.mo
graft deluge/plugins
recursive-exclude deluge/plugins create_dev_link.sh *.pyc *.egg
prune deluge/plugins/*/build
prune deluge/plugins/*/*.egg-info
graft deluge/tests/
recursive-exclude deluge/tests *.pyc
graft deluge/ui/data
recursive-exclude deluge/ui/data *.desktop *.xml
graft deluge/ui/gtk3/glade
include deluge/ui/web/index.html
include deluge/ui/web/css/*.css
include deluge/ui/web/js/*.js
graft deluge/ui/web/js/deluge-all/
graft deluge/ui/web/js/extjs/
graft deluge/ui/web/themes
graft deluge/ui/web/render
graft deluge/ui/web/icons
graft deluge/ui/web/images
recursive-include libtorrent/ *
recursive-include glade/ *.glade
recursive-include pixmaps/ *.png *.svg
recursive-include plugins/ *
recursive-include po/ *

31
Makefile Normal file
View File

@ -0,0 +1,31 @@
#
# Makefile for Deluge
#
PYVER=`python -c "import sys; print sys.version[:3]"`
PREFIX ?= /usr
DESTDIR ?= ./
all:
python setup.py build
tarball:
python setup.py sdist
mv dist/deluge-*.tar.gz $(DESTDIR)
install:
python setup.py install --prefix=$(PREFIX)
clean:
python setup.py clean
rm -rf ./build
rm msgfmt.pyc
find . -name *.pyc -exec rm {} \;
uninstall:
-rm $(PREFIX)/bin/deluge
-rm -r $(PREFIX)/lib/python${PYVER}/site-packages/deluge
-rm -r $(PREFIX)/lib/python${PYVER}/site-packages/deluge-*.egg-info
-rm -r $(PREFIX)/share/deluge
-find ${PREFIX}/share/locale -name deluge.mo -exec rm {} \;
-rm $(PREFIX)/share/applications/deluge.desktop
-rm $(PREFIX)/share/pixmaps/deluge.png

10
PKG-INFO Normal file
View File

@ -0,0 +1,10 @@
Metadata-Version: 1.0
Name: deluge
Version: 0.5.2
Summary: A bittorrent client written in PyGTK
Home-page: http://deluge-torrent.org
Author: Zach Tibbitts, Alon Zakai, Marcos Pinto, Alex Dedul, Andrew Resch
Author-email: zach@collegegeek.org, kripkensteiner@gmail.com, marcospinto@dipconsultants.com, rotmer@gmail.com, alonzakai@gmail.com
License: GPLv2
Description: UNKNOWN
Platform: UNKNOWN

102
README Normal file
View File

@ -0,0 +1,102 @@
==========================
Deluge BitTorrent Client
==========================
Authors:
Zach Tibbitts, aka zachtib
Alon Zakai, aka kripkenstein
Marcos Pinto, aka markybob
Andrew Resch, aka andar
Alex Dedul, aka plisk
Homepage: http://deluge-torrent.org
==========================
Contact/Support:
==========================
We have two options available for support:
Our Forum, at http://forum.deluge-torrent.org
or
Our IRC Channel, at #deluge on Freenode
==========================
Installation Instructions:
==========================
First, make sure you have the proper build dependencies installed. On a normal
Debian or Ubuntu system, those dependencies are:
g++
make
python-all-dev
python-all version >= 2.4
python-dbus
python-gtk2 version >= 2.9
python-notify
python-pyopenssl
librsvg2-common
python-xdg
python-support
libboost-dev >= 1.33.1
libboost-thread-dev
libboost-date-time-dev
libboost-filesystem-dev
libboost-serialization-dev
libssl-dev
zlib1g-dev
But the names of the packages may vary depending on your OS / distro.
Once you have the needed libraries installed, build Deluge by running:
$ make
You shouldn't get any errors. Then run, as root (or by using sudo):
$ make install
and Deluge will be installed. By default, Deluge will be installed to the
prefix /usr. If you wish, you can install Deluge to a different prefix by
specifying it when you install it:
$ PREFIX=yourprefixhere make install
So, to install to /usr/local, run:
$ PREFIX=/usr/local make install
You can then run Deluge by executing:
$ deluge
==========================
Uninstallation/Upgrading:
==========================
If you wish to upgrade from the older Deluge version please remove it first,
then install the latest version as per "Installation Instructions". If you
installed via the tarball, cd into the unpacked source tarball and then run,
as root (or by using sudo):
$ make uninstall
If you installed via the deb package, run as root (or by using sudo:)
$ dpkg --purge remove deluge-torrent
Now install the latest version (and check out the additional notes).
==========================
Additional Notes:
==========================
1) On some distributions, boost libraries are renamed to have "-mt" at the end
(boost_thread_mt instead of boost_thread, for example), the "mt" indicating
"multithreaded". In some cases it appears the distros lack symlinks to connect
things. The solution is to either add symlinks from the short names to those
with "-mt", or to alter setup.py to look for the "-mt" versions.
2) After upgrading your Deluge installation, it may fail to start. If this
happens to you, you need to remove your ~/.config/deluge directory to allow
Deluge to rebuild it's configuration file.

8
README.Packagers Normal file
View File

@ -0,0 +1,8 @@
NOTE: Deluge 0.5.1 and newer uses an svn build of libtorrent. This build
differs from a clean libtorrent source checkout and has been hacked in order
to get it to work properly with Deluge. As a result, Deluge will likely not
build properly against a vanilla libtorrent 0.12 installation or a nightly
build oflibtorrent 0.13. It is recommended that you build against our included
libtorrent, as our build will not conflict with any installed libtorrent.
- zachtib

View File

@ -1,70 +0,0 @@
# Deluge BitTorrent Client
[![build-status]][github-ci] [![docs-status]][rtd-deluge]
Deluge is a BitTorrent client that utilizes a daemon/client model.
It has various user interfaces available such as the GTK-UI, Web-UI and
Console-UI. It uses [libtorrent][lt] at its core to handle the BitTorrent
protocol.
## Install
From [PyPi](https://pypi.org/project/deluge):
pip install deluge
with all optional dependencies:
pip install deluge[all]
From source code:
pip install .
with all optional dependencies:
pip install .[all]
See [DEPENDS](DEPENDS.md) and [Installing/Source] for dependency details.
## Usage
The various user-interfaces and Deluge daemon can be started with the following commands.
Use the `--help` option for further command options.
### Gtk UI
`deluge` or `deluge-gtk`
### Console UI
`deluge-console`
### Web UI
`deluge-web`
Open http://localhost:8112 with default password `deluge`.
### Daemon
`deluged`
See the [Thinclient guide] to connect to the daemon from another computer.
## Contact
- [Homepage](https://deluge-torrent.org)
- [User guide][user guide]
- [Forum](https://forum.deluge-torrent.org)
- [IRC Libera.Chat #deluge](irc://irc.libera.chat/deluge)
[user guide]: https://dev.deluge-torrent.org/wiki/UserGuide
[thinclient guide]: https://dev.deluge-torrent.org/wiki/UserGuide/ThinClient
[installing/source]: https://dev.deluge-torrent.org/wiki/Installing/Source
[build-status]: https://github.com/deluge-torrent/deluge/actions/workflows/ci.yml/badge.svg?branch=develop "CI"
[github-ci]: https://github.com/deluge-torrent/deluge/actions/workflows/ci.yml
[docs-status]: https://readthedocs.org/projects/deluge/badge/?version=latest
[rtd-deluge]: https://deluge.readthedocs.io/en/latest/?badge=latest "Documentation Status"
[lt]: https://libtorrent.org

3
TODO Normal file
View File

@ -0,0 +1,3 @@
for 0.5.7
* remap filenames
* copy over active torrents when user changes location of torrent files

5
createicons.sh Executable file
View File

@ -0,0 +1,5 @@
#!/bin/bash
for size in 16 22 24 32 36 48 64 72 96 128 192 256; do mkdir -p icons/hicolor/\
${size}x${size}/apps; rsvg-convert -w ${size} -h ${size} \
-o icons/hicolor/${size}x${size}/apps/deluge.png pixmaps/deluge.svg; mkdir -p \
icons/scalable/apps/; cp pixmaps/deluge.svg icons/scalable/apps/deluge.svg; done

13
deluge.desktop Normal file
View File

@ -0,0 +1,13 @@
[Desktop Entry]
Version=1.0
Encoding=UTF-8
Name=Deluge BitTorrent Client
GenericName=BitTorrent Client
Comment=A GTK BitTorrent client written in Python and C++
Exec=deluge
Icon=deluge.png
Terminal=false
Type=Application
Categories=Network;
StartupNotify=true
MimeType=application/x-bittorrent;

BIN
deluge.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.4 KiB

View File

@ -1 +0,0 @@
"""Deluge"""

View File

@ -1,36 +0,0 @@
# -*- coding: utf-8 -*-
#
# Copyright (C) 2009 Andrew Resch <andrewresch@gmail.com>
#
# This file is part of Deluge and is licensed under GNU General Public License 3.0, or later, with
# the additional special exception to link portions of this program with the OpenSSL library.
# See LICENSE for more details.
#
"""
This module is used to handle the importing of libtorrent and also controls
the minimum versions of libtorrent that this version of Deluge supports.
Example:
>>> from deluge._libtorrent import lt
"""
from deluge.common import VersionSplit, get_version
from deluge.error import LibtorrentImportError
try:
import deluge.libtorrent as lt
except ImportError:
try:
import libtorrent as lt
except ImportError as ex:
raise LibtorrentImportError('No libtorrent library found: %s' % (ex))
REQUIRED_VERSION = '1.1.2.0'
LT_VERSION = lt.__version__
if VersionSplit(LT_VERSION) < VersionSplit(REQUIRED_VERSION):
raise LibtorrentImportError(
'Deluge %s requires libtorrent >= %s' % (get_version(), REQUIRED_VERSION)
)

View File

@ -1,385 +0,0 @@
# -*- coding: utf-8 -*-
#
# Copyright (C) 2007 Andrew Resch <andrewresch@gmail.com>
#
# This file is part of Deluge and is licensed under GNU General Public License 3.0, or later, with
# the additional special exception to link portions of this program with the OpenSSL library.
# See LICENSE for more details.
#
import argparse
import logging
import os
import platform
import sys
import textwrap
import deluge.log
from deluge import common
from deluge.configmanager import get_config_dir, set_config_dir
def find_subcommand(self, args=None, sys_argv=True):
"""Find if a subcommand has been supplied.
Args:
args (list, optional): The argument list to search through.
sys_argv (bool): Use sys.argv[1:] if args is None.
Returns:
int: Index of the subcommand or '-1' if none found.
"""
subcommand_found = -1
if args is None:
args = sys.argv[1:] if sys_argv is None else []
for x in self._subparsers._actions:
if not isinstance(x, argparse._SubParsersAction):
continue
for sp_name in x._name_parser_map:
if sp_name in args:
subcommand_found = args.index(sp_name)
return subcommand_found
def set_default_subparser(self, name, abort_opts=None):
"""Sets the default argparse subparser.
Args:
name (str): The name of the default subparser.
abort_opts (list): The arguments to test for in case no subcommand is found.
If any of the values are found, the default subparser will
not be inserted into sys.argv.
Returns:
list: The arguments found in sys.argv if no subcommand found, else None
"""
found_abort_opts = []
abort_opts = [] if abort_opts is None else abort_opts
test_args = sys.argv[1:]
subparser_found = self.find_subcommand(args=test_args)
for i, arg in enumerate(test_args):
if subparser_found == i:
break
if arg in abort_opts:
found_abort_opts.append(arg)
if subparser_found == -1:
if found_abort_opts:
# Found one or more of arguments in abort_opts
return found_abort_opts
# insert default in first position, this implies no
# global options without a sub_parsers specified
sys.argv.insert(1, name)
return None
argparse.ArgumentParser.find_subcommand = find_subcommand
argparse.ArgumentParser.set_default_subparser = set_default_subparser
def _get_version_detail():
version_str = '%s\n' % (common.get_version())
try:
from deluge._libtorrent import LT_VERSION
version_str += 'libtorrent: %s\n' % LT_VERSION
except ImportError:
pass
version_str += 'Python: %s\n' % platform.python_version()
version_str += 'OS: %s %s\n' % (platform.system(), common.get_os_version())
return version_str
class DelugeTextHelpFormatter(argparse.RawDescriptionHelpFormatter):
"""Help message formatter which retains formatting of all help text."""
def _split_lines(self, text, width):
"""
Do not remove whitespaces in string but still wrap text to max width.
Instead of passing the entire text to textwrap.wrap, split and pass each
line instead. This way list formatting is not mangled by textwrap.wrap.
"""
wrapped_lines = []
for l in text.splitlines():
wrapped_lines.extend(textwrap.wrap(l, width, subsequent_indent=' '))
return wrapped_lines
def _format_action_invocation(self, action):
"""
Combines the options with comma and displays the argument
value only once instead of after both options.
Instead of: -s <arg>, --long-opt <arg>
Show : -s, --long-opt <arg>
"""
if not action.option_strings:
(metavar,) = self._metavar_formatter(action, action.dest)(1)
return metavar
else:
parts = []
# if the Optional doesn't take a value, format is:
# -s, --long
if action.nargs == 0:
parts.extend(action.option_strings)
# if the Optional takes a value, format is:
# -s, --long ARGS
else:
default = action.dest.upper()
args_string = self._format_args(action, default)
opt = ', '.join(action.option_strings)
parts.append('%s %s' % (opt, args_string))
return ', '.join(parts)
class HelpAction(argparse._HelpAction):
def __call__(self, parser, namespace, values, option_string=None):
if hasattr(parser, 'subparser'):
subparser = getattr(parser, 'subparser')
subparser.print_help()
else:
parser.print_help()
parser.exit()
class ArgParserBase(argparse.ArgumentParser):
def __init__(self, *args, **kwargs):
if 'formatter_class' not in kwargs:
kwargs['formatter_class'] = lambda prog: DelugeTextHelpFormatter(
prog, max_help_position=33, width=90
)
kwargs['add_help'] = kwargs.get('add_help', False)
common_help = kwargs.pop('common_help', True)
self.log_stream = sys.stdout
if 'log_stream' in kwargs:
self.log_stream = kwargs['log_stream']
del kwargs['log_stream']
super(ArgParserBase, self).__init__(*args, **kwargs)
self.common_setup = False
self.process_arg_group = False
self.group = self.add_argument_group(_('Common Options'))
if common_help:
self.group.add_argument(
'-h', '--help', action=HelpAction, help=_('Print this help message')
)
self.group.add_argument(
'-V',
'--version',
action='version',
version='%(prog)s ' + _get_version_detail(),
help=_('Print version information'),
)
self.group.add_argument(
'-v',
action='version',
version='%(prog)s ' + _get_version_detail(),
help=argparse.SUPPRESS,
) # Deprecated arg
self.group.add_argument(
'-c',
'--config',
metavar='<config>',
help=_('Set the config directory path'),
)
self.group.add_argument(
'-l',
'--logfile',
metavar='<logfile>',
help=_('Output to specified logfile instead of stdout'),
)
self.group.add_argument(
'-L',
'--loglevel',
choices=[l for k in deluge.log.levels for l in (k, k.upper())],
help=_('Set the log level (none, error, warning, info, debug)'),
metavar='<level>',
)
self.group.add_argument(
'--logrotate',
nargs='?',
const='2M',
metavar='<max-size>',
help=_(
'Enable logfile rotation, with optional maximum logfile size, '
'default: %(const)s (Logfile rotation count is 5)'
),
)
self.group.add_argument(
'-q',
'--quiet',
action='store_true',
help=_('Quieten logging output (Same as `--loglevel none`)'),
)
self.group.add_argument(
'--profile',
metavar='<profile-file>',
nargs='?',
default=False,
help=_(
'Profile %(prog)s with cProfile. Outputs to stdout '
'unless a filename is specified'
),
)
def parse_args(self, args=None):
"""Parse UI arguments and handle common and process group options.
Notes:
Unknown arguments results in usage text printed and system exit.
Args:
args (list, optional): The arguments to parse.
Returns:
argparse.Namespace: The parsed arguments.
"""
options = super(ArgParserBase, self).parse_args(args=args)
return self._handle_ui_options(options)
def parse_known_ui_args(self, args, withhold=None):
"""Parse UI arguments and handle common and process group options without error.
Args:
args (list): The arguments to parse.
withhold (list): Values to ignore in the args list.
Returns:
argparse.Namespace: The parsed arguments.
"""
if withhold:
args = [a for a in args if a not in withhold]
options, remaining = super(ArgParserBase, self).parse_known_args(args=args)
options.remaining = remaining
# Handle common and process group options
return self._handle_ui_options(options)
def _handle_ui_options(self, options):
"""Handle UI common and process group options.
Args:
options (argparse.Namespace): The parsed options.
Returns:
argparse.Namespace: The parsed options.
"""
if not self.common_setup:
self.common_setup = True
# Setup the logger
if options.quiet:
options.loglevel = 'none'
if options.loglevel:
options.loglevel = options.loglevel.lower()
logfile_mode = 'w'
logrotate = options.logrotate
if options.logrotate:
logfile_mode = 'a'
logrotate = common.parse_human_size(options.logrotate)
# Setup the logger
deluge.log.setup_logger(
level=options.loglevel,
filename=options.logfile,
filemode=logfile_mode,
logrotate=logrotate,
output_stream=self.log_stream,
)
if options.config:
if not set_config_dir(options.config):
log = logging.getLogger(__name__)
log.error('There was an error setting the config dir! Exiting..')
sys.exit(1)
else:
if not os.path.exists(common.get_default_config_dir()):
os.makedirs(common.get_default_config_dir())
if self.process_arg_group:
self.process_arg_group = False
# If donotdaemonize is set, skip process forking.
if not (common.windows_check() or options.donotdaemonize):
if os.fork():
os._exit(0)
os.setsid()
# Do second fork
if os.fork():
os._exit(0)
# Ensure process doesn't keep any directory in use that may prevent a filesystem unmount.
os.chdir(get_config_dir())
# Write pid file before chuid
if options.pidfile:
with open(options.pidfile, 'w') as _file:
_file.write('%d\n' % os.getpid())
if not common.windows_check():
if options.group:
if not options.group.isdigit():
import grp
options.group = grp.getgrnam(options.group)[2]
os.setgid(options.group)
if options.user:
if not options.user.isdigit():
import pwd
options.user = pwd.getpwnam(options.user)[2]
os.setuid(options.user)
return options
def add_process_arg_group(self):
"""Adds a grouping of common process args to control a daemon to the parser"""
self.process_arg_group = True
self.group = self.add_argument_group(_('Process Control Options'))
self.group.add_argument(
'-P',
'--pidfile',
metavar='<pidfile>',
action='store',
help=_('Pidfile to store the process id'),
)
if not common.windows_check():
self.group.add_argument(
'-d',
'--do-not-daemonize',
dest='donotdaemonize',
action='store_true',
help=_('Do not daemonize (fork) this process'),
)
self.group.add_argument(
'-f',
'--fork',
dest='donotdaemonize',
action='store_false',
help=argparse.SUPPRESS,
) # Deprecated arg
self.group.add_argument(
'-U',
'--user',
metavar='<user>',
action='store',
help=_('Change to this user on startup (Requires root)'),
)
self.group.add_argument(
'-g',
'--group',
metavar='<group>',
action='store',
help=_('Change to this group on startup (Requires root)'),
)

View File

@ -1,148 +0,0 @@
# The contents of this file are subject to the Python Software Foundation
# License Version 2.3 (the License). You may not copy or use this file, in
# either source code or executable form, except in compliance with the License.
# You may obtain a copy of the License at http://www.python.org/license.
#
# Software distributed under the License is distributed on an AS IS basis,
# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
# for the specific language governing rights and limitations under the
# License.
# Written by Petru Paler
# Updated by Calum Lind to support Python 3.
class BTFailure(Exception):
pass
DICT_DELIM = b'd'
END_DELIM = b'e'
INT_DELIM = b'i'
LIST_DELIM = b'l'
BYTE_SEP = b':'
def decode_int(x, f):
f += 1
newf = x.index(END_DELIM, f)
n = int(x[f:newf])
if x[f : f + 1] == b'-' and x[f + 1 : f + 2] == b'0':
raise ValueError
elif x[f : f + 1] == b'0' and newf != f + 1:
raise ValueError
return (n, newf + 1)
def decode_string(x, f):
colon = x.index(BYTE_SEP, f)
n = int(x[f:colon])
if x[f : f + 1] == b'0' and colon != f + 1:
raise ValueError
colon += 1
return (x[colon : colon + n], colon + n)
def decode_list(x, f):
r, f = [], f + 1
while x[f : f + 1] != END_DELIM:
v, f = decode_func[x[f : f + 1]](x, f)
r.append(v)
return (r, f + 1)
def decode_dict(x, f):
r, f = {}, f + 1
while x[f : f + 1] != END_DELIM:
k, f = decode_string(x, f)
r[k], f = decode_func[x[f : f + 1]](x, f)
return (r, f + 1)
decode_func = {}
decode_func[LIST_DELIM] = decode_list
decode_func[DICT_DELIM] = decode_dict
decode_func[INT_DELIM] = decode_int
decode_func[b'0'] = decode_string
decode_func[b'1'] = decode_string
decode_func[b'2'] = decode_string
decode_func[b'3'] = decode_string
decode_func[b'4'] = decode_string
decode_func[b'5'] = decode_string
decode_func[b'6'] = decode_string
decode_func[b'7'] = decode_string
decode_func[b'8'] = decode_string
decode_func[b'9'] = decode_string
def bdecode(x):
try:
r, __ = decode_func[x[0:1]](x, 0)
except (LookupError, TypeError, ValueError):
raise BTFailure('Not a valid bencoded string')
else:
return r
class Bencached(object):
__slots__ = ['bencoded']
def __init__(self, s):
self.bencoded = s
def encode_bencached(x, r):
r.append(x.bencoded)
def encode_int(x, r):
r.extend((INT_DELIM, str(x).encode('utf8'), END_DELIM))
def encode_bool(x, r):
encode_int(1 if x else 0, r)
def encode_string(x, r):
encode_bytes(x.encode('utf8'), r)
def encode_bytes(x, r):
r.extend((str(len(x)).encode('utf8'), BYTE_SEP, x))
def encode_list(x, r):
r.append(LIST_DELIM)
for i in x:
encode_func[type(i)](i, r)
r.append(END_DELIM)
def encode_dict(x, r):
r.append(DICT_DELIM)
for k, v in sorted(x.items()):
try:
k = k.encode('utf8')
except AttributeError:
pass
r.extend((str(len(k)).encode('utf8'), BYTE_SEP, k))
encode_func[type(v)](v, r)
r.append(END_DELIM)
encode_func = {}
encode_func[Bencached] = encode_bencached
encode_func[int] = encode_int
encode_func[list] = encode_list
encode_func[tuple] = encode_list
encode_func[dict] = encode_dict
encode_func[bool] = encode_bool
encode_func[str] = encode_string
encode_func[bytes] = encode_bytes
def bencode(x):
r = []
encode_func[type(x)](x, r)
return b''.join(r)

File diff suppressed because it is too large Load Diff

View File

@ -1,487 +0,0 @@
# -*- coding: utf-8 -*-
#
# Copyright (C) 2007-2010 Andrew Resch <andrewresch@gmail.com>
#
# This file is part of Deluge and is licensed under GNU General Public License 3.0, or later, with
# the additional special exception to link portions of this program with the OpenSSL library.
# See LICENSE for more details.
#
import logging
import traceback
from collections import defaultdict
from twisted.internet import reactor
from twisted.internet.defer import DeferredList, fail, maybeDeferred, succeed
from twisted.internet.task import LoopingCall, deferLater
log = logging.getLogger(__name__)
class ComponentAlreadyRegistered(Exception):
pass
class ComponentException(Exception):
def __init__(self, message, tb):
super(ComponentException, self).__init__(message)
self.message = message
self.tb = tb
def __str__(self):
s = super(ComponentException, self).__str__()
return '%s\n%s' % (s, ''.join(self.tb))
def __eq__(self, other):
if isinstance(other, self.__class__):
return self.message == other.message
else:
return False
def __ne__(self, other):
return not self.__eq__(other)
class Component(object):
"""Component objects are singletons managed by the :class:`ComponentRegistry`.
When a new Component object is instantiated, it will be automatically
registered with the :class:`ComponentRegistry`.
The ComponentRegistry has the ability to start, stop, pause and shutdown the
components registered with it.
**Events:**
**start()** - This method is called when the client has connected to a
Deluge core.
**stop()** - This method is called when the client has disconnected from a
Deluge core.
**update()** - This method is called every 1 second by default while the
Componented is in a *Started* state. The interval can be
specified during instantiation. The update() timer can be
paused by instructing the :class:`ComponentRegistry` to pause
this Component.
**shutdown()** - This method is called when the client is exiting. If the
Component is in a "Started" state when this is called, a
call to stop() will be issued prior to shutdown().
**States:**
A Component can be in one of these 5 states.
**Started** - The Component has been started by the :class:`ComponentRegistry`
and will have it's update timer started.
**Starting** - The Component has had it's start method called, but it hasn't
fully started yet.
**Stopped** - The Component has either been stopped or has yet to be started.
**Stopping** - The Component has had it's stop method called, but it hasn't
fully stopped yet.
**Paused** - The Component has had it's update timer stopped, but will
still be considered in a Started state.
"""
def __init__(self, name, interval=1, depend=None):
"""Initialize component.
Args:
name (str): Name of component.
interval (int, optional): The interval in seconds to call the update function.
depend (list, optional): The names of components this component depends on.
"""
self._component_name = name
self._component_interval = interval
self._component_depend = depend
self._component_state = 'Stopped'
self._component_timer = None
self._component_starting_deferred = None
self._component_stopping_deferred = None
_ComponentRegistry.register(self)
def __del__(self):
if _ComponentRegistry:
_ComponentRegistry.deregister(self)
def _component_start_timer(self):
if hasattr(self, 'update'):
self._component_timer = LoopingCall(self.update)
self._component_timer.start(self._component_interval)
def _component_start(self):
def on_start(result):
self._component_state = 'Started'
self._component_starting_deferred = None
self._component_start_timer()
return True
def on_start_fail(result):
self._component_state = 'Stopped'
self._component_starting_deferred = None
log.error(result)
return fail(result)
if self._component_state == 'Stopped':
if hasattr(self, 'start'):
self._component_state = 'Starting'
d = deferLater(reactor, 0, self.start)
d.addCallbacks(on_start, on_start_fail)
self._component_starting_deferred = d
else:
d = maybeDeferred(on_start, None)
elif self._component_state == 'Starting':
return self._component_starting_deferred
elif self._component_state == 'Started':
d = succeed(True)
else:
d = fail(
ComponentException(
'Trying to start component "%s" but it is '
'not in a stopped state. Current state: %s'
% (self._component_name, self._component_state),
traceback.format_stack(limit=4),
)
)
return d
def _component_stop(self):
def on_stop(result):
self._component_state = 'Stopped'
if self._component_timer and self._component_timer.running:
self._component_timer.stop()
return True
def on_stop_fail(result):
self._component_state = 'Started'
self._component_stopping_deferred = None
log.error(result)
return result
if self._component_state != 'Stopped' and self._component_state != 'Stopping':
if hasattr(self, 'stop'):
self._component_state = 'Stopping'
d = maybeDeferred(self.stop)
d.addCallback(on_stop)
d.addErrback(on_stop_fail)
self._component_stopping_deferred = d
else:
d = maybeDeferred(on_stop, None)
if self._component_state == 'Stopping':
return self._component_stopping_deferred
return succeed(None)
def _component_pause(self):
def on_pause(result):
self._component_state = 'Paused'
if self._component_state == 'Started':
if self._component_timer and self._component_timer.running:
d = maybeDeferred(self._component_timer.stop)
d.addCallback(on_pause)
else:
d = succeed(None)
elif self._component_state == 'Paused':
d = succeed(None)
else:
d = fail(
ComponentException(
'Trying to pause component "%s" but it is '
'not in a started state. Current state: %s'
% (self._component_name, self._component_state),
traceback.format_stack(limit=4),
)
)
return d
def _component_resume(self):
def on_resume(result):
self._component_state = 'Started'
if self._component_state == 'Paused':
d = maybeDeferred(self._component_start_timer)
d.addCallback(on_resume)
else:
d = fail(
ComponentException(
'Trying to resume component "%s" but it is '
'not in a paused state. Current state: %s'
% (self._component_name, self._component_state),
traceback.format_stack(limit=4),
)
)
return d
def _component_shutdown(self):
def on_stop(result):
if hasattr(self, 'shutdown'):
return maybeDeferred(self.shutdown)
return succeed(None)
d = self._component_stop()
d.addCallback(on_stop)
return d
def get_state(self):
return self._component_state
def start(self):
pass
def stop(self):
pass
def update(self):
pass
def shutdown(self):
pass
class ComponentRegistry(object):
"""The ComponentRegistry holds a list of currently registered :class:`Component` objects.
It is used to manage the Components by starting, stopping, pausing and shutting them down.
"""
def __init__(self):
self.components = {}
# Stores all of the components that are dependent on a particular component
self.dependents = defaultdict(list)
def register(self, obj):
"""Register a component object with the registry.
Note:
This is done automatically when a Component object is instantiated.
Args:
obj (Component): A component object to register.
Raises:
ComponentAlreadyRegistered: If a component with the same name is already registered.
"""
name = obj._component_name
if name in self.components:
raise ComponentAlreadyRegistered(
'Component already registered with name %s' % name
)
self.components[obj._component_name] = obj
if obj._component_depend:
for depend in obj._component_depend:
self.dependents[depend].append(name)
def deregister(self, obj):
"""Deregister a component from the registry. A stop will be
issued to the component prior to deregistering it.
Args:
obj (Component): a component object to deregister
Returns:
Deferred: a deferred object that will fire once the Component has been
successfully deregistered
"""
if obj in self.components.values():
log.debug('Deregistering Component: %s', obj._component_name)
d = self.stop([obj._component_name])
def on_stop(result, name):
# Component may have been removed, so pop to ensure it doesn't fail
self.components.pop(name, None)
return d.addCallback(on_stop, obj._component_name)
else:
return succeed(None)
def start(self, names=None):
"""Start Components, and their dependencies, that are currently in a Stopped state.
Note:
If no names are specified then all registered components will be started.
Args:
names (list): A list of Components to start and their dependencies.
Returns:
Deferred: Fired once all Components have been successfully started.
"""
# Start all the components if names is empty
if not names:
names = list(self.components)
elif isinstance(names, str):
names = [names]
def on_depends_started(result, name):
return self.components[name]._component_start()
deferreds = []
for name in names:
if self.components[name]._component_depend:
# This component has depends, so we need to start them first.
d = self.start(self.components[name]._component_depend)
d.addCallback(on_depends_started, name)
deferreds.append(d)
else:
deferreds.append(self.components[name]._component_start())
return DeferredList(deferreds)
def stop(self, names=None):
"""Stop Components that are currently not in a Stopped state.
Note:
If no names are specified then all registered components will be stopped.
Args:
names (list): A list of Components to stop.
Returns:
Deferred: Fired once all Components have been successfully stopped.
"""
if not names:
names = list(self.components)
elif isinstance(names, str):
names = [names]
def on_dependents_stopped(result, name):
return self.components[name]._component_stop()
stopped_in_deferred = set()
deferreds = []
for name in names:
if name in stopped_in_deferred:
continue
if name in self.components:
if name in self.dependents:
# If other components depend on this component, stop them first
d = self.stop(self.dependents[name]).addCallback(
on_dependents_stopped, name
)
deferreds.append(d)
stopped_in_deferred.update(self.dependents[name])
else:
deferreds.append(self.components[name]._component_stop())
return DeferredList(deferreds)
def pause(self, names=None):
"""Pause Components that are currently in a Started state.
Note:
If no names are specified then all registered components will be paused.
Args:
names (list): A list of Components to pause.
Returns:
Deferred: Fired once all Components have been successfully paused.
"""
if not names:
names = list(self.components)
elif isinstance(names, str):
names = [names]
deferreds = []
for name in names:
if self.components[name]._component_state == 'Started':
deferreds.append(self.components[name]._component_pause())
return DeferredList(deferreds)
def resume(self, names=None):
"""Resume Components that are currently in a Paused state.
Note:
If no names are specified then all registered components will be resumed.
Args:
names (list): A list of Components to to resume.
Returns:
Deferred: Fired once all Components have been successfully resumed.
"""
if not names:
names = list(self.components)
elif isinstance(names, str):
names = [names]
deferreds = []
for name in names:
if self.components[name]._component_state == 'Paused':
deferreds.append(self.components[name]._component_resume())
return DeferredList(deferreds)
def shutdown(self):
"""Shutdown all Components regardless of state.
This will call stop() on all the components prior to shutting down. This should be called
when the program is exiting to ensure all Components have a chance to properly shutdown.
Returns:
Deferred: Fired once all Components have been successfully shut down.
"""
def on_stopped(result):
return DeferredList(
[comp._component_shutdown() for comp in list(self.components.values())]
)
return self.stop(list(self.components)).addCallback(on_stopped)
def update(self):
"""Update all Components that are in a Started state."""
for component in self.components.items():
try:
component.update()
except BaseException as ex:
log.exception(ex)
_ComponentRegistry = ComponentRegistry()
deregister = _ComponentRegistry.deregister
start = _ComponentRegistry.start
stop = _ComponentRegistry.stop
pause = _ComponentRegistry.pause
resume = _ComponentRegistry.resume
update = _ComponentRegistry.update
shutdown = _ComponentRegistry.shutdown
def get(name):
"""Return a reference to a component.
Args:
name (str): The Component name to get.
Returns:
Component: The Component object.
Raises:
KeyError: If the Component does not exist.
"""
return _ComponentRegistry.components[name]

View File

@ -1,561 +0,0 @@
# -*- coding: utf-8 -*-
#
# Copyright (C) 2008 Andrew Resch <andrewresch@gmail.com>
#
# This file is part of Deluge and is licensed under GNU General Public License 3.0, or later, with
# the additional special exception to link portions of this program with the OpenSSL library.
# See LICENSE for more details.
#
"""
Deluge Config Module
This module is used for loading and saving of configuration files.. or anything
really.
The format of the config file is two json encoded dicts:
<version dict>
<content dict>
The version dict contains two keys: file and format. The format version is
controlled by the Config class. It should only be changed when anything below
it is changed directly by the Config class. An example of this would be if we
changed the serializer for the content to something different.
The config file version is changed by the 'owner' of the config file. This is
to signify that there is a change in the naming of some config keys or something
similar along those lines.
The content is simply the dict to be saved and will be serialized before being
written.
Converting
Since the format of the config could change, there needs to be a way to have
the Config object convert to newer formats. To do this, you will need to
register conversion functions for various versions of the config file. Note that
this can only be done for the 'config file version' and not for the 'format'
version as this will be done internally.
"""
import json
import logging
import os
import pickle
import shutil
from codecs import getwriter
from io import open
from tempfile import NamedTemporaryFile
from deluge.common import JSON_FORMAT, get_default_config_dir
log = logging.getLogger(__name__)
callLater = None # noqa: N816 Necessary for the config tests
def prop(func):
"""Function decorator for defining property attributes
The decorated function is expected to return a dictionary
containing one or more of the following pairs:
fget - function for getting attribute value
fset - function for setting attribute value
fdel - function for deleting attribute
This can be conveniently constructed by the locals() builtin
function; see:
http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/205183
"""
return property(doc=func.__doc__, **func())
def find_json_objects(text, decoder=json.JSONDecoder()):
"""Find json objects in text.
Args:
text (str): The text to find json objects within.
Returns:
list: A list of tuples containing start and end locations of json
objects in the text. e.g. [(start, end), ...]
"""
objects = []
offset = 0
while True:
try:
start = text.index('{', offset)
except ValueError:
break
try:
__, index = decoder.raw_decode(text[start:])
except json.decoder.JSONDecodeError:
offset = start + 1
else:
offset = start + index
objects.append((start, offset))
return objects
class Config(object):
"""This class is used to access/create/modify config files.
Args:
filename (str): The config filename.
defaults (dict): The default config values to insert before loading the config file.
config_dir (str): the path to the config directory.
file_version (int): The file format for the default config values when creating
a fresh config. This value should be increased whenever a new migration function is
setup to convert old config files. (default: 1)
"""
def __init__(self, filename, defaults=None, config_dir=None, file_version=1):
self.__config = {}
self.__set_functions = {}
self.__change_callbacks = []
# These hold the version numbers and they will be set when loaded
self.__version = {'format': 1, 'file': file_version}
# This will get set with a reactor.callLater whenever a config option
# is set.
self._save_timer = None
if defaults:
for key, value in defaults.items():
self.set_item(key, value)
# Load the config from file in the config_dir
if config_dir:
self.__config_file = os.path.join(config_dir, filename)
else:
self.__config_file = get_default_config_dir(filename)
self.load()
def __contains__(self, item):
return item in self.__config
def __setitem__(self, key, value):
"""See set_item"""
return self.set_item(key, value)
def set_item(self, key, value):
"""Sets item 'key' to 'value' in the config dictionary.
Does not allow changing the item's type unless it is None.
If the types do not match, it will attempt to convert it to the
set type before raising a ValueError.
Args:
key (str): Item to change to change.
value (any): The value to change item to, must be same type as what is
currently in the config.
Raises:
ValueError: Raised when the type of value is not the same as what is
currently in the config and it could not convert the value.
Examples:
>>> config = Config('test.conf')
>>> config['test'] = 5
>>> config['test']
5
"""
if key not in self.__config:
self.__config[key] = value
log.debug('Setting key "%s" to: %s (of type: %s)', key, value, type(value))
return
if self.__config[key] == value:
return
# Change the value type if it is not None and does not match.
type_match = isinstance(self.__config[key], (type(None), type(value)))
if value is not None and not type_match:
try:
oldtype = type(self.__config[key])
# Don't convert to bytes as requires encoding and value will
# be decoded anyway.
if oldtype is not bytes:
value = oldtype(value)
except ValueError:
log.warning('Value Type "%s" invalid for key: %s', type(value), key)
raise
if isinstance(value, bytes):
value = value.decode('utf8')
log.debug('Setting key "%s" to: %s (of type: %s)', key, value, type(value))
self.__config[key] = value
global callLater
if callLater is None:
# Must import here and not at the top or it will throw ReactorAlreadyInstalledError
from twisted.internet.reactor import ( # pylint: disable=redefined-outer-name
callLater,
)
# Run the set_function for this key if any
try:
for func in self.__set_functions[key]:
callLater(0, func, key, value)
except KeyError:
pass
try:
def do_change_callbacks(key, value):
for func in self.__change_callbacks:
func(key, value)
callLater(0, do_change_callbacks, key, value)
except Exception:
pass
# We set the save_timer for 5 seconds if not already set
if not self._save_timer or not self._save_timer.active():
self._save_timer = callLater(5, self.save)
def __getitem__(self, key):
"""See get_item """
return self.get_item(key)
def get_item(self, key):
"""Gets the value of item 'key'.
Args:
key (str): The item for which you want it's value.
Returns:
any: The value of item 'key'.
Raises:
ValueError: If 'key' is not in the config dictionary.
Examples:
>>> config = Config('test.conf', defaults={'test': 5})
>>> config['test']
5
"""
return self.__config[key]
def get(self, key, default=None):
"""Gets the value of item 'key' if key is in the config, else default.
If default is not given, it defaults to None, so that this method
never raises a KeyError.
Args:
key (str): the item for which you want it's value
default (any): the default value if key is missing
Returns:
any: The value of item 'key' or default.
Examples:
>>> config = Config('test.conf', defaults={'test': 5})
>>> config.get('test', 10)
5
>>> config.get('bad_key', 10)
10
"""
try:
return self.get_item(key)
except KeyError:
return default
def __delitem__(self, key):
"""
See
:meth:`del_item`
"""
self.del_item(key)
def del_item(self, key):
"""Deletes item with a specific key from the configuration.
Args:
key (str): The item which you wish to delete.
Raises:
ValueError: If 'key' is not in the config dictionary.
Examples:
>>> config = Config('test.conf', defaults={'test': 5})
>>> del config['test']
"""
del self.__config[key]
global callLater
if callLater is None:
# Must import here and not at the top or it will throw ReactorAlreadyInstalledError
from twisted.internet.reactor import ( # pylint: disable=redefined-outer-name
callLater,
)
# We set the save_timer for 5 seconds if not already set
if not self._save_timer or not self._save_timer.active():
self._save_timer = callLater(5, self.save)
def register_change_callback(self, callback):
"""Registers a callback function for any changed value.
Will be called when any value is changed in the config dictionary.
Args:
callback (func): The function to call with parameters: f(key, value).
Examples:
>>> config = Config('test.conf', defaults={'test': 5})
>>> def cb(key, value):
... print key, value
...
>>> config.register_change_callback(cb)
"""
self.__change_callbacks.append(callback)
def register_set_function(self, key, function, apply_now=True):
"""Register a function to be called when a config value changes.
Args:
key (str): The item to monitor for change.
function (func): The function to call when the value changes, f(key, value).
apply_now (bool): If True, the function will be called immediately after it's registered.
Examples:
>>> config = Config('test.conf', defaults={'test': 5})
>>> def cb(key, value):
... print key, value
...
>>> config.register_set_function('test', cb, apply_now=True)
test 5
"""
log.debug('Registering function for %s key..', key)
if key not in self.__set_functions:
self.__set_functions[key] = []
self.__set_functions[key].append(function)
# Run the function now if apply_now is set
if apply_now:
function(key, self.__config[key])
return
def apply_all(self):
"""Calls all set functions.
Examples:
>>> config = Config('test.conf', defaults={'test': 5})
>>> def cb(key, value):
... print key, value
...
>>> config.register_set_function('test', cb, apply_now=False)
>>> config.apply_all()
test 5
"""
log.debug('Calling all set functions..')
for key, value in self.__set_functions.items():
for func in value:
func(key, self.__config[key])
def apply_set_functions(self, key):
"""Calls set functions for `:param:key`.
Args:
key (str): the config key
"""
log.debug('Calling set functions for key %s..', key)
if key in self.__set_functions:
for func in self.__set_functions[key]:
func(key, self.__config[key])
def load(self, filename=None):
"""Load a config file.
Args:
filename (str): If None, uses filename set in object initialization
"""
if not filename:
filename = self.__config_file
try:
with open(filename, 'r', encoding='utf8') as _file:
data = _file.read()
except IOError as ex:
log.warning('Unable to open config file %s: %s', filename, ex)
return
objects = find_json_objects(data)
if not len(objects):
# No json objects found, try depickling it
try:
self.__config.update(pickle.loads(data))
except Exception as ex:
log.exception(ex)
log.warning('Unable to load config file: %s', filename)
elif len(objects) == 1:
start, end = objects[0]
try:
self.__config.update(json.loads(data[start:end]))
except Exception as ex:
log.exception(ex)
log.warning('Unable to load config file: %s', filename)
elif len(objects) == 2:
try:
start, end = objects[0]
self.__version.update(json.loads(data[start:end]))
start, end = objects[1]
self.__config.update(json.loads(data[start:end]))
except Exception as ex:
log.exception(ex)
log.warning('Unable to load config file: %s', filename)
log.debug(
'Config %s version: %s.%s loaded: %s',
filename,
self.__version['format'],
self.__version['file'],
self.__config,
)
def save(self, filename=None):
"""Save configuration to disk.
Args:
filename (str): If None, uses filename set in object initialization
Returns:
bool: Whether or not the save succeeded.
"""
if not filename:
filename = self.__config_file
# Check to see if the current config differs from the one on disk
# We will only write a new config file if there is a difference
try:
with open(filename, 'r', encoding='utf8') as _file:
data = _file.read()
objects = find_json_objects(data)
start, end = objects[0]
version = json.loads(data[start:end])
start, end = objects[1]
loaded_data = json.loads(data[start:end])
if self.__config == loaded_data and self.__version == version:
# The config has not changed so lets just return
if self._save_timer and self._save_timer.active():
self._save_timer.cancel()
return True
except (IOError, IndexError) as ex:
log.warning('Unable to open config file: %s because: %s', filename, ex)
# Save the new config and make sure it's written to disk
try:
with NamedTemporaryFile(
prefix=os.path.basename(filename) + '.', delete=False
) as _file:
filename_tmp = _file.name
log.debug('Saving new config file %s', filename_tmp)
json.dump(self.__version, getwriter('utf8')(_file), **JSON_FORMAT)
json.dump(self.__config, getwriter('utf8')(_file), **JSON_FORMAT)
_file.flush()
os.fsync(_file.fileno())
except IOError as ex:
log.error('Error writing new config file: %s', ex)
return False
# Resolve symlinked config files before backing up and saving.
filename = os.path.realpath(filename)
# Make a backup of the old config
try:
log.debug('Backing up old config file to %s.bak', filename)
shutil.move(filename, filename + '.bak')
except IOError as ex:
log.warning('Unable to backup old config: %s', ex)
# The new config file has been written successfully, so let's move it over
# the existing one.
try:
log.debug('Moving new config file %s to %s', filename_tmp, filename)
shutil.move(filename_tmp, filename)
except IOError as ex:
log.error('Error moving new config file: %s', ex)
return False
else:
return True
finally:
if self._save_timer and self._save_timer.active():
self._save_timer.cancel()
def run_converter(self, input_range, output_version, func):
"""Runs a function that will convert file versions.
Args:
input_range (tuple): (int, int) The range of input versions this function will accept.
output_version (int): The version this function will convert to.
func (func): The function that will do the conversion, it will take the config
dict as an argument and return the augmented dict.
Raises:
ValueError: If output_version is less than the input_range.
"""
if output_version in input_range or output_version <= max(input_range):
raise ValueError('output_version needs to be greater than input_range')
if self.__version['file'] not in input_range:
log.debug(
'File version %s is not in input_range %s, ignoring converter function..',
self.__version['file'],
input_range,
)
return
try:
self.__config = func(self.__config)
except Exception as ex:
log.exception(ex)
log.error(
'There was an exception try to convert config file %s %s to %s',
self.__config_file,
self.__version['file'],
output_version,
)
raise ex
else:
self.__version['file'] = output_version
self.save()
@property
def config_file(self):
return self.__config_file
@prop
def config(): # pylint: disable=no-method-argument
"""The config dictionary"""
def fget(self):
return self.__config
def fdel(self):
return self.save()
return locals()

View File

@ -1,128 +0,0 @@
# -*- coding: utf-8 -*-
#
# Copyright (C) 2007 Andrew Resch <andrewresch@gmail.com>
#
# This file is part of Deluge and is licensed under GNU General Public License 3.0, or later, with
# the additional special exception to link portions of this program with the OpenSSL library.
# See LICENSE for more details.
#
import logging
import os
import deluge.common
import deluge.log
from deluge.config import Config
log = logging.getLogger(__name__)
class _ConfigManager(object):
def __init__(self):
log.debug('ConfigManager started..')
self.config_files = {}
self.__config_directory = None
@property
def config_directory(self):
if self.__config_directory is None:
self.__config_directory = deluge.common.get_default_config_dir()
return self.__config_directory
def __del__(self):
del self.config_files
def set_config_dir(self, directory):
"""
Sets the config directory.
:param directory: str, the directory where the config info should be
:returns bool: True if successfully changed directory, False if not
"""
if not directory:
return False
# Ensure absolute dirpath
directory = os.path.abspath(directory)
log.info('Setting config directory to: %s', directory)
if not os.path.exists(directory):
# Try to create the config folder if it doesn't exist
try:
os.makedirs(directory)
except OSError as ex:
log.error('Unable to make config directory: %s', ex)
return False
elif not os.path.isdir(directory):
log.error('Config directory needs to be a directory!')
return False
self.__config_directory = directory
# Reset the config_files so we don't get config from old config folder
# XXX: Probably should have it go through the config_files dict and try
# to reload based on the new config directory
self.save()
self.config_files = {}
deluge.log.tweak_logging_levels()
return True
def get_config_dir(self):
return self.config_directory
def close(self, config):
"""Closes a config file."""
try:
del self.config_files[config]
except KeyError:
pass
def save(self):
"""Saves all the configs to disk."""
for value in self.config_files.values():
value.save()
# We need to return True to keep the timer active
return True
def get_config(self, config_file, defaults=None, file_version=1):
"""Get a reference to the Config object for this filename"""
log.debug('Getting config: %s', config_file)
# Create the config object if not already created
if config_file not in self.config_files:
self.config_files[config_file] = Config(
config_file,
defaults,
config_dir=self.config_directory,
file_version=file_version,
)
return self.config_files[config_file]
# Singleton functions
_configmanager = _ConfigManager()
def ConfigManager(config, defaults=None, file_version=1): # NOQA: N802
return _configmanager.get_config(
config, defaults=defaults, file_version=file_version
)
def set_config_dir(directory):
"""Sets the config directory, else just uses default"""
return _configmanager.set_config_dir(deluge.common.decode_bytes(directory))
def get_config_dir(filename=None):
if filename is not None:
return os.path.join(_configmanager.get_config_dir(), filename)
else:
return _configmanager.get_config_dir()
def close(config):
return _configmanager.close(config)

View File

@ -1,142 +0,0 @@
# -*- coding: utf-8 -*-
#
# Copyright (C) 2007-2009 Andrew Resch <andrewresch@gmail.com>
#
# This file is part of Deluge and is licensed under GNU General Public License 3.0, or later, with
# the additional special exception to link portions of this program with the OpenSSL library.
# See LICENSE for more details.
#
"""
The AlertManager handles all the libtorrent alerts.
This should typically only be used by the Core. Plugins should utilize the
`:mod:EventManager` for similar functionality.
"""
import logging
from types import SimpleNamespace
from twisted.internet import reactor
import deluge.component as component
from deluge._libtorrent import lt
from deluge.common import decode_bytes
log = logging.getLogger(__name__)
class AlertManager(component.Component):
"""AlertManager fetches and processes libtorrent alerts"""
def __init__(self):
log.debug('AlertManager init...')
component.Component.__init__(self, 'AlertManager', interval=0.3)
self.session = component.get('Core').session
# Increase the alert queue size so that alerts don't get lost.
self.alert_queue_size = 10000
self.set_alert_queue_size(self.alert_queue_size)
alert_mask = (
lt.alert.category_t.error_notification
| lt.alert.category_t.port_mapping_notification
| lt.alert.category_t.storage_notification
| lt.alert.category_t.tracker_notification
| lt.alert.category_t.status_notification
| lt.alert.category_t.ip_block_notification
| lt.alert.category_t.performance_warning
)
self.session.apply_settings({'alert_mask': alert_mask})
# handlers is a dictionary of lists {"alert_type": [handler1,h2,..]}
self.handlers = {}
self.delayed_calls = []
def update(self):
self.delayed_calls = [dc for dc in self.delayed_calls if dc.active()]
self.handle_alerts()
def stop(self):
for delayed_call in self.delayed_calls:
if delayed_call.active():
delayed_call.cancel()
self.delayed_calls = []
def register_handler(self, alert_type, handler):
"""
Registers a function that will be called when 'alert_type' is pop'd
in handle_alerts. The handler function should look like: handler(alert)
Where 'alert' is the actual alert object from libtorrent.
:param alert_type: str, this is string representation of the alert name
:param handler: func(alert), the function to be called when the alert is raised
"""
if alert_type not in self.handlers:
# There is no entry for this alert type yet, so lets make it with an
# empty list.
self.handlers[alert_type] = []
# Append the handler to the list in the handlers dictionary
self.handlers[alert_type].append(handler)
log.debug('Registered handler for alert %s', alert_type)
def deregister_handler(self, handler):
"""
De-registers the `:param:handler` function from all alert types.
:param handler: func, the handler function to deregister
"""
# Iterate through all handlers and remove 'handler' where found
for (dummy_key, value) in self.handlers.items():
if handler in value:
# Handler is in this alert type list
value.remove(handler)
def handle_alerts(self):
"""
Pops all libtorrent alerts in the session queue and handles them appropriately.
"""
alerts = self.session.pop_alerts()
if not alerts:
return
num_alerts = len(alerts)
if log.isEnabledFor(logging.DEBUG):
log.debug('Alerts queued: %s', num_alerts)
if num_alerts > 0.9 * self.alert_queue_size:
log.warning(
'Warning total alerts queued, %s, passes 90%% of queue size.',
num_alerts,
)
# Loop through all alerts in the queue
for alert in alerts:
alert_type = type(alert).__name__
# Display the alert message
if log.isEnabledFor(logging.DEBUG):
log.debug('%s: %s', alert_type, decode_bytes(alert.message()))
# Call any handlers for this alert type
if alert_type in self.handlers:
for handler in self.handlers[alert_type]:
if log.isEnabledFor(logging.DEBUG):
log.debug('Handling alert: %s', alert_type)
# Copy alert attributes
alert_copy = SimpleNamespace(
**{
attr: getattr(alert, attr)
for attr in dir(alert)
if not attr.startswith('__')
}
)
self.delayed_calls.append(reactor.callLater(0, handler, alert_copy))
def set_alert_queue_size(self, queue_size):
"""Sets the maximum size of the libtorrent alert queue"""
log.info('Alert Queue Size set to %s', queue_size)
self.alert_queue_size = queue_size
component.get('Core').apply_session_setting(
'alert_queue_size', self.alert_queue_size
)

View File

@ -1,287 +0,0 @@
# -*- coding: utf-8 -*-
#
# Copyright (C) 2009 Andrew Resch <andrewresch@gmail.com>
# Copyright (C) 2011 Pedro Algarvio <pedro@algarvio.me>
#
# This file is part of Deluge and is licensed under GNU General Public License 3.0, or later, with
# the additional special exception to link portions of this program with the OpenSSL library.
# See LICENSE for more details.
#
import logging
import os
import shutil
from io import open
import deluge.component as component
import deluge.configmanager as configmanager
from deluge.common import (
AUTH_LEVEL_ADMIN,
AUTH_LEVEL_DEFAULT,
AUTH_LEVEL_NONE,
AUTH_LEVEL_NORMAL,
AUTH_LEVEL_READONLY,
create_localclient_account,
)
from deluge.error import AuthenticationRequired, AuthManagerError, BadLoginError
log = logging.getLogger(__name__)
AUTH_LEVELS_MAPPING = {
'NONE': AUTH_LEVEL_NONE,
'READONLY': AUTH_LEVEL_READONLY,
'DEFAULT': AUTH_LEVEL_NORMAL,
'NORMAL': AUTH_LEVEL_DEFAULT,
'ADMIN': AUTH_LEVEL_ADMIN,
}
AUTH_LEVELS_MAPPING_REVERSE = {v: k for k, v in AUTH_LEVELS_MAPPING.items()}
class Account(object):
__slots__ = ('username', 'password', 'authlevel')
def __init__(self, username, password, authlevel):
self.username = username
self.password = password
self.authlevel = authlevel
def data(self):
return {
'username': self.username,
'password': self.password,
'authlevel': AUTH_LEVELS_MAPPING_REVERSE[self.authlevel],
'authlevel_int': self.authlevel,
}
def __repr__(self):
return '<Account username="%(username)s" authlevel=%(authlevel)s>' % {
'username': self.username,
'authlevel': self.authlevel,
}
class AuthManager(component.Component):
def __init__(self):
component.Component.__init__(self, 'AuthManager', interval=10)
self.__auth = {}
self.__auth_modification_time = None
def start(self):
self.__load_auth_file()
def stop(self):
self.__auth = {}
def shutdown(self):
pass
def update(self):
auth_file = configmanager.get_config_dir('auth')
# Check for auth file and create if necessary
if not os.path.isfile(auth_file):
log.info('Authfile not found, recreating it.')
self.__load_auth_file()
return
auth_file_modification_time = os.stat(auth_file).st_mtime
if self.__auth_modification_time != auth_file_modification_time:
log.info('Auth file changed, reloading it!')
self.__load_auth_file()
def authorize(self, username, password):
"""Authorizes users based on username and password.
Args:
username (str): Username
password (str): Password
Returns:
int: The auth level for this user.
Raises:
AuthenticationRequired: If additional details are required to authenticate.
BadLoginError: If the username does not exist or password does not match.
"""
if not username:
raise AuthenticationRequired(
'Username and Password are required.', username
)
if username not in self.__auth:
# Let's try to re-load the file.. Maybe it's been updated
self.__load_auth_file()
if username not in self.__auth:
raise BadLoginError('Username does not exist', username)
if self.__auth[username].password == password:
# Return the users auth level
return self.__auth[username].authlevel
elif not password and self.__auth[username].password:
raise AuthenticationRequired('Password is required', username)
else:
raise BadLoginError('Password does not match', username)
def has_account(self, username):
return username in self.__auth
def get_known_accounts(self):
"""Returns a list of known deluge usernames."""
self.__load_auth_file()
return [account.data() for account in self.__auth.values()]
def create_account(self, username, password, authlevel):
if username in self.__auth:
raise AuthManagerError('Username in use.', username)
if authlevel not in AUTH_LEVELS_MAPPING:
raise AuthManagerError('Invalid auth level: %s' % authlevel)
try:
self.__auth[username] = Account(
username, password, AUTH_LEVELS_MAPPING[authlevel]
)
self.write_auth_file()
return True
except Exception as ex:
log.exception(ex)
raise ex
def update_account(self, username, password, authlevel):
if username not in self.__auth:
raise AuthManagerError('Username not known', username)
if authlevel not in AUTH_LEVELS_MAPPING:
raise AuthManagerError('Invalid auth level: %s' % authlevel)
try:
self.__auth[username].username = username
self.__auth[username].password = password
self.__auth[username].authlevel = AUTH_LEVELS_MAPPING[authlevel]
self.write_auth_file()
return True
except Exception as ex:
log.exception(ex)
raise ex
def remove_account(self, username):
if username not in self.__auth:
raise AuthManagerError('Username not known', username)
elif username == component.get('RPCServer').get_session_user():
raise AuthManagerError(
'You cannot delete your own account while logged in!', username
)
del self.__auth[username]
self.write_auth_file()
return True
def write_auth_file(self):
filename = 'auth'
filepath = os.path.join(configmanager.get_config_dir(), filename)
filepath_bak = filepath + '.bak'
filepath_tmp = filepath + '.tmp'
try:
if os.path.isfile(filepath):
log.debug('Creating backup of %s at: %s', filename, filepath_bak)
shutil.copy2(filepath, filepath_bak)
except IOError as ex:
log.error('Unable to backup %s to %s: %s', filepath, filepath_bak, ex)
else:
log.info('Saving the %s at: %s', filename, filepath)
try:
with open(filepath_tmp, 'w', encoding='utf8') as _file:
for account in self.__auth.values():
_file.write(
'%(username)s:%(password)s:%(authlevel_int)s\n'
% account.data()
)
_file.flush()
os.fsync(_file.fileno())
shutil.move(filepath_tmp, filepath)
except IOError as ex:
log.error('Unable to save %s: %s', filename, ex)
if os.path.isfile(filepath_bak):
log.info('Restoring backup of %s from: %s', filename, filepath_bak)
shutil.move(filepath_bak, filepath)
self.__load_auth_file()
def __load_auth_file(self):
save_and_reload = False
filename = 'auth'
auth_file = configmanager.get_config_dir(filename)
auth_file_bak = auth_file + '.bak'
# Check for auth file and create if necessary
if not os.path.isfile(auth_file):
create_localclient_account()
return self.__load_auth_file()
auth_file_modification_time = os.stat(auth_file).st_mtime
if self.__auth_modification_time is None:
self.__auth_modification_time = auth_file_modification_time
elif self.__auth_modification_time == auth_file_modification_time:
# File didn't change, no need for re-parsing's
return
for _filepath in (auth_file, auth_file_bak):
log.info('Opening %s for load: %s', filename, _filepath)
try:
with open(_filepath, 'r', encoding='utf8') as _file:
file_data = _file.readlines()
except IOError as ex:
log.warning('Unable to load %s: %s', _filepath, ex)
file_data = []
else:
log.info('Successfully loaded %s: %s', filename, _filepath)
break
# Load the auth file into a dictionary: {username: Account(...)}
for line in file_data:
line = line.strip()
if line.startswith('#') or not line:
# This line is a comment or empty
continue
lsplit = line.split(':')
if len(lsplit) == 2:
username, password = lsplit
log.warning(
'Your auth entry for %s contains no auth level, '
'using AUTH_LEVEL_DEFAULT(%s)..',
username,
AUTH_LEVEL_DEFAULT,
)
if username == 'localclient':
authlevel = AUTH_LEVEL_ADMIN
else:
authlevel = AUTH_LEVEL_DEFAULT
# This is probably an old auth file
save_and_reload = True
elif len(lsplit) == 3:
username, password, authlevel = lsplit
else:
log.error('Your auth file is malformed: Incorrect number of fields!')
continue
username = username.strip()
password = password.strip()
try:
authlevel = int(authlevel)
except ValueError:
try:
authlevel = AUTH_LEVELS_MAPPING[authlevel]
except KeyError:
log.error(
'Your auth file is malformed: %r is not a valid auth level',
authlevel,
)
continue
self.__auth[username] = Account(username, password, authlevel)
if 'localclient' not in self.__auth:
create_localclient_account(True)
return self.__load_auth_file()
if save_and_reload:
log.info('Re-writing auth file (upgrade)')
self.write_auth_file()
self.__auth_modification_time = auth_file_modification_time

File diff suppressed because it is too large Load Diff

View File

@ -1,204 +0,0 @@
# -*- coding: utf-8 -*-
#
# Copyright (C) 2007-2009 Andrew Resch <andrewresch@gmail.com>
#
# This file is part of Deluge and is licensed under GNU General Public License 3.0, or later, with
# the additional special exception to link portions of this program with the OpenSSL library.
# See LICENSE for more details.
#
"""The Deluge daemon"""
import logging
import os
import socket
from twisted.internet import reactor
import deluge.component as component
from deluge.common import get_version, is_ip, is_process_running, windows_check
from deluge.configmanager import get_config_dir
from deluge.core.core import Core
from deluge.core.rpcserver import RPCServer, export
from deluge.error import DaemonRunningError
if windows_check():
from win32api import SetConsoleCtrlHandler
from win32con import CTRL_CLOSE_EVENT, CTRL_SHUTDOWN_EVENT
log = logging.getLogger(__name__)
def is_daemon_running(pid_file):
"""
Check for another running instance of the daemon using the same pid file.
Args:
pid_file: The location of the file with pid, port values.
Returns:
bool: True is daemon is running, False otherwise.
"""
try:
with open(pid_file) as _file:
pid, port = [int(x) for x in _file.readline().strip().split(';')]
except (EnvironmentError, ValueError):
return False
if is_process_running(pid):
# Ensure it's a deluged process by trying to open a socket to it's port.
_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
_socket.connect(('127.0.0.1', port))
except socket.error:
# Can't connect, so pid is not a deluged process.
return False
else:
# This is a deluged process!
_socket.close()
return True
class Daemon(object):
"""The Deluge Daemon class"""
def __init__(
self,
listen_interface=None,
outgoing_interface=None,
interface=None,
port=None,
standalone=False,
read_only_config_keys=None,
):
"""
Args:
listen_interface (str, optional): The IP address to listen to
BitTorrent connections on.
outgoing_interface (str, optional): The network interface name or
IP address to open outgoing BitTorrent connections on.
interface (str, optional): The IP address the daemon will
listen for UI connections on.
port (int, optional): The port the daemon will listen for UI
connections on.
standalone (bool, optional): If True the client is in Standalone
mode otherwise, if False, start the daemon as separate process.
read_only_config_keys (list of str, optional): A list of config
keys that will not be altered by core.set_config() RPC method.
"""
self.standalone = standalone
self.pid_file = get_config_dir('deluged.pid')
log.info('Deluge daemon %s', get_version())
if is_daemon_running(self.pid_file):
raise DaemonRunningError(
'Deluge daemon already running with this config directory!'
)
# Twisted catches signals to terminate, so just have it call the shutdown method.
reactor.addSystemEventTrigger('before', 'shutdown', self._shutdown)
# Catch some Windows specific signals
if windows_check():
def win_handler(ctrl_type):
"""Handle the Windows shutdown or close events."""
log.debug('windows handler ctrl_type: %s', ctrl_type)
if ctrl_type == CTRL_CLOSE_EVENT or ctrl_type == CTRL_SHUTDOWN_EVENT:
self._shutdown()
return 1
SetConsoleCtrlHandler(win_handler)
# Start the core as a thread and join it until it's done
self.core = Core(
listen_interface=listen_interface,
outgoing_interface=outgoing_interface,
read_only_config_keys=read_only_config_keys,
)
if port is None:
port = self.core.config['daemon_port']
self.port = port
if interface and not is_ip(interface):
log.error('Invalid UI interface (must be IP Address): %s', interface)
interface = None
self.rpcserver = RPCServer(
port=port,
allow_remote=self.core.config['allow_remote'],
listen=not standalone,
interface=interface,
)
log.debug(
'Listening to UI on: %s:%s and bittorrent on: %s Making connections out on: %s',
interface,
port,
listen_interface,
outgoing_interface,
)
def start(self):
# Register the daemon and the core RPCs
self.rpcserver.register_object(self.core)
self.rpcserver.register_object(self)
# Make sure we start the PreferencesManager first
component.start('PreferencesManager')
if not self.standalone:
log.info('Deluge daemon starting...')
# Create pid file to track if deluged is running, also includes the port number.
pid = os.getpid()
log.debug('Storing pid %s & port %s in: %s', pid, self.port, self.pid_file)
with open(self.pid_file, 'w') as _file:
_file.write('%s;%s\n' % (pid, self.port))
component.start()
try:
reactor.run()
finally:
log.debug('Remove pid file: %s', self.pid_file)
os.remove(self.pid_file)
log.info('Deluge daemon shutdown successfully')
@export()
def shutdown(self, *args, **kwargs):
log.debug('Deluge daemon shutdown requested...')
reactor.callLater(0, reactor.stop)
def _shutdown(self, *args, **kwargs):
log.info('Deluge daemon shutting down, waiting for components to shutdown...')
if not self.standalone:
return component.shutdown()
@export()
def get_method_list(self):
"""Returns a list of the exported methods."""
return self.rpcserver.get_method_list()
@export()
def get_version(self):
"""Returns the daemon version"""
return get_version()
@export(1)
def authorized_call(self, rpc):
"""Determines if session auth_level is authorized to call RPC.
Args:
rpc (str): A RPC, e.g. core.get_torrents_status
Returns:
bool: True if authorized to call RPC, otherwise False.
"""
if rpc not in self.get_method_list():
return False
return (
self.rpcserver.get_session_auth_level()
>= self.rpcserver.get_rpc_auth_level(rpc)
)

View File

@ -1,141 +0,0 @@
# -*- coding: utf-8 -*-
#
# Copyright (C) 2007 Andrew Resch <andrewresch@gmail.com>
# Copyright (C) 2010 Pedro Algarvio <pedro@algarvio.me>
#
# This file is part of Deluge and is licensed under GNU General Public License 3.0, or later, with
# the additional special exception to link portions of this program with the OpenSSL library.
# See LICENSE for more details.
#
import os
import sys
from logging import DEBUG, FileHandler, getLogger
from twisted.internet.error import CannotListenError
from deluge.argparserbase import ArgParserBase
from deluge.common import run_profiled
from deluge.configmanager import get_config_dir
from deluge.i18n import setup_mock_translation
def add_daemon_options(parser):
group = parser.add_argument_group(_('Daemon Options'))
group.add_argument(
'-u',
'--ui-interface',
metavar='<ip-addr>',
action='store',
help=_('IP address to listen for UI connections'),
)
group.add_argument(
'-p',
'--port',
metavar='<port>',
action='store',
type=int,
help=_('Port to listen for UI connections on'),
)
group.add_argument(
'-i',
'--interface',
metavar='<ip-addr>',
dest='listen_interface',
action='store',
help=_('IP address to listen for BitTorrent connections'),
)
group.add_argument(
'-o',
'--outgoing-interface',
metavar='<interface>',
dest='outgoing_interface',
action='store',
help=_(
'The network interface name or IP address for outgoing BitTorrent connections.'
),
)
group.add_argument(
'--read-only-config-keys',
metavar='<comma-separated-keys>',
action='store',
help=_('Config keys to be unmodified by `set_config` RPC'),
type=str,
default='',
)
parser.add_process_arg_group()
def start_daemon(skip_start=False):
"""
Entry point for daemon script
Args:
skip_start (bool): If starting daemon should be skipped.
Returns:
deluge.core.daemon.Daemon: A new daemon object
"""
setup_mock_translation()
# Setup the argument parser
parser = ArgParserBase()
add_daemon_options(parser)
options = parser.parse_args()
# Check for any daemons running with this same config
from deluge.core.daemon import is_daemon_running
pid_file = get_config_dir('deluged.pid')
if is_daemon_running(pid_file):
print(
'Cannot run multiple daemons with same config directory.\n'
'If you believe this is an error, force starting by deleting: %s' % pid_file
)
sys.exit(1)
log = getLogger(__name__)
# If no logfile specified add logging to default location (as well as stdout)
if not options.logfile:
options.logfile = get_config_dir('deluged.log')
file_handler = FileHandler(options.logfile)
log.addHandler(file_handler)
def run_daemon(options):
try:
from deluge.core.daemon import Daemon
daemon = Daemon(
listen_interface=options.listen_interface,
outgoing_interface=options.outgoing_interface,
interface=options.ui_interface,
port=options.port,
read_only_config_keys=options.read_only_config_keys.split(','),
)
if skip_start:
return daemon
else:
daemon.start()
except CannotListenError as ex:
log.error(
'Cannot start deluged, listen port in use.\n'
' Check for other running daemons or services using this port: %s:%s',
ex.interface,
ex.port,
)
sys.exit(1)
except Exception as ex:
log.error('Unable to start deluged: %s', ex)
if log.isEnabledFor(DEBUG):
log.exception(ex)
sys.exit(1)
finally:
log.info('Exiting...')
if options.pidfile:
os.remove(options.pidfile)
return run_profiled(
run_daemon, options, output_file=options.profile, do_profile=options.profile
)

View File

@ -1,67 +0,0 @@
# -*- coding: utf-8 -*-
#
# Copyright (C) 2009 Andrew Resch <andrewresch@gmail.com>
#
# This file is part of Deluge and is licensed under GNU General Public License 3.0, or later, with
# the additional special exception to link portions of this program with the OpenSSL library.
# See LICENSE for more details.
#
import logging
import deluge.component as component
log = logging.getLogger(__name__)
class EventManager(component.Component):
def __init__(self):
component.Component.__init__(self, 'EventManager')
self.handlers = {}
def emit(self, event):
"""
Emits the event to interested clients.
:param event: DelugeEvent
"""
# Emit the event to the interested clients
component.get('RPCServer').emit_event(event)
# Call any handlers for the event
if event.name in self.handlers:
for handler in self.handlers[event.name]:
# log.debug('Running handler %s for event %s with args: %s', event.name, handler, event.args)
try:
handler(*event.args)
except Exception as ex:
log.error(
'Event handler %s failed in %s with exception %s',
event.name,
handler,
ex,
)
def register_event_handler(self, event, handler):
"""
Registers a function to be called when a `:param:event` is emitted.
:param event: str, the event name
:param handler: function, to be called when `:param:event` is emitted
"""
if event not in self.handlers:
self.handlers[event] = []
if handler not in self.handlers[event]:
self.handlers[event].append(handler)
def deregister_event_handler(self, event, handler):
"""
Deregisters an event handler function.
:param event: str, the event name
:param handler: function, currently registered to handle `:param:event`
"""
if event in self.handlers and handler in self.handlers[event]:
self.handlers[event].remove(handler)

View File

@ -1,275 +0,0 @@
# -*- coding: utf-8 -*-
#
# Copyright (C) 2008 Martijn Voncken <mvoncken@gmail.com>
#
# This file is part of Deluge and is licensed under GNU General Public License 3.0, or later, with
# the additional special exception to link portions of this program with the OpenSSL library.
# See LICENSE for more details.
#
import logging
import deluge.component as component
from deluge.common import TORRENT_STATE
log = logging.getLogger(__name__)
STATE_SORT = ['All', 'Active'] + TORRENT_STATE
# Special purpose filters:
def filter_keywords(torrent_ids, values):
# Cleanup
keywords = ','.join([v.lower() for v in values])
keywords = keywords.split(',')
for keyword in keywords:
torrent_ids = filter_one_keyword(torrent_ids, keyword)
return torrent_ids
def filter_one_keyword(torrent_ids, keyword):
"""
search torrent on keyword.
searches title,state,tracker-status,tracker,files
"""
all_torrents = component.get('TorrentManager').torrents
for torrent_id in torrent_ids:
torrent = all_torrents[torrent_id]
if keyword in torrent.filename.lower():
yield torrent_id
elif keyword in torrent.state.lower():
yield torrent_id
elif torrent.trackers and keyword in torrent.trackers[0]['url']:
yield torrent_id
elif keyword in torrent_id:
yield torrent_id
# Want to find broken torrents (search on "error", or "unregistered")
elif keyword in torrent.tracker_status.lower():
yield torrent_id
else:
for t_file in torrent.get_files():
if keyword in t_file['path'].lower():
yield torrent_id
break
def filter_by_name(torrent_ids, search_string):
all_torrents = component.get('TorrentManager').torrents
try:
search_string, match_case = search_string[0].split('::match')
except ValueError:
search_string = search_string[0]
match_case = False
if match_case is False:
search_string = search_string.lower()
for torrent_id in torrent_ids:
torrent_name = all_torrents[torrent_id].get_name()
if match_case is False:
torrent_name = all_torrents[torrent_id].get_name().lower()
else:
torrent_name = all_torrents[torrent_id].get_name()
if search_string in torrent_name:
yield torrent_id
def tracker_error_filter(torrent_ids, values):
filtered_torrent_ids = []
tm = component.get('TorrentManager')
# If this is a tracker_host, then we need to filter on it
if values[0] != 'Error':
for torrent_id in torrent_ids:
if values[0] == tm[torrent_id].get_status(['tracker_host'])['tracker_host']:
filtered_torrent_ids.append(torrent_id)
return filtered_torrent_ids
# Check torrent's tracker_status for 'Error:' and return those torrent_ids
for torrent_id in torrent_ids:
if 'Error:' in tm[torrent_id].get_status(['tracker_status'])['tracker_status']:
filtered_torrent_ids.append(torrent_id)
return filtered_torrent_ids
class FilterManager(component.Component):
"""FilterManager"""
def __init__(self, core):
component.Component.__init__(self, 'FilterManager')
log.debug('FilterManager init..')
self.core = core
self.torrents = core.torrentmanager
self.registered_filters = {}
self.register_filter('keyword', filter_keywords)
self.register_filter('name', filter_by_name)
self.tree_fields = {}
self.register_tree_field('state', self._init_state_tree)
def _init_tracker_tree():
return {'Error': 0}
self.register_tree_field('tracker_host', _init_tracker_tree)
self.register_filter('tracker_host', tracker_error_filter)
def _init_users_tree():
return {'': 0}
self.register_tree_field('owner', _init_users_tree)
def filter_torrent_ids(self, filter_dict):
"""
returns a list of torrent_id's matching filter_dict.
core filter method
"""
if not filter_dict:
return self.torrents.get_torrent_list()
# Sanitize input: filter-value must be a list of strings
for key, value in filter_dict.items():
if isinstance(value, str):
filter_dict[key] = [value]
# Optimized filter for id
if 'id' in filter_dict:
torrent_ids = list(filter_dict['id'])
del filter_dict['id']
else:
torrent_ids = self.torrents.get_torrent_list()
# Return if there's nothing more to filter
if not filter_dict:
return torrent_ids
# Special purpose, state=Active.
if 'state' in filter_dict:
# We need to make sure this is a list for the logic below
filter_dict['state'] = list(filter_dict['state'])
if 'state' in filter_dict and 'Active' in filter_dict['state']:
filter_dict['state'].remove('Active')
if not filter_dict['state']:
del filter_dict['state']
torrent_ids = self.filter_state_active(torrent_ids)
if not filter_dict:
return torrent_ids
# Registered filters
for field, values in list(filter_dict.items()):
if field in self.registered_filters:
# Filters out doubles
torrent_ids = list(
set(self.registered_filters[field](torrent_ids, values))
)
del filter_dict[field]
if not filter_dict:
return torrent_ids
torrent_keys, plugin_keys = self.torrents.separate_keys(
list(filter_dict), torrent_ids
)
# Leftover filter arguments, default filter on status fields.
for torrent_id in list(torrent_ids):
status = self.core.create_torrent_status(
torrent_id, torrent_keys, plugin_keys
)
for field, values in filter_dict.items():
if field in status and status[field] in values:
continue
elif torrent_id in torrent_ids:
torrent_ids.remove(torrent_id)
return torrent_ids
def get_filter_tree(self, show_zero_hits=True, hide_cat=None):
"""
returns {field: [(value,count)] }
for use in sidebar.
"""
torrent_ids = self.torrents.get_torrent_list()
tree_keys = list(self.tree_fields)
if hide_cat:
for cat in hide_cat:
tree_keys.remove(cat)
torrent_keys, plugin_keys = self.torrents.separate_keys(tree_keys, torrent_ids)
items = {field: self.tree_fields[field]() for field in tree_keys}
for torrent_id in list(torrent_ids):
status = self.core.create_torrent_status(
torrent_id, torrent_keys, plugin_keys
) # status={key:value}
for field in tree_keys:
value = status[field]
items[field][value] = items[field].get(value, 0) + 1
if 'tracker_host' in items:
items['tracker_host']['All'] = len(torrent_ids)
items['tracker_host']['Error'] = len(
tracker_error_filter(torrent_ids, ('Error',))
)
if not show_zero_hits:
for cat in ['state', 'owner', 'tracker_host']:
if cat in tree_keys:
self._hide_state_items(items[cat])
# Return a dict of tuples:
sorted_items = {field: sorted(items[field].items()) for field in tree_keys}
if 'state' in tree_keys:
sorted_items['state'].sort(key=self._sort_state_item)
return sorted_items
def _init_state_tree(self):
init_state = {}
init_state['All'] = len(self.torrents.get_torrent_list())
for state in TORRENT_STATE:
init_state[state] = 0
init_state['Active'] = len(
self.filter_state_active(self.torrents.get_torrent_list())
)
return init_state
def register_filter(self, filter_id, filter_func, filter_value=None):
self.registered_filters[filter_id] = filter_func
def deregister_filter(self, filter_id):
del self.registered_filters[filter_id]
def register_tree_field(self, field, init_func=lambda: {}):
self.tree_fields[field] = init_func
def deregister_tree_field(self, field):
if field in self.tree_fields:
del self.tree_fields[field]
def filter_state_active(self, torrent_ids):
for torrent_id in list(torrent_ids):
status = self.torrents[torrent_id].get_status(
['download_payload_rate', 'upload_payload_rate']
)
if status['download_payload_rate'] or status['upload_payload_rate']:
pass
else:
torrent_ids.remove(torrent_id)
return torrent_ids
def _hide_state_items(self, state_items):
"""For hide(show)-zero hits"""
for value, count in list(state_items.items()):
if value != 'All' and count == 0:
del state_items[value]
def _sort_state_item(self, item):
try:
return STATE_SORT.index(item[0])
except ValueError:
return 99

View File

@ -1,106 +0,0 @@
# -*- coding: utf-8 -*-
#
# Copyright (C) 2007 Andrew Resch <andrewresch@gmail.com>
#
# This file is part of Deluge and is licensed under GNU General Public License 3.0, or later, with
# the additional special exception to link portions of this program with the OpenSSL library.
# See LICENSE for more details.
#
"""PluginManager for Core"""
import logging
from twisted.internet import defer
import deluge.component as component
import deluge.pluginmanagerbase
from deluge.event import PluginDisabledEvent, PluginEnabledEvent
log = logging.getLogger(__name__)
class PluginManager(deluge.pluginmanagerbase.PluginManagerBase, component.Component):
"""PluginManager handles the loading of plugins and provides plugins with
functions to access parts of the core."""
def __init__(self, core):
component.Component.__init__(self, 'CorePluginManager')
self.status_fields = {}
# Call the PluginManagerBase constructor
deluge.pluginmanagerbase.PluginManagerBase.__init__(
self, 'core.conf', 'deluge.plugin.core'
)
def start(self):
# Enable plugins that are enabled in the config
self.enable_plugins()
def stop(self):
# Disable all enabled plugins
self.disable_plugins()
def shutdown(self):
self.stop()
def update_plugins(self):
for plugin in self.plugins:
if hasattr(self.plugins[plugin], 'update'):
try:
self.plugins[plugin].update()
except Exception as ex:
log.exception(ex)
def enable_plugin(self, name):
d = defer.succeed(True)
if name not in self.plugins:
d = deluge.pluginmanagerbase.PluginManagerBase.enable_plugin(self, name)
def on_enable_plugin(result):
if result is True and name in self.plugins:
component.get('EventManager').emit(PluginEnabledEvent(name))
return result
d.addBoth(on_enable_plugin)
return d
def disable_plugin(self, name):
d = defer.succeed(True)
if name in self.plugins:
d = deluge.pluginmanagerbase.PluginManagerBase.disable_plugin(self, name)
def on_disable_plugin(result):
if name not in self.plugins:
component.get('EventManager').emit(PluginDisabledEvent(name))
return result
d.addBoth(on_disable_plugin)
return d
def get_status(self, torrent_id, fields):
"""Return the value of status fields for the selected torrent_id."""
status = {}
if len(fields) == 0:
fields = list(self.status_fields)
for field in fields:
try:
status[field] = self.status_fields[field](torrent_id)
except KeyError:
pass
return status
def register_status_field(self, field, function):
"""Register a new status field. This can be used in the same way the
client requests other status information from core."""
log.debug('Registering status field %s with PluginManager', field)
self.status_fields[field] = function
def deregister_status_field(self, field):
"""Deregisters a status field"""
log.debug('Deregistering status field %s with PluginManager', field)
try:
del self.status_fields[field]
except Exception:
log.warning('Unable to deregister status field %s', field)

View File

@ -1,475 +0,0 @@
# -*- coding: utf-8 -*-
#
# Copyright (C) 2008-2010 Andrew Resch <andrewresch@gmail.com>
#
# This file is part of Deluge and is licensed under GNU General Public License 3.0, or later, with
# the additional special exception to link portions of this program with the OpenSSL library.
# See LICENSE for more details.
#
import logging
import os
import platform
import random
import threading
from urllib.parse import quote_plus
from urllib.request import urlopen
from twisted.internet.task import LoopingCall
import deluge.common
import deluge.component as component
import deluge.configmanager
from deluge._libtorrent import lt
from deluge.event import ConfigValueChangedEvent
try:
import GeoIP
except ImportError:
GeoIP = None
log = logging.getLogger(__name__)
DEFAULT_PREFS = {
'send_info': False,
'info_sent': 0.0,
'daemon_port': 58846,
'allow_remote': False,
'pre_allocate_storage': False,
'download_location': deluge.common.get_default_download_dir(),
'listen_ports': [6881, 6891],
'listen_interface': '',
'outgoing_interface': '',
'random_port': True,
'listen_random_port': None,
'listen_use_sys_port': False,
'listen_reuse_port': True,
'outgoing_ports': [0, 0],
'random_outgoing_ports': True,
'copy_torrent_file': False,
'del_copy_torrent_file': False,
'torrentfiles_location': deluge.common.get_default_download_dir(),
'plugins_location': os.path.join(deluge.configmanager.get_config_dir(), 'plugins'),
'prioritize_first_last_pieces': False,
'sequential_download': False,
'dht': True,
'upnp': True,
'natpmp': True,
'utpex': True,
'lsd': True,
'enc_in_policy': 1,
'enc_out_policy': 1,
'enc_level': 2,
'max_connections_global': 200,
'max_upload_speed': -1.0,
'max_download_speed': -1.0,
'max_upload_slots_global': 4,
'max_half_open_connections': (
lambda: deluge.common.windows_check()
and (lambda: deluge.common.vista_check() and 4 or 8)()
or 50
)(),
'max_connections_per_second': 20,
'ignore_limits_on_local_network': True,
'max_connections_per_torrent': -1,
'max_upload_slots_per_torrent': -1,
'max_upload_speed_per_torrent': -1,
'max_download_speed_per_torrent': -1,
'enabled_plugins': [],
'add_paused': False,
'max_active_seeding': 5,
'max_active_downloading': 3,
'max_active_limit': 8,
'dont_count_slow_torrents': False,
'queue_new_to_top': False,
'stop_seed_at_ratio': False,
'remove_seed_at_ratio': False,
'stop_seed_ratio': 2.00,
'share_ratio_limit': 2.00,
'seed_time_ratio_limit': 7.00,
'seed_time_limit': 180,
'auto_managed': True,
'move_completed': False,
'move_completed_path': deluge.common.get_default_download_dir(),
'move_completed_paths_list': [],
'download_location_paths_list': [],
'path_chooser_show_chooser_button_on_localhost': True,
'path_chooser_auto_complete_enabled': True,
'path_chooser_accelerator_string': 'Tab',
'path_chooser_max_popup_rows': 20,
'path_chooser_show_hidden_files': False,
'new_release_check': True,
'proxy': {
'type': 0,
'hostname': '',
'username': '',
'password': '',
'port': 8080,
'proxy_hostnames': True,
'proxy_peer_connections': True,
'proxy_tracker_connections': True,
'force_proxy': False,
'anonymous_mode': False,
},
'peer_tos': '0x00',
'rate_limit_ip_overhead': True,
'geoip_db_location': '/usr/share/GeoIP/GeoIP.dat',
'cache_size': 512,
'cache_expiry': 60,
'auto_manage_prefer_seeds': False,
'shared': False,
'super_seeding': False,
}
class PreferencesManager(component.Component):
def __init__(self):
component.Component.__init__(self, 'PreferencesManager')
self.config = deluge.configmanager.ConfigManager('core.conf', DEFAULT_PREFS)
if 'proxies' in self.config:
log.warning(
'Updating config file for proxy, using "peer" values to fill new "proxy" setting'
)
self.config['proxy'].update(self.config['proxies']['peer'])
log.warning('New proxy config is: %s', self.config['proxy'])
del self.config['proxies']
if 'i2p_proxy' in self.config and self.config['i2p_proxy']['hostname']:
self.config['proxy'].update(self.config['i2p_proxy'])
self.config['proxy']['type'] = 6
del self.config['i2p_proxy']
if 'anonymous_mode' in self.config:
self.config['proxy']['anonymous_mode'] = self.config['anonymous_mode']
del self.config['anonymous_mode']
if 'proxy' in self.config:
for key in DEFAULT_PREFS['proxy']:
if key not in self.config['proxy']:
self.config['proxy'][key] = DEFAULT_PREFS['proxy'][key]
self.core = component.get('Core')
self.new_release_timer = None
def start(self):
# Set the initial preferences on start-up
for key in DEFAULT_PREFS:
self.do_config_set_func(key, self.config[key])
self.config.register_change_callback(self._on_config_value_change)
def stop(self):
if self.new_release_timer and self.new_release_timer.running:
self.new_release_timer.stop()
# Config set functions
def do_config_set_func(self, key, value):
on_set_func = getattr(self, '_on_set_' + key, None)
if on_set_func:
if log.isEnabledFor(logging.DEBUG):
log.debug('Config key: %s set to %s..', key, value)
on_set_func(key, value)
def _on_config_value_change(self, key, value):
if self.get_state() == 'Started':
self.do_config_set_func(key, value)
component.get('EventManager').emit(ConfigValueChangedEvent(key, value))
def _on_set_torrentfiles_location(self, key, value):
if self.config['copy_torrent_file']:
try:
os.makedirs(value)
except OSError as ex:
log.debug('Unable to make directory: %s', ex)
def _on_set_listen_ports(self, key, value):
self.__set_listen_on()
def _on_set_listen_interface(self, key, value):
self.__set_listen_on()
def _on_set_outgoing_interface(self, key, value):
"""Set interface name or IP address for outgoing BitTorrent connections."""
value = value.strip() if value else ''
self.core.apply_session_settings({'outgoing_interfaces': value})
def _on_set_random_port(self, key, value):
self.__set_listen_on()
def __set_listen_on(self):
""" Set the ports and interface address to listen for incoming connections on."""
if self.config['random_port']:
if not self.config['listen_random_port']:
self.config['listen_random_port'] = random.randrange(49152, 65525)
listen_ports = [
self.config['listen_random_port']
] * 2 # use single port range
else:
self.config['listen_random_port'] = None
listen_ports = self.config['listen_ports']
if self.config['listen_interface']:
interface = self.config['listen_interface'].strip()
else:
interface = '0.0.0.0'
log.debug(
'Listen Interface: %s, Ports: %s with use_sys_port: %s',
interface,
listen_ports,
self.config['listen_use_sys_port'],
)
interfaces = [
'%s:%s' % (interface, port)
for port in range(listen_ports[0], listen_ports[1] + 1)
]
self.core.apply_session_settings(
{
'listen_system_port_fallback': self.config['listen_use_sys_port'],
'listen_interfaces': ','.join(interfaces),
}
)
def _on_set_outgoing_ports(self, key, value):
self.__set_outgoing_ports()
def _on_set_random_outgoing_ports(self, key, value):
self.__set_outgoing_ports()
def __set_outgoing_ports(self):
port = (
0
if self.config['random_outgoing_ports']
else self.config['outgoing_ports'][0]
)
if port:
num_ports = (
self.config['outgoing_ports'][1] - self.config['outgoing_ports'][0]
)
num_ports = num_ports if num_ports > 1 else 5
else:
num_ports = 0
log.debug('Outgoing port set to %s with range: %s', port, num_ports)
self.core.apply_session_settings(
{'outgoing_port': port, 'num_outgoing_ports': num_ports}
)
def _on_set_peer_tos(self, key, value):
try:
self.core.apply_session_setting('peer_tos', int(value, 16))
except ValueError as ex:
log.error('Invalid tos byte: %s', ex)
def _on_set_dht(self, key, value):
lt_bootstraps = self.core.session.get_settings()['dht_bootstrap_nodes']
# Update list of lt bootstraps, using set to remove duplicates.
dht_bootstraps = set(
lt_bootstraps.split(',')
+ [
'router.bittorrent.com:6881',
'router.utorrent.com:6881',
'router.bitcomet.com:6881',
'dht.transmissionbt.com:6881',
'dht.aelitis.com:6881',
]
)
self.core.apply_session_settings(
{'dht_bootstrap_nodes': ','.join(dht_bootstraps), 'enable_dht': value}
)
def _on_set_upnp(self, key, value):
self.core.apply_session_setting('enable_upnp', value)
def _on_set_natpmp(self, key, value):
self.core.apply_session_setting('enable_natpmp', value)
def _on_set_lsd(self, key, value):
self.core.apply_session_setting('enable_lsd', value)
def _on_set_utpex(self, key, value):
if value:
self.core.session.add_extension('ut_pex')
def _on_set_enc_in_policy(self, key, value):
self._on_set_encryption(key, value)
def _on_set_enc_out_policy(self, key, value):
self._on_set_encryption(key, value)
def _on_set_enc_level(self, key, value):
self._on_set_encryption(key, value)
def _on_set_encryption(self, key, value):
# Convert Deluge enc_level values to libtorrent enc_level values.
pe_enc_level = {
0: lt.enc_level.plaintext,
1: lt.enc_level.rc4,
2: lt.enc_level.both,
}
self.core.apply_session_settings(
{
'out_enc_policy': lt.enc_policy(self.config['enc_out_policy']),
'in_enc_policy': lt.enc_policy(self.config['enc_in_policy']),
'allowed_enc_level': lt.enc_level(
pe_enc_level[self.config['enc_level']]
),
'prefer_rc4': True,
}
)
def _on_set_max_connections_global(self, key, value):
self.core.apply_session_setting('connections_limit', value)
def _on_set_max_upload_speed(self, key, value):
# We need to convert Kb/s to B/s
value = -1 if value < 0 else int(value * 1024)
self.core.apply_session_setting('upload_rate_limit', value)
def _on_set_max_download_speed(self, key, value):
# We need to convert Kb/s to B/s
value = -1 if value < 0 else int(value * 1024)
self.core.apply_session_setting('download_rate_limit', value)
def _on_set_max_upload_slots_global(self, key, value):
self.core.apply_session_setting('unchoke_slots_limit', value)
def _on_set_max_half_open_connections(self, key, value):
self.core.apply_session_setting('half_open_limit', value)
def _on_set_max_connections_per_second(self, key, value):
self.core.apply_session_setting('connection_speed', value)
def _on_set_ignore_limits_on_local_network(self, key, value):
self.core.apply_session_setting('ignore_limits_on_local_network', value)
def _on_set_share_ratio_limit(self, key, value):
# This value is a float percentage in deluge, but libtorrent needs int percentage.
self.core.apply_session_setting('share_ratio_limit', int(value * 100))
def _on_set_seed_time_ratio_limit(self, key, value):
# This value is a float percentage in deluge, but libtorrent needs int percentage.
self.core.apply_session_setting('seed_time_ratio_limit', int(value * 100))
def _on_set_seed_time_limit(self, key, value):
# This value is stored in minutes in deluge, but libtorrent wants seconds
self.core.apply_session_setting('seed_time_limit', int(value * 60))
def _on_set_max_active_downloading(self, key, value):
self.core.apply_session_setting('active_downloads', value)
def _on_set_max_active_seeding(self, key, value):
self.core.apply_session_setting('active_seeds', value)
def _on_set_max_active_limit(self, key, value):
self.core.apply_session_setting('active_limit', value)
def _on_set_dont_count_slow_torrents(self, key, value):
self.core.apply_session_setting('dont_count_slow_torrents', value)
def _on_set_send_info(self, key, value):
"""sends anonymous stats home"""
log.debug('Sending anonymous stats..')
class SendInfoThread(threading.Thread):
def __init__(self, config):
self.config = config
threading.Thread.__init__(self)
def run(self):
import time
now = time.time()
# check if we've done this within the last week or never
if (now - self.config['info_sent']) >= (60 * 60 * 24 * 7):
try:
url = (
'http://deluge-torrent.org/stats_get.php?processor='
+ platform.machine()
+ '&python='
+ platform.python_version()
+ '&deluge='
+ deluge.common.get_version()
+ '&os='
+ platform.system()
+ '&plugins='
+ quote_plus(':'.join(self.config['enabled_plugins']))
)
urlopen(url)
except IOError as ex:
log.debug('Network error while trying to send info: %s', ex)
else:
self.config['info_sent'] = now
if value:
SendInfoThread(self.config).start()
def _on_set_new_release_check(self, key, value):
if value:
log.debug('Checking for new release..')
threading.Thread(target=self.core.get_new_release).start()
if self.new_release_timer and self.new_release_timer.running:
self.new_release_timer.stop()
# Set a timer to check for a new release every 3 days
self.new_release_timer = LoopingCall(
self._on_set_new_release_check, 'new_release_check', True
)
self.new_release_timer.start(72 * 60 * 60, False)
else:
if self.new_release_timer and self.new_release_timer.running:
self.new_release_timer.stop()
def _on_set_proxy(self, key, value):
# Initialise with type none and blank hostnames.
proxy_settings = {
'proxy_type': lt.proxy_type_t.none,
'i2p_hostname': '',
'proxy_hostname': '',
'proxy_hostnames': value['proxy_hostnames'],
'proxy_peer_connections': value['proxy_peer_connections'],
'proxy_tracker_connections': value['proxy_tracker_connections'],
'force_proxy': value['force_proxy'],
'anonymous_mode': value['anonymous_mode'],
}
if value['type'] == lt.proxy_type_t.i2p_proxy:
proxy_settings.update(
{
'proxy_type': lt.proxy_type_t.i2p_proxy,
'i2p_hostname': value['hostname'],
'i2p_port': value['port'],
}
)
elif value['type'] != lt.proxy_type_t.none:
proxy_settings.update(
{
'proxy_type': value['type'],
'proxy_hostname': value['hostname'],
'proxy_port': value['port'],
'proxy_username': value['username'],
'proxy_password': value['password'],
}
)
self.core.apply_session_settings(proxy_settings)
def _on_set_rate_limit_ip_overhead(self, key, value):
self.core.apply_session_setting('rate_limit_ip_overhead', value)
def _on_set_geoip_db_location(self, key, geoipdb_path):
# Load the GeoIP DB for country look-ups if available
if os.path.exists(geoipdb_path):
try:
self.core.geoip_instance = GeoIP.open(
geoipdb_path, GeoIP.GEOIP_STANDARD
)
except AttributeError:
log.warning('GeoIP Unavailable')
else:
log.warning('Unable to find GeoIP database file: %s', geoipdb_path)
def _on_set_cache_size(self, key, value):
self.core.apply_session_setting('cache_size', value)
def _on_set_cache_expiry(self, key, value):
self.core.apply_session_setting('cache_expiry', value)
def _on_auto_manage_prefer_seeds(self, key, value):
self.core.apply_session_setting('auto_manage_prefer_seeds', value)

View File

@ -1,586 +0,0 @@
# -*- coding: utf-8 -*-
#
# Copyright (C) 2008,2009 Andrew Resch <andrewresch@gmail.com>
#
# This file is part of Deluge and is licensed under GNU General Public License 3.0, or later, with
# the additional special exception to link portions of this program with the OpenSSL library.
# See LICENSE for more details.
#
"""RPCServer Module"""
import logging
import os
import sys
import traceback
from collections import namedtuple
from types import FunctionType
from twisted.internet import defer, reactor
from twisted.internet.protocol import Factory, connectionDone
import deluge.component as component
import deluge.configmanager
from deluge.core.authmanager import (
AUTH_LEVEL_ADMIN,
AUTH_LEVEL_DEFAULT,
AUTH_LEVEL_NONE,
)
from deluge.crypto_utils import check_ssl_keys, get_context_factory
from deluge.error import (
DelugeError,
IncompatibleClient,
NotAuthorizedError,
WrappedException,
_ClientSideRecreateError,
)
from deluge.event import ClientDisconnectedEvent
from deluge.transfer import DelugeTransferProtocol
RPC_RESPONSE = 1
RPC_ERROR = 2
RPC_EVENT = 3
log = logging.getLogger(__name__)
def export(auth_level=AUTH_LEVEL_DEFAULT):
"""
Decorator function to register an object's method as an RPC. The object
will need to be registered with an :class:`RPCServer` to be effective.
:param func: the function to export
:type func: function
:param auth_level: the auth level required to call this method
:type auth_level: int
"""
def wrap(func, *args, **kwargs):
func._rpcserver_export = True
func._rpcserver_auth_level = auth_level
rpc_text = '**RPC exported method** (*Auth level: %s*)' % auth_level
# Append the RPC text while ensuring correct docstring formatting.
if func.__doc__:
if func.__doc__.endswith(' '):
indent = func.__doc__.split('\n')[-1]
func.__doc__ += '\n{}'.format(indent)
else:
func.__doc__ += '\n\n'
func.__doc__ += rpc_text
else:
func.__doc__ = rpc_text
return func
if isinstance(auth_level, FunctionType):
func = auth_level
auth_level = AUTH_LEVEL_DEFAULT
return wrap(func)
else:
return wrap
def format_request(call):
"""
Format the RPCRequest message for debug printing
:param call: the request
:type call: a RPCRequest
:returns: a formatted string for printing
:rtype: str
"""
try:
s = call[1] + '('
if call[2]:
s += ', '.join([str(x) for x in call[2]])
if call[3]:
if call[2]:
s += ', '
s += ', '.join([key + '=' + str(value) for key, value in call[3].items()])
s += ')'
except UnicodeEncodeError:
return 'UnicodeEncodeError, call: %s' % call
else:
return s
class DelugeRPCProtocol(DelugeTransferProtocol):
def __init__(self):
super(DelugeRPCProtocol, self).__init__()
# namedtuple subclass with auth_level, username for the connected session.
self.AuthLevel = namedtuple('SessionAuthlevel', 'auth_level, username')
def message_received(self, request):
"""
This method is called whenever a message is received from a client. The
only message that a client sends to the server is a RPC Request message.
If the RPC Request message is valid, then the method is called in
:meth:`dispatch`.
:param request: the request from the client.
:type data: tuple
"""
if not isinstance(request, tuple):
log.debug('Received invalid message: type is not tuple')
return
if len(request) < 1:
log.debug('Received invalid message: there are no items')
return
for call in request:
if len(call) != 4:
log.debug(
'Received invalid rpc request: number of items ' 'in request is %s',
len(call),
)
continue
# log.debug('RPCRequest: %s', format_request(call))
reactor.callLater(0, self.dispatch, *call)
def sendData(self, data): # NOQA: N802
"""
Sends the data to the client.
:param data: the object that is to be sent to the client. This should
be one of the RPC message types.
:type data: object
"""
try:
self.transfer_message(data)
except Exception as ex:
log.warning('Error occurred when sending message: %s.', ex)
log.exception(ex)
raise
def connectionMade(self): # NOQA: N802
"""
This method is called when a new client connects.
"""
peer = self.transport.getPeer()
log.info('Deluge Client connection made from: %s:%s', peer.host, peer.port)
# Set the initial auth level of this session to AUTH_LEVEL_NONE
self.factory.authorized_sessions[self.transport.sessionno] = self.AuthLevel(
AUTH_LEVEL_NONE, ''
)
def connectionLost(self, reason=connectionDone): # NOQA: N802
"""
This method is called when the client is disconnected.
:param reason: the reason the client disconnected.
:type reason: str
"""
# We need to remove this session from various dicts
del self.factory.authorized_sessions[self.transport.sessionno]
if self.transport.sessionno in self.factory.session_protocols:
del self.factory.session_protocols[self.transport.sessionno]
if self.transport.sessionno in self.factory.interested_events:
del self.factory.interested_events[self.transport.sessionno]
if self.factory.state == 'running':
component.get('EventManager').emit(
ClientDisconnectedEvent(self.factory.session_id)
)
log.info('Deluge client disconnected: %s', reason.value)
def valid_session(self):
return self.transport.sessionno in self.factory.authorized_sessions
def dispatch(self, request_id, method, args, kwargs):
"""
This method is run when a RPC Request is made. It will run the local method
and will send either a RPC Response or RPC Error back to the client.
:param request_id: the request_id from the client (sent in the RPC Request)
:type request_id: int
:param method: the local method to call. It must be registered with
the :class:`RPCServer`.
:type method: str
:param args: the arguments to pass to `method`
:type args: list
:param kwargs: the keyword-arguments to pass to `method`
:type kwargs: dict
"""
def send_error():
"""
Sends an error response with the contents of the exception that was raised.
"""
exc_type, exc_value, dummy_exc_trace = sys.exc_info()
formated_tb = traceback.format_exc()
try:
self.sendData(
(
RPC_ERROR,
request_id,
exc_type.__name__,
exc_value._args,
exc_value._kwargs,
formated_tb,
)
)
except AttributeError:
# This is not a deluge exception (object has no attribute '_args), let's wrap it
log.warning(
'An exception occurred while sending RPC_ERROR to '
'client. Wrapping it and resending. Error to '
'send(causing exception goes next):\n%s',
formated_tb,
)
try:
raise WrappedException(
str(exc_value), exc_type.__name__, formated_tb
)
except WrappedException:
send_error()
except Exception as ex:
log.error(
'An exception occurred while sending RPC_ERROR to client: %s', ex
)
if method == 'daemon.info':
# This is a special case and used in the initial connection process
self.sendData((RPC_RESPONSE, request_id, deluge.common.get_version()))
return
elif method == 'daemon.login':
# This is a special case and used in the initial connection process
# We need to authenticate the user here
log.debug('RPC dispatch daemon.login')
try:
client_version = kwargs.pop('client_version', None)
if client_version is None:
raise IncompatibleClient(deluge.common.get_version())
ret = component.get('AuthManager').authorize(*args, **kwargs)
if ret:
self.factory.authorized_sessions[
self.transport.sessionno
] = self.AuthLevel(ret, args[0])
self.factory.session_protocols[self.transport.sessionno] = self
except Exception as ex:
send_error()
if not isinstance(ex, _ClientSideRecreateError):
log.exception(ex)
else:
self.sendData((RPC_RESPONSE, request_id, (ret)))
if not ret:
self.transport.loseConnection()
return
# Anything below requires a valid session
if not self.valid_session():
return
if method == 'daemon.set_event_interest':
log.debug('RPC dispatch daemon.set_event_interest')
# This special case is to allow clients to set which events they are
# interested in receiving.
# We are expecting a sequence from the client.
try:
if self.transport.sessionno not in self.factory.interested_events:
self.factory.interested_events[self.transport.sessionno] = []
self.factory.interested_events[self.transport.sessionno].extend(args[0])
except Exception:
send_error()
else:
self.sendData((RPC_RESPONSE, request_id, (True)))
return
if method not in self.factory.methods:
try:
# Raise exception to be sent back to client
raise AttributeError('RPC call on invalid function: %s' % method)
except AttributeError:
send_error()
return
log.debug('RPC dispatch %s', method)
try:
method_auth_requirement = self.factory.methods[method]._rpcserver_auth_level
auth_level = self.factory.authorized_sessions[
self.transport.sessionno
].auth_level
if auth_level < method_auth_requirement:
# This session is not allowed to call this method
log.debug(
'Session %s is attempting an unauthorized method call!',
self.transport.sessionno,
)
raise NotAuthorizedError(auth_level, method_auth_requirement)
# Set the session_id in the factory so that methods can know
# which session is calling it.
self.factory.session_id = self.transport.sessionno
ret = self.factory.methods[method](*args, **kwargs)
except Exception as ex:
send_error()
# Don't bother printing out DelugeErrors, because they are just
# for the client
if not isinstance(ex, DelugeError):
log.exception('Exception calling RPC request: %s', ex)
else:
# Check if the return value is a deferred, since we'll need to
# wait for it to fire before sending the RPC_RESPONSE
if isinstance(ret, defer.Deferred):
def on_success(result):
try:
self.sendData((RPC_RESPONSE, request_id, result))
except Exception:
send_error()
return result
def on_fail(failure):
try:
failure.raiseException()
except Exception:
send_error()
return failure
ret.addCallbacks(on_success, on_fail)
else:
self.sendData((RPC_RESPONSE, request_id, ret))
class RPCServer(component.Component):
"""
This class is used to handle rpc requests from the client. Objects are
registered with this class and their methods are exported using the export
decorator.
:param port: the port the RPCServer will listen on
:type port: int
:param interface: the interface to listen on, this may override the `allow_remote` setting
:type interface: str
:param allow_remote: set True if the server should allow remote connections
:type allow_remote: bool
:param listen: if False, will not start listening.. This is only useful in Classic Mode
:type listen: bool
"""
def __init__(self, port=58846, interface='', allow_remote=False, listen=True):
component.Component.__init__(self, 'RPCServer')
self.factory = Factory()
self.factory.protocol = DelugeRPCProtocol
self.factory.session_id = -1
self.factory.state = 'running'
# Holds the registered methods
self.factory.methods = {}
# Holds the session_ids and auth levels
self.factory.authorized_sessions = {}
# Holds the protocol objects with the session_id as key
self.factory.session_protocols = {}
# Holds the interested event list for the sessions
self.factory.interested_events = {}
self.listen = listen
if not listen:
return
if allow_remote:
hostname = ''
else:
hostname = 'localhost'
if interface:
hostname = interface
log.info('Starting DelugeRPC server %s:%s', hostname, port)
# Check for SSL keys and generate some if needed
check_ssl_keys()
cert = os.path.join(deluge.configmanager.get_config_dir('ssl'), 'daemon.cert')
pkey = os.path.join(deluge.configmanager.get_config_dir('ssl'), 'daemon.pkey')
try:
reactor.listenSSL(
port, self.factory, get_context_factory(cert, pkey), interface=hostname
)
except Exception as ex:
log.debug('Daemon already running or port not available.: %s', ex)
raise
def register_object(self, obj, name=None):
"""
Registers an object to export it's rpc methods. These methods should
be exported with the export decorator prior to registering the object.
:param obj: the object that we want to export
:type obj: object
:param name: the name to use, if None, it will be the class name of the object
:type name: str
"""
if not name:
name = obj.__class__.__name__.lower()
for d in dir(obj):
if d[0] == '_':
continue
if getattr(getattr(obj, d), '_rpcserver_export', False):
log.debug('Registering method: %s', name + '.' + d)
self.factory.methods[name + '.' + d] = getattr(obj, d)
def deregister_object(self, obj):
"""
Deregisters an objects exported rpc methods.
:param obj: the object that was previously registered
"""
for key, value in self.factory.methods.items():
if value.__self__ == obj:
del self.factory.methods[key]
def get_object_method(self, name):
"""
Returns a registered method.
:param name: the name of the method, usually in the form of 'object.method'
:type name: str
:returns: method
:raises KeyError: if `name` is not registered
"""
return self.factory.methods[name]
def get_method_list(self):
"""
Returns a list of the exported methods.
:returns: the exported methods
:rtype: list
"""
return list(self.factory.methods)
def get_session_id(self):
"""
Returns the session id of the current RPC.
:returns: the session id, this will be -1 if no connections have been made
:rtype: int
"""
return self.factory.session_id
def get_session_user(self):
"""
Returns the username calling the current RPC.
:returns: the username of the user calling the current RPC
:rtype: string
"""
if not self.listen:
return 'localclient'
session_id = self.get_session_id()
if session_id > -1 and session_id in self.factory.authorized_sessions:
return self.factory.authorized_sessions[session_id].username
else:
# No connections made yet
return ''
def get_session_auth_level(self):
"""
Returns the auth level of the user calling the current RPC.
:returns: the auth level
:rtype: int
"""
if not self.listen or not self.is_session_valid(self.get_session_id()):
return AUTH_LEVEL_ADMIN
return self.factory.authorized_sessions[self.get_session_id()].auth_level
def get_rpc_auth_level(self, rpc):
"""
Returns the auth level requirement for an exported rpc.
:returns: the auth level
:rtype: int
"""
return self.factory.methods[rpc]._rpcserver_auth_level
def is_session_valid(self, session_id):
"""
Checks if the session is still valid, eg, if the client is still connected.
:param session_id: the session id
:type session_id: int
:returns: True if the session is valid
:rtype: bool
"""
return session_id in self.factory.authorized_sessions
def emit_event(self, event):
"""
Emits the event to interested clients.
:param event: the event to emit
:type event: :class:`deluge.event.DelugeEvent`
"""
log.debug('intevents: %s', self.factory.interested_events)
# Find sessions interested in this event
for session_id, interest in self.factory.interested_events.items():
if event.name in interest:
log.debug('Emit Event: %s %s', event.name, event.args)
# This session is interested so send a RPC_EVENT
self.factory.session_protocols[session_id].sendData(
(RPC_EVENT, event.name, event.args)
)
def emit_event_for_session_id(self, session_id, event):
"""
Emits the event to specified session_id.
:param session_id: the event to emit
:type session_id: int
:param event: the event to emit
:type event: :class:`deluge.event.DelugeEvent`
"""
if not self.is_session_valid(session_id):
log.debug(
'Session ID %s is not valid. Not sending event "%s".',
session_id,
event.name,
)
return
if session_id not in self.factory.interested_events:
log.debug(
'Session ID %s is not interested in any events. Not sending event "%s".',
session_id,
event.name,
)
return
if event.name not in self.factory.interested_events[session_id]:
log.debug(
'Session ID %s is not interested in event "%s". Not sending it.',
session_id,
event.name,
)
return
log.debug(
'Sending event "%s" with args "%s" to session id "%s".',
event.name,
event.args,
session_id,
)
self.factory.session_protocols[session_id].sendData(
(RPC_EVENT, event.name, event.args)
)
def stop(self):
self.factory.state = 'stopping'

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -1,137 +0,0 @@
# -*- coding: utf-8 -*-
#
# Copyright (C) 2007,2008 Andrew Resch <andrewresch@gmail.com>
#
# This file is part of Deluge and is licensed under GNU General Public License 3.0, or later, with
# the additional special exception to link portions of this program with the OpenSSL library.
# See LICENSE for more details.
#
import os
import stat
from OpenSSL import crypto
from OpenSSL.crypto import FILETYPE_PEM
from twisted.internet.ssl import (
AcceptableCiphers,
Certificate,
CertificateOptions,
KeyPair,
TLSVersion,
)
import deluge.configmanager
# A TLS ciphers list.
# Sources for more information on TLS ciphers:
# - https://wiki.mozilla.org/Security/Server_Side_TLS
# - https://www.ssllabs.com/projects/best-practices/index.html
# - https://hynek.me/articles/hardening-your-web-servers-ssl-ciphers/
#
# This list was inspired by the `urllib3` library
# - https://github.com/urllib3/urllib3/blob/master/urllib3/util/ssl_.py#L79
#
# The general intent is:
# - prefer cipher suites that offer perfect forward secrecy (ECDHE),
# - prefer AES-GCM over ChaCha20 because hardware-accelerated AES is common,
# - disable NULL authentication, MD5 MACs and DSS for security reasons.
TLS_CIPHERS = ':'.join(
[
'ECDH+AESGCM',
'ECDH+CHACHA20',
'AES256-GCM-SHA384',
'AES128-GCM-SHA256',
'!DSS' '!aNULL',
'!eNULL',
'!MD5',
]
)
# This value tells OpenSSL to disable all SSL/TLS renegotiation.
SSL_OP_NO_RENEGOTIATION = 0x40000000
def get_context_factory(cert_path, pkey_path):
"""OpenSSL context factory.
Generates an OpenSSL context factory using Twisted's CertificateOptions class.
This will keep a server cipher order.
Args:
cert_path (string): The path to the certificate file
pkey_path (string): The path to the private key file
Returns:
twisted.internet.ssl.CertificateOptions: An OpenSSL context factory
"""
with open(cert_path) as cert:
certificate = Certificate.loadPEM(cert.read()).original
with open(pkey_path) as pkey:
private_key = KeyPair.load(pkey.read(), FILETYPE_PEM).original
ciphers = AcceptableCiphers.fromOpenSSLCipherString(TLS_CIPHERS)
cert_options = CertificateOptions(
privateKey=private_key,
certificate=certificate,
raiseMinimumTo=TLSVersion.TLSv1_2,
acceptableCiphers=ciphers,
)
ctx = cert_options.getContext()
ctx.use_certificate_chain_file(cert_path)
ctx.set_options(SSL_OP_NO_RENEGOTIATION)
return cert_options
def check_ssl_keys():
"""
Check for SSL cert/key and create them if necessary
"""
ssl_dir = deluge.configmanager.get_config_dir('ssl')
if not os.path.exists(ssl_dir):
# The ssl folder doesn't exist so we need to create it
os.makedirs(ssl_dir)
generate_ssl_keys()
else:
for f in ('daemon.pkey', 'daemon.cert'):
if not os.path.exists(os.path.join(ssl_dir, f)):
generate_ssl_keys()
break
def generate_ssl_keys():
"""
This method generates a new SSL key/cert.
"""
digest = 'sha256'
# Generate key pair
pkey = crypto.PKey()
pkey.generate_key(crypto.TYPE_RSA, 2048)
# Generate cert request
req = crypto.X509Req()
subj = req.get_subject()
setattr(subj, 'CN', 'Deluge Daemon')
req.set_pubkey(pkey)
req.sign(pkey, digest)
# Generate certificate
cert = crypto.X509()
cert.set_serial_number(0)
cert.gmtime_adj_notBefore(0)
cert.gmtime_adj_notAfter(60 * 60 * 24 * 365 * 3) # Three Years
cert.set_issuer(req.get_subject())
cert.set_subject(req.get_subject())
cert.set_pubkey(req.get_pubkey())
cert.sign(pkey, digest)
# Write out files
ssl_dir = deluge.configmanager.get_config_dir('ssl')
with open(os.path.join(ssl_dir, 'daemon.pkey'), 'wb') as _file:
_file.write(crypto.dump_privatekey(crypto.FILETYPE_PEM, pkey))
with open(os.path.join(ssl_dir, 'daemon.cert'), 'wb') as _file:
_file.write(crypto.dump_certificate(crypto.FILETYPE_PEM, cert))
# Make the files only readable by this user
for f in ('daemon.pkey', 'daemon.cert'):
os.chmod(os.path.join(ssl_dir, f), stat.S_IREAD | stat.S_IWRITE)

View File

@ -1,162 +0,0 @@
# -*- coding: utf-8 -*-
#
# Copyright (C) 2010 John Garland <johnnybg+deluge@gmail.com>
#
# This file is part of Deluge and is licensed under GNU General Public License 3.0, or later, with
# the additional special exception to link portions of this program with the OpenSSL library.
# See LICENSE for more details.
#
import inspect
import re
import warnings
from functools import wraps
def proxy(proxy_func):
"""
Factory class which returns a decorator that passes
the decorated function to a proxy function
:param proxy_func: the proxy function
:type proxy_func: function
"""
def decorator(func):
@wraps(func)
def wrapper(*args, **kwargs):
return proxy_func(func, *args, **kwargs)
return wrapper
return decorator
def overrides(*args):
"""
Decorater function to specify when class methods override
super class methods.
When used as
@overrides
def funcname
the argument will be the funcname function.
When used as
@overrides(BaseClass)
def funcname
the argument will be the BaseClass
"""
stack = inspect.stack()
if inspect.isfunction(args[0]):
return _overrides(stack, args[0])
else:
# One or more classes are specified, so return a function that will be
# called with the real function as argument
def ret_func(func, **kwargs):
return _overrides(stack, func, explicit_base_classes=args)
return ret_func
def _overrides(stack, method, explicit_base_classes=None):
# stack[0]=overrides, stack[1]=inside class def'n, stack[2]=outside class def'n
classes = {}
derived_class_locals = stack[2][0].f_locals
# Find all super classes
m = re.search(r'class\s(.+)\((.+)\)\s*\:', stack[2][4][0])
class_name = m.group(1)
base_classes = m.group(2)
# Handle multiple inheritance
base_classes = [s.strip() for s in base_classes.split(',')]
check_classes = base_classes
if not base_classes:
raise ValueError(
'overrides decorator: unable to determine base class of class "%s"'
% class_name
)
def get_class(cls_name):
if '.' not in cls_name:
return derived_class_locals[cls_name]
else:
components = cls_name.split('.')
# obj is either a module or a class
obj = derived_class_locals[components[0]]
for c in components[1:]:
assert inspect.ismodule(obj) or inspect.isclass(obj)
obj = getattr(obj, c)
return obj
if explicit_base_classes:
# One or more base classes are explicitly given, check only those classes
override_classes = re.search(r'\s*@overrides\((.+)\)\s*', stack[1][4][0]).group(
1
)
override_classes = [c.strip() for c in override_classes.split(',')]
check_classes = override_classes
for c in base_classes + check_classes:
classes[c] = get_class(c)
# Verify that the explicit override class is one of base classes
if explicit_base_classes:
from itertools import product
for bc, cc in product(base_classes, check_classes):
if issubclass(classes[bc], classes[cc]):
break
else:
raise Exception(
'Excplicit override class "%s" is not a super class of: %s'
% (explicit_base_classes, class_name)
)
if not all(hasattr(classes[cls], method.__name__) for cls in check_classes):
for cls in check_classes:
if not hasattr(classes[cls], method.__name__):
raise Exception(
'Function override "%s" not found in superclass: %s\n%s'
% (
method.__name__,
cls,
'File: %s:%s' % (stack[1][1], stack[1][2]),
)
)
if not any(hasattr(classes[cls], method.__name__) for cls in check_classes):
raise Exception(
'Function override "%s" not found in any superclass: %s\n%s'
% (
method.__name__,
check_classes,
'File: %s:%s' % (stack[1][1], stack[1][2]),
)
)
return method
def deprecated(func):
"""This is a decorator which can be used to mark function as deprecated.
It will result in a warning being emitted when the function is used.
"""
@wraps(func)
def depr_func(*args, **kwargs):
warnings.simplefilter('always', DeprecationWarning) # Turn off filter
warnings.warn(
'Call to deprecated function {}.'.format(func.__name__),
category=DeprecationWarning,
stacklevel=2,
)
warnings.simplefilter('default', DeprecationWarning) # Reset filter
return func(*args, **kwargs)
return depr_func

View File

@ -1,97 +0,0 @@
# -*- coding: utf-8 -*-
#
# Copyright (C) 2008 Andrew Resch <andrewresch@gmail.com>
# Copyright (C) 2011 Pedro Algarvio <pedro@algarvio.me>
#
# This file is part of Deluge and is licensed under GNU General Public License 3.0, or later, with
# the additional special exception to link portions of this program with the OpenSSL library.
# See LICENSE for more details.
#
class DelugeError(Exception):
def __new__(cls, *args, **kwargs):
inst = super(DelugeError, cls).__new__(cls, *args, **kwargs)
inst._args = args
inst._kwargs = kwargs
return inst
def __init__(self, message=None):
super(DelugeError, self).__init__(message)
self.message = message
def __str__(self):
return self.message
class DaemonRunningError(DelugeError):
pass
class InvalidTorrentError(DelugeError):
pass
class AddTorrentError(DelugeError):
pass
class InvalidPathError(DelugeError):
pass
class WrappedException(DelugeError):
def __init__(self, message, exception_type, traceback):
super(WrappedException, self).__init__(message)
self.type = exception_type
self.traceback = traceback
def __str__(self):
return '%s\n%s' % (self.message, self.traceback)
class _ClientSideRecreateError(DelugeError):
pass
class IncompatibleClient(_ClientSideRecreateError):
def __init__(self, daemon_version):
self.daemon_version = daemon_version
msg = (
'Your deluge client is not compatible with the daemon. '
'Please upgrade your client to %(daemon_version)s'
) % {'daemon_version': self.daemon_version}
super(IncompatibleClient, self).__init__(message=msg)
class NotAuthorizedError(_ClientSideRecreateError):
def __init__(self, current_level, required_level):
msg = ('Auth level too low: %(current_level)s < %(required_level)s') % {
'current_level': current_level,
'required_level': required_level,
}
super(NotAuthorizedError, self).__init__(message=msg)
self.current_level = current_level
self.required_level = required_level
class _UsernameBasedPasstroughError(_ClientSideRecreateError):
def __init__(self, message, username):
super(_UsernameBasedPasstroughError, self).__init__(message)
self.username = username
class BadLoginError(_UsernameBasedPasstroughError):
pass
class AuthenticationRequired(_UsernameBasedPasstroughError):
pass
class AuthManagerError(_UsernameBasedPasstroughError):
pass
class LibtorrentImportError(ImportError):
pass

View File

@ -1,320 +0,0 @@
# -*- coding: utf-8 -*-
#
# Copyright (C) 2009 Andrew Resch <andrewresch@gmail.com>
#
# This file is part of Deluge and is licensed under GNU General Public License 3.0, or later, with
# the additional special exception to link portions of this program with the OpenSSL library.
# See LICENSE for more details.
#
"""
Event module.
This module describes the types of events that can be generated by the daemon
and subsequently emitted to the clients.
"""
known_events = {}
class DelugeEventMetaClass(type):
"""
This metaclass simply keeps a list of all events classes created.
"""
def __init__(cls, name, bases, dct): # pylint: disable=bad-mcs-method-argument
super(DelugeEventMetaClass, cls).__init__(name, bases, dct)
if name != 'DelugeEvent':
known_events[name] = cls
class DelugeEvent(metaclass=DelugeEventMetaClass):
"""
The base class for all events.
:prop name: this is the name of the class which is in-turn the event name
:type name: string
:prop args: a list of the attribute values
:type args: list
"""
def _get_name(self):
return self.__class__.__name__
def _get_args(self):
if not hasattr(self, '_args'):
return []
return self._args
name = property(fget=_get_name)
args = property(fget=_get_args)
class TorrentAddedEvent(DelugeEvent):
"""
Emitted when a new torrent is successfully added to the session.
"""
def __init__(self, torrent_id, from_state):
"""
:param torrent_id: the torrent_id of the torrent that was added
:type torrent_id: string
:param from_state: was the torrent loaded from state? Or is it a new torrent.
:type from_state: bool
"""
self._args = [torrent_id, from_state]
class TorrentRemovedEvent(DelugeEvent):
"""
Emitted when a torrent has been removed from the session.
"""
def __init__(self, torrent_id):
"""
:param torrent_id: the torrent_id
:type torrent_id: string
"""
self._args = [torrent_id]
class PreTorrentRemovedEvent(DelugeEvent):
"""
Emitted when a torrent is about to be removed from the session.
"""
def __init__(self, torrent_id):
"""
:param torrent_id: the torrent_id
:type torrent_id: string
"""
self._args = [torrent_id]
class TorrentStateChangedEvent(DelugeEvent):
"""
Emitted when a torrent changes state.
"""
def __init__(self, torrent_id, state):
"""
:param torrent_id: the torrent_id
:type torrent_id: string
:param state: the new state
:type state: string
"""
self._args = [torrent_id, state]
class TorrentTrackerStatusEvent(DelugeEvent):
"""
Emitted when a torrents tracker status changes.
"""
def __init__(self, torrent_id, status):
"""
Args:
torrent_id (str): the torrent_id
status (str): the new status
"""
self._args = [torrent_id, status]
class TorrentQueueChangedEvent(DelugeEvent):
"""
Emitted when the queue order has changed.
"""
pass
class TorrentFolderRenamedEvent(DelugeEvent):
"""
Emitted when a folder within a torrent has been renamed.
"""
def __init__(self, torrent_id, old, new):
"""
:param torrent_id: the torrent_id
:type torrent_id: string
:param old: the old folder name
:type old: string
:param new: the new folder name
:type new: string
"""
self._args = [torrent_id, old, new]
class TorrentFileRenamedEvent(DelugeEvent):
"""
Emitted when a file within a torrent has been renamed.
"""
def __init__(self, torrent_id, index, name):
"""
:param torrent_id: the torrent_id
:type torrent_id: string
:param index: the index of the file
:type index: int
:param name: the new filename
:type name: string
"""
self._args = [torrent_id, index, name]
class TorrentFinishedEvent(DelugeEvent):
"""
Emitted when a torrent finishes downloading.
"""
def __init__(self, torrent_id):
"""
:param torrent_id: the torrent_id
:type torrent_id: string
"""
self._args = [torrent_id]
class TorrentResumedEvent(DelugeEvent):
"""
Emitted when a torrent resumes from a paused state.
"""
def __init__(self, torrent_id):
"""
:param torrent_id: the torrent_id
:type torrent_id: string
"""
self._args = [torrent_id]
class TorrentFileCompletedEvent(DelugeEvent):
"""
Emitted when a file completes.
"""
def __init__(self, torrent_id, index):
"""
:param torrent_id: the torrent_id
:type torrent_id: string
:param index: the file index
:type index: int
"""
self._args = [torrent_id, index]
class TorrentStorageMovedEvent(DelugeEvent):
"""
Emitted when the storage location for a torrent has been moved.
"""
def __init__(self, torrent_id, path):
"""
:param torrent_id: the torrent_id
:type torrent_id: string
:param path: the new location
:type path: string
"""
self._args = [torrent_id, path]
class CreateTorrentProgressEvent(DelugeEvent):
"""
Emitted when creating a torrent file remotely.
"""
def __init__(self, piece_count, num_pieces):
self._args = [piece_count, num_pieces]
class NewVersionAvailableEvent(DelugeEvent):
"""
Emitted when a more recent version of Deluge is available.
"""
def __init__(self, new_release):
"""
:param new_release: the new version that is available
:type new_release: string
"""
self._args = [new_release]
class SessionStartedEvent(DelugeEvent):
"""
Emitted when a session has started. This typically only happens once when
the daemon is initially started.
"""
pass
class SessionPausedEvent(DelugeEvent):
"""
Emitted when the session has been paused.
"""
pass
class SessionResumedEvent(DelugeEvent):
"""
Emitted when the session has been resumed.
"""
pass
class ConfigValueChangedEvent(DelugeEvent):
"""
Emitted when a config value changes in the Core.
"""
def __init__(self, key, value):
"""
:param key: the key that changed
:type key: string
:param value: the new value of the `:param:key`
"""
self._args = [key, value]
class PluginEnabledEvent(DelugeEvent):
"""
Emitted when a plugin is enabled in the Core.
"""
def __init__(self, plugin_name):
self._args = [plugin_name]
class PluginDisabledEvent(DelugeEvent):
"""
Emitted when a plugin is disabled in the Core.
"""
def __init__(self, plugin_name):
self._args = [plugin_name]
class ClientDisconnectedEvent(DelugeEvent):
"""
Emitted when a client disconnects.
"""
def __init__(self, session_id):
self._args = [session_id]
class ExternalIPEvent(DelugeEvent):
"""
Emitted when the external ip address is received from libtorrent.
"""
def __init__(self, external_ip):
"""
Args:
external_ip (str): The IP address.
"""
self._args = [external_ip]

View File

@ -1,331 +0,0 @@
# -*- coding: utf-8 -*-
#
# Copyright (C) 2009 Andrew Resch <andrewresch@gmail.com>
#
# This file is part of Deluge and is licensed under GNU General Public License 3.0, or later, with
# the additional special exception to link portions of this program with the OpenSSL library.
# See LICENSE for more details.
#
import cgi
import logging
import os.path
import zlib
from twisted.internet import reactor
from twisted.internet.defer import Deferred
from twisted.python.failure import Failure
from twisted.web import client, http
from twisted.web._newclient import HTTPClientParser
from twisted.web.error import PageRedirect
from twisted.web.http_headers import Headers
from twisted.web.iweb import IAgent
from zope.interface import implementer
from deluge.common import get_version
log = logging.getLogger(__name__)
class CompressionDecoder(client.GzipDecoder):
"""A compression decoder for gzip, x-gzip and deflate."""
def deliverBody(self, protocol): # NOQA: N802
self.original.deliverBody(CompressionDecoderProtocol(protocol, self.original))
class CompressionDecoderProtocol(client._GzipProtocol):
"""A compression decoder protocol for CompressionDecoder."""
def __init__(self, protocol, response):
super(CompressionDecoderProtocol, self).__init__(protocol, response)
self._zlibDecompress = zlib.decompressobj(32 + zlib.MAX_WBITS)
class BodyHandler(HTTPClientParser, object):
"""An HTTP parser that saves the response to a file."""
def __init__(self, request, finished, length, agent, encoding=None):
"""BodyHandler init.
Args:
request (t.w.i.IClientRequest): The parser request.
finished (Deferred): A Deferred to handle the finished response.
length (int): The length of the response.
agent (t.w.i.IAgent): The agent from which the request was sent.
"""
super(BodyHandler, self).__init__(request, finished)
self.agent = agent
self.finished = finished
self.total_length = length
self.current_length = 0
self.data = b''
self.encoding = encoding
def dataReceived(self, data): # NOQA: N802
self.current_length += len(data)
self.data += data
if self.agent.part_callback:
self.agent.part_callback(data, self.current_length, self.total_length)
def connectionLost(self, reason): # NOQA: N802
if self.encoding:
self.data = self.data.decode(self.encoding).encode('utf8')
with open(self.agent.filename, 'wb') as _file:
_file.write(self.data)
self.finished.callback(self.agent.filename)
self.state = u'DONE'
HTTPClientParser.connectionLost(self, reason)
@implementer(IAgent)
class HTTPDownloaderAgent(object):
"""A File Downloader Agent."""
def __init__(
self,
agent,
filename,
part_callback=None,
force_filename=False,
allow_compression=True,
handle_redirect=True,
):
"""HTTPDownloaderAgent init.
Args:
agent (t.w.c.Agent): The agent which will send the requests.
filename (str): The filename to save the file as.
force_filename (bool): Forces use of the supplied filename,
regardless of header content.
part_callback (func): A function to be called when a part of data
is received, it's signature should be:
func(data, current_length, total_length)
"""
self.handle_redirect = handle_redirect
self.agent = agent
self.filename = filename
self.part_callback = part_callback
self.force_filename = force_filename
self.allow_compression = allow_compression
self.decoder = None
def request_callback(self, response):
finished = Deferred()
if not self.handle_redirect and response.code in (
http.MOVED_PERMANENTLY,
http.FOUND,
http.SEE_OTHER,
http.TEMPORARY_REDIRECT,
):
location = response.headers.getRawHeaders(b'location')[0]
error = PageRedirect(response.code, location=location)
finished.errback(Failure(error))
else:
headers = response.headers
body_length = int(headers.getRawHeaders(b'content-length', default=[0])[0])
if headers.hasHeader(b'content-disposition') and not self.force_filename:
content_disp = headers.getRawHeaders(b'content-disposition')[0].decode(
'utf-8'
)
content_disp_params = cgi.parse_header(content_disp)[1]
if 'filename' in content_disp_params:
new_file_name = content_disp_params['filename']
new_file_name = sanitise_filename(new_file_name)
new_file_name = os.path.join(
os.path.split(self.filename)[0], new_file_name
)
count = 1
fileroot = os.path.splitext(new_file_name)[0]
fileext = os.path.splitext(new_file_name)[1]
while os.path.isfile(new_file_name):
# Increment filename if already exists
new_file_name = '%s-%s%s' % (fileroot, count, fileext)
count += 1
self.filename = new_file_name
cont_type_header = headers.getRawHeaders(b'content-type')[0].decode()
cont_type, params = cgi.parse_header(cont_type_header)
# Only re-ecode text content types.
encoding = None
if cont_type.startswith('text/'):
encoding = params.get('charset', None)
response.deliverBody(
BodyHandler(response.request, finished, body_length, self, encoding)
)
return finished
def request(self, method, uri, headers=None, body_producer=None):
"""Issue a new request to the wrapped agent.
Args:
method (bytes): The HTTP method to use.
uri (bytes): The url to download from.
headers (t.w.h.Headers, optional): Any extra headers to send.
body_producer (t.w.i.IBodyProducer, optional): Request body data.
Returns:
Deferred: The filename of the of the downloaded file.
"""
if headers is None:
headers = Headers()
if not headers.hasHeader(b'User-Agent'):
version = get_version()
user_agent = 'Deluge/%s (https://deluge-torrent.org)' % version
headers.addRawHeader('User-Agent', user_agent)
d = self.agent.request(
method=method, uri=uri, headers=headers, bodyProducer=body_producer
)
d.addCallback(self.request_callback)
return d
def sanitise_filename(filename):
"""Sanitises a filename to use as a download destination file.
Logs any filenames that could be considered malicious.
filename (str): The filename to sanitise.
Returns:
str: The sanitised filename.
"""
# Remove any quotes
filename = filename.strip('\'"')
if os.path.basename(filename) != filename:
# Dodgy server, log it
log.warning(
'Potentially malicious server: trying to write to file: %s', filename
)
# Only use the basename
filename = os.path.basename(filename)
filename = filename.strip()
if filename.startswith('.') or ';' in filename or '|' in filename:
# Dodgy server, log it
log.warning(
'Potentially malicious server: trying to write to file: %s', filename
)
return filename
def _download_file(
url,
filename,
callback=None,
headers=None,
force_filename=False,
allow_compression=True,
handle_redirects=True,
):
"""Downloads a file from a specific URL and returns a Deferred.
A callback function can be specified to be called as parts are received.
Args:
url (str): The url to download from.
filename (str): The filename to save the file as.
callback (func): A function to be called when partial data is received,
it's signature should be: func(data, current_length, total_length)
headers (dict): Any optional headers to send.
force_filename (bool): Force using the filename specified rather than
one the server may suggest.
allow_compression (bool): Allows gzip & deflate decoding.
Returns:
Deferred: The filename of the downloaded file.
Raises:
t.w.e.PageRedirect
t.w.e.Error: for all other HTTP response errors
"""
agent = client.Agent(reactor)
if allow_compression:
enc_accepted = ['gzip', 'x-gzip', 'deflate']
decoders = [(enc.encode(), CompressionDecoder) for enc in enc_accepted]
agent = client.ContentDecoderAgent(agent, decoders)
if handle_redirects:
agent = client.RedirectAgent(agent)
agent = HTTPDownloaderAgent(
agent, filename, callback, force_filename, allow_compression, handle_redirects
)
# The Headers init expects dict values to be a list.
if headers:
for name, value in list(headers.items()):
if not isinstance(value, list):
headers[name] = [value]
return agent.request(b'GET', url.encode(), Headers(headers))
def download_file(
url,
filename,
callback=None,
headers=None,
force_filename=False,
allow_compression=True,
handle_redirects=True,
):
"""Downloads a file from a specific URL and returns a Deferred.
A callback function can be specified to be called as parts are received.
Args:
url (str): The url to download from.
filename (str): The filename to save the file as.
callback (func): A function to be called when partial data is received,
it's signature should be: func(data, current_length, total_length).
headers (dict): Any optional headers to send.
force_filename (bool): Force the filename specified rather than one the
server may suggest.
allow_compression (bool): Allows gzip & deflate decoding.
handle_redirects (bool): HTTP redirects handled automatically or not.
Returns:
Deferred: The filename of the downloaded file.
Raises:
t.w.e.PageRedirect: If handle_redirects is False.
t.w.e.Error: For all other HTTP response errors.
"""
def on_download_success(result):
log.debug('Download success!')
return result
def on_download_fail(failure):
log.warning(
'Error occurred downloading file from "%s": %s',
url,
failure.getErrorMessage(),
)
result = failure
return result
d = _download_file(
url,
filename,
callback=callback,
headers=headers,
force_filename=force_filename,
allow_compression=allow_compression,
handle_redirects=handle_redirects,
)
d.addCallbacks(on_download_success, on_download_fail)
return d

View File

@ -1,15 +0,0 @@
from .util import (
I18N_DOMAIN,
get_languages,
set_language,
setup_mock_translation,
setup_translation,
)
__all__ = [
'I18N_DOMAIN',
'set_language',
'get_languages',
'setup_translation',
'setup_mock_translation',
]

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -1,115 +0,0 @@
# -*- coding: utf-8 -*-
#
# This file is public domain.
#
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
# Deferred translation
def _(message):
return message
# Languages we provide translations for, out of the box.
LANGUAGES = {
'af': _('Afrikaans'),
'ar': _('Arabic'),
'ast': _('Asturian'),
'az': _('Azerbaijani'),
'bg': _('Bulgarian'),
'be': _('Belarusian'),
'bn': _('Bengali'),
'br': _('Breton'),
'bs': _('Bosnian'),
'ca': _('Catalan'),
'cs': _('Czech'),
'cy': _('Welsh'),
'da': _('Danish'),
'de': _('German'),
'el': _('Greek'),
'en': _('English'),
'en_AU': _('English (Australia)'),
'en_CA': _('English (Canada)'),
'en_GB': _('English (United Kingdom)'),
'eo': _('Esperanto'),
'es': _('Spanish'),
'es-ar': _('Argentinian Spanish'),
'es-mx': _('Mexican Spanish'),
'es-ni': _('Nicaraguan Spanish'),
'es-ve': _('Venezuelan Spanish'),
'et': _('Estonian'),
'eu': _('Basque'),
'fa': _('Persian'),
'fi': _('Finnish'),
'fr': _('French'),
'fy': _('Frisian'),
'ga': _('Irish'),
'gl': _('Galician'),
'he': _('Hebrew'),
'hi': _('Hindi'),
'hr': _('Croatian'),
'hu': _('Hungarian'),
'ia': _('Interlingua'),
'id': _('Indonesian'),
'is': _('Icelandic'),
'it': _('Italian'),
'iu': _('Inuktitut'),
'ja': _('Japanese'),
'ka': _('Georgian'),
'kk': _('Kazakh'),
'km': _('Khmer'),
'kn': _('Kannada'),
'ko': _('Korean'),
'ku': _('Kurdish'),
'la': _('Latin'),
'lb': _('Luxembourgish'),
'lt': _('Lithuanian'),
'lv': _('Latvian'),
'mk': _('Macedonian'),
'ml': _('Malayalam'),
'mn': _('Mongolian'),
'ms': _('Mayaly'),
'my': _('Burmese'),
'nb': _('Norwegian Bokmal'),
'ne': _('Nepali'),
'nds': _('Low German'),
'nl': _('Dutch'),
'nn': _('Norwegian Nynorsk'),
'os': _('Ossetic'),
'pa': _('Punjabi'),
'pl': _('Polish'),
'pms': _('Piedmontese'),
'pt': _('Portuguese'),
'pt_BR': _('Brazilian Portuguese'),
'ro': _('Romanian'),
'ru': _('Russian'),
'sk': _('Slovak'),
'sl': _('Slovenian'),
'si': _('Sinhalese'),
'sq': _('Albanian'),
'sr': _('Serbian'),
'sr-latn': _('Serbian Latin'),
'sv': _('Swedish'),
'sw': _('Swahili'),
'ta': _('Tamil'),
'te': _('Telugu'),
'th': _('Thai'),
'tl': _('Tagalog'),
'tlh': _('Klingon'),
'tr': _('Turkish'),
'tt': _('Tatar'),
'udm': _('Udmurt'),
'uk': _('Ukrainian'),
'ur': _('Urdu'),
'vi': _('Vietnamese'),
'zh_CN': _('Chinese (Simplified)'),
'zh_HK': _('Chinese (Hong Kong)'),
'zh-hans': _('Simplified Chinese'),
'zh-hant': _('Traditional Chinese'),
'zh_TW': _('Chinese (Taiwan)'),
}
del _

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

Some files were not shown because too many files have changed in this diff Show More