Merge lp:~rcart/ubuntu/natty/bittornado/fix-420387 into lp:ubuntu/natty/bittornado

Proposed by Ronny Cardona
Status: Merged
Merge reported by: Daniel Holbach
Merged at revision: not available
Proposed branch: lp:~rcart/ubuntu/natty/bittornado/fix-420387
Merge into: lp:ubuntu/natty/bittornado
Diff against target: 28069 lines (+1407/-24161)
80 files modified
.pc/01_MANIFEST.in_remove_broken_cruft.dpatch/setup.py (+0/-28)
.pc/02_btdownloadcurses_increase_significant_digit.dpatch/btdownloadcurses.py (+0/-407)
.pc/05_bttrack_connerr_fix.dpatch/BitTornado/BT1/track.py (+0/-1137)
.pc/06_README_portchange.dpatch/README.txt (+0/-110)
.pc/07_change_report_address.dpatch/BitTornado/__init__.py (+0/-63)
.pc/08_btdownloadcurses_indent.dpatch/btdownloadcurses.py (+0/-407)
.pc/09_timtuckerfixes.dpatch/BitTornado/BT1/Connecter.py (+0/-328)
.pc/09_timtuckerfixes.dpatch/BitTornado/BT1/Encrypter.py (+0/-657)
.pc/09_timtuckerfixes.dpatch/BitTornado/BT1/Storage.py (+0/-584)
.pc/09_timtuckerfixes.dpatch/BitTornado/BT1/StreamCheck.py (+0/-135)
.pc/09_timtuckerfixes.dpatch/BitTornado/ConfigDir.py (+0/-401)
.pc/09_timtuckerfixes.dpatch/BitTornado/RawServer.py (+0/-195)
.pc/09_timtuckerfixes.dpatch/BitTornado/clock.py (+0/-27)
.pc/09_timtuckerfixes.dpatch/BitTornado/download_bt1.py (+0/-877)
.pc/09_timtuckerfixes.dpatch/BitTornado/launchmanycore.py (+0/-381)
.pc/10_removeCVScrud.dpatch/.cvsignore (+0/-4)
.pc/10_removeCVScrud.dpatch/BitTornado/.cvsignore (+0/-4)
.pc/10_removeCVScrud.dpatch/BitTornado/BT1/.cvsignore (+0/-4)
.pc/11_sorthashcheck.dpatch/BitTornado/launchmanycore.py (+0/-389)
.pc/12_fix_guis_for_2.6.dpatch/btcompletedirgui.py (+0/-192)
.pc/12_fix_guis_for_2.6.dpatch/btmaketorrentgui.py (+0/-353)
.pc/13_fix_btcompletedirgui_bug.dpatch/btcompletedirgui.py (+0/-192)
.pc/15_fix_unicode_in_makemetafile.py.dpatch/BitTornado/BT1/makemetafile.py (+0/-263)
.pc/16_fix_ipv6_in_SocketHandler.dpatch/BitTornado/SocketHandler.py (+0/-375)
.pc/17_fix_NatCheck_bufferlen_error.dpatch/BitTornado/BT1/NatCheck.py (+0/-219)
.pc/18_fix_launchmany_encrypter.dpatch/BitTornado/BT1/Encrypter.py (+0/-646)
.pc/19_fix_tracker_return_all.dpatch/BitTornado/BT1/track.py (+0/-1137)
.pc/20_tracker_cache_minor_fix.dpatch/BitTornado/BT1/track.py (+0/-1138)
.pc/21_remove_deprecated_wxPython_usage.dpatch/BitTornado/ConfigReader.py (+0/-1195)
.pc/21_remove_deprecated_wxPython_usage.dpatch/bt-t-make.py (+0/-1063)
.pc/21_remove_deprecated_wxPython_usage.dpatch/btcompletedirgui.py (+0/-192)
.pc/21_remove_deprecated_wxPython_usage.dpatch/btdownloadgui.py (+0/-2373)
.pc/21_remove_deprecated_wxPython_usage.dpatch/btmaketorrentgui.py (+0/-353)
.pc/22_fix_makemetafile_error-handling.dpatch/BitTornado/BT1/makemetafile.py (+0/-264)
.pc/23_remove_UPnP_options.dpatch/BitTornado/download_bt1.py (+0/-871)
.pc/23_remove_UPnP_options.dpatch/BitTornado/launchmanycore.py (+0/-390)
.pc/23_remove_UPnP_options.dpatch/btdownloadcurses.py (+0/-408)
.pc/23_remove_UPnP_options.dpatch/btdownloadgui.py (+0/-2368)
.pc/23_remove_UPnP_options.dpatch/btdownloadheadless.py (+0/-244)
.pc/24_clarify_ip_parameter.dpatch/README.txt (+0/-110)
.pc/25_errors_in_error_handling.dpatch/btdownloadcurses.py (+0/-408)
.pc/25_errors_in_error_handling.dpatch/btdownloadheadless.py (+0/-244)
.pc/27_remove_btdownloadheadless_curses_dependency.dpatch/btdownloadheadless.py (+0/-246)
.pc/28_float_mod_time_fix.dpatch/BitTornado/parsedir.py (+0/-150)
.pc/29_fix_urandom_error.dpatch/BitTornado/BTcrypto.py (+0/-103)
.pc/30_announce_list_only_torrents.dpatch/BitTornado/BT1/btformats.py (+0/-100)
.pc/30_announce_list_only_torrents.dpatch/btshowmetainfo.py (+0/-78)
.pc/31_fix_for_compact_reqd_off.dpatch/BitTornado/BT1/track.py (+0/-1143)
.pc/applied-patches (+0/-27)
BitTornado/BT1/Connecter.py (+0/-1)
BitTornado/BT1/Encrypter.py (+13/-4)
BitTornado/BT1/NatCheck.py (+3/-0)
BitTornado/BT1/Storage.py (+14/-18)
BitTornado/BT1/StreamCheck.py (+16/-1)
BitTornado/BT1/btformats.py (+3/-17)
BitTornado/BT1/makemetafile.py (+2/-3)
BitTornado/BT1/track.py (+13/-22)
BitTornado/BTcrypto.py (+0/-1)
BitTornado/ConfigDir.py (+17/-2)
BitTornado/ConfigReader.py (+231/-226)
BitTornado/RawServer.py (+2/-4)
BitTornado/SocketHandler.py (+1/-1)
BitTornado/__init__.py (+1/-1)
BitTornado/clock.py (+2/-5)
BitTornado/download_bt1.py (+13/-4)
BitTornado/launchmanycore.py (+5/-14)
BitTornado/parsedir.py (+1/-2)
README.txt (+4/-4)
bt-t-make.py (+255/-255)
btcompletedirgui.py (+56/-56)
btdownloadcurses.py (+6/-11)
btdownloadgui.py (+356/-351)
btdownloadheadless.py (+10/-4)
btmaketorrentgui.py (+106/-106)
btshowmetainfo.py (+54/-63)
debian/changelog (+8/-0)
debian/control (+2/-1)
debian/patches/32_use_hashlib_for_sha.patch (+210/-0)
debian/patches/series (+1/-0)
setup.py (+2/-1)
To merge this branch: bzr merge lp:~rcart/ubuntu/natty/bittornado/fix-420387
Reviewer Review Type Date Requested Status
Daniel Holbach Approve
Artur Rona (community) Approve
Ubuntu branches Pending
Review via email: mp+46878@code.launchpad.net

Description of the change

 * debian/patches/32_use_hashlib_for_sha.patch:
   - Updated use of deprecated sha module to hashlib. (LP: #420387, Closes: #593653)

To post a comment you must log in.
Revision history for this message
Scott Moser (smoser) wrote :

(copied from bug comment)

The changes look fine to me.
However, I don't think that your removal of the CVS directory will actually "stick". As I understand it, the bzr importer that maintains the lp:ubuntu/<package> branches basically does:

dget <uploaded package>
dpkg-source -x *.dsc

In doing so, the CVS dir will still be there. I'd advise to either:
 - patch it out (with a debian/patches/32-remove-CVS-dir.patch -- this could be a pain with an active upstream)
 - live with being unable to use debcommit
 - fix debcommit to prefer .bzr over CVS

Revision history for this message
Artur Rona (ari-tczew) wrote :

1) Please don't change Standards-Version and other lintian warnings if it's not necessary. This is the place for Debian.

2) According to above, don't mention about fixing lintian warnings in debian/changelog.

3) We don't mention about update-maintainer field in debian/changelog.

4) Please add DEP3 tags to your patch. https://wiki.ubuntu.com/PackagingGuide/PatchSystems#Patch%20Tagging%20Guidelines

review: Needs Fixing
Revision history for this message
Artur Rona (ari-tczew) wrote :

5) Package is in quilt 3.0 source format, so please change your patch filename to *.patch, not *.dpatch and update series file. Debian should update their files.

6) I would like to see following entry in debian/changelog:

  * debian/patches/32_use_hashlib_for_sha.patch:
    - Updated use of deprecated sha module to hashlib. (LP: #420387)
  * Removed old CVS dir so it can use .bzr dir for Bazaar.

7) What about forwarding this patch to Debian?

Thank you for your contribution!

review: Needs Fixing
Revision history for this message
Artur Rona (ari-tczew) wrote :

There are files patched directly. Could you try to clean up? also .pc files should be removed.

review: Needs Fixing
Revision history for this message
Ronny Cardona (rcart) wrote :

Thanks for the corrections and time, Artur. I've updated the branch right now. All the patches are directly applied by default in the original branch, so i've cleaned it up.

Revision history for this message
Artur Rona (ari-tczew) wrote :

Now looks better, but still some issues:

1) Please use revision 0.3.18-10ubuntu1 and below (LP: #420387, Closes: #593653)

2) Improve DEP3 tags:
Bug-Debian: http://bugs.debian.org/593653
Add dot at the end of sentence in Description ;)
About Origin: I saw on the Debian bug that patch comes from Git - could you research the git address and get a link to this revision?

review: Needs Fixing
Revision history for this message
Artur Rona (ari-tczew) wrote :

BTW package built fine.

Revision history for this message
Ronny Cardona (rcart) wrote :

> BTW package built fine.
Great. Now I’m looking for the git link, hopefully get it right away to update the branch ^^

Revision history for this message
Ronny Cardona (rcart) wrote :

Branch updated.

About the git address; I didn't find it. It seems like Debian uses svn and upstream cvs in the project.

Looks like the patch author worked in a local git branch to generate the patch, not sure about that :/

By the way, why use that Bittornado revision on Ubuntu? It's due to the actual status of developement cycle?

Thanks in advance

Revision history for this message
Artur Rona (ari-tczew) wrote :

Please add DEP3 tag to 32*.patch:
Bug-Ubuntu: https://launchpad.net/420387

If you have this done, I'll approve.

review: Needs Fixing
Revision history for this message
Artur Rona (ari-tczew) wrote :
7. By Ronny Cardona

* debian/patches/32_use_hashlib_for_sha.patch:
  - Updated use of deprecated sha module to hashlib. (LP: #420387,
  Closes: #593653)

Revision history for this message
Ronny Cardona (rcart) wrote :

Branch updated.I hope that it's ready.

Thanks for all your corrections.

Revision history for this message
Artur Rona (ari-tczew) wrote :

OK, now core-dev turn.

review: Approve
Revision history for this message
Daniel Holbach (dholbach) wrote :
Download full text (9.6 KiB)

I'm not quite sure what happened in this branch, but I uploaded what I extracted as the minimal diff.

--- bittornado-0.3.18//debian/changelog 2010-03-21 22:36:58.000000000 +0100
+++ bittornado/debian/changelog 2011-02-09 10:11:50.020853000 +0100
@@ -1,3 +1,11 @@
+bittornado (0.3.18-10ubuntu1) natty; urgency=low
+
+ * debian/patches/32_use_hashlib_for_sha.patch:
+ - Updated use of deprecated sha module to hashlib. (LP: #420387,
+ Closes: #593653)
+
+ -- Ronny Cardona (Rcart) <email address hidden> Mon, 24 Jan 2011 17:27:47 -0600
+
 bittornado (0.3.18-10) unstable; urgency=low

   * New patch from upstream's CVS to allow torrents that only have an
--- bittornado-0.3.18//debian/control 2010-03-21 22:16:54.000000000 +0100
+++ bittornado/debian/control 2011-02-09 10:11:50.020853000 +0100
@@ -1,7 +1,8 @@
 Source: bittornado
 Section: net
 Priority: optional
-Maintainer: Cameron Dale <email address hidden>
+Maintainer: Ubuntu Developers <email address hidden>
+XSBC-Original-Maintainer: Cameron Dale <email address hidden>
 Build-Depends: debhelper (>= 5.0.37.2)
 Build-Depends-Indep: python, python-support (>= 0.5.4), docbook-to-man
 Standards-Version: 3.8.4
--- bittornado-0.3.18//debian/patches/32_use_hashlib_for_sha.patch 1970-01-01 01:00:00.000000000 +0100
+++ bittornado/debian/patches/32_use_hashlib_for_sha.patch 2011-02-09 10:11:50.020853000 +0100
@@ -0,0 +1,210 @@
+From: Ronny Cardona (Rcart) <email address hidden>
+Description: Updated use of deprecated sha module to hashlib.
+Origin: http://bugs.debian.org/593653#17
+Bug-Debian: http://bugs.debian.org/593653
+Bug-Ubuntu: https://launchpad.net/bugs/420387
+
+Index: bittornado.fix-420387/BitTornado/BT1/makemetafile.py
+===================================================================
+--- bittornado.fix-420387.orig/BitTornado/BT1/makemetafile.py 2011-01-24 17:18:09.183076000 -0600
++++ bittornado.fix-420387/BitTornado/BT1/makemetafile.py 2011-01-24 17:18:55.483076002 -0600
+@@ -4,7 +4,10 @@
+
+ from os.path import getsize, split, join, abspath, isdir
+ from os import listdir
+-from sha import sha
++try:
++ from hashlib import sha1 as sha
++except ImportError:
++ from sha import sha
+ from copy import copy
+ from string import strip
+ from BitTornado.bencode import bencode
+Index: bittornado.fix-420387/BitTornado/BT1/Rerequester.py
+===================================================================
+--- bittornado.fix-420387.orig/BitTornado/BT1/Rerequester.py 2011-01-24 17:18:09.083076000 -0600
++++ bittornado.fix-420387/BitTornado/BT1/Rerequester.py 2011-01-24 17:18:55.483076002 -0600
+@@ -12,7 +12,10 @@
+ from traceback import print_exc
+ from socket import error, gethostbyname
+ from random import shuffle
+-from sha import sha
++try:
++ from hashlib import sha1 as sha
++except ImportError:
++ from sha import sha
+ from time import time
+ try:
+ from os import getpid
+Index: bittornado.fix-420387/BitTornado/BT1/StorageWrapper.py
+===================================================================
+--- bittornado.fix-420387.orig/BitTornado/BT1/StorageWrapper.py 2011-01-24 17:18:09.383076000 -0600
++++ bittornado.fix-420387/BitTornado/BT1/StorageWrapper...

Read more...

review: Approve

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1=== removed directory '.pc/01_MANIFEST.in_remove_broken_cruft.dpatch'
2=== removed file '.pc/01_MANIFEST.in_remove_broken_cruft.dpatch/setup.py'
3--- .pc/01_MANIFEST.in_remove_broken_cruft.dpatch/setup.py 2010-03-21 14:36:30 +0000
4+++ .pc/01_MANIFEST.in_remove_broken_cruft.dpatch/setup.py 1970-01-01 00:00:00 +0000
5@@ -1,28 +0,0 @@
6-#!/usr/bin/env python
7-
8-# Written by Bram Cohen
9-# see LICENSE.txt for license information
10-
11-import sys
12-assert sys.version >= '2', "Install Python 2.0 or greater"
13-from distutils.core import setup, Extension
14-import BitTornado
15-
16-setup(
17- name = "BitTornado",
18- version = BitTornado.version,
19- author = "Bram Cohen, John Hoffman, Uoti Arpala et. al.",
20- author_email = "<theshadow@degreez.net>",
21- url = "http://www.bittornado.com",
22- license = "MIT",
23-
24- packages = ["BitTornado","BitTornado.BT1"],
25-
26- scripts = ["btdownloadgui.py", "btdownloadheadless.py",
27- "bttrack.py", "btmakemetafile.py", "btlaunchmany.py", "btcompletedir.py",
28- "btdownloadcurses.py", "btcompletedirgui.py", "btlaunchmanycurses.py",
29- "btmakemetafile.py", "btreannounce.py", "btrename.py", "btshowmetainfo.py",
30- 'btmaketorrentgui.py', 'btcopyannounce.py', 'btsethttpseeds.py',
31- 'bt-t-make.py',
32- ]
33- )
34
35=== removed directory '.pc/02_btdownloadcurses_increase_significant_digit.dpatch'
36=== removed file '.pc/02_btdownloadcurses_increase_significant_digit.dpatch/btdownloadcurses.py'
37--- .pc/02_btdownloadcurses_increase_significant_digit.dpatch/btdownloadcurses.py 2010-03-21 14:36:30 +0000
38+++ .pc/02_btdownloadcurses_increase_significant_digit.dpatch/btdownloadcurses.py 1970-01-01 00:00:00 +0000
39@@ -1,407 +0,0 @@
40-#!/usr/bin/env python
41-
42-# Written by Henry 'Pi' James
43-# see LICENSE.txt for license information
44-
45-SPEW_SCROLL_RATE = 1
46-
47-from BitTornado import PSYCO
48-if PSYCO.psyco:
49- try:
50- import psyco
51- assert psyco.__version__ >= 0x010100f0
52- psyco.full()
53- except:
54- pass
55-
56-from BitTornado.download_bt1 import BT1Download, defaults, parse_params, get_usage, get_response
57-from BitTornado.RawServer import RawServer, UPnP_ERROR
58-from random import seed
59-from socket import error as socketerror
60-from BitTornado.bencode import bencode
61-from BitTornado.natpunch import UPnP_test
62-from threading import Event
63-from os.path import abspath
64-from signal import signal, SIGWINCH
65-from sha import sha
66-from sys import argv, exit
67-import sys
68-from time import time, strftime
69-from BitTornado.clock import clock
70-from BitTornado import createPeerID, version
71-from BitTornado.ConfigDir import ConfigDir
72-
73-try:
74- import curses
75- import curses.panel
76- from curses.wrapper import wrapper as curses_wrapper
77- from signal import signal, SIGWINCH
78-except:
79- print 'Textmode GUI initialization failed, cannot proceed.'
80- print
81- print 'This download interface requires the standard Python module ' \
82- '"curses", which is unfortunately not available for the native ' \
83- 'Windows port of Python. It is however available for the Cygwin ' \
84- 'port of Python, running on all Win32 systems (www.cygwin.com).'
85- print
86- print 'You may still use "btdownloadheadless.py" to download.'
87- sys.exit(1)
88-
89-assert sys.version >= '2', "Install Python 2.0 or greater"
90-try:
91- True
92-except:
93- True = 1
94- False = 0
95-
96-def fmttime(n):
97- if n == 0:
98- return 'download complete!'
99- try:
100- n = int(n)
101- assert n >= 0 and n < 5184000 # 60 days
102- except:
103- return '<unknown>'
104- m, s = divmod(n, 60)
105- h, m = divmod(m, 60)
106- return 'finishing in %d:%02d:%02d' % (h, m, s)
107-
108-def fmtsize(n):
109- s = str(n)
110- size = s[-3:]
111- while len(s) > 3:
112- s = s[:-3]
113- size = '%s,%s' % (s[-3:], size)
114- if n > 999:
115- unit = ['B', 'KiB', 'MiB', 'GiB', 'TiB', 'PiB', 'EiB', 'ZiB', 'YiB']
116- i = 1
117- while i + 1 < len(unit) and (n >> 10) >= 999:
118- i += 1
119- n >>= 10
120- n = float(n) / (1 << 10)
121- size = '%s (%.0f %s)' % (size, n, unit[i])
122- return size
123-
124-
125-class CursesDisplayer:
126- def __init__(self, scrwin, errlist, doneflag):
127- self.scrwin = scrwin
128- self.errlist = errlist
129- self.doneflag = doneflag
130-
131- signal(SIGWINCH, self.winch_handler)
132- self.changeflag = Event()
133-
134- self.done = 0
135- self.file = ''
136- self.fileSize = ''
137- self.activity = ''
138- self.status = ''
139- self.progress = ''
140- self.downloadTo = ''
141- self.downRate = '---'
142- self.upRate = '---'
143- self.shareRating = ''
144- self.seedStatus = ''
145- self.peerStatus = ''
146- self.errors = []
147- self.last_update_time = 0
148- self.spew_scroll_time = 0
149- self.spew_scroll_pos = 0
150-
151- self._remake_window()
152-
153- def winch_handler(self, signum, stackframe):
154- self.changeflag.set()
155- curses.endwin()
156- self.scrwin.refresh()
157- self.scrwin = curses.newwin(0, 0, 0, 0)
158- self._remake_window()
159-
160- def _remake_window(self):
161- self.scrh, self.scrw = self.scrwin.getmaxyx()
162- self.scrpan = curses.panel.new_panel(self.scrwin)
163- self.labelh, self.labelw, self.labely, self.labelx = 11, 9, 1, 2
164- self.labelwin = curses.newwin(self.labelh, self.labelw,
165- self.labely, self.labelx)
166- self.labelpan = curses.panel.new_panel(self.labelwin)
167- self.fieldh, self.fieldw, self.fieldy, self.fieldx = (
168- self.labelh, self.scrw-2 - self.labelw-3,
169- 1, self.labelw+3)
170- self.fieldwin = curses.newwin(self.fieldh, self.fieldw,
171- self.fieldy, self.fieldx)
172- self.fieldwin.nodelay(1)
173- self.fieldpan = curses.panel.new_panel(self.fieldwin)
174- self.spewh, self.speww, self.spewy, self.spewx = (
175- self.scrh - self.labelh - 2, self.scrw - 3, 1 + self.labelh, 2)
176- self.spewwin = curses.newwin(self.spewh, self.speww,
177- self.spewy, self.spewx)
178- self.spewpan = curses.panel.new_panel(self.spewwin)
179- try:
180- self.scrwin.border(ord('|'),ord('|'),ord('-'),ord('-'),ord(' '),ord(' '),ord(' '),ord(' '))
181- except:
182- pass
183- self.labelwin.addstr(0, 0, 'file:')
184- self.labelwin.addstr(1, 0, 'size:')
185- self.labelwin.addstr(2, 0, 'dest:')
186- self.labelwin.addstr(3, 0, 'progress:')
187- self.labelwin.addstr(4, 0, 'status:')
188- self.labelwin.addstr(5, 0, 'dl speed:')
189- self.labelwin.addstr(6, 0, 'ul speed:')
190- self.labelwin.addstr(7, 0, 'sharing:')
191- self.labelwin.addstr(8, 0, 'seeds:')
192- self.labelwin.addstr(9, 0, 'peers:')
193- curses.panel.update_panels()
194- curses.doupdate()
195- self.changeflag.clear()
196-
197-
198- def finished(self):
199- self.done = 1
200- self.activity = 'download succeeded!'
201- self.downRate = '---'
202- self.display(fractionDone = 1)
203-
204- def failed(self):
205- self.done = 1
206- self.activity = 'download failed!'
207- self.downRate = '---'
208- self.display()
209-
210- def error(self, errormsg):
211- newerrmsg = strftime('[%H:%M:%S] ') + errormsg
212- self.errors.append(newerrmsg)
213- self.errlist.append(newerrmsg)
214- self.display()
215-
216- def display(self, dpflag = Event(), fractionDone = None, timeEst = None,
217- downRate = None, upRate = None, activity = None,
218- statistics = None, spew = None, **kws):
219-
220- inchar = self.fieldwin.getch()
221- if inchar == 12: # ^L
222- self._remake_window()
223- elif inchar in (ord('q'),ord('Q')):
224- self.doneflag.set()
225-
226- if activity is not None and not self.done:
227- self.activity = activity
228- elif timeEst is not None:
229- self.activity = fmttime(timeEst)
230- if self.changeflag.isSet():
231- return
232- if self.last_update_time + 0.1 > clock() and fractionDone not in (0.0, 1.0) and activity is not None:
233- return
234- self.last_update_time = clock()
235- if fractionDone is not None:
236- blocknum = int(self.fieldw * fractionDone)
237- self.progress = blocknum * '#' + (self.fieldw - blocknum) * '_'
238- self.status = '%s (%.1f%%)' % (self.activity, fractionDone * 100)
239- else:
240- self.status = self.activity
241- if downRate is not None:
242- self.downRate = '%.1f KB/s' % (float(downRate) / (1 << 10))
243- if upRate is not None:
244- self.upRate = '%.1f KB/s' % (float(upRate) / (1 << 10))
245- if statistics is not None:
246- if (statistics.shareRating < 0) or (statistics.shareRating > 100):
247- self.shareRating = 'oo (%.1f MB up / %.1f MB down)' % (float(statistics.upTotal) / (1<<20), float(statistics.downTotal) / (1<<20))
248- else:
249- self.shareRating = '%.3f (%.1f MB up / %.1f MB down)' % (statistics.shareRating, float(statistics.upTotal) / (1<<20), float(statistics.downTotal) / (1<<20))
250- if not self.done:
251- self.seedStatus = '%d seen now, plus %.3f distributed copies' % (statistics.numSeeds,0.001*int(1000*statistics.numCopies2))
252- else:
253- self.seedStatus = '%d seen recently, plus %.3f distributed copies' % (statistics.numOldSeeds,0.001*int(1000*statistics.numCopies))
254- self.peerStatus = '%d seen now, %.1f%% done at %.1f kB/s' % (statistics.numPeers,statistics.percentDone,float(statistics.torrentRate) / (1 << 10))
255-
256- self.fieldwin.erase()
257- self.fieldwin.addnstr(0, 0, self.file, self.fieldw, curses.A_BOLD)
258- self.fieldwin.addnstr(1, 0, self.fileSize, self.fieldw)
259- self.fieldwin.addnstr(2, 0, self.downloadTo, self.fieldw)
260- if self.progress:
261- self.fieldwin.addnstr(3, 0, self.progress, self.fieldw, curses.A_BOLD)
262- self.fieldwin.addnstr(4, 0, self.status, self.fieldw)
263- self.fieldwin.addnstr(5, 0, self.downRate, self.fieldw)
264- self.fieldwin.addnstr(6, 0, self.upRate, self.fieldw)
265- self.fieldwin.addnstr(7, 0, self.shareRating, self.fieldw)
266- self.fieldwin.addnstr(8, 0, self.seedStatus, self.fieldw)
267- self.fieldwin.addnstr(9, 0, self.peerStatus, self.fieldw)
268-
269- self.spewwin.erase()
270-
271- if not spew:
272- errsize = self.spewh
273- if self.errors:
274- self.spewwin.addnstr(0, 0, "error(s):", self.speww, curses.A_BOLD)
275- errsize = len(self.errors)
276- displaysize = min(errsize, self.spewh)
277- displaytop = errsize - displaysize
278- for i in range(displaysize):
279- self.spewwin.addnstr(i, self.labelw, self.errors[displaytop + i],
280- self.speww-self.labelw-1, curses.A_BOLD)
281- else:
282- if self.errors:
283- self.spewwin.addnstr(0, 0, "error:", self.speww, curses.A_BOLD)
284- self.spewwin.addnstr(0, self.labelw, self.errors[-1],
285- self.speww-self.labelw-1, curses.A_BOLD)
286- self.spewwin.addnstr(2, 0, " # IP Upload Download Completed Speed", self.speww, curses.A_BOLD)
287-
288-
289- if self.spew_scroll_time + SPEW_SCROLL_RATE < clock():
290- self.spew_scroll_time = clock()
291- if len(spew) > self.spewh-5 or self.spew_scroll_pos > 0:
292- self.spew_scroll_pos += 1
293- if self.spew_scroll_pos > len(spew):
294- self.spew_scroll_pos = 0
295-
296- for i in range(len(spew)):
297- spew[i]['lineno'] = i+1
298- spew.append({'lineno': None})
299- spew = spew[self.spew_scroll_pos:] + spew[:self.spew_scroll_pos]
300-
301- for i in range(min(self.spewh - 5, len(spew))):
302- if not spew[i]['lineno']:
303- continue
304- self.spewwin.addnstr(i+3, 0, '%3d' % spew[i]['lineno'], 3)
305- self.spewwin.addnstr(i+3, 4, spew[i]['ip']+spew[i]['direction'], 16)
306- if spew[i]['uprate'] > 100:
307- self.spewwin.addnstr(i+3, 20, '%6.0f KB/s' % (float(spew[i]['uprate']) / 1000), 11)
308- self.spewwin.addnstr(i+3, 32, '-----', 5)
309- if spew[i]['uinterested'] == 1:
310- self.spewwin.addnstr(i+3, 33, 'I', 1)
311- if spew[i]['uchoked'] == 1:
312- self.spewwin.addnstr(i+3, 35, 'C', 1)
313- if spew[i]['downrate'] > 100:
314- self.spewwin.addnstr(i+3, 38, '%6.0f KB/s' % (float(spew[i]['downrate']) / 1000), 11)
315- self.spewwin.addnstr(i+3, 50, '-------', 7)
316- if spew[i]['dinterested'] == 1:
317- self.spewwin.addnstr(i+3, 51, 'I', 1)
318- if spew[i]['dchoked'] == 1:
319- self.spewwin.addnstr(i+3, 53, 'C', 1)
320- if spew[i]['snubbed'] == 1:
321- self.spewwin.addnstr(i+3, 55, 'S', 1)
322- self.spewwin.addnstr(i+3, 58, '%5.1f%%' % (float(int(spew[i]['completed']*1000))/10), 6)
323- if spew[i]['speed'] is not None:
324- self.spewwin.addnstr(i+3, 64, '%5.0f KB/s' % (float(spew[i]['speed'])/1000), 10)
325-
326- if statistics is not None:
327- self.spewwin.addnstr(self.spewh-1, 0,
328- 'downloading %d pieces, have %d fragments, %d of %d pieces completed'
329- % ( statistics.storage_active, statistics.storage_dirty,
330- statistics.storage_numcomplete,
331- statistics.storage_totalpieces ), self.speww-1 )
332-
333- curses.panel.update_panels()
334- curses.doupdate()
335- dpflag.set()
336-
337- def chooseFile(self, default, size, saveas, dir):
338- self.file = default
339- self.fileSize = fmtsize(size)
340- if saveas == '':
341- saveas = default
342- self.downloadTo = abspath(saveas)
343- return saveas
344-
345-def run(scrwin, errlist, params):
346- doneflag = Event()
347- d = CursesDisplayer(scrwin, errlist, doneflag)
348- try:
349- while 1:
350- configdir = ConfigDir('downloadcurses')
351- defaultsToIgnore = ['responsefile', 'url', 'priority']
352- configdir.setDefaults(defaults,defaultsToIgnore)
353- configdefaults = configdir.loadConfig()
354- defaults.append(('save_options',0,
355- "whether to save the current options as the new default configuration " +
356- "(only for btdownloadcurses.py)"))
357- try:
358- config = parse_params(params, configdefaults)
359- except ValueError, e:
360- d.error('error: ' + str(e) + '\nrun with no args for parameter explanations')
361- break
362- if not config:
363- d.error(get_usage(defaults, d.fieldw, configdefaults))
364- break
365- if config['save_options']:
366- configdir.saveConfig(config)
367- configdir.deleteOldCacheData(config['expire_cache_data'])
368-
369- myid = createPeerID()
370- seed(myid)
371-
372- rawserver = RawServer(doneflag, config['timeout_check_interval'],
373- config['timeout'], ipv6_enable = config['ipv6_enabled'],
374- failfunc = d.failed, errorfunc = d.error)
375-
376- upnp_type = UPnP_test(config['upnp_nat_access'])
377- while True:
378- try:
379- listen_port = rawserver.find_and_bind(config['minport'], config['maxport'],
380- config['bind'], ipv6_socket_style = config['ipv6_binds_v4'],
381- upnp = upnp_type, randomizer = config['random_port'])
382- break
383- except socketerror, e:
384- if upnp_type and e == UPnP_ERROR:
385- d.error('WARNING: COULD NOT FORWARD VIA UPnP')
386- upnp_type = 0
387- continue
388- d.error("Couldn't listen - " + str(e))
389- d.failed()
390- return
391-
392- response = get_response(config['responsefile'], config['url'], d.error)
393- if not response:
394- break
395-
396- infohash = sha(bencode(response['info'])).digest()
397-
398- dow = BT1Download(d.display, d.finished, d.error, d.error, doneflag,
399- config, response, infohash, myid, rawserver, listen_port,
400- configdir)
401-
402- if not dow.saveAs(d.chooseFile):
403- break
404-
405- if not dow.initFiles(old_style = True):
406- break
407- if not dow.startEngine():
408- dow.shutdown()
409- break
410- dow.startRerequester()
411- dow.autoStats()
412-
413- if not dow.am_I_finished():
414- d.display(activity = 'connecting to peers')
415- rawserver.listen_forever(dow.getPortHandler())
416- d.display(activity = 'shutting down')
417- dow.shutdown()
418- break
419-
420- except KeyboardInterrupt:
421- # ^C to exit..
422- pass
423- try:
424- rawserver.shutdown()
425- except:
426- pass
427- if not d.done:
428- d.failed()
429-
430-
431-if __name__ == '__main__':
432- if argv[1:] == ['--version']:
433- print version
434- exit(0)
435- if len(argv) <= 1:
436- print "Usage: btdownloadcurses.py <global options>\n"
437- print get_usage(defaults)
438- exit(1)
439-
440- errlist = []
441- curses_wrapper(run, errlist, argv[1:])
442-
443- if errlist:
444- print "These errors occurred during execution:"
445- for error in errlist:
446- print error
447\ No newline at end of file
448
449=== removed directory '.pc/05_bttrack_connerr_fix.dpatch'
450=== removed directory '.pc/05_bttrack_connerr_fix.dpatch/BitTornado'
451=== removed directory '.pc/05_bttrack_connerr_fix.dpatch/BitTornado/BT1'
452=== removed file '.pc/05_bttrack_connerr_fix.dpatch/BitTornado/BT1/track.py'
453--- .pc/05_bttrack_connerr_fix.dpatch/BitTornado/BT1/track.py 2010-03-21 14:36:30 +0000
454+++ .pc/05_bttrack_connerr_fix.dpatch/BitTornado/BT1/track.py 1970-01-01 00:00:00 +0000
455@@ -1,1137 +0,0 @@
456-# Written by Bram Cohen
457-# see LICENSE.txt for license information
458-
459-from BitTornado.parseargs import parseargs, formatDefinitions
460-from BitTornado.RawServer import RawServer, autodetect_ipv6, autodetect_socket_style
461-from BitTornado.HTTPHandler import HTTPHandler, months, weekdays
462-from BitTornado.parsedir import parsedir
463-from NatCheck import NatCheck, CHECK_PEER_ID_ENCRYPTED
464-from BitTornado.BTcrypto import CRYPTO_OK
465-from T2T import T2TList
466-from BitTornado.subnetparse import IP_List, ipv6_to_ipv4, to_ipv4, is_valid_ip, is_ipv4
467-from BitTornado.iprangeparse import IP_List as IP_Range_List
468-from BitTornado.torrentlistparse import parsetorrentlist
469-from threading import Event, Thread
470-from BitTornado.bencode import bencode, bdecode, Bencached
471-from BitTornado.zurllib import urlopen, quote, unquote
472-from Filter import Filter
473-from urlparse import urlparse
474-from os import rename, getpid
475-from os.path import exists, isfile
476-from cStringIO import StringIO
477-from traceback import print_exc
478-from time import time, gmtime, strftime, localtime
479-from BitTornado.clock import clock
480-from random import shuffle, seed, randrange
481-from sha import sha
482-from types import StringType, IntType, LongType, ListType, DictType
483-from binascii import b2a_hex, a2b_hex, a2b_base64
484-from string import lower
485-import sys, os
486-import signal
487-import re
488-import BitTornado.__init__
489-from BitTornado.__init__ import version, createPeerID
490-try:
491- True
492-except:
493- True = 1
494- False = 0
495- bool = lambda x: not not x
496-
497-defaults = [
498- ('port', 80, "Port to listen on."),
499- ('dfile', None, 'file to store recent downloader info in'),
500- ('bind', '', 'comma-separated list of ips/hostnames to bind to locally'),
501-# ('ipv6_enabled', autodetect_ipv6(),
502- ('ipv6_enabled', 0,
503- 'allow the client to connect to peers via IPv6'),
504- ('ipv6_binds_v4', autodetect_socket_style(),
505- 'set if an IPv6 server socket will also field IPv4 connections'),
506- ('socket_timeout', 15, 'timeout for closing connections'),
507- ('save_dfile_interval', 5 * 60, 'seconds between saving dfile'),
508- ('timeout_downloaders_interval', 45 * 60, 'seconds between expiring downloaders'),
509- ('reannounce_interval', 30 * 60, 'seconds downloaders should wait between reannouncements'),
510- ('response_size', 50, 'number of peers to send in an info message'),
511- ('timeout_check_interval', 5,
512- 'time to wait between checking if any connections have timed out'),
513- ('nat_check', 3,
514- "how many times to check if a downloader is behind a NAT (0 = don't check)"),
515- ('log_nat_checks', 0,
516- "whether to add entries to the log for nat-check results"),
517- ('min_time_between_log_flushes', 3.0,
518- 'minimum time it must have been since the last flush to do another one'),
519- ('min_time_between_cache_refreshes', 600.0,
520- 'minimum time in seconds before a cache is considered stale and is flushed'),
521- ('allowed_dir', '', 'only allow downloads for .torrents in this dir'),
522- ('allowed_list', '', 'only allow downloads for hashes in this list (hex format, one per line)'),
523- ('allowed_controls', 0, 'allow special keys in torrents in the allowed_dir to affect tracker access'),
524- ('multitracker_enabled', 0, 'whether to enable multitracker operation'),
525- ('multitracker_allowed', 'autodetect', 'whether to allow incoming tracker announces (can be none, autodetect or all)'),
526- ('multitracker_reannounce_interval', 2 * 60, 'seconds between outgoing tracker announces'),
527- ('multitracker_maxpeers', 20, 'number of peers to get in a tracker announce'),
528- ('aggregate_forward', '', 'format: <url>[,<password>] - if set, forwards all non-multitracker to this url with this optional password'),
529- ('aggregator', '0', 'whether to act as a data aggregator rather than a tracker. If enabled, may be 1, or <password>; ' +
530- 'if password is set, then an incoming password is required for access'),
531- ('hupmonitor', 0, 'whether to reopen the log file upon receipt of HUP signal'),
532- ('http_timeout', 60,
533- 'number of seconds to wait before assuming that an http connection has timed out'),
534- ('parse_dir_interval', 60, 'seconds between reloading of allowed_dir or allowed_file ' +
535- 'and allowed_ips and banned_ips lists'),
536- ('show_infopage', 1, "whether to display an info page when the tracker's root dir is loaded"),
537- ('infopage_redirect', '', 'a URL to redirect the info page to'),
538- ('show_names', 1, 'whether to display names from allowed dir'),
539- ('favicon', '', 'file containing x-icon data to return when browser requests favicon.ico'),
540- ('allowed_ips', '', 'only allow connections from IPs specified in the given file; '+
541- 'file contains subnet data in the format: aa.bb.cc.dd/len'),
542- ('banned_ips', '', "don't allow connections from IPs specified in the given file; "+
543- 'file contains IP range data in the format: xxx:xxx:ip1-ip2'),
544- ('only_local_override_ip', 2, "ignore the ip GET parameter from machines which aren't on local network IPs " +
545- "(0 = never, 1 = always, 2 = ignore if NAT checking is not enabled)"),
546- ('logfile', '', 'file to write the tracker logs, use - for stdout (default)'),
547- ('allow_get', 0, 'use with allowed_dir; adds a /file?hash={hash} url that allows users to download the torrent file'),
548- ('keep_dead', 0, 'keep dead torrents after they expire (so they still show up on your /scrape and web page)'),
549- ('scrape_allowed', 'full', 'scrape access allowed (can be none, specific or full)'),
550- ('dedicated_seed_id', '', 'allows tracker to monitor dedicated seed(s) and flag torrents as seeded'),
551- ('compact_reqd', 1, "only allow peers that accept a compact response"),
552- ]
553-
554-def statefiletemplate(x):
555- if type(x) != DictType:
556- raise ValueError
557- for cname, cinfo in x.items():
558- if cname == 'peers':
559- for y in cinfo.values(): # The 'peers' key is a dictionary of SHA hashes (torrent ids)
560- if type(y) != DictType: # ... for the active torrents, and each is a dictionary
561- raise ValueError
562- for id, info in y.items(): # ... of client ids interested in that torrent
563- if (len(id) != 20):
564- raise ValueError
565- if type(info) != DictType: # ... each of which is also a dictionary
566- raise ValueError # ... which has an IP, a Port, and a Bytes Left count for that client for that torrent
567- if type(info.get('ip', '')) != StringType:
568- raise ValueError
569- port = info.get('port')
570- if type(port) not in (IntType,LongType) or port < 0:
571- raise ValueError
572- left = info.get('left')
573- if type(left) not in (IntType,LongType) or left < 0:
574- raise ValueError
575- if type(info.get('supportcrypto')) not in (IntType,LongType):
576- raise ValueError
577- if type(info.get('requirecrypto')) not in (IntType,LongType):
578- raise ValueError
579- elif cname == 'completed':
580- if (type(cinfo) != DictType): # The 'completed' key is a dictionary of SHA hashes (torrent ids)
581- raise ValueError # ... for keeping track of the total completions per torrent
582- for y in cinfo.values(): # ... each torrent has an integer value
583- if type(y) not in (IntType,LongType):
584- raise ValueError # ... for the number of reported completions for that torrent
585- elif cname == 'allowed':
586- if (type(cinfo) != DictType): # a list of info_hashes and included data
587- raise ValueError
588- if x.has_key('allowed_dir_files'):
589- adlist = [z[1] for z in x['allowed_dir_files'].values()]
590- for y in cinfo.keys(): # and each should have a corresponding key here
591- if not y in adlist:
592- raise ValueError
593- elif cname == 'allowed_dir_files':
594- if (type(cinfo) != DictType): # a list of files, their attributes and info hashes
595- raise ValueError
596- dirkeys = {}
597- for y in cinfo.values(): # each entry should have a corresponding info_hash
598- if not y[1]:
599- continue
600- if not x['allowed'].has_key(y[1]):
601- raise ValueError
602- if dirkeys.has_key(y[1]): # and each should have a unique info_hash
603- raise ValueError
604- dirkeys[y[1]] = 1
605-
606-
607-alas = 'your file may exist elsewhere in the universe\nbut alas, not here\n'
608-
609-local_IPs = IP_List()
610-local_IPs.set_intranet_addresses()
611-
612-
613-def isotime(secs = None):
614- if secs == None:
615- secs = time()
616- return strftime('%Y-%m-%d %H:%M UTC', gmtime(secs))
617-
618-http_via_filter = re.compile(' for ([0-9.]+)\Z')
619-
620-def _get_forwarded_ip(headers):
621- header = headers.get('x-forwarded-for')
622- if header:
623- try:
624- x,y = header.split(',')
625- except:
626- return header
627- if is_valid_ip(x) and not local_IPs.includes(x):
628- return x
629- return y
630- header = headers.get('client-ip')
631- if header:
632- return header
633- header = headers.get('via')
634- if header:
635- x = http_via_filter.search(header)
636- try:
637- return x.group(1)
638- except:
639- pass
640- header = headers.get('from')
641- #if header:
642- # return header
643- #return None
644- return header
645-
646-def get_forwarded_ip(headers):
647- x = _get_forwarded_ip(headers)
648- if not is_valid_ip(x) or local_IPs.includes(x):
649- return None
650- return x
651-
652-def compact_peer_info(ip, port):
653- try:
654- s = ( ''.join([chr(int(i)) for i in ip.split('.')])
655- + chr((port & 0xFF00) >> 8) + chr(port & 0xFF) )
656- if len(s) != 6:
657- raise ValueError
658- except:
659- s = '' # not a valid IP, must be a domain name
660- return s
661-
662-class Tracker:
663- def __init__(self, config, rawserver):
664- self.config = config
665- self.response_size = config['response_size']
666- self.dfile = config['dfile']
667- self.natcheck = config['nat_check']
668- favicon = config['favicon']
669- self.parse_dir_interval = config['parse_dir_interval']
670- self.favicon = None
671- if favicon:
672- try:
673- h = open(favicon,'r')
674- self.favicon = h.read()
675- h.close()
676- except:
677- print "**warning** specified favicon file -- %s -- does not exist." % favicon
678- self.rawserver = rawserver
679- self.cached = {} # format: infohash: [[time1, l1, s1], [time2, l2, s2], ...]
680- self.cached_t = {} # format: infohash: [time, cache]
681- self.times = {}
682- self.state = {}
683- self.seedcount = {}
684-
685- self.allowed_IPs = None
686- self.banned_IPs = None
687- if config['allowed_ips'] or config['banned_ips']:
688- self.allowed_ip_mtime = 0
689- self.banned_ip_mtime = 0
690- self.read_ip_lists()
691-
692- self.only_local_override_ip = config['only_local_override_ip']
693- if self.only_local_override_ip == 2:
694- self.only_local_override_ip = not config['nat_check']
695-
696- if CHECK_PEER_ID_ENCRYPTED and not CRYPTO_OK:
697- print ('**warning** crypto library not installed,' +
698- ' cannot completely verify encrypted peers')
699-
700- if exists(self.dfile):
701- try:
702- h = open(self.dfile, 'rb')
703- ds = h.read()
704- h.close()
705- tempstate = bdecode(ds)
706- if not tempstate.has_key('peers'):
707- tempstate = {'peers': tempstate}
708- statefiletemplate(tempstate)
709- self.state = tempstate
710- except:
711- print '**warning** statefile '+self.dfile+' corrupt; resetting'
712- self.downloads = self.state.setdefault('peers', {})
713- self.completed = self.state.setdefault('completed', {})
714-
715- self.becache = {}
716- ''' format: infohash: [[l0, s0], [l1, s1], ...]
717- l0,s0 = compact, not requirecrypto=1
718- l1,s1 = compact, only supportcrypto=1
719- l2,s2 = [compact, crypto_flag], all peers
720- if --compact_reqd 0:
721- l3,s3 = [ip,port,id]
722- l4,l4 = [ip,port] nopeerid
723- '''
724- if config['compact_reqd']:
725- x = 3
726- else:
727- x = 5
728- self.cache_default = [({},{}) for i in xrange(x)]
729- for infohash, ds in self.downloads.items():
730- self.seedcount[infohash] = 0
731- for x,y in ds.items():
732- ip = y['ip']
733- if ( (self.allowed_IPs and not self.allowed_IPs.includes(ip))
734- or (self.banned_IPs and self.banned_IPs.includes(ip)) ):
735- del ds[x]
736- continue
737- if not y['left']:
738- self.seedcount[infohash] += 1
739- if y.get('nat',-1):
740- continue
741- gip = y.get('given_ip')
742- if is_valid_ip(gip) and (
743- not self.only_local_override_ip or local_IPs.includes(ip) ):
744- ip = gip
745- self.natcheckOK(infohash,x,ip,y['port'],y)
746-
747- for x in self.downloads.keys():
748- self.times[x] = {}
749- for y in self.downloads[x].keys():
750- self.times[x][y] = 0
751-
752- self.trackerid = createPeerID('-T-')
753- seed(self.trackerid)
754-
755- self.reannounce_interval = config['reannounce_interval']
756- self.save_dfile_interval = config['save_dfile_interval']
757- self.show_names = config['show_names']
758- rawserver.add_task(self.save_state, self.save_dfile_interval)
759- self.prevtime = clock()
760- self.timeout_downloaders_interval = config['timeout_downloaders_interval']
761- rawserver.add_task(self.expire_downloaders, self.timeout_downloaders_interval)
762- self.logfile = None
763- self.log = None
764- if (config['logfile']) and (config['logfile'] != '-'):
765- try:
766- self.logfile = config['logfile']
767- self.log = open(self.logfile,'a')
768- sys.stdout = self.log
769- print "# Log Started: ", isotime()
770- except:
771- print "**warning** could not redirect stdout to log file: ", sys.exc_info()[0]
772-
773- if config['hupmonitor']:
774- def huphandler(signum, frame, self = self):
775- try:
776- self.log.close ()
777- self.log = open(self.logfile,'a')
778- sys.stdout = self.log
779- print "# Log reopened: ", isotime()
780- except:
781- print "**warning** could not reopen logfile"
782-
783- signal.signal(signal.SIGHUP, huphandler)
784-
785- self.allow_get = config['allow_get']
786-
787- self.t2tlist = T2TList(config['multitracker_enabled'], self.trackerid,
788- config['multitracker_reannounce_interval'],
789- config['multitracker_maxpeers'], config['http_timeout'],
790- self.rawserver)
791-
792- if config['allowed_list']:
793- if config['allowed_dir']:
794- print '**warning** allowed_dir and allowed_list options cannot be used together'
795- print '**warning** disregarding allowed_dir'
796- config['allowed_dir'] = ''
797- self.allowed = self.state.setdefault('allowed_list',{})
798- self.allowed_list_mtime = 0
799- self.parse_allowed()
800- self.remove_from_state('allowed','allowed_dir_files')
801- if config['multitracker_allowed'] == 'autodetect':
802- config['multitracker_allowed'] = 'none'
803- config['allowed_controls'] = 0
804-
805- elif config['allowed_dir']:
806- self.allowed = self.state.setdefault('allowed',{})
807- self.allowed_dir_files = self.state.setdefault('allowed_dir_files',{})
808- self.allowed_dir_blocked = {}
809- self.parse_allowed()
810- self.remove_from_state('allowed_list')
811-
812- else:
813- self.allowed = None
814- self.remove_from_state('allowed','allowed_dir_files', 'allowed_list')
815- if config['multitracker_allowed'] == 'autodetect':
816- config['multitracker_allowed'] = 'none'
817- config['allowed_controls'] = 0
818-
819- self.uq_broken = unquote('+') != ' '
820- self.keep_dead = config['keep_dead']
821- self.Filter = Filter(rawserver.add_task)
822-
823- aggregator = config['aggregator']
824- if aggregator == '0':
825- self.is_aggregator = False
826- self.aggregator_key = None
827- else:
828- self.is_aggregator = True
829- if aggregator == '1':
830- self.aggregator_key = None
831- else:
832- self.aggregator_key = aggregator
833- self.natcheck = False
834-
835- send = config['aggregate_forward']
836- if not send:
837- self.aggregate_forward = None
838- else:
839- try:
840- self.aggregate_forward, self.aggregate_password = send.split(',')
841- except:
842- self.aggregate_forward = send
843- self.aggregate_password = None
844-
845- self.dedicated_seed_id = config['dedicated_seed_id']
846- self.is_seeded = {}
847-
848- self.cachetime = 0
849- self.cachetimeupdate()
850-
851- def cachetimeupdate(self):
852- self.cachetime += 1 # raw clock, but more efficient for cache
853- self.rawserver.add_task(self.cachetimeupdate,1)
854-
855- def aggregate_senddata(self, query):
856- url = self.aggregate_forward+'?'+query
857- if self.aggregate_password is not None:
858- url += '&password='+self.aggregate_password
859- rq = Thread(target = self._aggregate_senddata, args = [url])
860- rq.setDaemon(False)
861- rq.start()
862-
863- def _aggregate_senddata(self, url): # just send, don't attempt to error check,
864- try: # discard any returned data
865- h = urlopen(url)
866- h.read()
867- h.close()
868- except:
869- return
870-
871-
872- def get_infopage(self):
873- try:
874- if not self.config['show_infopage']:
875- return (404, 'Not Found', {'Content-Type': 'text/plain', 'Pragma': 'no-cache'}, alas)
876- red = self.config['infopage_redirect']
877- if red:
878- return (302, 'Found', {'Content-Type': 'text/html', 'Location': red},
879- '<A HREF="'+red+'">Click Here</A>')
880-
881- s = StringIO()
882- s.write('<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.1//EN" "http://www.w3.org/TR/xhtml11/DTD/xhtml11.dtd">\n' \
883- '<html><head><title>BitTorrent download info</title>\n')
884- if self.favicon is not None:
885- s.write('<link rel="shortcut icon" href="/favicon.ico">\n')
886- s.write('</head>\n<body>\n' \
887- '<h3>BitTorrent download info</h3>\n'\
888- '<ul>\n'
889- '<li><strong>tracker version:</strong> %s</li>\n' \
890- '<li><strong>server time:</strong> %s</li>\n' \
891- '</ul>\n' % (version, isotime()))
892- if self.config['allowed_dir']:
893- if self.show_names:
894- names = [ (self.allowed[hash]['name'],hash)
895- for hash in self.allowed.keys() ]
896- else:
897- names = [ (None,hash)
898- for hash in self.allowed.keys() ]
899- else:
900- names = [ (None,hash) for hash in self.downloads.keys() ]
901- if not names:
902- s.write('<p>not tracking any files yet...</p>\n')
903- else:
904- names.sort()
905- tn = 0
906- tc = 0
907- td = 0
908- tt = 0 # Total transferred
909- ts = 0 # Total size
910- nf = 0 # Number of files displayed
911- if self.config['allowed_dir'] and self.show_names:
912- s.write('<table summary="files" border="1">\n' \
913- '<tr><th>info hash</th><th>torrent name</th><th align="right">size</th><th align="right">complete</th><th align="right">downloading</th><th align="right">downloaded</th><th align="right">transferred</th></tr>\n')
914- else:
915- s.write('<table summary="files">\n' \
916- '<tr><th>info hash</th><th align="right">complete</th><th align="right">downloading</th><th align="right">downloaded</th></tr>\n')
917- for name,hash in names:
918- l = self.downloads[hash]
919- n = self.completed.get(hash, 0)
920- tn = tn + n
921- c = self.seedcount[hash]
922- tc = tc + c
923- d = len(l) - c
924- td = td + d
925- if self.config['allowed_dir'] and self.show_names:
926- if self.allowed.has_key(hash):
927- nf = nf + 1
928- sz = self.allowed[hash]['length'] # size
929- ts = ts + sz
930- szt = sz * n # Transferred for this torrent
931- tt = tt + szt
932- if self.allow_get == 1:
933- linkname = '<a href="/file?info_hash=' + quote(hash) + '">' + name + '</a>'
934- else:
935- linkname = name
936- s.write('<tr><td><code>%s</code></td><td>%s</td><td align="right">%s</td><td align="right">%i</td><td align="right">%i</td><td align="right">%i</td><td align="right">%s</td></tr>\n' \
937- % (b2a_hex(hash), linkname, size_format(sz), c, d, n, size_format(szt)))
938- else:
939- s.write('<tr><td><code>%s</code></td><td align="right"><code>%i</code></td><td align="right"><code>%i</code></td><td align="right"><code>%i</code></td></tr>\n' \
940- % (b2a_hex(hash), c, d, n))
941- if self.config['allowed_dir'] and self.show_names:
942- s.write('<tr><td align="right" colspan="2">%i files</td><td align="right">%s</td><td align="right">%i</td><td align="right">%i</td><td align="right">%i</td><td align="right">%s</td></tr>\n'
943- % (nf, size_format(ts), tc, td, tn, size_format(tt)))
944- else:
945- s.write('<tr><td align="right">%i files</td><td align="right">%i</td><td align="right">%i</td><td align="right">%i</td></tr>\n'
946- % (nf, tc, td, tn))
947- s.write('</table>\n' \
948- '<ul>\n' \
949- '<li><em>info hash:</em> SHA1 hash of the "info" section of the metainfo (*.torrent)</li>\n' \
950- '<li><em>complete:</em> number of connected clients with the complete file</li>\n' \
951- '<li><em>downloading:</em> number of connected clients still downloading</li>\n' \
952- '<li><em>downloaded:</em> reported complete downloads</li>\n' \
953- '<li><em>transferred:</em> torrent size * total downloaded (does not include partial transfers)</li>\n' \
954- '</ul>\n')
955-
956- s.write('</body>\n' \
957- '</html>\n')
958- return (200, 'OK', {'Content-Type': 'text/html; charset=iso-8859-1'}, s.getvalue())
959- except:
960- print_exc()
961- return (500, 'Internal Server Error', {'Content-Type': 'text/html; charset=iso-8859-1'}, 'Server Error')
962-
963-
964- def scrapedata(self, hash, return_name = True):
965- l = self.downloads[hash]
966- n = self.completed.get(hash, 0)
967- c = self.seedcount[hash]
968- d = len(l) - c
969- f = {'complete': c, 'incomplete': d, 'downloaded': n}
970- if return_name and self.show_names and self.config['allowed_dir']:
971- f['name'] = self.allowed[hash]['name']
972- return (f)
973-
974- def get_scrape(self, paramslist):
975- fs = {}
976- if paramslist.has_key('info_hash'):
977- if self.config['scrape_allowed'] not in ['specific', 'full']:
978- return (400, 'Not Authorized', {'Content-Type': 'text/plain', 'Pragma': 'no-cache'},
979- bencode({'failure reason':
980- 'specific scrape function is not available with this tracker.'}))
981- for hash in paramslist['info_hash']:
982- if self.allowed is not None:
983- if self.allowed.has_key(hash):
984- fs[hash] = self.scrapedata(hash)
985- else:
986- if self.downloads.has_key(hash):
987- fs[hash] = self.scrapedata(hash)
988- else:
989- if self.config['scrape_allowed'] != 'full':
990- return (400, 'Not Authorized', {'Content-Type': 'text/plain', 'Pragma': 'no-cache'},
991- bencode({'failure reason':
992- 'full scrape function is not available with this tracker.'}))
993- if self.allowed is not None:
994- keys = self.allowed.keys()
995- else:
996- keys = self.downloads.keys()
997- for hash in keys:
998- fs[hash] = self.scrapedata(hash)
999-
1000- return (200, 'OK', {'Content-Type': 'text/plain'}, bencode({'files': fs}))
1001-
1002-
1003- def get_file(self, hash):
1004- if not self.allow_get:
1005- return (400, 'Not Authorized', {'Content-Type': 'text/plain', 'Pragma': 'no-cache'},
1006- 'get function is not available with this tracker.')
1007- if not self.allowed.has_key(hash):
1008- return (404, 'Not Found', {'Content-Type': 'text/plain', 'Pragma': 'no-cache'}, alas)
1009- fname = self.allowed[hash]['file']
1010- fpath = self.allowed[hash]['path']
1011- return (200, 'OK', {'Content-Type': 'application/x-bittorrent',
1012- 'Content-Disposition': 'attachment; filename=' + fname},
1013- open(fpath, 'rb').read())
1014-
1015-
1016- def check_allowed(self, infohash, paramslist):
1017- if ( self.aggregator_key is not None
1018- and not ( paramslist.has_key('password')
1019- and paramslist['password'][0] == self.aggregator_key ) ):
1020- return (200, 'Not Authorized', {'Content-Type': 'text/plain', 'Pragma': 'no-cache'},
1021- bencode({'failure reason':
1022- 'Requested download is not authorized for use with this tracker.'}))
1023-
1024- if self.allowed is not None:
1025- if not self.allowed.has_key(infohash):
1026- return (200, 'Not Authorized', {'Content-Type': 'text/plain', 'Pragma': 'no-cache'},
1027- bencode({'failure reason':
1028- 'Requested download is not authorized for use with this tracker.'}))
1029- if self.config['allowed_controls']:
1030- if self.allowed[infohash].has_key('failure reason'):
1031- return (200, 'Not Authorized', {'Content-Type': 'text/plain', 'Pragma': 'no-cache'},
1032- bencode({'failure reason': self.allowed[infohash]['failure reason']}))
1033-
1034- if paramslist.has_key('tracker'):
1035- if ( self.config['multitracker_allowed'] == 'none' or # turned off
1036- paramslist['peer_id'][0] == self.trackerid ): # oops! contacted myself
1037- return (200, 'Not Authorized', {'Content-Type': 'text/plain', 'Pragma': 'no-cache'},
1038- bencode({'failure reason': 'disallowed'}))
1039-
1040- if ( self.config['multitracker_allowed'] == 'autodetect'
1041- and not self.allowed[infohash].has_key('announce-list') ):
1042- return (200, 'Not Authorized', {'Content-Type': 'text/plain', 'Pragma': 'no-cache'},
1043- bencode({'failure reason':
1044- 'Requested download is not authorized for multitracker use.'}))
1045-
1046- return None
1047-
1048-
1049- def add_data(self, infohash, event, ip, paramslist):
1050- peers = self.downloads.setdefault(infohash, {})
1051- ts = self.times.setdefault(infohash, {})
1052- self.completed.setdefault(infohash, 0)
1053- self.seedcount.setdefault(infohash, 0)
1054-
1055- def params(key, default = None, l = paramslist):
1056- if l.has_key(key):
1057- return l[key][0]
1058- return default
1059-
1060- myid = params('peer_id','')
1061- if len(myid) != 20:
1062- raise ValueError, 'id not of length 20'
1063- if event not in ['started', 'completed', 'stopped', 'snooped', None]:
1064- raise ValueError, 'invalid event'
1065- port = params('cryptoport')
1066- if port is None:
1067- port = params('port','')
1068- port = long(port)
1069- if port < 0 or port > 65535:
1070- raise ValueError, 'invalid port'
1071- left = long(params('left',''))
1072- if left < 0:
1073- raise ValueError, 'invalid amount left'
1074- uploaded = long(params('uploaded',''))
1075- downloaded = long(params('downloaded',''))
1076- if params('supportcrypto'):
1077- supportcrypto = 1
1078- try:
1079- s = int(params['requirecrypto'])
1080- chr(s)
1081- except:
1082- s = 0
1083- requirecrypto = s
1084- else:
1085- supportcrypto = 0
1086- requirecrypto = 0
1087-
1088- peer = peers.get(myid)
1089- islocal = local_IPs.includes(ip)
1090- mykey = params('key')
1091- if peer:
1092- auth = peer.get('key',-1) == mykey or peer.get('ip') == ip
1093-
1094- gip = params('ip')
1095- if is_valid_ip(gip) and (islocal or not self.only_local_override_ip):
1096- ip1 = gip
1097- else:
1098- ip1 = ip
1099-
1100- if params('numwant') is not None:
1101- rsize = min(int(params('numwant')),self.response_size)
1102- else:
1103- rsize = self.response_size
1104-
1105- if event == 'stopped':
1106- if peer:
1107- if auth:
1108- self.delete_peer(infohash,myid)
1109-
1110- elif not peer:
1111- ts[myid] = clock()
1112- peer = { 'ip': ip, 'port': port, 'left': left,
1113- 'supportcrypto': supportcrypto,
1114- 'requirecrypto': requirecrypto }
1115- if mykey:
1116- peer['key'] = mykey
1117- if gip:
1118- peer['given ip'] = gip
1119- if port:
1120- if not self.natcheck or islocal:
1121- peer['nat'] = 0
1122- self.natcheckOK(infohash,myid,ip1,port,peer)
1123- else:
1124- NatCheck(self.connectback_result,infohash,myid,ip1,port,
1125- self.rawserver,encrypted=requirecrypto)
1126- else:
1127- peer['nat'] = 2**30
1128- if event == 'completed':
1129- self.completed[infohash] += 1
1130- if not left:
1131- self.seedcount[infohash] += 1
1132-
1133- peers[myid] = peer
1134-
1135- else:
1136- if not auth:
1137- return rsize # return w/o changing stats
1138-
1139- ts[myid] = clock()
1140- if not left and peer['left']:
1141- self.completed[infohash] += 1
1142- self.seedcount[infohash] += 1
1143- if not peer.get('nat', -1):
1144- for bc in self.becache[infohash]:
1145- bc[1][myid] = bc[0][myid]
1146- del bc[0][myid]
1147- elif left and not peer['left']:
1148- self.completed[infohash] -= 1
1149- self.seedcount[infohash] -= 1
1150- if not peer.get('nat', -1):
1151- for bc in self.becache[infohash]:
1152- bc[0][myid] = bc[1][myid]
1153- del bc[1][myid]
1154- peer['left'] = left
1155-
1156- if port:
1157- recheck = False
1158- if ip != peer['ip']:
1159- peer['ip'] = ip
1160- recheck = True
1161- if gip != peer.get('given ip'):
1162- if gip:
1163- peer['given ip'] = gip
1164- elif peer.has_key('given ip'):
1165- del peer['given ip']
1166- recheck = True
1167-
1168- natted = peer.get('nat', -1)
1169- if recheck:
1170- if natted == 0:
1171- l = self.becache[infohash]
1172- y = not peer['left']
1173- for x in l:
1174- del x[y][myid]
1175- if natted >= 0:
1176- del peer['nat'] # restart NAT testing
1177- if natted and natted < self.natcheck:
1178- recheck = True
1179-
1180- if recheck:
1181- if not self.natcheck or islocal:
1182- peer['nat'] = 0
1183- self.natcheckOK(infohash,myid,ip1,port,peer)
1184- else:
1185- NatCheck(self.connectback_result,infohash,myid,ip1,port,
1186- self.rawserver,encrypted=requirecrypto)
1187-
1188- return rsize
1189-
1190-
1191- def peerlist(self, infohash, stopped, tracker, is_seed,
1192- return_type, rsize, supportcrypto):
1193- data = {} # return data
1194- seeds = self.seedcount[infohash]
1195- data['complete'] = seeds
1196- data['incomplete'] = len(self.downloads[infohash]) - seeds
1197-
1198- if ( self.config['allowed_controls']
1199- and self.allowed[infohash].has_key('warning message') ):
1200- data['warning message'] = self.allowed[infohash]['warning message']
1201-
1202- if tracker:
1203- data['interval'] = self.config['multitracker_reannounce_interval']
1204- if not rsize:
1205- return data
1206- cache = self.cached_t.setdefault(infohash, None)
1207- if ( not cache or len(cache[1]) < rsize
1208- or cache[0] + self.config['min_time_between_cache_refreshes'] < clock() ):
1209- bc = self.becache.setdefault(infohash,self.cache_default)
1210- cache = [ clock(), bc[0][0].values() + bc[0][1].values() ]
1211- self.cached_t[infohash] = cache
1212- shuffle(cache[1])
1213- cache = cache[1]
1214-
1215- data['peers'] = cache[-rsize:]
1216- del cache[-rsize:]
1217- return data
1218-
1219- data['interval'] = self.reannounce_interval
1220- if stopped or not rsize: # save some bandwidth
1221- data['peers'] = []
1222- return data
1223-
1224- bc = self.becache.setdefault(infohash,self.cache_default)
1225- len_l = len(bc[2][0])
1226- len_s = len(bc[2][1])
1227- if not (len_l+len_s): # caches are empty!
1228- data['peers'] = []
1229- return data
1230- l_get_size = int(float(rsize)*(len_l)/(len_l+len_s))
1231- cache = self.cached.setdefault(infohash,[None,None,None])[return_type]
1232- if cache and ( not cache[1]
1233- or (is_seed and len(cache[1]) < rsize)
1234- or len(cache[1]) < l_get_size
1235- or cache[0]+self.config['min_time_between_cache_refreshes'] < self.cachetime ):
1236- cache = None
1237- if not cache:
1238- peers = self.downloads[infohash]
1239- if self.config['compact_reqd']:
1240- vv = ([],[],[])
1241- else:
1242- vv = ([],[],[],[],[])
1243- for key, ip, port in self.t2tlist.harvest(infohash): # empty if disabled
1244- if not peers.has_key(key):
1245- cp = compact_peer_info(ip, port)
1246- vv[0].append(cp)
1247- vv[2].append((cp,'\x00'))
1248- if not self.config['compact_reqd']:
1249- vv[3].append({'ip': ip, 'port': port, 'peer id': key})
1250- vv[4].append({'ip': ip, 'port': port})
1251- cache = [ self.cachetime,
1252- bc[return_type][0].values()+vv[return_type],
1253- bc[return_type][1].values() ]
1254- shuffle(cache[1])
1255- shuffle(cache[2])
1256- self.cached[infohash][return_type] = cache
1257- for rr in xrange(len(self.cached[infohash])):
1258- if rr != return_type:
1259- try:
1260- self.cached[infohash][rr][1].extend(vv[rr])
1261- except:
1262- pass
1263- if len(cache[1]) < l_get_size:
1264- peerdata = cache[1]
1265- if not is_seed:
1266- peerdata.extend(cache[2])
1267- cache[1] = []
1268- cache[2] = []
1269- else:
1270- if not is_seed:
1271- peerdata = cache[2][l_get_size-rsize:]
1272- del cache[2][l_get_size-rsize:]
1273- rsize -= len(peerdata)
1274- else:
1275- peerdata = []
1276- if rsize:
1277- peerdata.extend(cache[1][-rsize:])
1278- del cache[1][-rsize:]
1279- if return_type == 0:
1280- data['peers'] = ''.join(peerdata)
1281- elif return_type == 1:
1282- data['crypto_flags'] = "0x01"*len(peerdata)
1283- data['peers'] = ''.join(peerdata)
1284- elif return_type == 2:
1285- data['crypto_flags'] = ''.join([p[1] for p in peerdata])
1286- data['peers'] = ''.join([p[0] for p in peerdata])
1287- else:
1288- data['peers'] = peerdata
1289- return data
1290-
1291-
1292- def get(self, connection, path, headers):
1293- real_ip = connection.get_ip()
1294- ip = real_ip
1295- if is_ipv4(ip):
1296- ipv4 = True
1297- else:
1298- try:
1299- ip = ipv6_to_ipv4(ip)
1300- ipv4 = True
1301- except ValueError:
1302- ipv4 = False
1303-
1304- if ( (self.allowed_IPs and not self.allowed_IPs.includes(ip))
1305- or (self.banned_IPs and self.banned_IPs.includes(ip)) ):
1306- return (400, 'Not Authorized', {'Content-Type': 'text/plain', 'Pragma': 'no-cache'},
1307- bencode({'failure reason':
1308- 'your IP is not allowed on this tracker'}))
1309-
1310- nip = get_forwarded_ip(headers)
1311- if nip and not self.only_local_override_ip:
1312- ip = nip
1313- try:
1314- ip = to_ipv4(ip)
1315- ipv4 = True
1316- except ValueError:
1317- ipv4 = False
1318-
1319- paramslist = {}
1320- def params(key, default = None, l = paramslist):
1321- if l.has_key(key):
1322- return l[key][0]
1323- return default
1324-
1325- try:
1326- (scheme, netloc, path, pars, query, fragment) = urlparse(path)
1327- if self.uq_broken == 1:
1328- path = path.replace('+',' ')
1329- query = query.replace('+',' ')
1330- path = unquote(path)[1:]
1331- for s in query.split('&'):
1332- if s:
1333- i = s.index('=')
1334- kw = unquote(s[:i])
1335- paramslist.setdefault(kw, [])
1336- paramslist[kw] += [unquote(s[i+1:])]
1337-
1338- if path == '' or path == 'index.html':
1339- return self.get_infopage()
1340- if (path == 'file'):
1341- return self.get_file(params('info_hash'))
1342- if path == 'favicon.ico' and self.favicon is not None:
1343- return (200, 'OK', {'Content-Type' : 'image/x-icon'}, self.favicon)
1344-
1345- # automated access from here on
1346-
1347- if path in ('scrape', 'scrape.php', 'tracker.php/scrape'):
1348- return self.get_scrape(paramslist)
1349-
1350- if not path in ('announce', 'announce.php', 'tracker.php/announce'):
1351- return (404, 'Not Found', {'Content-Type': 'text/plain', 'Pragma': 'no-cache'}, alas)
1352-
1353- # main tracker function
1354-
1355- filtered = self.Filter.check(real_ip, paramslist, headers)
1356- if filtered:
1357- return (400, 'Not Authorized', {'Content-Type': 'text/plain', 'Pragma': 'no-cache'},
1358- bencode({'failure reason': filtered}))
1359-
1360- infohash = params('info_hash')
1361- if not infohash:
1362- raise ValueError, 'no info hash'
1363-
1364- notallowed = self.check_allowed(infohash, paramslist)
1365- if notallowed:
1366- return notallowed
1367-
1368- event = params('event')
1369-
1370- rsize = self.add_data(infohash, event, ip, paramslist)
1371-
1372- except ValueError, e:
1373- return (400, 'Bad Request', {'Content-Type': 'text/plain'},
1374- 'you sent me garbage - ' + str(e))
1375-
1376- if self.aggregate_forward and not paramslist.has_key('tracker'):
1377- self.aggregate_senddata(query)
1378-
1379- if self.is_aggregator: # don't return peer data here
1380- return (200, 'OK', {'Content-Type': 'text/plain', 'Pragma': 'no-cache'},
1381- bencode({'response': 'OK'}))
1382-
1383- if params('compact') and ipv4:
1384- if params('requirecrypto'):
1385- return_type = 1
1386- elif params('supportcrypto'):
1387- return_type = 2
1388- else:
1389- return_type = 0
1390- elif self.config['compact_reqd'] and ipv4:
1391- return (400, 'Bad Request', {'Content-Type': 'text/plain'},
1392- 'your client is outdated, please upgrade')
1393- elif params('no_peer_id'):
1394- return_type = 4
1395- else:
1396- return_type = 3
1397-
1398- data = self.peerlist(infohash, event=='stopped',
1399- params('tracker'), not params('left'),
1400- return_type, rsize, params('supportcrypto'))
1401-
1402- if paramslist.has_key('scrape'): # deprecated
1403- data['scrape'] = self.scrapedata(infohash, False)
1404-
1405- if self.dedicated_seed_id:
1406- if params('seed_id') == self.dedicated_seed_id and params('left') == 0:
1407- self.is_seeded[infohash] = True
1408- if params('check_seeded') and self.is_seeded.get(infohash):
1409- data['seeded'] = 1
1410-
1411- return (200, 'OK', {'Content-Type': 'text/plain', 'Pragma': 'no-cache'}, bencode(data))
1412-
1413-
1414- def natcheckOK(self, infohash, peerid, ip, port, peer):
1415- seed = not peer['left']
1416- bc = self.becache.setdefault(infohash,self.cache_default)
1417- cp = compact_peer_info(ip, port)
1418- reqc = peer['requirecrypto']
1419- bc[2][seed][peerid] = (cp,chr(reqc))
1420- if peer['supportcrypto']:
1421- bc[1][seed][peerid] = cp
1422- if not reqc:
1423- bc[0][seed][peerid] = cp
1424- if not self.config['compact_reqd']:
1425- bc[3][seed][peerid] = Bencached(bencode({'ip': ip, 'port': port,
1426- 'peer id': peerid}))
1427- bc[4][seed][peerid] = Bencached(bencode({'ip': ip, 'port': port}))
1428-
1429-
1430- def natchecklog(self, peerid, ip, port, result):
1431- year, month, day, hour, minute, second, a, b, c = localtime(time())
1432- print '%s - %s [%02d/%3s/%04d:%02d:%02d:%02d] "!natcheck-%s:%i" %i 0 - -' % (
1433- ip, quote(peerid), day, months[month], year, hour, minute, second,
1434- ip, port, result)
1435-
1436- def connectback_result(self, result, downloadid, peerid, ip, port):
1437- record = self.downloads.get(downloadid,{}).get(peerid)
1438- if ( record is None
1439- or (record['ip'] != ip and record.get('given ip') != ip)
1440- or record['port'] != port ):
1441- if self.config['log_nat_checks']:
1442- self.natchecklog(peerid, ip, port, 404)
1443- return
1444- if self.config['log_nat_checks']:
1445- if result:
1446- x = 200
1447- else:
1448- x = 503
1449- self.natchecklog(peerid, ip, port, x)
1450- if not record.has_key('nat'):
1451- record['nat'] = int(not result)
1452- if result:
1453- self.natcheckOK(downloadid,peerid,ip,port,record)
1454- elif result and record['nat']:
1455- record['nat'] = 0
1456- self.natcheckOK(downloadid,peerid,ip,port,record)
1457- elif not result:
1458- record['nat'] += 1
1459-
1460-
1461- def remove_from_state(self, *l):
1462- for s in l:
1463- try:
1464- del self.state[s]
1465- except:
1466- pass
1467-
1468- def save_state(self):
1469- self.rawserver.add_task(self.save_state, self.save_dfile_interval)
1470- h = open(self.dfile, 'wb')
1471- h.write(bencode(self.state))
1472- h.close()
1473-
1474-
1475- def parse_allowed(self):
1476- self.rawserver.add_task(self.parse_allowed, self.parse_dir_interval)
1477-
1478- if self.config['allowed_dir']:
1479- r = parsedir( self.config['allowed_dir'], self.allowed,
1480- self.allowed_dir_files, self.allowed_dir_blocked,
1481- [".torrent"] )
1482- ( self.allowed, self.allowed_dir_files, self.allowed_dir_blocked,
1483- added, garbage2 ) = r
1484-
1485- self.state['allowed'] = self.allowed
1486- self.state['allowed_dir_files'] = self.allowed_dir_files
1487-
1488- self.t2tlist.parse(self.allowed)
1489-
1490- else:
1491- f = self.config['allowed_list']
1492- if self.allowed_list_mtime == os.path.getmtime(f):
1493- return
1494- try:
1495- r = parsetorrentlist(f, self.allowed)
1496- (self.allowed, added, garbage2) = r
1497- self.state['allowed_list'] = self.allowed
1498- except (IOError, OSError):
1499- print '**warning** unable to read allowed torrent list'
1500- return
1501- self.allowed_list_mtime = os.path.getmtime(f)
1502-
1503- for infohash in added.keys():
1504- self.downloads.setdefault(infohash, {})
1505- self.completed.setdefault(infohash, 0)
1506- self.seedcount.setdefault(infohash, 0)
1507-
1508-
1509- def read_ip_lists(self):
1510- self.rawserver.add_task(self.read_ip_lists,self.parse_dir_interval)
1511-
1512- f = self.config['allowed_ips']
1513- if f and self.allowed_ip_mtime != os.path.getmtime(f):
1514- self.allowed_IPs = IP_List()
1515- try:
1516- self.allowed_IPs.read_fieldlist(f)
1517- self.allowed_ip_mtime = os.path.getmtime(f)
1518- except (IOError, OSError):
1519- print '**warning** unable to read allowed_IP list'
1520-
1521- f = self.config['banned_ips']
1522- if f and self.banned_ip_mtime != os.path.getmtime(f):
1523- self.banned_IPs = IP_Range_List()
1524- try:
1525- self.banned_IPs.read_rangelist(f)
1526- self.banned_ip_mtime = os.path.getmtime(f)
1527- except (IOError, OSError):
1528- print '**warning** unable to read banned_IP list'
1529-
1530-
1531- def delete_peer(self, infohash, peerid):
1532- dls = self.downloads[infohash]
1533- peer = dls[peerid]
1534- if not peer['left']:
1535- self.seedcount[infohash] -= 1
1536- if not peer.get('nat',-1):
1537- l = self.becache[infohash]
1538- y = not peer['left']
1539- for x in l:
1540- if x[y].has_key(peerid):
1541- del x[y][peerid]
1542- del self.times[infohash][peerid]
1543- del dls[peerid]
1544-
1545- def expire_downloaders(self):
1546- for x in self.times.keys():
1547- for myid, t in self.times[x].items():
1548- if t < self.prevtime:
1549- self.delete_peer(x,myid)
1550- self.prevtime = clock()
1551- if (self.keep_dead != 1):
1552- for key, value in self.downloads.items():
1553- if len(value) == 0 and (
1554- self.allowed is None or not self.allowed.has_key(key) ):
1555- del self.times[key]
1556- del self.downloads[key]
1557- del self.seedcount[key]
1558- self.rawserver.add_task(self.expire_downloaders, self.timeout_downloaders_interval)
1559-
1560-
1561-def track(args):
1562- if len(args) == 0:
1563- print formatDefinitions(defaults, 80)
1564- return
1565- try:
1566- config, files = parseargs(args, defaults, 0, 0)
1567- except ValueError, e:
1568- print 'error: ' + str(e)
1569- print 'run with no arguments for parameter explanations'
1570- return
1571- r = RawServer(Event(), config['timeout_check_interval'],
1572- config['socket_timeout'], ipv6_enable = config['ipv6_enabled'])
1573- t = Tracker(config, r)
1574- r.bind(config['port'], config['bind'],
1575- reuse = True, ipv6_socket_style = config['ipv6_binds_v4'])
1576- r.listen_forever(HTTPHandler(t.get, config['min_time_between_log_flushes']))
1577- t.save_state()
1578- print '# Shutting down: ' + isotime()
1579-
1580-def size_format(s):
1581- if (s < 1024):
1582- r = str(s) + 'B'
1583- elif (s < 1048576):
1584- r = str(int(s/1024)) + 'KiB'
1585- elif (s < 1073741824L):
1586- r = str(int(s/1048576)) + 'MiB'
1587- elif (s < 1099511627776L):
1588- r = str(int((s/1073741824.0)*100.0)/100.0) + 'GiB'
1589- else:
1590- r = str(int((s/1099511627776.0)*100.0)/100.0) + 'TiB'
1591- return(r)
1592-
1593
1594=== removed directory '.pc/06_README_portchange.dpatch'
1595=== removed file '.pc/06_README_portchange.dpatch/README.txt'
1596--- .pc/06_README_portchange.dpatch/README.txt 2010-03-21 14:36:30 +0000
1597+++ .pc/06_README_portchange.dpatch/README.txt 1970-01-01 00:00:00 +0000
1598@@ -1,110 +0,0 @@
1599-BitTorrent is a tool for distributing files. It's extremely
1600-easy to use - downloads are started by clicking on hyperlinks.
1601-Whenever more than one person is downloading at once
1602-they send pieces of the file(s) to each other, thus relieving
1603-the central server's bandwidth burden. Even with many
1604-simultaneous downloads, the upload burden on the central server
1605-remains quite small, since each new downloader introduces new
1606-upload capacity.
1607-
1608-Windows web browser support is added by running an installer.
1609-A prebuilt one is available, but instructions for building it
1610-yourself are in BUILD.windows.txt
1611-
1612-Instructions for Unix installation are in INSTALL.unix.txt
1613-
1614-To start hosting -
1615-
1616-1) start running a tracker
1617-
1618-First, you need a tracker. If you're on a dynamic IP or otherwise
1619-unreliable connection, you should find someone else's tracker and
1620-use that. Otherwise, follow the rest of this step.
1621-
1622-Trackers refer downloaders to each other. The load on the tracker
1623-is very small, so you only need one for all your files.
1624-
1625-To run a tracker, execute the command bttrack.py Here is an example -
1626-
1627-./bttrack.py --port 6969 --dfile dstate
1628-
1629---dfile is where persistent information is kept on the tracker across
1630-invocations. It makes everything start working again immediately if
1631-you restart the tracker. A new one will be created if it doesn't exist
1632-already.
1633-
1634-The tracker must be on a net-addressible box, and you must know the
1635-ip number or dns name of it.
1636-
1637-The tracker outputs web logs to standard out. You can get information
1638-about the files it's currently serving by getting its index page.
1639-
1640-2) create a metainfo file using btmakemetafile.py
1641-
1642-To generate a metainfo file, run the publish btmakemetafile and give
1643-it the file you want metainfo for and the url of the tracker
1644-
1645-./btmakemetafile.py http://my.tracker:6969/announce myfile.ext
1646-
1647-This will generate a file called myfile.ext.torrent
1648-
1649-Make sure to include the port number in the tracker url if it isn't 80.
1650-
1651-This command may take a while to scan over the whole file hashing it.
1652-
1653-The /announce path is special and hard-coded into the tracker.
1654-Make sure to give the domain or ip your tracker is on instead of
1655-my.tracker.
1656-
1657-You can use either a dns name or an IP address in the tracker url.
1658-
1659-3) associate .torrent with application/x-bittorrent on your web server
1660-
1661-The way you do this is dependent on the particular web server you're using.
1662-
1663-You must have a web server which can serve ordinary static files and is
1664-addressable from the internet at large.
1665-
1666-4) put the newly made .torrent file on your web server
1667-
1668-Note that the file name you choose on the server must end in .torrent, so
1669-it gets associated with the right mimetype.
1670-
1671-5) put up a static page which links to the location you uploaded to in step 4
1672-
1673-The file you uploaded in step 4 is linked to using an ordinary url.
1674-
1675-6) start a downloader as a resume on the complete file
1676-
1677-You have to run a downloader which already has the complete file,
1678-so new downloaders have a place to get it from. Here's an example -
1679-
1680-./btdownloadheadless.py --url http://my.server/myfile.torrent --saveas myfile.ext
1681-
1682-Make sure the saveas argument points to the already complete file.
1683-
1684-If you're running the complete downloader on the same machine or LAN as
1685-the tracker, give a --ip parameter to the complete downloader. The --ip
1686-parameter can be either an IP address or DNS name.
1687-
1688-BitTorrent defaults to port 6881. If it can't use 6881, (probably because
1689-another download is happening) it tries 6882, then 6883, etc. It gives up
1690-after 6889.
1691-
1692-7) you're done!
1693-
1694-Now you just have to get people downloading! Refer them to the page you
1695-created in step 5.
1696-
1697-BitTorrent can also publish whole directories - simply point
1698-btmakemetafile.py at the directory with files in it, they'll be published
1699-as one unit. All files in subdirectories will be included, although files
1700-and directories named 'CVS' and 'core' are ignored.
1701-
1702-If you have any questions, try the web site or mailing list -
1703-
1704-http://bitconjurer.org/BitTorrent/
1705-
1706-http://groups.yahoo.com/group/BitTorrent
1707-
1708-You can also often find me, Bram, in #bittorrent of irc.freenode.net
1709
1710=== removed directory '.pc/07_change_report_address.dpatch'
1711=== removed directory '.pc/07_change_report_address.dpatch/BitTornado'
1712=== removed file '.pc/07_change_report_address.dpatch/BitTornado/__init__.py'
1713--- .pc/07_change_report_address.dpatch/BitTornado/__init__.py 2010-03-21 14:36:30 +0000
1714+++ .pc/07_change_report_address.dpatch/BitTornado/__init__.py 1970-01-01 00:00:00 +0000
1715@@ -1,63 +0,0 @@
1716-product_name = 'BitTornado'
1717-version_short = 'T-0.3.18'
1718-
1719-version = version_short+' ('+product_name+')'
1720-report_email = version_short+'@degreez.net'
1721-
1722-from types import StringType
1723-from sha import sha
1724-from time import time, clock
1725-try:
1726- from os import getpid
1727-except ImportError:
1728- def getpid():
1729- return 1
1730-
1731-mapbase64 = '0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz.-'
1732-
1733-_idprefix = version_short[0]
1734-for subver in version_short[2:].split('.'):
1735- try:
1736- subver = int(subver)
1737- except:
1738- subver = 0
1739- _idprefix += mapbase64[subver]
1740-_idprefix += ('-' * (6-len(_idprefix)))
1741-_idrandom = [None]
1742-
1743-def resetPeerIDs():
1744- try:
1745- f = open('/dev/urandom','rb')
1746- x = f.read(20)
1747- f.close()
1748- except:
1749- x = ''
1750-
1751- l1 = 0
1752- t = clock()
1753- while t == clock():
1754- l1 += 1
1755- l2 = 0
1756- t = long(time()*100)
1757- while t == long(time()*100):
1758- l2 += 1
1759- l3 = 0
1760- if l2 < 1000:
1761- t = long(time()*10)
1762- while t == long(clock()*10):
1763- l3 += 1
1764- x += ( repr(time()) + '/' + str(time()) + '/'
1765- + str(l1) + '/' + str(l2) + '/' + str(l3) + '/'
1766- + str(getpid()) )
1767-
1768- s = ''
1769- for i in sha(x).digest()[-11:]:
1770- s += mapbase64[ord(i) & 0x3F]
1771- _idrandom[0] = s
1772-
1773-resetPeerIDs()
1774-
1775-def createPeerID(ins = '---'):
1776- assert type(ins) is StringType
1777- assert len(ins) == 3
1778- return _idprefix + ins + _idrandom[0]
1779
1780=== removed directory '.pc/08_btdownloadcurses_indent.dpatch'
1781=== removed file '.pc/08_btdownloadcurses_indent.dpatch/btdownloadcurses.py'
1782--- .pc/08_btdownloadcurses_indent.dpatch/btdownloadcurses.py 2010-03-21 14:36:30 +0000
1783+++ .pc/08_btdownloadcurses_indent.dpatch/btdownloadcurses.py 1970-01-01 00:00:00 +0000
1784@@ -1,407 +0,0 @@
1785-#!/usr/bin/env python
1786-
1787-# Written by Henry 'Pi' James
1788-# see LICENSE.txt for license information
1789-
1790-SPEW_SCROLL_RATE = 1
1791-
1792-from BitTornado import PSYCO
1793-if PSYCO.psyco:
1794- try:
1795- import psyco
1796- assert psyco.__version__ >= 0x010100f0
1797- psyco.full()
1798- except:
1799- pass
1800-
1801-from BitTornado.download_bt1 import BT1Download, defaults, parse_params, get_usage, get_response
1802-from BitTornado.RawServer import RawServer, UPnP_ERROR
1803-from random import seed
1804-from socket import error as socketerror
1805-from BitTornado.bencode import bencode
1806-from BitTornado.natpunch import UPnP_test
1807-from threading import Event
1808-from os.path import abspath
1809-from signal import signal, SIGWINCH
1810-from sha import sha
1811-from sys import argv, exit
1812-import sys
1813-from time import time, strftime
1814-from BitTornado.clock import clock
1815-from BitTornado import createPeerID, version
1816-from BitTornado.ConfigDir import ConfigDir
1817-
1818-try:
1819- import curses
1820- import curses.panel
1821- from curses.wrapper import wrapper as curses_wrapper
1822- from signal import signal, SIGWINCH
1823-except:
1824- print 'Textmode GUI initialization failed, cannot proceed.'
1825- print
1826- print 'This download interface requires the standard Python module ' \
1827- '"curses", which is unfortunately not available for the native ' \
1828- 'Windows port of Python. It is however available for the Cygwin ' \
1829- 'port of Python, running on all Win32 systems (www.cygwin.com).'
1830- print
1831- print 'You may still use "btdownloadheadless.py" to download.'
1832- sys.exit(1)
1833-
1834-assert sys.version >= '2', "Install Python 2.0 or greater"
1835-try:
1836- True
1837-except:
1838- True = 1
1839- False = 0
1840-
1841-def fmttime(n):
1842- if n == 0:
1843- return 'download complete!'
1844- try:
1845- n = int(n)
1846- assert n >= 0 and n < 5184000 # 60 days
1847- except:
1848- return '<unknown>'
1849- m, s = divmod(n, 60)
1850- h, m = divmod(m, 60)
1851- return 'finishing in %d:%02d:%02d' % (h, m, s)
1852-
1853-def fmtsize(n):
1854- s = str(n)
1855- size = s[-3:]
1856- while len(s) > 3:
1857- s = s[:-3]
1858- size = '%s,%s' % (s[-3:], size)
1859- if n > 999:
1860- unit = ['B', 'KiB', 'MiB', 'GiB', 'TiB', 'PiB', 'EiB', 'ZiB', 'YiB']
1861- i = 1
1862- while i + 1 < len(unit) and (n >> 10) >= 999:
1863- i += 1
1864- n >>= 10
1865- n = float(n) / (1 << 10)
1866- size = '%s (%.2f %s)' % (size, n, unit[i])
1867- return size
1868-
1869-
1870-class CursesDisplayer:
1871- def __init__(self, scrwin, errlist, doneflag):
1872- self.scrwin = scrwin
1873- self.errlist = errlist
1874- self.doneflag = doneflag
1875-
1876- signal(SIGWINCH, self.winch_handler)
1877- self.changeflag = Event()
1878-
1879- self.done = 0
1880- self.file = ''
1881- self.fileSize = ''
1882- self.activity = ''
1883- self.status = ''
1884- self.progress = ''
1885- self.downloadTo = ''
1886- self.downRate = '---'
1887- self.upRate = '---'
1888- self.shareRating = ''
1889- self.seedStatus = ''
1890- self.peerStatus = ''
1891- self.errors = []
1892- self.last_update_time = 0
1893- self.spew_scroll_time = 0
1894- self.spew_scroll_pos = 0
1895-
1896- self._remake_window()
1897-
1898- def winch_handler(self, signum, stackframe):
1899- self.changeflag.set()
1900- curses.endwin()
1901- self.scrwin.refresh()
1902- self.scrwin = curses.newwin(0, 0, 0, 0)
1903- self._remake_window()
1904-
1905- def _remake_window(self):
1906- self.scrh, self.scrw = self.scrwin.getmaxyx()
1907- self.scrpan = curses.panel.new_panel(self.scrwin)
1908- self.labelh, self.labelw, self.labely, self.labelx = 11, 9, 1, 2
1909- self.labelwin = curses.newwin(self.labelh, self.labelw,
1910- self.labely, self.labelx)
1911- self.labelpan = curses.panel.new_panel(self.labelwin)
1912- self.fieldh, self.fieldw, self.fieldy, self.fieldx = (
1913- self.labelh, self.scrw-2 - self.labelw-3,
1914- 1, self.labelw+3)
1915- self.fieldwin = curses.newwin(self.fieldh, self.fieldw,
1916- self.fieldy, self.fieldx)
1917- self.fieldwin.nodelay(1)
1918- self.fieldpan = curses.panel.new_panel(self.fieldwin)
1919- self.spewh, self.speww, self.spewy, self.spewx = (
1920- self.scrh - self.labelh - 2, self.scrw - 3, 1 + self.labelh, 2)
1921- self.spewwin = curses.newwin(self.spewh, self.speww,
1922- self.spewy, self.spewx)
1923- self.spewpan = curses.panel.new_panel(self.spewwin)
1924- try:
1925- self.scrwin.border(ord('|'),ord('|'),ord('-'),ord('-'),ord(' '),ord(' '),ord(' '),ord(' '))
1926- except:
1927- pass
1928- self.labelwin.addstr(0, 0, 'file:')
1929- self.labelwin.addstr(1, 0, 'size:')
1930- self.labelwin.addstr(2, 0, 'dest:')
1931- self.labelwin.addstr(3, 0, 'progress:')
1932- self.labelwin.addstr(4, 0, 'status:')
1933- self.labelwin.addstr(5, 0, 'dl speed:')
1934- self.labelwin.addstr(6, 0, 'ul speed:')
1935- self.labelwin.addstr(7, 0, 'sharing:')
1936- self.labelwin.addstr(8, 0, 'seeds:')
1937- self.labelwin.addstr(9, 0, 'peers:')
1938- curses.panel.update_panels()
1939- curses.doupdate()
1940- self.changeflag.clear()
1941-
1942-
1943- def finished(self):
1944- self.done = 1
1945- self.activity = 'download succeeded!'
1946- self.downRate = '---'
1947- self.display(fractionDone = 1)
1948-
1949- def failed(self):
1950- self.done = 1
1951- self.activity = 'download failed!'
1952- self.downRate = '---'
1953- self.display()
1954-
1955- def error(self, errormsg):
1956- newerrmsg = strftime('[%H:%M:%S] ') + errormsg
1957- self.errors.append(newerrmsg)
1958- self.errlist.append(newerrmsg)
1959- self.display()
1960-
1961- def display(self, dpflag = Event(), fractionDone = None, timeEst = None,
1962- downRate = None, upRate = None, activity = None,
1963- statistics = None, spew = None, **kws):
1964-
1965- inchar = self.fieldwin.getch()
1966- if inchar == 12: # ^L
1967- self._remake_window()
1968- elif inchar in (ord('q'),ord('Q')):
1969- self.doneflag.set()
1970-
1971- if activity is not None and not self.done:
1972- self.activity = activity
1973- elif timeEst is not None:
1974- self.activity = fmttime(timeEst)
1975- if self.changeflag.isSet():
1976- return
1977- if self.last_update_time + 0.1 > clock() and fractionDone not in (0.0, 1.0) and activity is not None:
1978- return
1979- self.last_update_time = clock()
1980- if fractionDone is not None:
1981- blocknum = int(self.fieldw * fractionDone)
1982- self.progress = blocknum * '#' + (self.fieldw - blocknum) * '_'
1983- self.status = '%s (%.1f%%)' % (self.activity, fractionDone * 100)
1984- else:
1985- self.status = self.activity
1986- if downRate is not None:
1987- self.downRate = '%.1f KB/s' % (float(downRate) / (1 << 10))
1988- if upRate is not None:
1989- self.upRate = '%.1f KB/s' % (float(upRate) / (1 << 10))
1990- if statistics is not None:
1991- if (statistics.shareRating < 0) or (statistics.shareRating > 100):
1992- self.shareRating = 'oo (%.1f MB up / %.1f MB down)' % (float(statistics.upTotal) / (1<<20), float(statistics.downTotal) / (1<<20))
1993- else:
1994- self.shareRating = '%.3f (%.1f MB up / %.1f MB down)' % (statistics.shareRating, float(statistics.upTotal) / (1<<20), float(statistics.downTotal) / (1<<20))
1995- if not self.done:
1996- self.seedStatus = '%d seen now, plus %.3f distributed copies' % (statistics.numSeeds,0.001*int(1000*statistics.numCopies2))
1997- else:
1998- self.seedStatus = '%d seen recently, plus %.3f distributed copies' % (statistics.numOldSeeds,0.001*int(1000*statistics.numCopies))
1999- self.peerStatus = '%d seen now, %.1f%% done at %.1f kB/s' % (statistics.numPeers,statistics.percentDone,float(statistics.torrentRate) / (1 << 10))
2000-
2001- self.fieldwin.erase()
2002- self.fieldwin.addnstr(0, 0, self.file, self.fieldw, curses.A_BOLD)
2003- self.fieldwin.addnstr(1, 0, self.fileSize, self.fieldw)
2004- self.fieldwin.addnstr(2, 0, self.downloadTo, self.fieldw)
2005- if self.progress:
2006- self.fieldwin.addnstr(3, 0, self.progress, self.fieldw, curses.A_BOLD)
2007- self.fieldwin.addnstr(4, 0, self.status, self.fieldw)
2008- self.fieldwin.addnstr(5, 0, self.downRate, self.fieldw)
2009- self.fieldwin.addnstr(6, 0, self.upRate, self.fieldw)
2010- self.fieldwin.addnstr(7, 0, self.shareRating, self.fieldw)
2011- self.fieldwin.addnstr(8, 0, self.seedStatus, self.fieldw)
2012- self.fieldwin.addnstr(9, 0, self.peerStatus, self.fieldw)
2013-
2014- self.spewwin.erase()
2015-
2016- if not spew:
2017- errsize = self.spewh
2018- if self.errors:
2019- self.spewwin.addnstr(0, 0, "error(s):", self.speww, curses.A_BOLD)
2020- errsize = len(self.errors)
2021- displaysize = min(errsize, self.spewh)
2022- displaytop = errsize - displaysize
2023- for i in range(displaysize):
2024- self.spewwin.addnstr(i, self.labelw, self.errors[displaytop + i],
2025- self.speww-self.labelw-1, curses.A_BOLD)
2026- else:
2027- if self.errors:
2028- self.spewwin.addnstr(0, 0, "error:", self.speww, curses.A_BOLD)
2029- self.spewwin.addnstr(0, self.labelw, self.errors[-1],
2030- self.speww-self.labelw-1, curses.A_BOLD)
2031- self.spewwin.addnstr(2, 0, " # IP Upload Download Completed Speed", self.speww, curses.A_BOLD)
2032-
2033-
2034- if self.spew_scroll_time + SPEW_SCROLL_RATE < clock():
2035- self.spew_scroll_time = clock()
2036- if len(spew) > self.spewh-5 or self.spew_scroll_pos > 0:
2037- self.spew_scroll_pos += 1
2038- if self.spew_scroll_pos > len(spew):
2039- self.spew_scroll_pos = 0
2040-
2041- for i in range(len(spew)):
2042- spew[i]['lineno'] = i+1
2043- spew.append({'lineno': None})
2044- spew = spew[self.spew_scroll_pos:] + spew[:self.spew_scroll_pos]
2045-
2046- for i in range(min(self.spewh - 5, len(spew))):
2047- if not spew[i]['lineno']:
2048- continue
2049- self.spewwin.addnstr(i+3, 0, '%3d' % spew[i]['lineno'], 3)
2050- self.spewwin.addnstr(i+3, 4, spew[i]['ip']+spew[i]['direction'], 16)
2051- if spew[i]['uprate'] > 100:
2052- self.spewwin.addnstr(i+3, 20, '%6.0f KB/s' % (float(spew[i]['uprate']) / 1000), 11)
2053- self.spewwin.addnstr(i+3, 32, '-----', 5)
2054- if spew[i]['uinterested'] == 1:
2055- self.spewwin.addnstr(i+3, 33, 'I', 1)
2056- if spew[i]['uchoked'] == 1:
2057- self.spewwin.addnstr(i+3, 35, 'C', 1)
2058- if spew[i]['downrate'] > 100:
2059- self.spewwin.addnstr(i+3, 38, '%6.0f KB/s' % (float(spew[i]['downrate']) / 1000), 11)
2060- self.spewwin.addnstr(i+3, 50, '-------', 7)
2061- if spew[i]['dinterested'] == 1:
2062- self.spewwin.addnstr(i+3, 51, 'I', 1)
2063- if spew[i]['dchoked'] == 1:
2064- self.spewwin.addnstr(i+3, 53, 'C', 1)
2065- if spew[i]['snubbed'] == 1:
2066- self.spewwin.addnstr(i+3, 55, 'S', 1)
2067- self.spewwin.addnstr(i+3, 58, '%5.1f%%' % (float(int(spew[i]['completed']*1000))/10), 6)
2068- if spew[i]['speed'] is not None:
2069- self.spewwin.addnstr(i+3, 64, '%5.0f KB/s' % (float(spew[i]['speed'])/1000), 10)
2070-
2071- if statistics is not None:
2072- self.spewwin.addnstr(self.spewh-1, 0,
2073- 'downloading %d pieces, have %d fragments, %d of %d pieces completed'
2074- % ( statistics.storage_active, statistics.storage_dirty,
2075- statistics.storage_numcomplete,
2076- statistics.storage_totalpieces ), self.speww-1 )
2077-
2078- curses.panel.update_panels()
2079- curses.doupdate()
2080- dpflag.set()
2081-
2082- def chooseFile(self, default, size, saveas, dir):
2083- self.file = default
2084- self.fileSize = fmtsize(size)
2085- if saveas == '':
2086- saveas = default
2087- self.downloadTo = abspath(saveas)
2088- return saveas
2089-
2090-def run(scrwin, errlist, params):
2091- doneflag = Event()
2092- d = CursesDisplayer(scrwin, errlist, doneflag)
2093- try:
2094- while 1:
2095- configdir = ConfigDir('downloadcurses')
2096- defaultsToIgnore = ['responsefile', 'url', 'priority']
2097- configdir.setDefaults(defaults,defaultsToIgnore)
2098- configdefaults = configdir.loadConfig()
2099- defaults.append(('save_options',0,
2100- "whether to save the current options as the new default configuration " +
2101- "(only for btdownloadcurses.py)"))
2102- try:
2103- config = parse_params(params, configdefaults)
2104- except ValueError, e:
2105- d.error('error: ' + str(e) + '\nrun with no args for parameter explanations')
2106- break
2107- if not config:
2108- d.error(get_usage(defaults, d.fieldw, configdefaults))
2109- break
2110- if config['save_options']:
2111- configdir.saveConfig(config)
2112- configdir.deleteOldCacheData(config['expire_cache_data'])
2113-
2114- myid = createPeerID()
2115- seed(myid)
2116-
2117- rawserver = RawServer(doneflag, config['timeout_check_interval'],
2118- config['timeout'], ipv6_enable = config['ipv6_enabled'],
2119- failfunc = d.failed, errorfunc = d.error)
2120-
2121- upnp_type = UPnP_test(config['upnp_nat_access'])
2122- while True:
2123- try:
2124- listen_port = rawserver.find_and_bind(config['minport'], config['maxport'],
2125- config['bind'], ipv6_socket_style = config['ipv6_binds_v4'],
2126- upnp = upnp_type, randomizer = config['random_port'])
2127- break
2128- except socketerror, e:
2129- if upnp_type and e == UPnP_ERROR:
2130- d.error('WARNING: COULD NOT FORWARD VIA UPnP')
2131- upnp_type = 0
2132- continue
2133- d.error("Couldn't listen - " + str(e))
2134- d.failed()
2135- return
2136-
2137- response = get_response(config['responsefile'], config['url'], d.error)
2138- if not response:
2139- break
2140-
2141- infohash = sha(bencode(response['info'])).digest()
2142-
2143- dow = BT1Download(d.display, d.finished, d.error, d.error, doneflag,
2144- config, response, infohash, myid, rawserver, listen_port,
2145- configdir)
2146-
2147- if not dow.saveAs(d.chooseFile):
2148- break
2149-
2150- if not dow.initFiles(old_style = True):
2151- break
2152- if not dow.startEngine():
2153- dow.shutdown()
2154- break
2155- dow.startRerequester()
2156- dow.autoStats()
2157-
2158- if not dow.am_I_finished():
2159- d.display(activity = 'connecting to peers')
2160- rawserver.listen_forever(dow.getPortHandler())
2161- d.display(activity = 'shutting down')
2162- dow.shutdown()
2163- break
2164-
2165- except KeyboardInterrupt:
2166- # ^C to exit..
2167- pass
2168- try:
2169- rawserver.shutdown()
2170- except:
2171- pass
2172- if not d.done:
2173- d.failed()
2174-
2175-
2176-if __name__ == '__main__':
2177- if argv[1:] == ['--version']:
2178- print version
2179- exit(0)
2180- if len(argv) <= 1:
2181- print "Usage: btdownloadcurses.py <global options>\n"
2182- print get_usage(defaults)
2183- exit(1)
2184-
2185- errlist = []
2186- curses_wrapper(run, errlist, argv[1:])
2187-
2188- if errlist:
2189- print "These errors occurred during execution:"
2190- for error in errlist:
2191- print error
2192\ No newline at end of file
2193
2194=== removed directory '.pc/09_timtuckerfixes.dpatch'
2195=== removed directory '.pc/09_timtuckerfixes.dpatch/BitTornado'
2196=== removed directory '.pc/09_timtuckerfixes.dpatch/BitTornado/BT1'
2197=== removed file '.pc/09_timtuckerfixes.dpatch/BitTornado/BT1/Connecter.py'
2198--- .pc/09_timtuckerfixes.dpatch/BitTornado/BT1/Connecter.py 2010-03-21 14:36:30 +0000
2199+++ .pc/09_timtuckerfixes.dpatch/BitTornado/BT1/Connecter.py 1970-01-01 00:00:00 +0000
2200@@ -1,328 +0,0 @@
2201-# Written by Bram Cohen
2202-# see LICENSE.txt for license information
2203-
2204-from BitTornado.bitfield import Bitfield
2205-from BitTornado.clock import clock
2206-from binascii import b2a_hex
2207-
2208-try:
2209- True
2210-except:
2211- True = 1
2212- False = 0
2213-
2214-DEBUG1 = False
2215-DEBUG2 = False
2216-
2217-def toint(s):
2218- return long(b2a_hex(s), 16)
2219-
2220-def tobinary(i):
2221- return (chr(i >> 24) + chr((i >> 16) & 0xFF) +
2222- chr((i >> 8) & 0xFF) + chr(i & 0xFF))
2223-
2224-CHOKE = chr(0)
2225-UNCHOKE = chr(1)
2226-INTERESTED = chr(2)
2227-NOT_INTERESTED = chr(3)
2228-# index
2229-HAVE = chr(4)
2230-# index, bitfield
2231-BITFIELD = chr(5)
2232-# index, begin, length
2233-REQUEST = chr(6)
2234-# index, begin, piece
2235-PIECE = chr(7)
2236-# index, begin, piece
2237-CANCEL = chr(8)
2238-
2239-class Connection:
2240- def __init__(self, connection, connecter, ccount):
2241- self.connection = connection
2242- self.connecter = connecter
2243- self.ccount = ccount
2244- self.got_anything = False
2245- self.next_upload = None
2246- self.outqueue = []
2247- self.partial_message = None
2248- self.download = None
2249- self.send_choke_queued = False
2250- self.just_unchoked = None
2251-
2252- def get_ip(self, real=False):
2253- return self.connection.get_ip(real)
2254-
2255- def get_id(self):
2256- return self.connection.get_id()
2257-
2258- def get_readable_id(self):
2259- return self.connection.get_readable_id()
2260-
2261- def close(self):
2262- if DEBUG1:
2263- print (self.ccount,'connection closed')
2264- self.connection.close()
2265-
2266- def is_locally_initiated(self):
2267- return self.connection.is_locally_initiated()
2268-
2269- def is_encrypted(self):
2270- return self.connection.is_encrypted()
2271-
2272- def send_interested(self):
2273- self._send_message(INTERESTED)
2274-
2275- def send_not_interested(self):
2276- self._send_message(NOT_INTERESTED)
2277-
2278- def send_choke(self):
2279- if self.partial_message:
2280- self.send_choke_queued = True
2281- else:
2282- self._send_message(CHOKE)
2283- self.upload.choke_sent()
2284- self.just_unchoked = 0
2285-
2286- def send_unchoke(self):
2287- if self.send_choke_queued:
2288- self.send_choke_queued = False
2289- if DEBUG1:
2290- print (self.ccount,'CHOKE SUPPRESSED')
2291- else:
2292- self._send_message(UNCHOKE)
2293- if ( self.partial_message or self.just_unchoked is None
2294- or not self.upload.interested or self.download.active_requests ):
2295- self.just_unchoked = 0
2296- else:
2297- self.just_unchoked = clock()
2298-
2299- def send_request(self, index, begin, length):
2300- self._send_message(REQUEST + tobinary(index) +
2301- tobinary(begin) + tobinary(length))
2302- if DEBUG1:
2303- print (self.ccount,'sent request',index,begin,begin+length)
2304-
2305- def send_cancel(self, index, begin, length):
2306- self._send_message(CANCEL + tobinary(index) +
2307- tobinary(begin) + tobinary(length))
2308- if DEBUG1:
2309- print (self.ccount,'sent cancel',index,begin,begin+length)
2310-
2311- def send_bitfield(self, bitfield):
2312- self._send_message(BITFIELD + bitfield)
2313-
2314- def send_have(self, index):
2315- self._send_message(HAVE + tobinary(index))
2316-
2317- def send_keepalive(self):
2318- self._send_message('')
2319-
2320- def _send_message(self, s):
2321- if DEBUG2:
2322- if s:
2323- print (self.ccount,'SENDING MESSAGE',ord(s[0]),len(s))
2324- else:
2325- print (self.ccount,'SENDING MESSAGE',-1,0)
2326- s = tobinary(len(s))+s
2327- if self.partial_message:
2328- self.outqueue.append(s)
2329- else:
2330- self.connection.send_message_raw(s)
2331-
2332- def send_partial(self, bytes):
2333- if self.connection.closed:
2334- return 0
2335- if self.partial_message is None:
2336- s = self.upload.get_upload_chunk()
2337- if s is None:
2338- return 0
2339- index, begin, piece = s
2340- self.partial_message = ''.join((
2341- tobinary(len(piece) + 9), PIECE,
2342- tobinary(index), tobinary(begin), piece.tostring() ))
2343- if DEBUG1:
2344- print (self.ccount,'sending chunk',index,begin,begin+len(piece))
2345-
2346- if bytes < len(self.partial_message):
2347- self.connection.send_message_raw(self.partial_message[:bytes])
2348- self.partial_message = self.partial_message[bytes:]
2349- return bytes
2350-
2351- q = [self.partial_message]
2352- self.partial_message = None
2353- if self.send_choke_queued:
2354- self.send_choke_queued = False
2355- self.outqueue.append(tobinary(1)+CHOKE)
2356- self.upload.choke_sent()
2357- self.just_unchoked = 0
2358- q.extend(self.outqueue)
2359- self.outqueue = []
2360- q = ''.join(q)
2361- self.connection.send_message_raw(q)
2362- return len(q)
2363-
2364- def get_upload(self):
2365- return self.upload
2366-
2367- def get_download(self):
2368- return self.download
2369-
2370- def set_download(self, download):
2371- self.download = download
2372-
2373- def backlogged(self):
2374- return not self.connection.is_flushed()
2375-
2376- def got_request(self, i, p, l):
2377- self.upload.got_request(i, p, l)
2378- if self.just_unchoked:
2379- self.connecter.ratelimiter.ping(clock() - self.just_unchoked)
2380- self.just_unchoked = 0
2381-
2382-
2383-
2384-
2385-class Connecter:
2386- def __init__(self, make_upload, downloader, choker, numpieces,
2387- totalup, config, ratelimiter, sched = None):
2388- self.downloader = downloader
2389- self.make_upload = make_upload
2390- self.choker = choker
2391- self.numpieces = numpieces
2392- self.config = config
2393- self.ratelimiter = ratelimiter
2394- self.rate_capped = False
2395- self.sched = sched
2396- self.totalup = totalup
2397- self.rate_capped = False
2398- self.connections = {}
2399- self.external_connection_made = 0
2400- self.ccount = 0
2401-
2402- def how_many_connections(self):
2403- return len(self.connections)
2404-
2405- def connection_made(self, connection):
2406- self.ccount += 1
2407- c = Connection(connection, self, self.ccount)
2408- if DEBUG2:
2409- print (c.ccount,'connection made')
2410- self.connections[connection] = c
2411- c.upload = self.make_upload(c, self.ratelimiter, self.totalup)
2412- c.download = self.downloader.make_download(c)
2413- self.choker.connection_made(c)
2414- return c
2415-
2416- def connection_lost(self, connection):
2417- c = self.connections[connection]
2418- if DEBUG2:
2419- print (c.ccount,'connection closed')
2420- del self.connections[connection]
2421- if c.download:
2422- c.download.disconnected()
2423- self.choker.connection_lost(c)
2424-
2425- def connection_flushed(self, connection):
2426- conn = self.connections[connection]
2427- if conn.next_upload is None and (conn.partial_message is not None
2428- or len(conn.upload.buffer) > 0):
2429- self.ratelimiter.queue(conn)
2430-
2431- def got_piece(self, i):
2432- for co in self.connections.values():
2433- co.send_have(i)
2434-
2435- def got_message(self, connection, message):
2436- c = self.connections[connection]
2437- t = message[0]
2438- if DEBUG2:
2439- print (c.ccount,'message received',ord(t))
2440- if t == BITFIELD and c.got_anything:
2441- if DEBUG2:
2442- print (c.ccount,'misplaced bitfield')
2443- connection.close()
2444- return
2445- c.got_anything = True
2446- if (t in [CHOKE, UNCHOKE, INTERESTED, NOT_INTERESTED] and
2447- len(message) != 1):
2448- if DEBUG2:
2449- print (c.ccount,'bad message length')
2450- connection.close()
2451- return
2452- if t == CHOKE:
2453- c.download.got_choke()
2454- elif t == UNCHOKE:
2455- c.download.got_unchoke()
2456- elif t == INTERESTED:
2457- if not c.download.have.complete():
2458- c.upload.got_interested()
2459- elif t == NOT_INTERESTED:
2460- c.upload.got_not_interested()
2461- elif t == HAVE:
2462- if len(message) != 5:
2463- if DEBUG2:
2464- print (c.ccount,'bad message length')
2465- connection.close()
2466- return
2467- i = toint(message[1:])
2468- if i >= self.numpieces:
2469- if DEBUG2:
2470- print (c.ccount,'bad piece number')
2471- connection.close()
2472- return
2473- if c.download.got_have(i):
2474- c.upload.got_not_interested()
2475- elif t == BITFIELD:
2476- try:
2477- b = Bitfield(self.numpieces, message[1:])
2478- except ValueError:
2479- if DEBUG2:
2480- print (c.ccount,'bad bitfield')
2481- connection.close()
2482- return
2483- if c.download.got_have_bitfield(b):
2484- c.upload.got_not_interested()
2485- elif t == REQUEST:
2486- if len(message) != 13:
2487- if DEBUG2:
2488- print (c.ccount,'bad message length')
2489- connection.close()
2490- return
2491- i = toint(message[1:5])
2492- if i >= self.numpieces:
2493- if DEBUG2:
2494- print (c.ccount,'bad piece number')
2495- connection.close()
2496- return
2497- c.got_request(i, toint(message[5:9]),
2498- toint(message[9:]))
2499- elif t == CANCEL:
2500- if len(message) != 13:
2501- if DEBUG2:
2502- print (c.ccount,'bad message length')
2503- connection.close()
2504- return
2505- i = toint(message[1:5])
2506- if i >= self.numpieces:
2507- if DEBUG2:
2508- print (c.ccount,'bad piece number')
2509- connection.close()
2510- return
2511- c.upload.got_cancel(i, toint(message[5:9]),
2512- toint(message[9:]))
2513- elif t == PIECE:
2514- if len(message) <= 9:
2515- if DEBUG2:
2516- print (c.ccount,'bad message length')
2517- connection.close()
2518- return
2519- i = toint(message[1:5])
2520- if i >= self.numpieces:
2521- if DEBUG2:
2522- print (c.ccount,'bad piece number')
2523- connection.close()
2524- return
2525- if c.download.got_piece(i, toint(message[5:9]), message[9:]):
2526- self.got_piece(i)
2527- else:
2528- connection.close()
2529
2530=== removed file '.pc/09_timtuckerfixes.dpatch/BitTornado/BT1/Encrypter.py'
2531--- .pc/09_timtuckerfixes.dpatch/BitTornado/BT1/Encrypter.py 2010-03-21 14:36:30 +0000
2532+++ .pc/09_timtuckerfixes.dpatch/BitTornado/BT1/Encrypter.py 1970-01-01 00:00:00 +0000
2533@@ -1,657 +0,0 @@
2534-# Written by Bram Cohen
2535-# see LICENSE.txt for license information
2536-
2537-from cStringIO import StringIO
2538-from binascii import b2a_hex
2539-from socket import error as socketerror
2540-from urllib import quote
2541-from traceback import print_exc
2542-from BitTornado.BTcrypto import Crypto
2543-
2544-try:
2545- True
2546-except:
2547- True = 1
2548- False = 0
2549- bool = lambda x: not not x
2550-
2551-DEBUG = False
2552-
2553-MAX_INCOMPLETE = 8
2554-
2555-protocol_name = 'BitTorrent protocol'
2556-option_pattern = chr(0)*8
2557-
2558-def toint(s):
2559- return long(b2a_hex(s), 16)
2560-
2561-def tobinary16(i):
2562- return chr((i >> 8) & 0xFF) + chr(i & 0xFF)
2563-
2564-hexchars = '0123456789ABCDEF'
2565-hexmap = []
2566-for i in xrange(256):
2567- hexmap.append(hexchars[(i&0xF0)/16]+hexchars[i&0x0F])
2568-
2569-def tohex(s):
2570- r = []
2571- for c in s:
2572- r.append(hexmap[ord(c)])
2573- return ''.join(r)
2574-
2575-def make_readable(s):
2576- if not s:
2577- return ''
2578- if quote(s).find('%') >= 0:
2579- return tohex(s)
2580- return '"'+s+'"'
2581-
2582-
2583-class IncompleteCounter:
2584- def __init__(self):
2585- self.c = 0
2586- def increment(self):
2587- self.c += 1
2588- def decrement(self):
2589- self.c -= 1
2590- def toomany(self):
2591- return self.c >= MAX_INCOMPLETE
2592-
2593-incompletecounter = IncompleteCounter()
2594-
2595-
2596-# header, options, download id, my id, [length, message]
2597-
2598-class Connection:
2599- def __init__(self, Encoder, connection, id,
2600- ext_handshake=False, encrypted = None, options = None):
2601- self.Encoder = Encoder
2602- self.connection = connection
2603- self.connecter = Encoder.connecter
2604- self.id = id
2605- self.locally_initiated = (id != None)
2606- self.readable_id = make_readable(id)
2607- self.complete = False
2608- self.keepalive = lambda: None
2609- self.closed = False
2610- self.buffer = ''
2611- self.bufferlen = None
2612- self.log = None
2613- self.read = self._read
2614- self.write = self._write
2615- self.cryptmode = 0
2616- self.encrypter = None
2617- if self.locally_initiated:
2618- incompletecounter.increment()
2619- if encrypted:
2620- self.encrypted = True
2621- self.encrypter = Crypto(True)
2622- self.write(self.encrypter.pubkey+self.encrypter.padding())
2623- else:
2624- self.encrypted = False
2625- self.write(chr(len(protocol_name)) + protocol_name +
2626- option_pattern + self.Encoder.download_id )
2627- self.next_len, self.next_func = 1+len(protocol_name), self.read_header
2628- elif ext_handshake:
2629- self.Encoder.connecter.external_connection_made += 1
2630- if encrypted: # passed an already running encrypter
2631- self.encrypter = encrypted
2632- self.encrypted = True
2633- self._start_crypto()
2634- self.next_len, self.next_func = 14, self.read_crypto_block3c
2635- else:
2636- self.encrypted = False
2637- self.options = options
2638- self.write(self.Encoder.my_id)
2639- self.next_len, self.next_func = 20, self.read_peer_id
2640- else:
2641- self.encrypted = None # don't know yet
2642- self.next_len, self.next_func = 1+len(protocol_name), self.read_header
2643- self.Encoder.raw_server.add_task(self._auto_close, 30)
2644-
2645-
2646- def _log_start(self): # only called with DEBUG = True
2647- self.log = open('peerlog.'+self.get_ip()+'.txt','a')
2648- self.log.write('connected - ')
2649- if self.locally_initiated:
2650- self.log.write('outgoing\n')
2651- else:
2652- self.log.write('incoming\n')
2653- self._logwritefunc = self.write
2654- self.write = self._log_write
2655-
2656- def _log_write(self, s):
2657- self.log.write('w:'+b2a_hex(s)+'\n')
2658- self._logwritefunc(s)
2659-
2660-
2661- def get_ip(self, real=False):
2662- return self.connection.get_ip(real)
2663-
2664- def get_id(self):
2665- return self.id
2666-
2667- def get_readable_id(self):
2668- return self.readable_id
2669-
2670- def is_locally_initiated(self):
2671- return self.locally_initiated
2672-
2673- def is_encrypted(self):
2674- return bool(self.encrypted)
2675-
2676- def is_flushed(self):
2677- return self.connection.is_flushed()
2678-
2679- def _read_header(self, s):
2680- if s == chr(len(protocol_name))+protocol_name:
2681- return 8, self.read_options
2682- return None
2683-
2684- def read_header(self, s):
2685- if self._read_header(s):
2686- if self.encrypted or self.Encoder.config['crypto_stealth']:
2687- return None
2688- return 8, self.read_options
2689- if self.locally_initiated and not self.encrypted:
2690- return None
2691- elif not self.Encoder.config['crypto_allowed']:
2692- return None
2693- if not self.encrypted:
2694- self.encrypted = True
2695- self.encrypter = Crypto(self.locally_initiated)
2696- self._write_buffer(s)
2697- return self.encrypter.keylength, self.read_crypto_header
2698-
2699- ################## ENCRYPTION SUPPORT ######################
2700-
2701- def _start_crypto(self):
2702- self.encrypter.setrawaccess(self._read,self._write)
2703- self.write = self.encrypter.write
2704- self.read = self.encrypter.read
2705- if self.buffer:
2706- self.buffer = self.encrypter.decrypt(self.buffer)
2707-
2708- def _end_crypto(self):
2709- self.read = self._read
2710- self.write = self._write
2711- self.encrypter = None
2712-
2713- def read_crypto_header(self, s):
2714- self.encrypter.received_key(s)
2715- self.encrypter.set_skey(self.Encoder.download_id)
2716- if self.locally_initiated:
2717- if self.Encoder.config['crypto_only']:
2718- cryptmode = '\x00\x00\x00\x02' # full stream encryption
2719- else:
2720- cryptmode = '\x00\x00\x00\x03' # header or full stream
2721- padc = self.encrypter.padding()
2722- self.write( self.encrypter.block3a
2723- + self.encrypter.block3b
2724- + self.encrypter.encrypt(
2725- ('\x00'*8) # VC
2726- + cryptmode # acceptable crypto modes
2727- + tobinary16(len(padc))
2728- + padc # PadC
2729- + '\x00\x00' ) ) # no initial payload data
2730- self._max_search = 520
2731- return 1, self.read_crypto_block4a
2732- self.write(self.encrypter.pubkey+self.encrypter.padding())
2733- self._max_search = 520
2734- return 0, self.read_crypto_block3a
2735-
2736- def _search_for_pattern(self, s, pat):
2737- p = s.find(pat)
2738- if p < 0:
2739- if len(s) >= len(pat):
2740- self._max_search -= len(s)+1-len(pat)
2741- if self._max_search < 0:
2742- self.close()
2743- return False
2744- self._write_buffer(s[1-len(pat):])
2745- return False
2746- self._write_buffer(s[p+len(pat):])
2747- return True
2748-
2749- ### INCOMING CONNECTION ###
2750-
2751- def read_crypto_block3a(self, s):
2752- if not self._search_for_pattern(s,self.encrypter.block3a):
2753- return -1, self.read_crypto_block3a # wait for more data
2754- return len(self.encrypter.block3b), self.read_crypto_block3b
2755-
2756- def read_crypto_block3b(self, s):
2757- if s != self.encrypter.block3b:
2758- return None
2759- self.Encoder.connecter.external_connection_made += 1
2760- self._start_crypto()
2761- return 14, self.read_crypto_block3c
2762-
2763- def read_crypto_block3c(self, s):
2764- if s[:8] != ('\x00'*8): # check VC
2765- return None
2766- self.cryptmode = toint(s[8:12]) % 4
2767- if self.cryptmode == 0:
2768- return None # no encryption selected
2769- if ( self.cryptmode == 1 # only header encryption
2770- and self.Encoder.config['crypto_only'] ):
2771- return None
2772- padlen = (ord(s[12])<<8)+ord(s[13])
2773- if padlen > 512:
2774- return None
2775- return padlen+2, self.read_crypto_pad3
2776-
2777- def read_crypto_pad3(self, s):
2778- s = s[-2:]
2779- ialen = (ord(s[0])<<8)+ord(s[1])
2780- if ialen > 65535:
2781- return None
2782- if self.cryptmode == 1:
2783- cryptmode = '\x00\x00\x00\x01' # header only encryption
2784- else:
2785- cryptmode = '\x00\x00\x00\x02' # full stream encryption
2786- padd = self.encrypter.padding()
2787- self.write( ('\x00'*8) # VC
2788- + cryptmode # encryption mode
2789- + tobinary16(len(padd))
2790- + padd ) # PadD
2791- if ialen:
2792- return ialen, self.read_crypto_ia
2793- return self.read_crypto_block3done()
2794-
2795- def read_crypto_ia(self, s):
2796- if DEBUG:
2797- self._log_start()
2798- self.log.write('r:'+b2a_hex(s)+'(ia)\n')
2799- if self.buffer:
2800- self.log.write('r:'+b2a_hex(self.buffer)+'(buffer)\n')
2801- return self.read_crypto_block3done(s)
2802-
2803- def read_crypto_block3done(self, ia=''):
2804- if DEBUG:
2805- if not self.log:
2806- self._log_start()
2807- if self.cryptmode == 1: # only handshake encryption
2808- assert not self.buffer # oops; check for exceptions to this
2809- self._end_crypto()
2810- if ia:
2811- self._write_buffer(ia)
2812- return 1+len(protocol_name), self.read_encrypted_header
2813-
2814- ### OUTGOING CONNECTION ###
2815-
2816- def read_crypto_block4a(self, s):
2817- if not self._search_for_pattern(s,self.encrypter.VC_pattern()):
2818- return -1, self.read_crypto_block4a # wait for more data
2819- self._start_crypto()
2820- return 6, self.read_crypto_block4b
2821-
2822- def read_crypto_block4b(self, s):
2823- self.cryptmode = toint(s[:4]) % 4
2824- if self.cryptmode == 1: # only header encryption
2825- if self.Encoder.config['crypto_only']:
2826- return None
2827- elif self.cryptmode != 2:
2828- return None # unknown encryption
2829- padlen = (ord(s[4])<<8)+ord(s[5])
2830- if padlen > 512:
2831- return None
2832- if padlen:
2833- return padlen, self.read_crypto_pad4
2834- return self.read_crypto_block4done()
2835-
2836- def read_crypto_pad4(self, s):
2837- # discard data
2838- return self.read_crypto_block4done()
2839-
2840- def read_crypto_block4done(self):
2841- if DEBUG:
2842- self._log_start()
2843- if self.cryptmode == 1: # only handshake encryption
2844- if not self.buffer: # oops; check for exceptions to this
2845- return None
2846- self._end_crypto()
2847- self.write(chr(len(protocol_name)) + protocol_name +
2848- option_pattern + self.Encoder.download_id)
2849- return 1+len(protocol_name), self.read_encrypted_header
2850-
2851- ### START PROTOCOL OVER ENCRYPTED CONNECTION ###
2852-
2853- def read_encrypted_header(self, s):
2854- return self._read_header(s)
2855-
2856- ################################################
2857-
2858- def read_options(self, s):
2859- self.options = s
2860- return 20, self.read_download_id
2861-
2862- def read_download_id(self, s):
2863- if ( s != self.Encoder.download_id
2864- or not self.Encoder.check_ip(ip=self.get_ip()) ):
2865- return None
2866- if not self.locally_initiated:
2867- if not self.encrypted:
2868- self.Encoder.connecter.external_connection_made += 1
2869- self.write(chr(len(protocol_name)) + protocol_name +
2870- option_pattern + self.Encoder.download_id + self.Encoder.my_id)
2871- return 20, self.read_peer_id
2872-
2873- def read_peer_id(self, s):
2874- if not self.encrypted and self.Encoder.config['crypto_only']:
2875- return None # allows older trackers to ping,
2876- # but won't proceed w/ connections
2877- if not self.id:
2878- self.id = s
2879- self.readable_id = make_readable(s)
2880- else:
2881- if s != self.id:
2882- return None
2883- self.complete = self.Encoder.got_id(self)
2884- if not self.complete:
2885- return None
2886- if self.locally_initiated:
2887- self.write(self.Encoder.my_id)
2888- incompletecounter.decrement()
2889- self._switch_to_read2()
2890- c = self.Encoder.connecter.connection_made(self)
2891- self.keepalive = c.send_keepalive
2892- return 4, self.read_len
2893-
2894- def read_len(self, s):
2895- l = toint(s)
2896- if l > self.Encoder.max_len:
2897- return None
2898- return l, self.read_message
2899-
2900- def read_message(self, s):
2901- if s != '':
2902- self.connecter.got_message(self, s)
2903- return 4, self.read_len
2904-
2905- def read_dead(self, s):
2906- return None
2907-
2908- def _auto_close(self):
2909- if not self.complete:
2910- self.close()
2911-
2912- def close(self):
2913- if not self.closed:
2914- self.connection.close()
2915- self.sever()
2916-
2917- def sever(self):
2918- if self.log:
2919- self.log.write('closed\n')
2920- self.log.close()
2921- self.closed = True
2922- del self.Encoder.connections[self.connection]
2923- if self.complete:
2924- self.connecter.connection_lost(self)
2925- elif self.locally_initiated:
2926- incompletecounter.decrement()
2927-
2928- def send_message_raw(self, message):
2929- self.write(message)
2930-
2931- def _write(self, message):
2932- if not self.closed:
2933- self.connection.write(message)
2934-
2935- def data_came_in(self, connection, s):
2936- self.read(s)
2937-
2938- def _write_buffer(self, s):
2939- self.buffer = s+self.buffer
2940-
2941- def _read(self, s):
2942- if self.log:
2943- self.log.write('r:'+b2a_hex(s)+'\n')
2944- self.Encoder.measurefunc(len(s))
2945- self.buffer += s
2946- while True:
2947- if self.closed:
2948- return
2949- # self.next_len = # of characters function expects
2950- # or 0 = all characters in the buffer
2951- # or -1 = wait for next read, then all characters in the buffer
2952- # not compatible w/ keepalives, switch out after all negotiation complete
2953- if self.next_len <= 0:
2954- m = self.buffer
2955- self.buffer = ''
2956- elif len(self.buffer) >= self.next_len:
2957- m = self.buffer[:self.next_len]
2958- self.buffer = self.buffer[self.next_len:]
2959- else:
2960- return
2961- try:
2962- x = self.next_func(m)
2963- except:
2964- self.next_len, self.next_func = 1, self.read_dead
2965- raise
2966- if x is None:
2967- self.close()
2968- return
2969- self.next_len, self.next_func = x
2970- if self.next_len < 0: # already checked buffer
2971- return # wait for additional data
2972- if self.bufferlen is not None:
2973- self._read2('')
2974- return
2975-
2976- def _switch_to_read2(self):
2977- self._write_buffer = None
2978- if self.encrypter:
2979- self.encrypter.setrawaccess(self._read2,self._write)
2980- else:
2981- self.read = self._read2
2982- self.bufferlen = len(self.buffer)
2983- self.buffer = [self.buffer]
2984-
2985- def _read2(self, s): # more efficient, requires buffer['',''] & bufferlen
2986- if self.log:
2987- self.log.write('r:'+b2a_hex(s)+'\n')
2988- self.Encoder.measurefunc(len(s))
2989- while True:
2990- if self.closed:
2991- return
2992- p = self.next_len-self.bufferlen
2993- if self.next_len == 0:
2994- m = ''
2995- elif s:
2996- if p > len(s):
2997- self.buffer.append(s)
2998- self.bufferlen += len(s)
2999- return
3000- self.bufferlen = len(s)-p
3001- self.buffer.append(s[:p])
3002- m = ''.join(self.buffer)
3003- if p == len(s):
3004- self.buffer = []
3005- else:
3006- self.buffer=[s[p:]]
3007- s = ''
3008- elif p <= 0:
3009- # assert len(self.buffer) == 1
3010- s = self.buffer[0]
3011- self.bufferlen = len(s)-self.next_len
3012- m = s[:self.next_len]
3013- if p == 0:
3014- self.buffer = []
3015- else:
3016- self.buffer = [s[self.next_len:]]
3017- s = ''
3018- else:
3019- return
3020- try:
3021- x = self.next_func(m)
3022- except:
3023- self.next_len, self.next_func = 1, self.read_dead
3024- raise
3025- if x is None:
3026- self.close()
3027- return
3028- self.next_len, self.next_func = x
3029- if self.next_len < 0: # already checked buffer
3030- return # wait for additional data
3031-
3032-
3033- def connection_flushed(self, connection):
3034- if self.complete:
3035- self.connecter.connection_flushed(self)
3036-
3037- def connection_lost(self, connection):
3038- if self.Encoder.connections.has_key(connection):
3039- self.sever()
3040-
3041-
3042-class _dummy_banlist:
3043- def includes(self, x):
3044- return False
3045-
3046-class Encoder:
3047- def __init__(self, connecter, raw_server, my_id, max_len,
3048- schedulefunc, keepalive_delay, download_id,
3049- measurefunc, config, bans=_dummy_banlist() ):
3050- self.raw_server = raw_server
3051- self.connecter = connecter
3052- self.my_id = my_id
3053- self.max_len = max_len
3054- self.schedulefunc = schedulefunc
3055- self.keepalive_delay = keepalive_delay
3056- self.download_id = download_id
3057- self.measurefunc = measurefunc
3058- self.config = config
3059- self.connections = {}
3060- self.banned = {}
3061- self.external_bans = bans
3062- self.to_connect = []
3063- self.paused = False
3064- if self.config['max_connections'] == 0:
3065- self.max_connections = 2 ** 30
3066- else:
3067- self.max_connections = self.config['max_connections']
3068- schedulefunc(self.send_keepalives, keepalive_delay)
3069-
3070- def send_keepalives(self):
3071- self.schedulefunc(self.send_keepalives, self.keepalive_delay)
3072- if self.paused:
3073- return
3074- for c in self.connections.values():
3075- c.keepalive()
3076-
3077- def start_connections(self, list):
3078- if not self.to_connect:
3079- self.raw_server.add_task(self._start_connection_from_queue)
3080- self.to_connect = list
3081-
3082- def _start_connection_from_queue(self):
3083- if self.connecter.external_connection_made:
3084- max_initiate = self.config['max_initiate']
3085- else:
3086- max_initiate = int(self.config['max_initiate']*1.5)
3087- cons = len(self.connections)
3088- if cons >= self.max_connections or cons >= max_initiate:
3089- delay = 60
3090- elif self.paused or incompletecounter.toomany():
3091- delay = 1
3092- else:
3093- delay = 0
3094- dns, id, encrypted = self.to_connect.pop(0)
3095- self.start_connection(dns, id, encrypted)
3096- if self.to_connect:
3097- self.raw_server.add_task(self._start_connection_from_queue, delay)
3098-
3099- def start_connection(self, dns, id, encrypted = None):
3100- if ( self.paused
3101- or len(self.connections) >= self.max_connections
3102- or id == self.my_id
3103- or not self.check_ip(ip=dns[0]) ):
3104- return True
3105- if self.config['crypto_only']:
3106- if encrypted is None or encrypted: # fails on encrypted = 0
3107- encrypted = True
3108- else:
3109- return True
3110- for v in self.connections.values():
3111- if v is None:
3112- continue
3113- if id and v.id == id:
3114- return True
3115- ip = v.get_ip(True)
3116- if self.config['security'] and ip != 'unknown' and ip == dns[0]:
3117- return True
3118- try:
3119- c = self.raw_server.start_connection(dns)
3120- con = Connection(self, c, id, encrypted = encrypted)
3121- self.connections[c] = con
3122- c.set_handler(con)
3123- except socketerror:
3124- return False
3125- return True
3126-
3127- def _start_connection(self, dns, id, encrypted = None):
3128- def foo(self=self, dns=dns, id=id, encrypted=encrypted):
3129- self.start_connection(dns, id, encrypted)
3130- self.schedulefunc(foo, 0)
3131-
3132- def check_ip(self, connection=None, ip=None):
3133- if not ip:
3134- ip = connection.get_ip(True)
3135- if self.config['security'] and self.banned.has_key(ip):
3136- return False
3137- if self.external_bans.includes(ip):
3138- return False
3139- return True
3140-
3141- def got_id(self, connection):
3142- if connection.id == self.my_id:
3143- self.connecter.external_connection_made -= 1
3144- return False
3145- ip = connection.get_ip(True)
3146- for v in self.connections.values():
3147- if connection is not v:
3148- if connection.id == v.id:
3149- if ip == v.get_ip(True):
3150- v.close()
3151- else:
3152- return False
3153- if self.config['security'] and ip != 'unknown' and ip == v.get_ip(True):
3154- v.close()
3155- return True
3156-
3157- def external_connection_made(self, connection):
3158- if self.paused or len(self.connections) >= self.max_connections:
3159- connection.close()
3160- return False
3161- con = Connection(self, connection, None)
3162- self.connections[connection] = con
3163- connection.set_handler(con)
3164- return True
3165-
3166- def externally_handshaked_connection_made(self, connection, options,
3167- already_read, encrypted = None):
3168- if ( self.paused
3169- or len(self.connections) >= self.max_connections
3170- or not self.check_ip(connection=connection) ):
3171- connection.close()
3172- return False
3173- con = Connection(self, connection, None,
3174- ext_handshake = True, encrypted = encrypted, options = options)
3175- self.connections[connection] = con
3176- connection.set_handler(con)
3177- if already_read:
3178- con.data_came_in(con, already_read)
3179- return True
3180-
3181- def close_all(self):
3182- for c in self.connections.values():
3183- c.close()
3184- self.connections = {}
3185-
3186- def ban(self, ip):
3187- self.banned[ip] = 1
3188-
3189- def pause(self, flag):
3190- self.paused = flag
3191
3192=== removed file '.pc/09_timtuckerfixes.dpatch/BitTornado/BT1/Storage.py'
3193--- .pc/09_timtuckerfixes.dpatch/BitTornado/BT1/Storage.py 2010-03-21 14:36:30 +0000
3194+++ .pc/09_timtuckerfixes.dpatch/BitTornado/BT1/Storage.py 1970-01-01 00:00:00 +0000
3195@@ -1,584 +0,0 @@
3196-# Written by Bram Cohen
3197-# see LICENSE.txt for license information
3198-
3199-from BitTornado.piecebuffer import BufferPool
3200-from threading import Lock
3201-from time import time, strftime, localtime
3202-import os
3203-from os.path import exists, getsize, getmtime, basename
3204-from traceback import print_exc
3205-try:
3206- from os import fsync
3207-except ImportError:
3208- fsync = lambda x: None
3209-from bisect import bisect
3210-
3211-try:
3212- True
3213-except:
3214- True = 1
3215- False = 0
3216-
3217-DEBUG = False
3218-
3219-MAXREADSIZE = 32768
3220-MAXLOCKSIZE = 1000000000L
3221-MAXLOCKRANGE = 3999999999L # only lock first 4 gig of file
3222-
3223-_pool = BufferPool()
3224-PieceBuffer = _pool.new
3225-
3226-def dummy_status(fractionDone = None, activity = None):
3227- pass
3228-
3229-class Storage:
3230- def __init__(self, files, piece_length, doneflag, config,
3231- disabled_files = None):
3232- # can raise IOError and ValueError
3233- self.files = files
3234- self.piece_length = piece_length
3235- self.doneflag = doneflag
3236- self.disabled = [False] * len(files)
3237- self.file_ranges = []
3238- self.disabled_ranges = []
3239- self.working_ranges = []
3240- numfiles = 0
3241- total = 0l
3242- so_far = 0l
3243- self.handles = {}
3244- self.whandles = {}
3245- self.tops = {}
3246- self.sizes = {}
3247- self.mtimes = {}
3248- if config.get('lock_files', True):
3249- self.lock_file, self.unlock_file = self._lock_file, self._unlock_file
3250- else:
3251- self.lock_file, self.unlock_file = lambda x1,x2: None, lambda x1,x2: None
3252- self.lock_while_reading = config.get('lock_while_reading', False)
3253- self.lock = Lock()
3254-
3255- if not disabled_files:
3256- disabled_files = [False] * len(files)
3257-
3258- for i in xrange(len(files)):
3259- file, length = files[i]
3260- if doneflag.isSet(): # bail out if doneflag is set
3261- return
3262- self.disabled_ranges.append(None)
3263- if length == 0:
3264- self.file_ranges.append(None)
3265- self.working_ranges.append([])
3266- else:
3267- range = (total, total + length, 0, file)
3268- self.file_ranges.append(range)
3269- self.working_ranges.append([range])
3270- numfiles += 1
3271- total += length
3272- if disabled_files[i]:
3273- l = 0
3274- else:
3275- if exists(file):
3276- l = getsize(file)
3277- if l > length:
3278- h = open(file, 'rb+')
3279- h.truncate(length)
3280- h.flush()
3281- h.close()
3282- l = length
3283- else:
3284- l = 0
3285- h = open(file, 'wb+')
3286- h.flush()
3287- h.close()
3288- self.mtimes[file] = getmtime(file)
3289- self.tops[file] = l
3290- self.sizes[file] = length
3291- so_far += l
3292-
3293- self.total_length = total
3294- self._reset_ranges()
3295-
3296- self.max_files_open = config['max_files_open']
3297- if self.max_files_open > 0 and numfiles > self.max_files_open:
3298- self.handlebuffer = []
3299- else:
3300- self.handlebuffer = None
3301-
3302-
3303- if os.name == 'nt':
3304- def _lock_file(self, name, f):
3305- import msvcrt
3306- for p in range(0, min(self.sizes[name],MAXLOCKRANGE), MAXLOCKSIZE):
3307- f.seek(p)
3308- msvcrt.locking(f.fileno(), msvcrt.LK_LOCK,
3309- min(MAXLOCKSIZE,self.sizes[name]-p))
3310-
3311- def _unlock_file(self, name, f):
3312- import msvcrt
3313- for p in range(0, min(self.sizes[name],MAXLOCKRANGE), MAXLOCKSIZE):
3314- f.seek(p)
3315- msvcrt.locking(f.fileno(), msvcrt.LK_UNLCK,
3316- min(MAXLOCKSIZE,self.sizes[name]-p))
3317-
3318- elif os.name == 'posix':
3319- def _lock_file(self, name, f):
3320- import fcntl
3321- fcntl.flock(f.fileno(), fcntl.LOCK_EX)
3322-
3323- def _unlock_file(self, name, f):
3324- import fcntl
3325- fcntl.flock(f.fileno(), fcntl.LOCK_UN)
3326-
3327- else:
3328- def _lock_file(self, name, f):
3329- pass
3330- def _unlock_file(self, name, f):
3331- pass
3332-
3333-
3334- def was_preallocated(self, pos, length):
3335- for file, begin, end in self._intervals(pos, length):
3336- if self.tops.get(file, 0) < end:
3337- return False
3338- return True
3339-
3340-
3341- def _sync(self, file):
3342- self._close(file)
3343- if self.handlebuffer:
3344- self.handlebuffer.remove(file)
3345-
3346- def sync(self):
3347- # may raise IOError or OSError
3348- for file in self.whandles.keys():
3349- self._sync(file)
3350-
3351-
3352- def set_readonly(self, f=None):
3353- if f is None:
3354- self.sync()
3355- return
3356- file = self.files[f][0]
3357- if self.whandles.has_key(file):
3358- self._sync(file)
3359-
3360-
3361- def get_total_length(self):
3362- return self.total_length
3363-
3364-
3365- def _open(self, file, mode):
3366- if self.mtimes.has_key(file):
3367- try:
3368- if self.handlebuffer is not None:
3369- assert getsize(file) == self.tops[file]
3370- newmtime = getmtime(file)
3371- oldmtime = self.mtimes[file]
3372- assert newmtime <= oldmtime+1
3373- assert newmtime >= oldmtime-1
3374- except:
3375- if DEBUG:
3376- print ( file+' modified: '
3377- +strftime('(%x %X)',localtime(self.mtimes[file]))
3378- +strftime(' != (%x %X) ?',localtime(getmtime(file))) )
3379- raise IOError('modified during download')
3380- try:
3381- return open(file, mode)
3382- except:
3383- if DEBUG:
3384- print_exc()
3385- raise
3386-
3387-
3388- def _close(self, file):
3389- f = self.handles[file]
3390- del self.handles[file]
3391- if self.whandles.has_key(file):
3392- del self.whandles[file]
3393- f.flush()
3394- self.unlock_file(file, f)
3395- f.close()
3396- self.tops[file] = getsize(file)
3397- self.mtimes[file] = getmtime(file)
3398- else:
3399- if self.lock_while_reading:
3400- self.unlock_file(file, f)
3401- f.close()
3402-
3403-
3404- def _close_file(self, file):
3405- if not self.handles.has_key(file):
3406- return
3407- self._close(file)
3408- if self.handlebuffer:
3409- self.handlebuffer.remove(file)
3410-
3411-
3412- def _get_file_handle(self, file, for_write):
3413- if self.handles.has_key(file):
3414- if for_write and not self.whandles.has_key(file):
3415- self._close(file)
3416- try:
3417- f = self._open(file, 'rb+')
3418- self.handles[file] = f
3419- self.whandles[file] = 1
3420- self.lock_file(file, f)
3421- except (IOError, OSError), e:
3422- if DEBUG:
3423- print_exc()
3424- raise IOError('unable to reopen '+file+': '+str(e))
3425-
3426- if self.handlebuffer:
3427- if self.handlebuffer[-1] != file:
3428- self.handlebuffer.remove(file)
3429- self.handlebuffer.append(file)
3430- elif self.handlebuffer is not None:
3431- self.handlebuffer.append(file)
3432- else:
3433- try:
3434- if for_write:
3435- f = self._open(file, 'rb+')
3436- self.handles[file] = f
3437- self.whandles[file] = 1
3438- self.lock_file(file, f)
3439- else:
3440- f = self._open(file, 'rb')
3441- self.handles[file] = f
3442- if self.lock_while_reading:
3443- self.lock_file(file, f)
3444- except (IOError, OSError), e:
3445- if DEBUG:
3446- print_exc()
3447- raise IOError('unable to open '+file+': '+str(e))
3448-
3449- if self.handlebuffer is not None:
3450- self.handlebuffer.append(file)
3451- if len(self.handlebuffer) > self.max_files_open:
3452- self._close(self.handlebuffer.pop(0))
3453-
3454- return self.handles[file]
3455-
3456-
3457- def _reset_ranges(self):
3458- self.ranges = []
3459- for l in self.working_ranges:
3460- self.ranges.extend(l)
3461- self.begins = [i[0] for i in self.ranges]
3462-
3463- def _intervals(self, pos, amount):
3464- r = []
3465- stop = pos + amount
3466- p = bisect(self.begins, pos) - 1
3467- while p < len(self.ranges):
3468- begin, end, offset, file = self.ranges[p]
3469- if begin >= stop:
3470- break
3471- r.append(( file,
3472- offset + max(pos, begin) - begin,
3473- offset + min(end, stop) - begin ))
3474- p += 1
3475- return r
3476-
3477-
3478- def read(self, pos, amount, flush_first = False):
3479- r = PieceBuffer()
3480- for file, pos, end in self._intervals(pos, amount):
3481- if DEBUG:
3482- print 'reading '+file+' from '+str(pos)+' to '+str(end)
3483- self.lock.acquire()
3484- h = self._get_file_handle(file, False)
3485- if flush_first and self.whandles.has_key(file):
3486- h.flush()
3487- fsync(h)
3488- h.seek(pos)
3489- while pos < end:
3490- length = min(end-pos, MAXREADSIZE)
3491- data = h.read(length)
3492- if len(data) != length:
3493- raise IOError('error reading data from '+file)
3494- r.append(data)
3495- pos += length
3496- self.lock.release()
3497- return r
3498-
3499- def write(self, pos, s):
3500- # might raise an IOError
3501- total = 0
3502- for file, begin, end in self._intervals(pos, len(s)):
3503- if DEBUG:
3504- print 'writing '+file+' from '+str(pos)+' to '+str(end)
3505- self.lock.acquire()
3506- h = self._get_file_handle(file, True)
3507- h.seek(begin)
3508- h.write(s[total: total + end - begin])
3509- self.lock.release()
3510- total += end - begin
3511-
3512- def top_off(self):
3513- for begin, end, offset, file in self.ranges:
3514- l = offset + end - begin
3515- if l > self.tops.get(file, 0):
3516- self.lock.acquire()
3517- h = self._get_file_handle(file, True)
3518- h.seek(l-1)
3519- h.write(chr(0xFF))
3520- self.lock.release()
3521-
3522- def flush(self):
3523- # may raise IOError or OSError
3524- for file in self.whandles.keys():
3525- self.lock.acquire()
3526- self.handles[file].flush()
3527- self.lock.release()
3528-
3529- def close(self):
3530- for file, f in self.handles.items():
3531- try:
3532- self.unlock_file(file, f)
3533- except:
3534- pass
3535- try:
3536- f.close()
3537- except:
3538- pass
3539- self.handles = {}
3540- self.whandles = {}
3541- self.handlebuffer = None
3542-
3543-
3544- def _get_disabled_ranges(self, f):
3545- if not self.file_ranges[f]:
3546- return ((),(),())
3547- r = self.disabled_ranges[f]
3548- if r:
3549- return r
3550- start, end, offset, file = self.file_ranges[f]
3551- if DEBUG:
3552- print 'calculating disabled range for '+self.files[f][0]
3553- print 'bytes: '+str(start)+'-'+str(end)
3554- print 'file spans pieces '+str(int(start/self.piece_length))+'-'+str(int((end-1)/self.piece_length)+1)
3555- pieces = range( int(start/self.piece_length),
3556- int((end-1)/self.piece_length)+1 )
3557- offset = 0
3558- disabled_files = []
3559- if len(pieces) == 1:
3560- if ( start % self.piece_length == 0
3561- and end % self.piece_length == 0 ): # happens to be a single,
3562- # perfect piece
3563- working_range = [(start, end, offset, file)]
3564- update_pieces = []
3565- else:
3566- midfile = os.path.join(self.bufferdir,str(f))
3567- working_range = [(start, end, 0, midfile)]
3568- disabled_files.append((midfile, start, end))
3569- length = end - start
3570- self.sizes[midfile] = length
3571- piece = pieces[0]
3572- update_pieces = [(piece, start-(piece*self.piece_length), length)]
3573- else:
3574- update_pieces = []
3575- if start % self.piece_length != 0: # doesn't begin on an even piece boundary
3576- end_b = pieces[1]*self.piece_length
3577- startfile = os.path.join(self.bufferdir,str(f)+'b')
3578- working_range_b = [ ( start, end_b, 0, startfile ) ]
3579- disabled_files.append((startfile, start, end_b))
3580- length = end_b - start
3581- self.sizes[startfile] = length
3582- offset = length
3583- piece = pieces.pop(0)
3584- update_pieces.append((piece, start-(piece*self.piece_length), length))
3585- else:
3586- working_range_b = []
3587- if f != len(self.files)-1 and end % self.piece_length != 0:
3588- # doesn't end on an even piece boundary
3589- start_e = pieces[-1] * self.piece_length
3590- endfile = os.path.join(self.bufferdir,str(f)+'e')
3591- working_range_e = [ ( start_e, end, 0, endfile ) ]
3592- disabled_files.append((endfile, start_e, end))
3593- length = end - start_e
3594- self.sizes[endfile] = length
3595- piece = pieces.pop(-1)
3596- update_pieces.append((piece, 0, length))
3597- else:
3598- working_range_e = []
3599- if pieces:
3600- working_range_m = [ ( pieces[0]*self.piece_length,
3601- (pieces[-1]+1)*self.piece_length,
3602- offset, file ) ]
3603- else:
3604- working_range_m = []
3605- working_range = working_range_b + working_range_m + working_range_e
3606-
3607- if DEBUG:
3608- print str(working_range)
3609- print str(update_pieces)
3610- r = (tuple(working_range), tuple(update_pieces), tuple(disabled_files))
3611- self.disabled_ranges[f] = r
3612- return r
3613-
3614-
3615- def set_bufferdir(self, dir):
3616- self.bufferdir = dir
3617-
3618- def enable_file(self, f):
3619- if not self.disabled[f]:
3620- return
3621- self.disabled[f] = False
3622- r = self.file_ranges[f]
3623- if not r:
3624- return
3625- file = r[3]
3626- if not exists(file):
3627- h = open(file, 'wb+')
3628- h.flush()
3629- h.close()
3630- if not self.tops.has_key(file):
3631- self.tops[file] = getsize(file)
3632- if not self.mtimes.has_key(file):
3633- self.mtimes[file] = getmtime(file)
3634- self.working_ranges[f] = [r]
3635-
3636- def disable_file(self, f):
3637- if self.disabled[f]:
3638- return
3639- self.disabled[f] = True
3640- r = self._get_disabled_ranges(f)
3641- if not r:
3642- return
3643- for file, begin, end in r[2]:
3644- if not os.path.isdir(self.bufferdir):
3645- os.makedirs(self.bufferdir)
3646- if not exists(file):
3647- h = open(file, 'wb+')
3648- h.flush()
3649- h.close()
3650- if not self.tops.has_key(file):
3651- self.tops[file] = getsize(file)
3652- if not self.mtimes.has_key(file):
3653- self.mtimes[file] = getmtime(file)
3654- self.working_ranges[f] = r[0]
3655-
3656- reset_file_status = _reset_ranges
3657-
3658-
3659- def get_piece_update_list(self, f):
3660- return self._get_disabled_ranges(f)[1]
3661-
3662-
3663- def delete_file(self, f):
3664- try:
3665- os.remove(self.files[f][0])
3666- except:
3667- pass
3668-
3669-
3670- '''
3671- Pickled data format:
3672-
3673- d['files'] = [ file #, size, mtime {, file #, size, mtime...} ]
3674- file # in torrent, and the size and last modification
3675- time for those files. Missing files are either empty
3676- or disabled.
3677- d['partial files'] = [ name, size, mtime... ]
3678- Names, sizes and last modification times of files containing
3679- partial piece data. Filenames go by the following convention:
3680- {file #, 0-based}{nothing, "b" or "e"}
3681- eg: "0e" "3" "4b" "4e"
3682- Where "b" specifies the partial data for the first piece in
3683- the file, "e" the last piece, and no letter signifying that
3684- the file is disabled but is smaller than one piece, and that
3685- all the data is cached inside so adjacent files may be
3686- verified.
3687- '''
3688- def pickle(self):
3689- files = []
3690- pfiles = []
3691- for i in xrange(len(self.files)):
3692- if not self.files[i][1]: # length == 0
3693- continue
3694- if self.disabled[i]:
3695- for file, start, end in self._get_disabled_ranges(i)[2]:
3696- pfiles.extend([basename(file),getsize(file),int(getmtime(file))])
3697- continue
3698- file = self.files[i][0]
3699- files.extend([i,getsize(file),int(getmtime(file))])
3700- return {'files': files, 'partial files': pfiles}
3701-
3702-
3703- def unpickle(self, data):
3704- # assume all previously-disabled files have already been disabled
3705- try:
3706- files = {}
3707- pfiles = {}
3708- l = data['files']
3709- assert len(l) % 3 == 0
3710- l = [l[x:x+3] for x in xrange(0,len(l),3)]
3711- for f, size, mtime in l:
3712- files[f] = (size, mtime)
3713- l = data.get('partial files',[])
3714- assert len(l) % 3 == 0
3715- l = [l[x:x+3] for x in xrange(0,len(l),3)]
3716- for file, size, mtime in l:
3717- pfiles[file] = (size, mtime)
3718-
3719- valid_pieces = {}
3720- for i in xrange(len(self.files)):
3721- if self.disabled[i]:
3722- continue
3723- r = self.file_ranges[i]
3724- if not r:
3725- continue
3726- start, end, offset, file =r
3727- if DEBUG:
3728- print 'adding '+file
3729- for p in xrange( int(start/self.piece_length),
3730- int((end-1)/self.piece_length)+1 ):
3731- valid_pieces[p] = 1
3732-
3733- if DEBUG:
3734- print valid_pieces.keys()
3735-
3736- def test(old, size, mtime):
3737- oldsize, oldmtime = old
3738- if size != oldsize:
3739- return False
3740- if mtime > oldmtime+1:
3741- return False
3742- if mtime < oldmtime-1:
3743- return False
3744- return True
3745-
3746- for i in xrange(len(self.files)):
3747- if self.disabled[i]:
3748- for file, start, end in self._get_disabled_ranges(i)[2]:
3749- f1 = basename(file)
3750- if ( not pfiles.has_key(f1)
3751- or not test(pfiles[f1],getsize(file),getmtime(file)) ):
3752- if DEBUG:
3753- print 'removing '+file
3754- for p in xrange( int(start/self.piece_length),
3755- int((end-1)/self.piece_length)+1 ):
3756- if valid_pieces.has_key(p):
3757- del valid_pieces[p]
3758- continue
3759- file, size = self.files[i]
3760- if not size:
3761- continue
3762- if ( not files.has_key(i)
3763- or not test(files[i],getsize(file),getmtime(file)) ):
3764- start, end, offset, file = self.file_ranges[i]
3765- if DEBUG:
3766- print 'removing '+file
3767- for p in xrange( int(start/self.piece_length),
3768- int((end-1)/self.piece_length)+1 ):
3769- if valid_pieces.has_key(p):
3770- del valid_pieces[p]
3771- except:
3772- if DEBUG:
3773- print_exc()
3774- return []
3775-
3776- if DEBUG:
3777- print valid_pieces.keys()
3778- return valid_pieces.keys()
3779-
3780
3781=== removed file '.pc/09_timtuckerfixes.dpatch/BitTornado/BT1/StreamCheck.py'
3782--- .pc/09_timtuckerfixes.dpatch/BitTornado/BT1/StreamCheck.py 2010-03-21 14:36:30 +0000
3783+++ .pc/09_timtuckerfixes.dpatch/BitTornado/BT1/StreamCheck.py 1970-01-01 00:00:00 +0000
3784@@ -1,135 +0,0 @@
3785-# Written by Bram Cohen
3786-# see LICENSE.txt for license information
3787-
3788-from cStringIO import StringIO
3789-from binascii import b2a_hex
3790-from socket import error as socketerror
3791-from urllib import quote
3792-from traceback import print_exc
3793-import Connecter
3794-try:
3795- True
3796-except:
3797- True = 1
3798- False = 0
3799-
3800-DEBUG = False
3801-
3802-
3803-protocol_name = 'BitTorrent protocol'
3804-option_pattern = chr(0)*8
3805-
3806-def toint(s):
3807- return long(b2a_hex(s), 16)
3808-
3809-def tobinary(i):
3810- return (chr(i >> 24) + chr((i >> 16) & 0xFF) +
3811- chr((i >> 8) & 0xFF) + chr(i & 0xFF))
3812-
3813-hexchars = '0123456789ABCDEF'
3814-hexmap = []
3815-for i in xrange(256):
3816- hexmap.append(hexchars[(i&0xF0)/16]+hexchars[i&0x0F])
3817-
3818-def tohex(s):
3819- r = []
3820- for c in s:
3821- r.append(hexmap[ord(c)])
3822- return ''.join(r)
3823-
3824-def make_readable(s):
3825- if not s:
3826- return ''
3827- if quote(s).find('%') >= 0:
3828- return tohex(s)
3829- return '"'+s+'"'
3830-
3831-def toint(s):
3832- return long(b2a_hex(s), 16)
3833-
3834-# header, reserved, download id, my id, [length, message]
3835-
3836-streamno = 0
3837-
3838-
3839-class StreamCheck:
3840- def __init__(self):
3841- global streamno
3842- self.no = streamno
3843- streamno += 1
3844- self.buffer = StringIO()
3845- self.next_len, self.next_func = 1, self.read_header_len
3846-
3847- def read_header_len(self, s):
3848- if ord(s) != len(protocol_name):
3849- print self.no, 'BAD HEADER LENGTH'
3850- return len(protocol_name), self.read_header
3851-
3852- def read_header(self, s):
3853- if s != protocol_name:
3854- print self.no, 'BAD HEADER'
3855- return 8, self.read_reserved
3856-
3857- def read_reserved(self, s):
3858- return 20, self.read_download_id
3859-
3860- def read_download_id(self, s):
3861- if DEBUG:
3862- print self.no, 'download ID ' + tohex(s)
3863- return 20, self.read_peer_id
3864-
3865- def read_peer_id(self, s):
3866- if DEBUG:
3867- print self.no, 'peer ID' + make_readable(s)
3868- return 4, self.read_len
3869-
3870- def read_len(self, s):
3871- l = toint(s)
3872- if l > 2 ** 23:
3873- print self.no, 'BAD LENGTH: '+str(l)+' ('+s+')'
3874- return l, self.read_message
3875-
3876- def read_message(self, s):
3877- if not s:
3878- return 4, self.read_len
3879- m = s[0]
3880- if ord(m) > 8:
3881- print self.no, 'BAD MESSAGE: '+str(ord(m))
3882- if m == Connecter.REQUEST:
3883- if len(s) != 13:
3884- print self.no, 'BAD REQUEST SIZE: '+str(len(s))
3885- return 4, self.read_len
3886- index = toint(s[1:5])
3887- begin = toint(s[5:9])
3888- length = toint(s[9:])
3889- print self.no, 'Request: '+str(index)+': '+str(begin)+'-'+str(begin)+'+'+str(length)
3890- elif m == Connecter.CANCEL:
3891- if len(s) != 13:
3892- print self.no, 'BAD CANCEL SIZE: '+str(len(s))
3893- return 4, self.read_len
3894- index = toint(s[1:5])
3895- begin = toint(s[5:9])
3896- length = toint(s[9:])
3897- print self.no, 'Cancel: '+str(index)+': '+str(begin)+'-'+str(begin)+'+'+str(length)
3898- elif m == Connecter.PIECE:
3899- index = toint(s[1:5])
3900- begin = toint(s[5:9])
3901- length = len(s)-9
3902- print self.no, 'Piece: '+str(index)+': '+str(begin)+'-'+str(begin)+'+'+str(length)
3903- else:
3904- print self.no, 'Message '+str(ord(m))+' (length '+str(len(s))+')'
3905- return 4, self.read_len
3906-
3907- def write(self, s):
3908- while True:
3909- i = self.next_len - self.buffer.tell()
3910- if i > len(s):
3911- self.buffer.write(s)
3912- return
3913- self.buffer.write(s[:i])
3914- s = s[i:]
3915- m = self.buffer.getvalue()
3916- self.buffer.reset()
3917- self.buffer.truncate()
3918- x = self.next_func(m)
3919- self.next_len, self.next_func = x
3920
3921=== removed file '.pc/09_timtuckerfixes.dpatch/BitTornado/ConfigDir.py'
3922--- .pc/09_timtuckerfixes.dpatch/BitTornado/ConfigDir.py 2010-03-21 14:36:30 +0000
3923+++ .pc/09_timtuckerfixes.dpatch/BitTornado/ConfigDir.py 1970-01-01 00:00:00 +0000
3924@@ -1,401 +0,0 @@
3925-#written by John Hoffman
3926-
3927-from inifile import ini_write, ini_read
3928-from bencode import bencode, bdecode
3929-from types import IntType, LongType, StringType, FloatType
3930-from CreateIcons import GetIcons, CreateIcon
3931-from parseargs import defaultargs
3932-from __init__ import product_name, version_short
3933-import sys,os
3934-from time import time, strftime
3935-
3936-try:
3937- True
3938-except:
3939- True = 1
3940- False = 0
3941-
3942-try:
3943- realpath = os.path.realpath
3944-except:
3945- realpath = lambda x:x
3946-OLDICONPATH = os.path.abspath(os.path.dirname(realpath(sys.argv[0])))
3947-
3948-DIRNAME = '.'+product_name
3949-
3950-hexchars = '0123456789abcdef'
3951-hexmap = []
3952-revmap = {}
3953-for i in xrange(256):
3954- x = hexchars[(i&0xF0)/16]+hexchars[i&0x0F]
3955- hexmap.append(x)
3956- revmap[x] = chr(i)
3957-
3958-def tohex(s):
3959- r = []
3960- for c in s:
3961- r.append(hexmap[ord(c)])
3962- return ''.join(r)
3963-
3964-def unhex(s):
3965- r = [ revmap[s[x:x+2]] for x in xrange(0, len(s), 2) ]
3966- return ''.join(r)
3967-
3968-def copyfile(oldpath, newpath): # simple file copy, all in RAM
3969- try:
3970- f = open(oldpath,'rb')
3971- r = f.read()
3972- success = True
3973- except:
3974- success = False
3975- try:
3976- f.close()
3977- except:
3978- pass
3979- if not success:
3980- return False
3981- try:
3982- f = open(newpath,'wb')
3983- f.write(r)
3984- except:
3985- success = False
3986- try:
3987- f.close()
3988- except:
3989- pass
3990- return success
3991-
3992-
3993-class ConfigDir:
3994-
3995- ###### INITIALIZATION TASKS ######
3996-
3997- def __init__(self, config_type = None):
3998- self.config_type = config_type
3999- if config_type:
4000- config_ext = '.'+config_type
4001- else:
4002- config_ext = ''
4003-
4004- def check_sysvars(x):
4005- y = os.path.expandvars(x)
4006- if y != x and os.path.isdir(y):
4007- return y
4008- return None
4009-
4010- for d in ['${APPDATA}', '${HOME}', '${HOMEPATH}', '${USERPROFILE}']:
4011- dir_root = check_sysvars(d)
4012- if dir_root:
4013- break
4014- else:
4015- dir_root = os.path.expanduser('~')
4016- if not os.path.isdir(dir_root):
4017- dir_root = os.path.abspath(os.path.dirname(sys.argv[0]))
4018-
4019- dir_root = os.path.join(dir_root,DIRNAME)
4020- self.dir_root = dir_root
4021-
4022- if not os.path.isdir(self.dir_root):
4023- os.mkdir(self.dir_root,0700) # exception if failed
4024-
4025- self.dir_icons = os.path.join(dir_root,'icons')
4026- if not os.path.isdir(self.dir_icons):
4027- os.mkdir(self.dir_icons)
4028- for icon in GetIcons():
4029- i = os.path.join(self.dir_icons,icon)
4030- if not os.path.exists(i):
4031- if not copyfile(os.path.join(OLDICONPATH,icon),i):
4032- CreateIcon(icon,self.dir_icons)
4033-
4034- self.dir_torrentcache = os.path.join(dir_root,'torrentcache')
4035- if not os.path.isdir(self.dir_torrentcache):
4036- os.mkdir(self.dir_torrentcache)
4037-
4038- self.dir_datacache = os.path.join(dir_root,'datacache')
4039- if not os.path.isdir(self.dir_datacache):
4040- os.mkdir(self.dir_datacache)
4041-
4042- self.dir_piececache = os.path.join(dir_root,'piececache')
4043- if not os.path.isdir(self.dir_piececache):
4044- os.mkdir(self.dir_piececache)
4045-
4046- self.configfile = os.path.join(dir_root,'config'+config_ext+'.ini')
4047- self.statefile = os.path.join(dir_root,'state'+config_ext)
4048-
4049- self.TorrentDataBuffer = {}
4050-
4051-
4052- ###### CONFIG HANDLING ######
4053-
4054- def setDefaults(self, defaults, ignore=[]):
4055- self.config = defaultargs(defaults)
4056- for k in ignore:
4057- if self.config.has_key(k):
4058- del self.config[k]
4059-
4060- def checkConfig(self):
4061- return os.path.exists(self.configfile)
4062-
4063- def loadConfig(self):
4064- try:
4065- r = ini_read(self.configfile)['']
4066- except:
4067- return self.config
4068- l = self.config.keys()
4069- for k,v in r.items():
4070- if self.config.has_key(k):
4071- t = type(self.config[k])
4072- try:
4073- if t == StringType:
4074- self.config[k] = v
4075- elif t == IntType or t == LongType:
4076- self.config[k] = long(v)
4077- elif t == FloatType:
4078- self.config[k] = float(v)
4079- l.remove(k)
4080- except:
4081- pass
4082- if l: # new default values since last save
4083- self.saveConfig()
4084- return self.config
4085-
4086- def saveConfig(self, new_config = None):
4087- if new_config:
4088- for k,v in new_config.items():
4089- if self.config.has_key(k):
4090- self.config[k] = v
4091- try:
4092- ini_write( self.configfile, self.config,
4093- 'Generated by '+product_name+'/'+version_short+'\n'
4094- + strftime('%x %X') )
4095- return True
4096- except:
4097- return False
4098-
4099- def getConfig(self):
4100- return self.config
4101-
4102-
4103- ###### STATE HANDLING ######
4104-
4105- def getState(self):
4106- try:
4107- f = open(self.statefile,'rb')
4108- r = f.read()
4109- except:
4110- r = None
4111- try:
4112- f.close()
4113- except:
4114- pass
4115- try:
4116- r = bdecode(r)
4117- except:
4118- r = None
4119- return r
4120-
4121- def saveState(self, state):
4122- try:
4123- f = open(self.statefile,'wb')
4124- f.write(bencode(state))
4125- success = True
4126- except:
4127- success = False
4128- try:
4129- f.close()
4130- except:
4131- pass
4132- return success
4133-
4134-
4135- ###### TORRENT HANDLING ######
4136-
4137- def getTorrents(self):
4138- d = {}
4139- for f in os.listdir(self.dir_torrentcache):
4140- f = os.path.basename(f)
4141- try:
4142- f, garbage = f.split('.')
4143- except:
4144- pass
4145- d[unhex(f)] = 1
4146- return d.keys()
4147-
4148- def getTorrentVariations(self, t):
4149- t = tohex(t)
4150- d = []
4151- for f in os.listdir(self.dir_torrentcache):
4152- f = os.path.basename(f)
4153- if f[:len(t)] == t:
4154- try:
4155- garbage, ver = f.split('.')
4156- except:
4157- ver = '0'
4158- d.append(int(ver))
4159- d.sort()
4160- return d
4161-
4162- def getTorrent(self, t, v = -1):
4163- t = tohex(t)
4164- if v == -1:
4165- v = max(self.getTorrentVariations(t)) # potential exception
4166- if v:
4167- t += '.'+str(v)
4168- try:
4169- f = open(os.path.join(self.dir_torrentcache,t),'rb')
4170- r = bdecode(f.read())
4171- except:
4172- r = None
4173- try:
4174- f.close()
4175- except:
4176- pass
4177- return r
4178-
4179- def writeTorrent(self, data, t, v = -1):
4180- t = tohex(t)
4181- if v == -1:
4182- try:
4183- v = max(self.getTorrentVariations(t))+1
4184- except:
4185- v = 0
4186- if v:
4187- t += '.'+str(v)
4188- try:
4189- f = open(os.path.join(self.dir_torrentcache,t),'wb')
4190- f.write(bencode(data))
4191- except:
4192- v = None
4193- try:
4194- f.close()
4195- except:
4196- pass
4197- return v
4198-
4199-
4200- ###### TORRENT DATA HANDLING ######
4201-
4202- def getTorrentData(self, t):
4203- if self.TorrentDataBuffer.has_key(t):
4204- return self.TorrentDataBuffer[t]
4205- t = os.path.join(self.dir_datacache,tohex(t))
4206- if not os.path.exists(t):
4207- return None
4208- try:
4209- f = open(t,'rb')
4210- r = bdecode(f.read())
4211- except:
4212- r = None
4213- try:
4214- f.close()
4215- except:
4216- pass
4217- self.TorrentDataBuffer[t] = r
4218- return r
4219-
4220- def writeTorrentData(self, t, data):
4221- self.TorrentDataBuffer[t] = data
4222- try:
4223- f = open(os.path.join(self.dir_datacache,tohex(t)),'wb')
4224- f.write(bencode(data))
4225- success = True
4226- except:
4227- success = False
4228- try:
4229- f.close()
4230- except:
4231- pass
4232- if not success:
4233- self.deleteTorrentData(t)
4234- return success
4235-
4236- def deleteTorrentData(self, t):
4237- try:
4238- os.remove(os.path.join(self.dir_datacache,tohex(t)))
4239- except:
4240- pass
4241-
4242- def getPieceDir(self, t):
4243- return os.path.join(self.dir_piececache,tohex(t))
4244-
4245-
4246- ###### EXPIRATION HANDLING ######
4247-
4248- def deleteOldCacheData(self, days, still_active = [], delete_torrents = False):
4249- if not days:
4250- return
4251- exptime = time() - (days*24*3600)
4252- names = {}
4253- times = {}
4254-
4255- for f in os.listdir(self.dir_torrentcache):
4256- p = os.path.join(self.dir_torrentcache,f)
4257- f = os.path.basename(f)
4258- try:
4259- f, garbage = f.split('.')
4260- except:
4261- pass
4262- try:
4263- f = unhex(f)
4264- assert len(f) == 20
4265- except:
4266- continue
4267- if delete_torrents:
4268- names.setdefault(f,[]).append(p)
4269- try:
4270- t = os.path.getmtime(p)
4271- except:
4272- t = time()
4273- times.setdefault(f,[]).append(t)
4274-
4275- for f in os.listdir(self.dir_datacache):
4276- p = os.path.join(self.dir_datacache,f)
4277- try:
4278- f = unhex(os.path.basename(f))
4279- assert len(f) == 20
4280- except:
4281- continue
4282- names.setdefault(f,[]).append(p)
4283- try:
4284- t = os.path.getmtime(p)
4285- except:
4286- t = time()
4287- times.setdefault(f,[]).append(t)
4288-
4289- for f in os.listdir(self.dir_piececache):
4290- p = os.path.join(self.dir_piececache,f)
4291- try:
4292- f = unhex(os.path.basename(f))
4293- assert len(f) == 20
4294- except:
4295- continue
4296- for f2 in os.listdir(p):
4297- p2 = os.path.join(p,f2)
4298- names.setdefault(f,[]).append(p2)
4299- try:
4300- t = os.path.getmtime(p2)
4301- except:
4302- t = time()
4303- times.setdefault(f,[]).append(t)
4304- names.setdefault(f,[]).append(p)
4305-
4306- for k,v in times.items():
4307- if max(v) < exptime and not k in still_active:
4308- for f in names[k]:
4309- try:
4310- os.remove(f)
4311- except:
4312- try:
4313- os.removedirs(f)
4314- except:
4315- pass
4316-
4317-
4318- def deleteOldTorrents(self, days, still_active = []):
4319- self.deleteOldCacheData(days, still_active, True)
4320-
4321-
4322- ###### OTHER ######
4323-
4324- def getIconDir(self):
4325- return self.dir_icons
4326
4327=== removed file '.pc/09_timtuckerfixes.dpatch/BitTornado/RawServer.py'
4328--- .pc/09_timtuckerfixes.dpatch/BitTornado/RawServer.py 2010-03-21 14:36:30 +0000
4329+++ .pc/09_timtuckerfixes.dpatch/BitTornado/RawServer.py 1970-01-01 00:00:00 +0000
4330@@ -1,195 +0,0 @@
4331-# Written by Bram Cohen
4332-# see LICENSE.txt for license information
4333-
4334-from bisect import insort
4335-from SocketHandler import SocketHandler, UPnP_ERROR
4336-import socket
4337-from cStringIO import StringIO
4338-from traceback import print_exc
4339-from select import error
4340-from threading import Thread, Event
4341-from time import sleep
4342-from clock import clock
4343-import sys
4344-try:
4345- True
4346-except:
4347- True = 1
4348- False = 0
4349-
4350-
4351-def autodetect_ipv6():
4352- try:
4353- assert sys.version_info >= (2,3)
4354- assert socket.has_ipv6
4355- socket.socket(socket.AF_INET6, socket.SOCK_STREAM)
4356- except:
4357- return 0
4358- return 1
4359-
4360-def autodetect_socket_style():
4361- if sys.platform.find('linux') < 0:
4362- return 1
4363- else:
4364- try:
4365- f = open('/proc/sys/net/ipv6/bindv6only','r')
4366- dual_socket_style = int(f.read())
4367- f.close()
4368- return int(not dual_socket_style)
4369- except:
4370- return 0
4371-
4372-
4373-READSIZE = 32768
4374-
4375-class RawServer:
4376- def __init__(self, doneflag, timeout_check_interval, timeout, noisy = True,
4377- ipv6_enable = True, failfunc = lambda x: None, errorfunc = None,
4378- sockethandler = None, excflag = Event()):
4379- self.timeout_check_interval = timeout_check_interval
4380- self.timeout = timeout
4381- self.servers = {}
4382- self.single_sockets = {}
4383- self.dead_from_write = []
4384- self.doneflag = doneflag
4385- self.noisy = noisy
4386- self.failfunc = failfunc
4387- self.errorfunc = errorfunc
4388- self.exccount = 0
4389- self.funcs = []
4390- self.externally_added = []
4391- self.finished = Event()
4392- self.tasks_to_kill = []
4393- self.excflag = excflag
4394-
4395- if sockethandler is None:
4396- sockethandler = SocketHandler(timeout, ipv6_enable, READSIZE)
4397- self.sockethandler = sockethandler
4398- self.add_task(self.scan_for_timeouts, timeout_check_interval)
4399-
4400- def get_exception_flag(self):
4401- return self.excflag
4402-
4403- def _add_task(self, func, delay, id = None):
4404- assert float(delay) >= 0
4405- insort(self.funcs, (clock() + delay, func, id))
4406-
4407- def add_task(self, func, delay = 0, id = None):
4408- assert float(delay) >= 0
4409- self.externally_added.append((func, delay, id))
4410-
4411- def scan_for_timeouts(self):
4412- self.add_task(self.scan_for_timeouts, self.timeout_check_interval)
4413- self.sockethandler.scan_for_timeouts()
4414-
4415- def bind(self, port, bind = '', reuse = False,
4416- ipv6_socket_style = 1, upnp = False):
4417- self.sockethandler.bind(port, bind, reuse, ipv6_socket_style, upnp)
4418-
4419- def find_and_bind(self, minport, maxport, bind = '', reuse = False,
4420- ipv6_socket_style = 1, upnp = 0, randomizer = False):
4421- return self.sockethandler.find_and_bind(minport, maxport, bind, reuse,
4422- ipv6_socket_style, upnp, randomizer)
4423-
4424- def start_connection_raw(self, dns, socktype, handler = None):
4425- return self.sockethandler.start_connection_raw(dns, socktype, handler)
4426-
4427- def start_connection(self, dns, handler = None, randomize = False):
4428- return self.sockethandler.start_connection(dns, handler, randomize)
4429-
4430- def get_stats(self):
4431- return self.sockethandler.get_stats()
4432-
4433- def pop_external(self):
4434- while self.externally_added:
4435- (a, b, c) = self.externally_added.pop(0)
4436- self._add_task(a, b, c)
4437-
4438-
4439- def listen_forever(self, handler):
4440- self.sockethandler.set_handler(handler)
4441- try:
4442- while not self.doneflag.isSet():
4443- try:
4444- self.pop_external()
4445- self._kill_tasks()
4446- if self.funcs:
4447- period = self.funcs[0][0] + 0.001 - clock()
4448- else:
4449- period = 2 ** 30
4450- if period < 0:
4451- period = 0
4452- events = self.sockethandler.do_poll(period)
4453- if self.doneflag.isSet():
4454- return
4455- while self.funcs and self.funcs[0][0] <= clock():
4456- garbage1, func, id = self.funcs.pop(0)
4457- if id in self.tasks_to_kill:
4458- pass
4459- try:
4460-# print func.func_name
4461- func()
4462- except (SystemError, MemoryError), e:
4463- self.failfunc(str(e))
4464- return
4465- except KeyboardInterrupt:
4466-# self.exception(True)
4467- return
4468- except:
4469- if self.noisy:
4470- self.exception()
4471- self.sockethandler.close_dead()
4472- self.sockethandler.handle_events(events)
4473- if self.doneflag.isSet():
4474- return
4475- self.sockethandler.close_dead()
4476- except (SystemError, MemoryError), e:
4477- self.failfunc(str(e))
4478- return
4479- except error:
4480- if self.doneflag.isSet():
4481- return
4482- except KeyboardInterrupt:
4483-# self.exception(True)
4484- return
4485- except:
4486- self.exception()
4487- if self.exccount > 10:
4488- return
4489- finally:
4490-# self.sockethandler.shutdown()
4491- self.finished.set()
4492-
4493- def is_finished(self):
4494- return self.finished.isSet()
4495-
4496- def wait_until_finished(self):
4497- self.finished.wait()
4498-
4499- def _kill_tasks(self):
4500- if self.tasks_to_kill:
4501- new_funcs = []
4502- for (t, func, id) in self.funcs:
4503- if id not in self.tasks_to_kill:
4504- new_funcs.append((t, func, id))
4505- self.funcs = new_funcs
4506- self.tasks_to_kill = []
4507-
4508- def kill_tasks(self, id):
4509- self.tasks_to_kill.append(id)
4510-
4511- def exception(self, kbint = False):
4512- if not kbint:
4513- self.excflag.set()
4514- self.exccount += 1
4515- if self.errorfunc is None:
4516- print_exc()
4517- else:
4518- data = StringIO()
4519- print_exc(file = data)
4520-# print data.getvalue() # report exception here too
4521- if not kbint: # don't report here if it's a keyboard interrupt
4522- self.errorfunc(data.getvalue())
4523-
4524- def shutdown(self):
4525- self.sockethandler.shutdown()
4526
4527=== removed file '.pc/09_timtuckerfixes.dpatch/BitTornado/clock.py'
4528--- .pc/09_timtuckerfixes.dpatch/BitTornado/clock.py 2010-03-21 14:36:30 +0000
4529+++ .pc/09_timtuckerfixes.dpatch/BitTornado/clock.py 1970-01-01 00:00:00 +0000
4530@@ -1,27 +0,0 @@
4531-# Written by John Hoffman
4532-# see LICENSE.txt for license information
4533-
4534-from time import *
4535-import sys
4536-
4537-_MAXFORWARD = 100
4538-_FUDGE = 1
4539-
4540-class RelativeTime:
4541- def __init__(self):
4542- self.time = time()
4543- self.offset = 0
4544-
4545- def get_time(self):
4546- t = time() + self.offset
4547- if t < self.time or t > self.time + _MAXFORWARD:
4548- self.time += _FUDGE
4549- self.offset += self.time - t
4550- return self.time
4551- self.time = t
4552- return t
4553-
4554-if sys.platform != 'win32':
4555- _RTIME = RelativeTime()
4556- def clock():
4557- return _RTIME.get_time()
4558\ No newline at end of file
4559
4560=== removed file '.pc/09_timtuckerfixes.dpatch/BitTornado/download_bt1.py'
4561--- .pc/09_timtuckerfixes.dpatch/BitTornado/download_bt1.py 2010-03-21 14:36:30 +0000
4562+++ .pc/09_timtuckerfixes.dpatch/BitTornado/download_bt1.py 1970-01-01 00:00:00 +0000
4563@@ -1,877 +0,0 @@
4564-# Written by Bram Cohen
4565-# see LICENSE.txt for license information
4566-
4567-from zurllib import urlopen
4568-from urlparse import urlparse
4569-from BT1.btformats import check_message
4570-from BT1.Choker import Choker
4571-from BT1.Storage import Storage
4572-from BT1.StorageWrapper import StorageWrapper
4573-from BT1.FileSelector import FileSelector
4574-from BT1.Uploader import Upload
4575-from BT1.Downloader import Downloader
4576-from BT1.HTTPDownloader import HTTPDownloader
4577-from BT1.Connecter import Connecter
4578-from RateLimiter import RateLimiter
4579-from BT1.Encrypter import Encoder
4580-from RawServer import RawServer, autodetect_ipv6, autodetect_socket_style
4581-from BT1.Rerequester import Rerequester
4582-from BT1.DownloaderFeedback import DownloaderFeedback
4583-from RateMeasure import RateMeasure
4584-from CurrentRateMeasure import Measure
4585-from BT1.PiecePicker import PiecePicker
4586-from BT1.Statistics import Statistics
4587-from ConfigDir import ConfigDir
4588-from bencode import bencode, bdecode
4589-from natpunch import UPnP_test
4590-from sha import sha
4591-from os import path, makedirs, listdir
4592-from parseargs import parseargs, formatDefinitions, defaultargs
4593-from socket import error as socketerror
4594-from random import seed
4595-from threading import Thread, Event
4596-from clock import clock
4597-from BTcrypto import CRYPTO_OK
4598-from __init__ import createPeerID
4599-
4600-try:
4601- True
4602-except:
4603- True = 1
4604- False = 0
4605-
4606-defaults = [
4607- ('max_uploads', 7,
4608- "the maximum number of uploads to allow at once."),
4609- ('keepalive_interval', 120.0,
4610- 'number of seconds to pause between sending keepalives'),
4611- ('download_slice_size', 2 ** 14,
4612- "How many bytes to query for per request."),
4613- ('upload_unit_size', 1460,
4614- "when limiting upload rate, how many bytes to send at a time"),
4615- ('request_backlog', 10,
4616- "maximum number of requests to keep in a single pipe at once."),
4617- ('max_message_length', 2 ** 23,
4618- "maximum length prefix encoding you'll accept over the wire - larger values get the connection dropped."),
4619- ('ip', '',
4620- "ip to report you have to the tracker."),
4621- ('minport', 10000, 'minimum port to listen on, counts up if unavailable'),
4622- ('maxport', 60000, 'maximum port to listen on'),
4623- ('random_port', 1, 'whether to choose randomly inside the port range ' +
4624- 'instead of counting up linearly'),
4625- ('responsefile', '',
4626- 'file the server response was stored in, alternative to url'),
4627- ('url', '',
4628- 'url to get file from, alternative to responsefile'),
4629- ('crypto_allowed', int(CRYPTO_OK),
4630- 'whether to allow the client to accept encrypted connections'),
4631- ('crypto_only', 0,
4632- 'whether to only create or allow encrypted connections'),
4633- ('crypto_stealth', 0,
4634- 'whether to prevent all non-encrypted connection attempts; ' +
4635- 'will result in an effectively firewalled state on older trackers'),
4636- ('selector_enabled', 1,
4637- 'whether to enable the file selector and fast resume function'),
4638- ('expire_cache_data', 10,
4639- 'the number of days after which you wish to expire old cache data ' +
4640- '(0 = disabled)'),
4641- ('priority', '',
4642- 'a list of file priorities separated by commas, must be one per file, ' +
4643- '0 = highest, 1 = normal, 2 = lowest, -1 = download disabled'),
4644- ('saveas', '',
4645- 'local file name to save the file as, null indicates query user'),
4646- ('timeout', 300.0,
4647- 'time to wait between closing sockets which nothing has been received on'),
4648- ('timeout_check_interval', 60.0,
4649- 'time to wait between checking if any connections have timed out'),
4650- ('max_slice_length', 2 ** 17,
4651- "maximum length slice to send to peers, larger requests are ignored"),
4652- ('max_rate_period', 20.0,
4653- "maximum amount of time to guess the current rate estimate represents"),
4654- ('bind', '',
4655- 'comma-separated list of ips/hostnames to bind to locally'),
4656-# ('ipv6_enabled', autodetect_ipv6(),
4657- ('ipv6_enabled', 0,
4658- 'allow the client to connect to peers via IPv6'),
4659- ('ipv6_binds_v4', autodetect_socket_style(),
4660- "set if an IPv6 server socket won't also field IPv4 connections"),
4661- ('upnp_nat_access', 1,
4662- 'attempt to autoconfigure a UPnP router to forward a server port ' +
4663- '(0 = disabled, 1 = mode 1 [fast], 2 = mode 2 [slow])'),
4664- ('upload_rate_fudge', 5.0,
4665- 'time equivalent of writing to kernel-level TCP buffer, for rate adjustment'),
4666- ('tcp_ack_fudge', 0.03,
4667- 'how much TCP ACK download overhead to add to upload rate calculations ' +
4668- '(0 = disabled)'),
4669- ('display_interval', .5,
4670- 'time between updates of displayed information'),
4671- ('rerequest_interval', 5 * 60,
4672- 'time to wait between requesting more peers'),
4673- ('min_peers', 20,
4674- 'minimum number of peers to not do rerequesting'),
4675- ('http_timeout', 60,
4676- 'number of seconds to wait before assuming that an http connection has timed out'),
4677- ('max_initiate', 40,
4678- 'number of peers at which to stop initiating new connections'),
4679- ('check_hashes', 1,
4680- 'whether to check hashes on disk'),
4681- ('max_upload_rate', 0,
4682- 'maximum kB/s to upload at (0 = no limit, -1 = automatic)'),
4683- ('max_download_rate', 0,
4684- 'maximum kB/s to download at (0 = no limit)'),
4685- ('alloc_type', 'normal',
4686- 'allocation type (may be normal, background, pre-allocate or sparse)'),
4687- ('alloc_rate', 2.0,
4688- 'rate (in MiB/s) to allocate space at using background allocation'),
4689- ('buffer_reads', 1,
4690- 'whether to buffer disk reads'),
4691- ('write_buffer_size', 4,
4692- 'the maximum amount of space to use for buffering disk writes ' +
4693- '(in megabytes, 0 = disabled)'),
4694- ('breakup_seed_bitfield', 1,
4695- 'sends an incomplete bitfield and then fills with have messages, '
4696- 'in order to get around stupid ISP manipulation'),
4697- ('snub_time', 30.0,
4698- "seconds to wait for data to come in over a connection before assuming it's semi-permanently choked"),
4699- ('spew', 0,
4700- "whether to display diagnostic info to stdout"),
4701- ('rarest_first_cutoff', 2,
4702- "number of downloads at which to switch from random to rarest first"),
4703- ('rarest_first_priority_cutoff', 5,
4704- 'the number of peers which need to have a piece before other partials take priority over rarest first'),
4705- ('min_uploads', 4,
4706- "the number of uploads to fill out to with extra optimistic unchokes"),
4707- ('max_files_open', 50,
4708- 'the maximum number of files to keep open at a time, 0 means no limit'),
4709- ('round_robin_period', 30,
4710- "the number of seconds between the client's switching upload targets"),
4711- ('super_seeder', 0,
4712- "whether to use special upload-efficiency-maximizing routines (only for dedicated seeds)"),
4713- ('security', 1,
4714- "whether to enable extra security features intended to prevent abuse"),
4715- ('max_connections', 0,
4716- "the absolute maximum number of peers to connect with (0 = no limit)"),
4717- ('auto_kick', 1,
4718- "whether to allow the client to automatically kick/ban peers that send bad data"),
4719- ('double_check', 1,
4720- "whether to double-check data being written to the disk for errors (may increase CPU load)"),
4721- ('triple_check', 0,
4722- "whether to thoroughly check data being written to the disk (may slow disk access)"),
4723- ('lock_files', 1,
4724- "whether to lock files the client is working with"),
4725- ('lock_while_reading', 0,
4726- "whether to lock access to files being read"),
4727- ('auto_flush', 0,
4728- "minutes between automatic flushes to disk (0 = disabled)"),
4729- ('dedicated_seed_id', '',
4730- "code to send to tracker identifying as a dedicated seed"),
4731- ]
4732-
4733-argslistheader = 'Arguments are:\n\n'
4734-
4735-
4736-def _failfunc(x):
4737- print x
4738-
4739-# old-style downloader
4740-def download(params, filefunc, statusfunc, finfunc, errorfunc, doneflag, cols,
4741- pathFunc = None, presets = {}, exchandler = None,
4742- failed = _failfunc, paramfunc = None):
4743-
4744- try:
4745- config = parse_params(params, presets)
4746- except ValueError, e:
4747- failed('error: ' + str(e) + '\nrun with no args for parameter explanations')
4748- return
4749- if not config:
4750- errorfunc(get_usage())
4751- return
4752-
4753- myid = createPeerID()
4754- seed(myid)
4755-
4756- rawserver = RawServer(doneflag, config['timeout_check_interval'],
4757- config['timeout'], ipv6_enable = config['ipv6_enabled'],
4758- failfunc = failed, errorfunc = exchandler)
4759-
4760- upnp_type = UPnP_test(config['upnp_nat_access'])
4761- try:
4762- listen_port = rawserver.find_and_bind(config['minport'], config['maxport'],
4763- config['bind'], ipv6_socket_style = config['ipv6_binds_v4'],
4764- upnp = upnp_type, randomizer = config['random_port'])
4765- except socketerror, e:
4766- failed("Couldn't listen - " + str(e))
4767- return
4768-
4769- response = get_response(config['responsefile'], config['url'], failed)
4770- if not response:
4771- return
4772-
4773- infohash = sha(bencode(response['info'])).digest()
4774-
4775- d = BT1Download(statusfunc, finfunc, errorfunc, exchandler, doneflag,
4776- config, response, infohash, myid, rawserver, listen_port)
4777-
4778- if not d.saveAs(filefunc):
4779- return
4780-
4781- if pathFunc:
4782- pathFunc(d.getFilename())
4783-
4784- hashcheck = d.initFiles(old_style = True)
4785- if not hashcheck:
4786- return
4787- if not hashcheck():
4788- return
4789- if not d.startEngine():
4790- return
4791- d.startRerequester()
4792- d.autoStats()
4793-
4794- statusfunc(activity = 'connecting to peers')
4795-
4796- if paramfunc:
4797- paramfunc({ 'max_upload_rate' : d.setUploadRate, # change_max_upload_rate(<int KiB/sec>)
4798- 'max_uploads': d.setConns, # change_max_uploads(<int max uploads>)
4799- 'listen_port' : listen_port, # int
4800- 'peer_id' : myid, # string
4801- 'info_hash' : infohash, # string
4802- 'start_connection' : d._startConnection, # start_connection((<string ip>, <int port>), <peer id>)
4803- })
4804-
4805- rawserver.listen_forever(d.getPortHandler())
4806-
4807- d.shutdown()
4808-
4809-
4810-def parse_params(params, presets = {}):
4811- if len(params) == 0:
4812- return None
4813- config, args = parseargs(params, defaults, 0, 1, presets = presets)
4814- if args:
4815- if config['responsefile'] or config['url']:
4816- raise ValueError,'must have responsefile or url as arg or parameter, not both'
4817- if path.isfile(args[0]):
4818- config['responsefile'] = args[0]
4819- else:
4820- try:
4821- urlparse(args[0])
4822- except:
4823- raise ValueError, 'bad filename or url'
4824- config['url'] = args[0]
4825- elif (config['responsefile'] == '') == (config['url'] == ''):
4826- raise ValueError, 'need responsefile or url, must have one, cannot have both'
4827- return config
4828-
4829-
4830-def get_usage(defaults = defaults, cols = 100, presets = {}):
4831- return (argslistheader + formatDefinitions(defaults, cols, presets))
4832-
4833-
4834-def get_response(file, url, errorfunc):
4835- try:
4836- if file:
4837- h = open(file, 'rb')
4838- try:
4839- line = h.read(10) # quick test to see if responsefile contains a dict
4840- front,garbage = line.split(':',1)
4841- assert front[0] == 'd'
4842- int(front[1:])
4843- except:
4844- errorfunc(file+' is not a valid responsefile')
4845- return None
4846- try:
4847- h.seek(0)
4848- except:
4849- try:
4850- h.close()
4851- except:
4852- pass
4853- h = open(file, 'rb')
4854- else:
4855- try:
4856- h = urlopen(url)
4857- except:
4858- errorfunc(url+' bad url')
4859- return None
4860- response = h.read()
4861-
4862- except IOError, e:
4863- errorfunc('problem getting response info - ' + str(e))
4864- return None
4865- try:
4866- h.close()
4867- except:
4868- pass
4869- try:
4870- try:
4871- response = bdecode(response)
4872- except:
4873- errorfunc("warning: bad data in responsefile")
4874- response = bdecode(response, sloppy=1)
4875- check_message(response)
4876- except ValueError, e:
4877- errorfunc("got bad file info - " + str(e))
4878- return None
4879-
4880- return response
4881-
4882-
4883-class BT1Download:
4884- def __init__(self, statusfunc, finfunc, errorfunc, excfunc, doneflag,
4885- config, response, infohash, id, rawserver, port,
4886- appdataobj = None):
4887- self.statusfunc = statusfunc
4888- self.finfunc = finfunc
4889- self.errorfunc = errorfunc
4890- self.excfunc = excfunc
4891- self.doneflag = doneflag
4892- self.config = config
4893- self.response = response
4894- self.infohash = infohash
4895- self.myid = id
4896- self.rawserver = rawserver
4897- self.port = port
4898-
4899- self.info = self.response['info']
4900- self.pieces = [self.info['pieces'][x:x+20]
4901- for x in xrange(0, len(self.info['pieces']), 20)]
4902- self.len_pieces = len(self.pieces)
4903- self.argslistheader = argslistheader
4904- self.unpauseflag = Event()
4905- self.unpauseflag.set()
4906- self.downloader = None
4907- self.storagewrapper = None
4908- self.fileselector = None
4909- self.super_seeding_active = False
4910- self.filedatflag = Event()
4911- self.spewflag = Event()
4912- self.superseedflag = Event()
4913- self.whenpaused = None
4914- self.finflag = Event()
4915- self.rerequest = None
4916- self.tcp_ack_fudge = config['tcp_ack_fudge']
4917-
4918- self.selector_enabled = config['selector_enabled']
4919- if appdataobj:
4920- self.appdataobj = appdataobj
4921- elif self.selector_enabled:
4922- self.appdataobj = ConfigDir()
4923- self.appdataobj.deleteOldCacheData( config['expire_cache_data'],
4924- [self.infohash] )
4925-
4926- self.excflag = self.rawserver.get_exception_flag()
4927- self.failed = False
4928- self.checking = False
4929- self.started = False
4930-
4931- self.picker = PiecePicker(self.len_pieces, config['rarest_first_cutoff'],
4932- config['rarest_first_priority_cutoff'])
4933- self.choker = Choker(config, rawserver.add_task,
4934- self.picker, self.finflag.isSet)
4935-
4936-
4937- def checkSaveLocation(self, loc):
4938- if self.info.has_key('length'):
4939- return path.exists(loc)
4940- for x in self.info['files']:
4941- if path.exists(path.join(loc, x['path'][0])):
4942- return True
4943- return False
4944-
4945-
4946- def saveAs(self, filefunc, pathfunc = None):
4947- try:
4948- def make(f, forcedir = False):
4949- if not forcedir:
4950- f = path.split(f)[0]
4951- if f != '' and not path.exists(f):
4952- makedirs(f)
4953-
4954- if self.info.has_key('length'):
4955- file_length = self.info['length']
4956- file = filefunc(self.info['name'], file_length,
4957- self.config['saveas'], False)
4958- if file is None:
4959- return None
4960- make(file)
4961- files = [(file, file_length)]
4962- else:
4963- file_length = 0L
4964- for x in self.info['files']:
4965- file_length += x['length']
4966- file = filefunc(self.info['name'], file_length,
4967- self.config['saveas'], True)
4968- if file is None:
4969- return None
4970-
4971- # if this path exists, and no files from the info dict exist, we assume it's a new download and
4972- # the user wants to create a new directory with the default name
4973- existing = 0
4974- if path.exists(file):
4975- if not path.isdir(file):
4976- self.errorfunc(file + 'is not a dir')
4977- return None
4978- if len(listdir(file)) > 0: # if it's not empty
4979- for x in self.info['files']:
4980- if path.exists(path.join(file, x['path'][0])):
4981- existing = 1
4982- if not existing:
4983- file = path.join(file, self.info['name'])
4984- if path.exists(file) and not path.isdir(file):
4985- if file[-8:] == '.torrent':
4986- file = file[:-8]
4987- if path.exists(file) and not path.isdir(file):
4988- self.errorfunc("Can't create dir - " + self.info['name'])
4989- return None
4990- make(file, True)
4991-
4992- # alert the UI to any possible change in path
4993- if pathfunc != None:
4994- pathfunc(file)
4995-
4996- files = []
4997- for x in self.info['files']:
4998- n = file
4999- for i in x['path']:
5000- n = path.join(n, i)
The diff has been truncated for viewing.

Subscribers

People subscribed via source and target branches

to all changes: