/mercurial/keepalive.py

https://bitbucket.org/mirror/mercurial/ · Python · 761 lines · 585 code · 62 blank · 114 comment · 104 complexity · a87aafa79ed1525eca2909b6c56584d7 MD5 · raw file

  1. # This library is free software; you can redistribute it and/or
  2. # modify it under the terms of the GNU Lesser General Public
  3. # License as published by the Free Software Foundation; either
  4. # version 2.1 of the License, or (at your option) any later version.
  5. #
  6. # This library is distributed in the hope that it will be useful,
  7. # but WITHOUT ANY WARRANTY; without even the implied warranty of
  8. # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  9. # Lesser General Public License for more details.
  10. #
  11. # You should have received a copy of the GNU Lesser General Public
  12. # License along with this library; if not, see
  13. # <http://www.gnu.org/licenses/>.
  14. # This file is part of urlgrabber, a high-level cross-protocol url-grabber
  15. # Copyright 2002-2004 Michael D. Stenner, Ryan Tomayko
  16. # Modified by Benoit Boissinot:
  17. # - fix for digest auth (inspired from urllib2.py @ Python v2.4)
  18. # Modified by Dirkjan Ochtman:
  19. # - import md5 function from a local util module
  20. # Modified by Martin Geisler:
  21. # - moved md5 function from local util module to this module
  22. # Modified by Augie Fackler:
  23. # - add safesend method and use it to prevent broken pipe errors
  24. # on large POST requests
  25. """An HTTP handler for urllib2 that supports HTTP 1.1 and keepalive.
  26. >>> import urllib2
  27. >>> from keepalive import HTTPHandler
  28. >>> keepalive_handler = HTTPHandler()
  29. >>> opener = urllib2.build_opener(keepalive_handler)
  30. >>> urllib2.install_opener(opener)
  31. >>>
  32. >>> fo = urllib2.urlopen('http://www.python.org')
  33. If a connection to a given host is requested, and all of the existing
  34. connections are still in use, another connection will be opened. If
  35. the handler tries to use an existing connection but it fails in some
  36. way, it will be closed and removed from the pool.
  37. To remove the handler, simply re-run build_opener with no arguments, and
  38. install that opener.
  39. You can explicitly close connections by using the close_connection()
  40. method of the returned file-like object (described below) or you can
  41. use the handler methods:
  42. close_connection(host)
  43. close_all()
  44. open_connections()
  45. NOTE: using the close_connection and close_all methods of the handler
  46. should be done with care when using multiple threads.
  47. * there is nothing that prevents another thread from creating new
  48. connections immediately after connections are closed
  49. * no checks are done to prevent in-use connections from being closed
  50. >>> keepalive_handler.close_all()
  51. EXTRA ATTRIBUTES AND METHODS
  52. Upon a status of 200, the object returned has a few additional
  53. attributes and methods, which should not be used if you want to
  54. remain consistent with the normal urllib2-returned objects:
  55. close_connection() - close the connection to the host
  56. readlines() - you know, readlines()
  57. status - the return status (i.e. 404)
  58. reason - english translation of status (i.e. 'File not found')
  59. If you want the best of both worlds, use this inside an
  60. AttributeError-catching try:
  61. >>> try: status = fo.status
  62. >>> except AttributeError: status = None
  63. Unfortunately, these are ONLY there if status == 200, so it's not
  64. easy to distinguish between non-200 responses. The reason is that
  65. urllib2 tries to do clever things with error codes 301, 302, 401,
  66. and 407, and it wraps the object upon return.
  67. For python versions earlier than 2.4, you can avoid this fancy error
  68. handling by setting the module-level global HANDLE_ERRORS to zero.
  69. You see, prior to 2.4, it's the HTTP Handler's job to determine what
  70. to handle specially, and what to just pass up. HANDLE_ERRORS == 0
  71. means "pass everything up". In python 2.4, however, this job no
  72. longer belongs to the HTTP Handler and is now done by a NEW handler,
  73. HTTPErrorProcessor. Here's the bottom line:
  74. python version < 2.4
  75. HANDLE_ERRORS == 1 (default) pass up 200, treat the rest as
  76. errors
  77. HANDLE_ERRORS == 0 pass everything up, error processing is
  78. left to the calling code
  79. python version >= 2.4
  80. HANDLE_ERRORS == 1 pass up 200, treat the rest as errors
  81. HANDLE_ERRORS == 0 (default) pass everything up, let the
  82. other handlers (specifically,
  83. HTTPErrorProcessor) decide what to do
  84. In practice, setting the variable either way makes little difference
  85. in python 2.4, so for the most consistent behavior across versions,
  86. you probably just want to use the defaults, which will give you
  87. exceptions on errors.
  88. """
  89. # $Id: keepalive.py,v 1.14 2006/04/04 21:00:32 mstenner Exp $
  90. import errno
  91. import httplib
  92. import socket
  93. import thread
  94. import urllib2
  95. DEBUG = None
  96. import sys
  97. if sys.version_info < (2, 4):
  98. HANDLE_ERRORS = 1
  99. else: HANDLE_ERRORS = 0
  100. class ConnectionManager(object):
  101. """
  102. The connection manager must be able to:
  103. * keep track of all existing
  104. """
  105. def __init__(self):
  106. self._lock = thread.allocate_lock()
  107. self._hostmap = {} # map hosts to a list of connections
  108. self._connmap = {} # map connections to host
  109. self._readymap = {} # map connection to ready state
  110. def add(self, host, connection, ready):
  111. self._lock.acquire()
  112. try:
  113. if host not in self._hostmap:
  114. self._hostmap[host] = []
  115. self._hostmap[host].append(connection)
  116. self._connmap[connection] = host
  117. self._readymap[connection] = ready
  118. finally:
  119. self._lock.release()
  120. def remove(self, connection):
  121. self._lock.acquire()
  122. try:
  123. try:
  124. host = self._connmap[connection]
  125. except KeyError:
  126. pass
  127. else:
  128. del self._connmap[connection]
  129. del self._readymap[connection]
  130. self._hostmap[host].remove(connection)
  131. if not self._hostmap[host]: del self._hostmap[host]
  132. finally:
  133. self._lock.release()
  134. def set_ready(self, connection, ready):
  135. try:
  136. self._readymap[connection] = ready
  137. except KeyError:
  138. pass
  139. def get_ready_conn(self, host):
  140. conn = None
  141. self._lock.acquire()
  142. try:
  143. if host in self._hostmap:
  144. for c in self._hostmap[host]:
  145. if self._readymap[c]:
  146. self._readymap[c] = 0
  147. conn = c
  148. break
  149. finally:
  150. self._lock.release()
  151. return conn
  152. def get_all(self, host=None):
  153. if host:
  154. return list(self._hostmap.get(host, []))
  155. else:
  156. return dict(self._hostmap)
  157. class KeepAliveHandler(object):
  158. def __init__(self):
  159. self._cm = ConnectionManager()
  160. #### Connection Management
  161. def open_connections(self):
  162. """return a list of connected hosts and the number of connections
  163. to each. [('foo.com:80', 2), ('bar.org', 1)]"""
  164. return [(host, len(li)) for (host, li) in self._cm.get_all().items()]
  165. def close_connection(self, host):
  166. """close connection(s) to <host>
  167. host is the host:port spec, as in 'www.cnn.com:8080' as passed in.
  168. no error occurs if there is no connection to that host."""
  169. for h in self._cm.get_all(host):
  170. self._cm.remove(h)
  171. h.close()
  172. def close_all(self):
  173. """close all open connections"""
  174. for host, conns in self._cm.get_all().iteritems():
  175. for h in conns:
  176. self._cm.remove(h)
  177. h.close()
  178. def _request_closed(self, request, host, connection):
  179. """tells us that this request is now closed and that the
  180. connection is ready for another request"""
  181. self._cm.set_ready(connection, 1)
  182. def _remove_connection(self, host, connection, close=0):
  183. if close:
  184. connection.close()
  185. self._cm.remove(connection)
  186. #### Transaction Execution
  187. def http_open(self, req):
  188. return self.do_open(HTTPConnection, req)
  189. def do_open(self, http_class, req):
  190. host = req.get_host()
  191. if not host:
  192. raise urllib2.URLError('no host given')
  193. try:
  194. h = self._cm.get_ready_conn(host)
  195. while h:
  196. r = self._reuse_connection(h, req, host)
  197. # if this response is non-None, then it worked and we're
  198. # done. Break out, skipping the else block.
  199. if r:
  200. break
  201. # connection is bad - possibly closed by server
  202. # discard it and ask for the next free connection
  203. h.close()
  204. self._cm.remove(h)
  205. h = self._cm.get_ready_conn(host)
  206. else:
  207. # no (working) free connections were found. Create a new one.
  208. h = http_class(host)
  209. if DEBUG:
  210. DEBUG.info("creating new connection to %s (%d)",
  211. host, id(h))
  212. self._cm.add(host, h, 0)
  213. self._start_transaction(h, req)
  214. r = h.getresponse()
  215. except (socket.error, httplib.HTTPException), err:
  216. raise urllib2.URLError(err)
  217. # if not a persistent connection, don't try to reuse it
  218. if r.will_close:
  219. self._cm.remove(h)
  220. if DEBUG:
  221. DEBUG.info("STATUS: %s, %s", r.status, r.reason)
  222. r._handler = self
  223. r._host = host
  224. r._url = req.get_full_url()
  225. r._connection = h
  226. r.code = r.status
  227. r.headers = r.msg
  228. r.msg = r.reason
  229. if r.status == 200 or not HANDLE_ERRORS:
  230. return r
  231. else:
  232. return self.parent.error('http', req, r,
  233. r.status, r.msg, r.headers)
  234. def _reuse_connection(self, h, req, host):
  235. """start the transaction with a re-used connection
  236. return a response object (r) upon success or None on failure.
  237. This DOES not close or remove bad connections in cases where
  238. it returns. However, if an unexpected exception occurs, it
  239. will close and remove the connection before re-raising.
  240. """
  241. try:
  242. self._start_transaction(h, req)
  243. r = h.getresponse()
  244. # note: just because we got something back doesn't mean it
  245. # worked. We'll check the version below, too.
  246. except (socket.error, httplib.HTTPException):
  247. r = None
  248. except: # re-raises
  249. # adding this block just in case we've missed
  250. # something we will still raise the exception, but
  251. # lets try and close the connection and remove it
  252. # first. We previously got into a nasty loop
  253. # where an exception was uncaught, and so the
  254. # connection stayed open. On the next try, the
  255. # same exception was raised, etc. The trade-off is
  256. # that it's now possible this call will raise
  257. # a DIFFERENT exception
  258. if DEBUG:
  259. DEBUG.error("unexpected exception - closing "
  260. "connection to %s (%d)", host, id(h))
  261. self._cm.remove(h)
  262. h.close()
  263. raise
  264. if r is None or r.version == 9:
  265. # httplib falls back to assuming HTTP 0.9 if it gets a
  266. # bad header back. This is most likely to happen if
  267. # the socket has been closed by the server since we
  268. # last used the connection.
  269. if DEBUG:
  270. DEBUG.info("failed to re-use connection to %s (%d)",
  271. host, id(h))
  272. r = None
  273. else:
  274. if DEBUG:
  275. DEBUG.info("re-using connection to %s (%d)", host, id(h))
  276. return r
  277. def _start_transaction(self, h, req):
  278. # What follows mostly reimplements HTTPConnection.request()
  279. # except it adds self.parent.addheaders in the mix.
  280. headers = req.headers.copy()
  281. if sys.version_info >= (2, 4):
  282. headers.update(req.unredirected_hdrs)
  283. headers.update(self.parent.addheaders)
  284. headers = dict((n.lower(), v) for n, v in headers.items())
  285. skipheaders = {}
  286. for n in ('host', 'accept-encoding'):
  287. if n in headers:
  288. skipheaders['skip_' + n.replace('-', '_')] = 1
  289. try:
  290. if req.has_data():
  291. data = req.get_data()
  292. h.putrequest('POST', req.get_selector(), **skipheaders)
  293. if 'content-type' not in headers:
  294. h.putheader('Content-type',
  295. 'application/x-www-form-urlencoded')
  296. if 'content-length' not in headers:
  297. h.putheader('Content-length', '%d' % len(data))
  298. else:
  299. h.putrequest('GET', req.get_selector(), **skipheaders)
  300. except (socket.error), err:
  301. raise urllib2.URLError(err)
  302. for k, v in headers.items():
  303. h.putheader(k, v)
  304. h.endheaders()
  305. if req.has_data():
  306. h.send(data)
  307. class HTTPHandler(KeepAliveHandler, urllib2.HTTPHandler):
  308. pass
  309. class HTTPResponse(httplib.HTTPResponse):
  310. # we need to subclass HTTPResponse in order to
  311. # 1) add readline() and readlines() methods
  312. # 2) add close_connection() methods
  313. # 3) add info() and geturl() methods
  314. # in order to add readline(), read must be modified to deal with a
  315. # buffer. example: readline must read a buffer and then spit back
  316. # one line at a time. The only real alternative is to read one
  317. # BYTE at a time (ick). Once something has been read, it can't be
  318. # put back (ok, maybe it can, but that's even uglier than this),
  319. # so if you THEN do a normal read, you must first take stuff from
  320. # the buffer.
  321. # the read method wraps the original to accommodate buffering,
  322. # although read() never adds to the buffer.
  323. # Both readline and readlines have been stolen with almost no
  324. # modification from socket.py
  325. def __init__(self, sock, debuglevel=0, strict=0, method=None):
  326. httplib.HTTPResponse.__init__(self, sock, debuglevel, method)
  327. self.fileno = sock.fileno
  328. self.code = None
  329. self._rbuf = ''
  330. self._rbufsize = 8096
  331. self._handler = None # inserted by the handler later
  332. self._host = None # (same)
  333. self._url = None # (same)
  334. self._connection = None # (same)
  335. _raw_read = httplib.HTTPResponse.read
  336. def close(self):
  337. if self.fp:
  338. self.fp.close()
  339. self.fp = None
  340. if self._handler:
  341. self._handler._request_closed(self, self._host,
  342. self._connection)
  343. def close_connection(self):
  344. self._handler._remove_connection(self._host, self._connection, close=1)
  345. self.close()
  346. def info(self):
  347. return self.headers
  348. def geturl(self):
  349. return self._url
  350. def read(self, amt=None):
  351. # the _rbuf test is only in this first if for speed. It's not
  352. # logically necessary
  353. if self._rbuf and not amt is None:
  354. L = len(self._rbuf)
  355. if amt > L:
  356. amt -= L
  357. else:
  358. s = self._rbuf[:amt]
  359. self._rbuf = self._rbuf[amt:]
  360. return s
  361. s = self._rbuf + self._raw_read(amt)
  362. self._rbuf = ''
  363. return s
  364. # stolen from Python SVN #68532 to fix issue1088
  365. def _read_chunked(self, amt):
  366. chunk_left = self.chunk_left
  367. value = ''
  368. # XXX This accumulates chunks by repeated string concatenation,
  369. # which is not efficient as the number or size of chunks gets big.
  370. while True:
  371. if chunk_left is None:
  372. line = self.fp.readline()
  373. i = line.find(';')
  374. if i >= 0:
  375. line = line[:i] # strip chunk-extensions
  376. try:
  377. chunk_left = int(line, 16)
  378. except ValueError:
  379. # close the connection as protocol synchronization is
  380. # probably lost
  381. self.close()
  382. raise httplib.IncompleteRead(value)
  383. if chunk_left == 0:
  384. break
  385. if amt is None:
  386. value += self._safe_read(chunk_left)
  387. elif amt < chunk_left:
  388. value += self._safe_read(amt)
  389. self.chunk_left = chunk_left - amt
  390. return value
  391. elif amt == chunk_left:
  392. value += self._safe_read(amt)
  393. self._safe_read(2) # toss the CRLF at the end of the chunk
  394. self.chunk_left = None
  395. return value
  396. else:
  397. value += self._safe_read(chunk_left)
  398. amt -= chunk_left
  399. # we read the whole chunk, get another
  400. self._safe_read(2) # toss the CRLF at the end of the chunk
  401. chunk_left = None
  402. # read and discard trailer up to the CRLF terminator
  403. ### note: we shouldn't have any trailers!
  404. while True:
  405. line = self.fp.readline()
  406. if not line:
  407. # a vanishingly small number of sites EOF without
  408. # sending the trailer
  409. break
  410. if line == '\r\n':
  411. break
  412. # we read everything; close the "file"
  413. self.close()
  414. return value
  415. def readline(self, limit=-1):
  416. i = self._rbuf.find('\n')
  417. while i < 0 and not (0 < limit <= len(self._rbuf)):
  418. new = self._raw_read(self._rbufsize)
  419. if not new:
  420. break
  421. i = new.find('\n')
  422. if i >= 0:
  423. i = i + len(self._rbuf)
  424. self._rbuf = self._rbuf + new
  425. if i < 0:
  426. i = len(self._rbuf)
  427. else:
  428. i = i + 1
  429. if 0 <= limit < len(self._rbuf):
  430. i = limit
  431. data, self._rbuf = self._rbuf[:i], self._rbuf[i:]
  432. return data
  433. def readlines(self, sizehint=0):
  434. total = 0
  435. list = []
  436. while True:
  437. line = self.readline()
  438. if not line:
  439. break
  440. list.append(line)
  441. total += len(line)
  442. if sizehint and total >= sizehint:
  443. break
  444. return list
  445. def safesend(self, str):
  446. """Send `str' to the server.
  447. Shamelessly ripped off from httplib to patch a bad behavior.
  448. """
  449. # _broken_pipe_resp is an attribute we set in this function
  450. # if the socket is closed while we're sending data but
  451. # the server sent us a response before hanging up.
  452. # In that case, we want to pretend to send the rest of the
  453. # outgoing data, and then let the user use getresponse()
  454. # (which we wrap) to get this last response before
  455. # opening a new socket.
  456. if getattr(self, '_broken_pipe_resp', None) is not None:
  457. return
  458. if self.sock is None:
  459. if self.auto_open:
  460. self.connect()
  461. else:
  462. raise httplib.NotConnected
  463. # send the data to the server. if we get a broken pipe, then close
  464. # the socket. we want to reconnect when somebody tries to send again.
  465. #
  466. # NOTE: we DO propagate the error, though, because we cannot simply
  467. # ignore the error... the caller will know if they can retry.
  468. if self.debuglevel > 0:
  469. print "send:", repr(str)
  470. try:
  471. blocksize = 8192
  472. read = getattr(str, 'read', None)
  473. if read is not None:
  474. if self.debuglevel > 0:
  475. print "sending a read()able"
  476. data = read(blocksize)
  477. while data:
  478. self.sock.sendall(data)
  479. data = read(blocksize)
  480. else:
  481. self.sock.sendall(str)
  482. except socket.error, v:
  483. reraise = True
  484. if v[0] == errno.EPIPE: # Broken pipe
  485. if self._HTTPConnection__state == httplib._CS_REQ_SENT:
  486. self._broken_pipe_resp = None
  487. self._broken_pipe_resp = self.getresponse()
  488. reraise = False
  489. self.close()
  490. if reraise:
  491. raise
  492. def wrapgetresponse(cls):
  493. """Wraps getresponse in cls with a broken-pipe sane version.
  494. """
  495. def safegetresponse(self):
  496. # In safesend() we might set the _broken_pipe_resp
  497. # attribute, in which case the socket has already
  498. # been closed and we just need to give them the response
  499. # back. Otherwise, we use the normal response path.
  500. r = getattr(self, '_broken_pipe_resp', None)
  501. if r is not None:
  502. return r
  503. return cls.getresponse(self)
  504. safegetresponse.__doc__ = cls.getresponse.__doc__
  505. return safegetresponse
  506. class HTTPConnection(httplib.HTTPConnection):
  507. # use the modified response class
  508. response_class = HTTPResponse
  509. send = safesend
  510. getresponse = wrapgetresponse(httplib.HTTPConnection)
  511. #########################################################################
  512. ##### TEST FUNCTIONS
  513. #########################################################################
  514. def error_handler(url):
  515. global HANDLE_ERRORS
  516. orig = HANDLE_ERRORS
  517. keepalive_handler = HTTPHandler()
  518. opener = urllib2.build_opener(keepalive_handler)
  519. urllib2.install_opener(opener)
  520. pos = {0: 'off', 1: 'on'}
  521. for i in (0, 1):
  522. print " fancy error handling %s (HANDLE_ERRORS = %i)" % (pos[i], i)
  523. HANDLE_ERRORS = i
  524. try:
  525. fo = urllib2.urlopen(url)
  526. fo.read()
  527. fo.close()
  528. try:
  529. status, reason = fo.status, fo.reason
  530. except AttributeError:
  531. status, reason = None, None
  532. except IOError, e:
  533. print " EXCEPTION: %s" % e
  534. raise
  535. else:
  536. print " status = %s, reason = %s" % (status, reason)
  537. HANDLE_ERRORS = orig
  538. hosts = keepalive_handler.open_connections()
  539. print "open connections:", hosts
  540. keepalive_handler.close_all()
  541. def md5(s):
  542. try:
  543. from hashlib import md5 as _md5
  544. except ImportError:
  545. from md5 import md5 as _md5
  546. global md5
  547. md5 = _md5
  548. return _md5(s)
  549. def continuity(url):
  550. format = '%25s: %s'
  551. # first fetch the file with the normal http handler
  552. opener = urllib2.build_opener()
  553. urllib2.install_opener(opener)
  554. fo = urllib2.urlopen(url)
  555. foo = fo.read()
  556. fo.close()
  557. m = md5.new(foo)
  558. print format % ('normal urllib', m.hexdigest())
  559. # now install the keepalive handler and try again
  560. opener = urllib2.build_opener(HTTPHandler())
  561. urllib2.install_opener(opener)
  562. fo = urllib2.urlopen(url)
  563. foo = fo.read()
  564. fo.close()
  565. m = md5.new(foo)
  566. print format % ('keepalive read', m.hexdigest())
  567. fo = urllib2.urlopen(url)
  568. foo = ''
  569. while True:
  570. f = fo.readline()
  571. if f:
  572. foo = foo + f
  573. else: break
  574. fo.close()
  575. m = md5.new(foo)
  576. print format % ('keepalive readline', m.hexdigest())
  577. def comp(N, url):
  578. print ' making %i connections to:\n %s' % (N, url)
  579. sys.stdout.write(' first using the normal urllib handlers')
  580. # first use normal opener
  581. opener = urllib2.build_opener()
  582. urllib2.install_opener(opener)
  583. t1 = fetch(N, url)
  584. print ' TIME: %.3f s' % t1
  585. sys.stdout.write(' now using the keepalive handler ')
  586. # now install the keepalive handler and try again
  587. opener = urllib2.build_opener(HTTPHandler())
  588. urllib2.install_opener(opener)
  589. t2 = fetch(N, url)
  590. print ' TIME: %.3f s' % t2
  591. print ' improvement factor: %.2f' % (t1 / t2)
  592. def fetch(N, url, delay=0):
  593. import time
  594. lens = []
  595. starttime = time.time()
  596. for i in range(N):
  597. if delay and i > 0:
  598. time.sleep(delay)
  599. fo = urllib2.urlopen(url)
  600. foo = fo.read()
  601. fo.close()
  602. lens.append(len(foo))
  603. diff = time.time() - starttime
  604. j = 0
  605. for i in lens[1:]:
  606. j = j + 1
  607. if not i == lens[0]:
  608. print "WARNING: inconsistent length on read %i: %i" % (j, i)
  609. return diff
  610. def test_timeout(url):
  611. global DEBUG
  612. dbbackup = DEBUG
  613. class FakeLogger(object):
  614. def debug(self, msg, *args):
  615. print msg % args
  616. info = warning = error = debug
  617. DEBUG = FakeLogger()
  618. print " fetching the file to establish a connection"
  619. fo = urllib2.urlopen(url)
  620. data1 = fo.read()
  621. fo.close()
  622. i = 20
  623. print " waiting %i seconds for the server to close the connection" % i
  624. while i > 0:
  625. sys.stdout.write('\r %2i' % i)
  626. sys.stdout.flush()
  627. time.sleep(1)
  628. i -= 1
  629. sys.stderr.write('\r')
  630. print " fetching the file a second time"
  631. fo = urllib2.urlopen(url)
  632. data2 = fo.read()
  633. fo.close()
  634. if data1 == data2:
  635. print ' data are identical'
  636. else:
  637. print ' ERROR: DATA DIFFER'
  638. DEBUG = dbbackup
  639. def test(url, N=10):
  640. print "checking error handler (do this on a non-200)"
  641. try: error_handler(url)
  642. except IOError:
  643. print "exiting - exception will prevent further tests"
  644. sys.exit()
  645. print
  646. print "performing continuity test (making sure stuff isn't corrupted)"
  647. continuity(url)
  648. print
  649. print "performing speed comparison"
  650. comp(N, url)
  651. print
  652. print "performing dropped-connection check"
  653. test_timeout(url)
  654. if __name__ == '__main__':
  655. import time
  656. import sys
  657. try:
  658. N = int(sys.argv[1])
  659. url = sys.argv[2]
  660. except (IndexError, ValueError):
  661. print "%s <integer> <url>" % sys.argv[0]
  662. else:
  663. test(url, N)