PageRenderTime 23ms CodeModel.GetById 13ms RepoModel.GetById 0ms app.codeStats 0ms

/dTubes.py

http://damnvid.googlecode.com/
Python | 629 lines | 628 code | 0 blank | 1 comment | 68 complexity | 936e5e27e353857347066c7142018937 MD5 | raw file
Possible License(s): GPL-2.0, BSD-3-Clause, GPL-3.0
  1. # -*- coding: utf-8 -*-
  2. from dCore import *
  3. from dLog import *
  4. from dThread import *
  5. import dSysInfo
  6. import time
  7. import random
  8. import re
  9. import socket
  10. import traceback
  11. import cookielib
  12. import gdata.youtube.service # YouTube service
  13. import gdata.projecthosting.client # Google Code service
  14. import BeautifulSoup
  15. DV.blanksocket = socket.socket
  16. DV.youtube_service = gdata.youtube.service.YouTubeService()
  17. DV.youtube_service.ssl = False
  18. DV.streamTimeout = 30.0
  19. DV.stripHtmlEntities = re.compile('&[^;]+;')
  20. socket.setdefaulttimeout(DV.streamTimeout)
  21. import urllib2
  22. class DamnCookieJar(cookielib.CookieJar):
  23. def _cookie_from_cookie_tuple(self, tup, request): # Work-around for cookielib bug with non-integer cookie versions (*ahem* @ Apple)
  24. name, value, standard, rest = tup
  25. standard["version"]= 1
  26. cookielib.CookieJar._cookie_from_cookie_tuple(self, tup, request)
  27. def DamnURLOpener():
  28. global urllib2
  29. Damnlog('Reloading proxy settings.')
  30. if DV.prefs is None:
  31. Damnlog('Prefs uninitialized, reloading them.')
  32. DV.prefs = DamnPrefs()
  33. Damnlog('Building damn cookie jar.')
  34. DV.cookiejar = DamnCookieJar()
  35. newSocket = DV.blanksocket
  36. proxy = DV.prefs.gets('damnvid-proxy', 'proxy')
  37. Damnlog('Proxy preference is', proxy)
  38. if proxy == 'none':
  39. proxyhandle = urllib2.ProxyHandler({}) # Empty dictionary = no proxy
  40. Damnlog('Proxy is none, set handle to', proxyhandle)
  41. elif proxy == 'http':
  42. proxy = {
  43. 'http':DV.prefs.gets('damnvid-proxy','http_proxy'),
  44. 'https':DV.prefs.gets('damnvid-proxy','https_proxy')
  45. }
  46. Damnlog('HTTP/HTTPS proxy addresses are', proxy)
  47. try:
  48. proxy['http'] += ':'+int(DV.prefs.gets('damnvid-proxy','http_port'))
  49. Damnlog('HTTP Proxy port is a valid integer.')
  50. except:
  51. Damnlog('HTTP Proxy port is not a valid integer.')
  52. try:
  53. proxy['https'] += ':'+int(DV.prefs.gets('damnvid-proxy','https_port'))
  54. Damnlog('HTTPS Proxy port is a valid integer.')
  55. except:
  56. Damnlog('HTTPS Proxy port is not a valid integer.')
  57. proxyhandle = urllib2.ProxyHandler(proxy)
  58. Damnlog('Proxy is', proxy, '; set handle to', proxyhandle)
  59. elif proxy == 'socks4' or proxy == 'socks5':
  60. Damnlog('Proxy is SOCKS-type.')
  61. proxyhandle = urllib2.ProxyHandler({})
  62. if proxy == 'socks4':
  63. Damnlog('It\'s a SOCKS4 proxy.')
  64. proxytype = socks.PROXY_TYPE_SOCKS4
  65. else:
  66. proxytype = socks.PROXY_TYPE_SOCKS5
  67. Damnlog('It\'s a SOCKS5 proxy.')
  68. address = DV.prefs.gets('damnvid-proxy','socks_proxy')
  69. Damnlog('SOCKS proxy address is', address)
  70. try:
  71. socks.setdefaultproxy(proxytype, address, int(DV.prefs.gets('damnvid-proxy','socks_port')))
  72. Damnlog('SOCKS proxy port is a valid integer.')
  73. except:
  74. socks.setdefaultproxy(proxytype, address)
  75. Damnlog('SOCKS proxy port is not a valid integer.')
  76. newSocket = socks.socksocket
  77. else:
  78. proxyhandle = urllib2.ProxyHandler()
  79. Damnlog('Using system settings, set proxy handle to', proxy)
  80. Damnlog('Reloading urllib2, setting socket to', newSocket)
  81. del urllib2
  82. socket.socket = newSocket
  83. import urllib2
  84. Damnlog('Building new URL opener.')
  85. DV.urllib2_urlopener = urllib2.build_opener(proxyhandle, urllib2.HTTPBasicAuthHandler(), urllib2.HTTPCookieProcessor(DV.cookiejar))
  86. DV.urllib2_urlopener.addheaders = [('User-agent', DV.safeUpperProduct + u'/' + DV.version)]
  87. Damnlog('URL opener built with user-agent', DV.safeUpperProduct + u'/' + DV.version,'; installing as default.')
  88. urllib2.install_opener(DV.urllib2_urlopener)
  89. Damnlog('URL opener installed, proxy settings loaded.')
  90. return DV.urllib2_urlopener
  91. from dConfig import *
  92. class DamnURLRequest(urllib2.Request):
  93. def __str__(self):
  94. return u'<DamnURLRequest of: ' + DamnUnicode(urllib2.Request.get_full_url(self)) + u'>'
  95. def __repr__(self):
  96. return self.__str__()
  97. def DamnTimeoutStreamRead(stream, bytes=-1, timeout=None):
  98. if timeout is None:
  99. timeout = DV.streamTimeout
  100. t = DamnThreadedFunction(DamnCurry(stream.read, bytes), autostart=True, log=False)
  101. t.join(timeout)
  102. try:
  103. return t.getResult()
  104. except DamnThreadedFunctionNotDoneException:
  105. return None
  106. class DamnResumableDownload:
  107. def __init__(self, req, data=None, resumeat=None, resumable=True, buffer=32768, autoopen=True, autofetch=True):
  108. Damnlog('DamnResumableDownload initiated with request', req, 'and data', data, '; Resumat at:', resumeat, '; Resumable:', resumable,'; Buffer size:', buffer, '; Auto-open:', autoopen,'; Auto-fetch:', autofetch)
  109. self.req = req
  110. self.url = req
  111. self.data = data
  112. self.resumable = resumable
  113. self.buffersize = buffer
  114. self.buffer = ''
  115. if type(req) in (type(''), type(u'')):
  116. self.req = DamnURLRequest(DamnUnicode(req))
  117. self.stream = None
  118. self.progress = 0
  119. self.totalSize = None
  120. self.resumeat = 0
  121. if resumeat is not None:
  122. self.resumeat = resumeat
  123. self.fetchThread = None
  124. self.autofetch = autofetch
  125. self.hasTimeout = False
  126. if autoopen:
  127. self.open()
  128. def __str__(self):
  129. return u'<DamnResumableDownload of: ' + DamnUnicode(self.url) + u'>'
  130. def __repr__(self):
  131. return self.__str__()
  132. def open(self, autostart=True):
  133. Damnlog('DamnResumableDownload opening', self.url)
  134. if self.stream is not None:
  135. self.close()
  136. if self.resumeat > 0:
  137. self.req.add_header('Range', 'bytes=' + str(self.resumeat) + '-')
  138. try:
  139. self.stream = urllib2.urlopen(self.req)
  140. Damnlog('DamnResumableDownload successfully opened', self.url, 'with info', self.info().keys())
  141. try:
  142. self.totalSize = int(self.info('Content-length'))
  143. except: # Probably Content-length isn't specified, so .info() returns None
  144. self.totalSize = None
  145. if self.autofetch and autostart:
  146. self.spawnFetcher()
  147. return self
  148. except urllib2.URLError, e:
  149. Damnlog('! DamnResumableDownload failed opening with URLError', e, 'on', self.url)
  150. if hasattr(e, 'code'):
  151. Damnlog('!! DamnResumableDownload opening URLError has code', e.code, 'on', self.url,'; raising exception.')
  152. raise e
  153. except socket.timeout, e:
  154. Damnlog('! DamnResumableDownload failed opening with socket timeout error', e, 'on', self.url)
  155. except socket.error, e:
  156. Damnlog('! DamnResumableDownload failed opening with socket error', e, 'on', self.url)
  157. except Exception, e:
  158. Damnlog('! DamnResumableDownload failed opening with unknown error', e, 'on', self.url)
  159. time.sleep(DV.streamTimeout/2)
  160. return self.open(autostart=autostart)
  161. def info(self, key=None):
  162. self.ensureOpen()
  163. info2 = self.stream.info()
  164. info = {}
  165. for i in info2:
  166. info[i.lower()] = info2[i]
  167. Damnlog('DamnResumableDownload info for', self.url, 'is', info)
  168. if key is None:
  169. return info
  170. key = key.lower()
  171. if key in info:
  172. return info[key]
  173. return None
  174. def ensureOpen(self, autostart=False):
  175. if self.stream is None:
  176. self.open(autostart=autostart)
  177. def timeout(self):
  178. Damnlog('DamnResumableDownload timeout error for', self.url)
  179. self.hasTimeout = True
  180. self.close()
  181. def fetchBuffer(self):
  182. if self.hasTimeout:
  183. time.sleep(DV.streamTimeout/2) # Sleep for a bit before giving it another shot
  184. self.hasTimeout = False
  185. self.ensureOpen()
  186. try:
  187. c = DamnTimeoutStreamRead(self.stream, self.buffersize * 4)
  188. if c is None:
  189. Damnlog('! DamnResumableDownload got None while trying to read stream of', self.url)
  190. self.hasTimeout = True
  191. except urllib2.URLError, e:
  192. Damnlog('! DamnResumableDownload failed reading with URLError', e, 'on', self.url)
  193. if hasattr(e, 'code'):
  194. Damnlog('!! DamnResumableDownload reading URLError has code', e.code, 'on', self.url,'; raising exception.')
  195. raise e
  196. self.hasTimeout = True
  197. except socket.timeout, e:
  198. Damnlog('! DamnResumableDownload failed reading with socket timeout error', e, 'on', self.url)
  199. self.hasTimeout = True
  200. except socket.error, e:
  201. Damnlog('! DamnResumableDownload failed reading with socket error', e, 'on', self.url)
  202. self.hasTimeout = True
  203. except Exception, e:
  204. Damnlog('! DamnResumableDownload failed reading with unknown error', e, 'on', self.url)
  205. self.hasTimeout = True
  206. if self.hasTimeout:
  207. self.close()
  208. return self.fetchBuffer()
  209. self.buffer += c
  210. l = len(c)
  211. self.resumeat += l
  212. self.progress += l
  213. if not l:
  214. Damnlog('! DamnResumableDownload fetchBuffer got no data.')
  215. def checkProgress(self): # True if there is stuff left to download
  216. Damnlog('Total size is', self.totalSize,'; Progress is', self.progress)
  217. if self.totalSize is None:
  218. return self.progress == 0 # Assume done if some stuff has been downloaded
  219. return self.totalSize > self.progress
  220. def spawnFetcher(self, join=False):
  221. if self.fetchThread is None:
  222. self.fetchThread = DamnThreadedFunction(self.fetchBuffer, autostart=True, log=False)
  223. if join:
  224. self.joinFetcher()
  225. def joinFetcher(self):
  226. if self.fetchThread is not None:
  227. self.fetchThread.join()
  228. self.fetchThread = None
  229. def readBuffer(self):
  230. self.spawnFetcher(join=True)
  231. if self.autofetch and len(self.buffer) <= self.buffersize * 2:
  232. self.spawnFetcher()
  233. def read(self, bytes=None):
  234. Damnlog('DamnResumableDownload reading', bytes, '; buffer size', len(self.buffer))
  235. if bytes is None:
  236. bytes = -1
  237. if bytes == -1:
  238. buffer = ''
  239. i = self.read(self.buffersize)
  240. while len(i):
  241. buffer += i
  242. i = self.read(self.buffersize)
  243. Damnlog('DamnResumableDownload returning0 ', len(buffer))
  244. return buffer
  245. while True:
  246. if len(self.buffer) >= bytes and bytes > 0: # If the buffer is rich enough
  247. buffer = self.buffer[:bytes]
  248. self.buffer = self.buffer[bytes:]
  249. if self.autofetch and len(self.buffer) <= self.buffersize * 2:
  250. self.spawnFetcher()
  251. Damnlog('DamnResumableDownload returning1 ', len(buffer))
  252. return buffer
  253. oldBuffer = len(self.buffer)
  254. self.readBuffer()
  255. if oldBuffer < len(self.buffer): # There is still stuff left
  256. return self.read(bytes=bytes) # Then keep downloading
  257. if self.checkProgress():
  258. self.readBuffer()
  259. else:
  260. break
  261. buffer = self.buffer # Otherwise stream ended, just flush the buffer
  262. self.buffer = '' # Empty buffer
  263. Damnlog('DamnResumableDownload returning2 ', len(buffer))
  264. return buffer
  265. def readAll(self):
  266. Damnlog('DamnResumableDownload reading all for', self.url)
  267. return self.read(-1)
  268. def close(self):
  269. Damnlog('DamnResumableDownload closing', self.url)
  270. if self.stream is not None:
  271. try:
  272. self.stream.close()
  273. except:
  274. Damnlog('! DamnResumableDownload failed closing', self.url)
  275. self.stream = None
  276. self.progress = 0
  277. def DamnURLOpen(req, data=None, throwerror=False, autoresume=True, resumeat=None):
  278. Damnlog('DamnURLOpen called with request', req, '; data', data,'; Throw error?', throwerror, '; Autoresume?', autoresume)
  279. try:
  280. if data is not None:
  281. pipe = DamnResumableDownload(req, data, resumable=autoresume, resumeat=resumeat)
  282. else:
  283. pipe = DamnResumableDownload(req, resumable=autoresume, resumeat=resumeat)
  284. Damnlog('DamnURLOpen successful, returning stream.')
  285. return pipe
  286. except IOError, err:
  287. if not hasattr(err, 'reason') and not hasattr(err, 'code'):
  288. Damnlog('DamnURLOpen on', url, 'failed with IOError but without reason or code.')
  289. else:
  290. try:
  291. Damnlog('DamnURLOpen on', url, 'failed with code', err.code, 'and reason', err.reason)
  292. except:
  293. try:
  294. Damnlog('DamnURLOpen on', url, 'failed with code', err.code, 'and no reason.')
  295. except:
  296. Damnlog('DamnURLOpen on', url, 'failed pretty badly.')
  297. if throwerror:
  298. raise err
  299. return None
  300. except Exception, e:
  301. if throwerror:
  302. Damnlog('DamnURLOpen failed on request',req,' with exception',e,'; throwing error because throwerror is True.')
  303. raise e
  304. Damnlog('DamnURLOpen failed on request',req,' with exception',e,'; returning None because throwerror is False.')
  305. return None
  306. def DamnURLGetAll(req, data=None, onerror=None):
  307. Damnlog('DamnURLGetAll called with request', req,'and data', data,'; on error =', onerror)
  308. url = DamnURLOpen(req, data=data, throwerror=False)
  309. if url is None:
  310. Damnlog('DamnURLGetAll got None; returning onerror =', onerror)
  311. return DamnUnicode(onerror)
  312. content = DamnUnicode(url.read(-1))
  313. Damnlog('DamnURLGetAll successful; returning', len(content),'bytes of content.')
  314. return content
  315. def DamnRTMPDump(req):
  316. pass # Todo
  317. def DamnURLPicker(urls, urlonly=False, resumeat=None):
  318. tried = []
  319. if resumeat == 0:
  320. resumeat = None
  321. Damnlog('DamnURLPicker summoned. URLs:', urls, 'Resume at:', resumeat)
  322. for i in urls:
  323. i = DamnUnicode(i)
  324. if i not in tried:
  325. tried.append(i)
  326. try:
  327. pipe = DamnURLOpen(i, throwerror=True, resumeat=resumeat)
  328. if urlonly:
  329. try:
  330. pipe.close()
  331. except:
  332. pass
  333. return i
  334. Damnlog('DamnURLPicker returning pipe stream for', i)
  335. return pipe
  336. except IOError, err:
  337. if not hasattr(err, 'reason') and not hasattr(err, 'code'):
  338. Damnlog('DamnURLPicker returning None because of an IOError without reason or code')
  339. return None
  340. Damnlog('DamnURLPicker returning None because no URLs are valid')
  341. return None
  342. def DamnURLPickerBySize(urls, array=False):
  343. Damnlog('URL picker by size summoned. URLs:', urls)
  344. if len(urls) == 1:
  345. if array:
  346. return urls
  347. return urls[0]
  348. tried = []
  349. maxlen = []
  350. maxurl = []
  351. trycount = 0
  352. for i in urls:
  353. if i not in tried:
  354. tried.append(i)
  355. trycount += 1
  356. try:
  357. handle = DamnURLOpen(i)
  358. size = int(handle.info()['Content-Length'])
  359. handle.close()
  360. maxlen.append(size)
  361. maxurl.append(i)
  362. except:
  363. maxlen.append(-trycount)
  364. maxurl.append(i)
  365. if not len(maxurl):
  366. return urls[0]
  367. maxlen2 = maxlen
  368. maxlen2.sort()
  369. maxlen2.reverse()
  370. assoc = []
  371. finalurls = []
  372. for i in maxlen2:
  373. for f in range(len(maxlen)):
  374. if i == maxlen[f] and f not in assoc:
  375. assoc.append(f)
  376. finalurls.append(maxurl[f])
  377. for i in tried:
  378. if i not in finalurls:
  379. finalurls.append(i)
  380. if array:
  381. return finalurls
  382. return finalurls[0]
  383. class DamnDownloader(DamnThread): # Retrieves video by HTTP and feeds it back to ffmpeg via stdin
  384. def __init__(self, uri, pipe, copy=None):
  385. Damnlog('DamnDownloader spawned. URI:', uri, '; Pipe:', pipe)
  386. self.uri = uri
  387. self.pipe = pipe
  388. self.copy = copy
  389. DamnThread.__init__(self)
  390. def timeouterror(self):
  391. Damnlog('DamnDownloader timeout detection timer fired!')
  392. self.timeouted = True
  393. self.http.close()
  394. def go(self):
  395. self.amountwritten = 0
  396. self.timeouted = True
  397. Damnlog('DamnDownloader starting download for first time.')
  398. while self.timeouted:
  399. self.timeouted = False
  400. self.goDownload()
  401. Damnlog('DamnDownloader goDownload subroutine done. Total written is', self.amountwritten, 'bytes. Timeout?', self.timeouted)
  402. def goDownload(self):
  403. self.http = DamnURLPicker(self.uri, resumeat=self.amountwritten)
  404. if self.http == None:
  405. try:
  406. self.pipe.close() # This tells ffmpeg that it's the end of the stream
  407. except:
  408. pass
  409. return None
  410. writing = ''
  411. direct = False
  412. if self.copy != None:
  413. copystream = DamnOpenFile(self.copy, 'wb')
  414. i = 'letsgo'
  415. while len(i):
  416. i = self.http.read(1024)
  417. if direct:
  418. self.pipe.write(i)
  419. if self.copy != None:
  420. copystream.write(i)
  421. else:
  422. writing += i
  423. if len(writing) > 102400: # Cache the first 100 KB and write them all at once (solves ffmpeg's "moov atom not found" problem)
  424. self.pipe.write(writing)
  425. if self.copy != None:
  426. copystream.write(writing)
  427. direct = True
  428. del writing
  429. self.amountwritten += len(i)
  430. if not direct: # Video weighs less than 100 KB (!)
  431. try:
  432. self.pipe.write(writing)
  433. if self.copy != None:
  434. copystream.write(writing)
  435. except:
  436. pass
  437. try:
  438. self.http.close()
  439. except:
  440. pass
  441. try:
  442. self.pipe.close() # This tells ffmpeg that it's the end of the stream
  443. except:
  444. pass
  445. try:
  446. copystream.close() # Might not be defined, but doesn't matter
  447. except:
  448. pass
  449. class DamnStreamCopy(DamnThread):
  450. def __init__(self, s1, s2, buffer=1048576, background=True, closes1=True, closes2=True):
  451. Damnlog('DamnStreamCopy spawned, will rip', s1, 'to', s2, ' when started. Background?', background)
  452. self.s1 = s1
  453. if type(s1) in (type(u''), type('')):
  454. self.s1 = DamnOpenFile(DamnUnicode(s1), 'rb')
  455. self.s2 = s2
  456. if type(s2) in (type(u''), type('')):
  457. self.s2 = DamnOpenFile(DamnUnicode(s2), 'wb')
  458. self.background = background
  459. self.buffer = buffer
  460. self.closes1 = closes1
  461. self.closes2 = closes2
  462. DamnThread.__init__(self)
  463. def start(self):
  464. if self.background:
  465. Damnlog('Starting stream copy in background thread.')
  466. DamnThread.start(self)
  467. else:
  468. Damnlog('Starting stream copy in current thread.')
  469. self.run()
  470. def go(self):
  471. firstread = True
  472. firstwrite = True
  473. Damnlog('Stream copy: Begin')
  474. i = 'Let\'s go'
  475. while len(i):
  476. try:
  477. i = self.s1.read(self.buffer)
  478. if firstread:
  479. Damnlog('Stream copy: first read successful, read', len(i), 'bytes.')
  480. firstread = False
  481. try:
  482. self.s2.write(i)
  483. if firstwrite:
  484. Damnlog('Stream copy: first write successful, wrote ', len(i), 'bytes.')
  485. firstwrite = False
  486. except Exception, e:
  487. Damnlog('Stream copy: failed to write', len(i), 'bytes to output stream:',e)
  488. except:
  489. Damnlog('Stream copy: Failed to read from input stream.')
  490. Damnlog('Stream copying done.')
  491. if self.closes1:
  492. try:
  493. self.s1.close()
  494. except:
  495. Damnlog('Stream copy: closing input stream failed.')
  496. if self.closes2:
  497. try:
  498. self.s2.close()
  499. except:
  500. Damnlog('Stream copy: closing output stream failed.')
  501. class DamnYouTubeService(DamnThread):
  502. def __init__(self, parent, query=None):
  503. self.parent = parent
  504. DamnThread.__init__(self)
  505. if query is None:
  506. self.queries = None
  507. else:
  508. self.queries = [query]
  509. def query(self, query):
  510. if self.queries is None:
  511. self.queries = [query]
  512. else:
  513. self.queries.append(query)
  514. def stillAlive(self):
  515. return True
  516. def postEvent(self, info):
  517. info['self'] = self
  518. try:
  519. DV.postEvent(self.parent, (DV.evt_loading, -1, info))
  520. except:
  521. pass # Window might have been closed
  522. def returnResult(self, result, index=0):
  523. return self.postEvent({'index':index, 'query':self.queries[index], 'result':result})
  524. def getTempFile(self):
  525. name = DV.tmp_path + str(random.random()) + '.tmp'
  526. while os.path.exists(name):
  527. name = DV.tmp_path + str(random.random()) + '.tmp'
  528. return name
  529. def go(self):
  530. while self.queries is None:
  531. time.sleep(.025)
  532. try:
  533. self.parent.loadlevel += 1
  534. except:
  535. pass # Window might have been closed
  536. while len(self.queries):
  537. query = self.queries[0]
  538. if query[0] == 'feed':
  539. self.returnResult(DV.youtube_service.GetYouTubeVideoFeed(DamnUnicode(query[1]).encode('utf8')))
  540. elif query[0] == 'image':
  541. http = DamnURLOpen(query[1])
  542. tmpf = self.getTempFile()
  543. tmpfstream = DamnOpenFile(tmpf, 'wb')
  544. for i in http.readlines():
  545. tmpfstream.write(i)
  546. http.close()
  547. tmpfstream.close()
  548. self.returnResult(tmpf)
  549. self.queries.pop(0)
  550. if not len(self.queries):
  551. time.sleep(.5) # Hang around for a moment, wait for work
  552. self.postEvent({'query':('done',)}) # All done, service will be respawned if needed later
  553. try:
  554. self.parent.loadlevel -= 1
  555. except:
  556. pass # Window might have been closed
  557. class DamnBugReporter(DamnThread):
  558. def __init__(self, desc, steps='', sysinfo=None, email='', parent=None):
  559. self.desc = desc
  560. self.steps = steps
  561. self.sysinfo = sysinfo
  562. if self.sysinfo is None:
  563. self.sysinfo = dSysInfo.DamnSysInfo()
  564. self.email = email
  565. self.parent = parent
  566. DamnThread.__init__(self)
  567. def postEvent(self, title=None, dialog=None, error=False, closedialog=True):
  568. info = {'title':title, 'dialog':dialog, 'error':error, 'closedialog':closedialog}
  569. Damnlog('Posting a bug report update event with info', info)
  570. if self.parent is None:
  571. Damnlog('Not posting anything, parent is none.')
  572. return
  573. try:
  574. DV.postEvent(self.parent, (DV.evt_bugreporting, -1, info))
  575. except:
  576. Damnlog('Posting event failed - Window was closed?')
  577. def go(self):
  578. Damnlog('Bug reporter thread launched.')
  579. if not len(self.desc):
  580. Damnlog('Bug reporter not sending anything - bug description is empty.')
  581. self.postEvent(DV.l('Empty bug description field'), DV.l('You must enter a bug description.'), error=True, closedialog=False)
  582. return
  583. Damnlog('Ready to submit bug.')
  584. Damnlog('Initiating Google Code API object.')
  585. api = gdata.projecthosting.client.ProjectHostingClient()
  586. Damnlog('Logging in with damnvid-user@gmail.com credentials')
  587. try:
  588. api.client_login('damnvid.user@gmail.com', 'damnviduser', source='DamnVid ' + DV.version, service='code') # OMG! Raw password!
  589. except:
  590. Damnlog('Could not log in to Google Code (Invalid connection?)', traceback.format_exc())
  591. self.postEvent(DV.l('Error while connecting'), DV.l('Could not connect to Google Code. Please make sure that your Internet connection is active and that no firewall is blocking DamnVid.'), error=True, closedialog=False)
  592. return
  593. summary = u'Bug: ' + self.desc + u'\n\nSteps:\n' + self.steps + u'\n\n' + self.sysinfo + u'\n\n'
  594. if len(self.email):
  595. summary += u'Email: ' + self.email.replace(u'@', u' (at) ').replace(u'.', u' (dot) ').replace(u'+', u' (plus) ').replace(u'-', u' (minus) ') + u'\n\n'
  596. try:
  597. Damnlog('Starting log dump, flusing.')
  598. DV.log.flush()
  599. Damnlog('Flushed, dumping...')
  600. logdump = ''
  601. f = DamnOpenFile(DV.log_file, 'r')
  602. for i in f:
  603. logdump += i
  604. f.close()
  605. logdump = DamnUnicode(logdump.strip())
  606. Damnlog('Log dump done, uploading to pastebin.')
  607. http = DamnURLOpen(DamnURLRequest('http://pastehtml.com/upload/create?input_type=txt&result=address', urllib.urlencode({'txt':logdump.encode('utf8')})))
  608. pasteurl = http.read(-1)
  609. http.close()
  610. Damnlog('Uploaded to', pasteurl)
  611. summary += u'damnvid.log: ' + DamnUnicode(pasteurl)
  612. except:
  613. summary += u'(Could not upload the contents of damnvid.log)'
  614. Damnlog('Impossible to send contents of damnvid.log!')
  615. Damnlog('Login successful, submitting issue...')
  616. try:
  617. api.add_issue('damnvid', self.desc.encode('utf8', 'ignore'), summary.encode('utf8', 'ignore'), 'windypower', status='New', labels=['Type-Defect', 'Priority-Medium'])
  618. except:
  619. Damnlog('Issue submission failed.', traceback.format_exc())
  620. self.postEvent(DV.l('Error while submitting issue'), DV.l('Could not submit bug report to Google Code. Please make sure that your Internet connection is active and that no firewall is blocking DamnVid.'), error=True, closedialog=False)
  621. return
  622. Damnlog('Issue submission successful.')
  623. self.postEvent(DV.l('Success'), DV.l('Bug report submitted successfully. Thanks!'), error=False, closedialog=True)
  624. def DamnHtmlEntities(html):
  625. try:
  626. return DamnUnicode(BeautifulSoup.BeautifulStoneSoup(html, convertEntities=BeautifulSoup.BeautifulStoneSoup.HTML_ENTITIES)).replace(u'&amp;', u'&') # Because BeautifulSoup, as good as it is, puts &amp;badentity where &badentitity; are. Gotta convert that back.
  627. except:
  628. Damnlog('Error while parsing HTML entities:', html)
  629. return DamnUnicode(DV.stripHtmlEntities.sub(u'', DamnUnicode(html)))