PageRenderTime 25ms CodeModel.GetById 22ms RepoModel.GetById 0ms app.codeStats 0ms

/addons/plugin.video.tvisio.tv/default.py

http://seppius-xbmc-repo.googlecode.com/
Python | 340 lines | 312 code | 4 blank | 24 comment | 8 complexity | 0facd6fcfbbe5bb02e87c5236e31b4ae MD5 | raw file
Possible License(s): GPL-3.0, AGPL-1.0
  1. #!/usr/bin/python
  2. # -*- coding: utf-8 -*-
  3. #/*
  4. # * Copyright (C) 2011 Silen
  5. # *
  6. # *
  7. # * This Program is free software; you can redistribute it and/or modify
  8. # * it under the terms of the GNU General Public License as published by
  9. # * the Free Software Foundation; either version 2, or (at your option)
  10. # * any later version.
  11. # *
  12. # * This Program is distributed in the hope that it will be useful,
  13. # * but WITHOUT ANY WARRANTY; without even the implied warranty of
  14. # * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  15. # * GNU General Public License for more details.
  16. # *
  17. # * You should have received a copy of the GNU General Public License
  18. # * along with this program; see the file COPYING. If not, write to
  19. # * the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA.
  20. # * http://www.gnu.org/copyleft/gpl.html
  21. # */
  22. import re, os, urllib, urllib2, cookielib, time, sys, urlparse
  23. from time import gmtime, strftime
  24. try:
  25. import json
  26. except ImportError:
  27. try:
  28. import simplejson as json
  29. #xbmc.log( '[%s]: Error import json. Uses module simplejson' % addon_id, 2 )
  30. except ImportError:
  31. try:
  32. import demjson3 as json
  33. #xbmc.log( '[%s]: Error import simplejson. Uses module demjson3' % addon_id, 3 )
  34. except ImportError:
  35. xbmc.log( '[%s]: Error import demjson3. Sorry.' % addon_id, 4 )
  36. import xbmc, xbmcgui, xbmcplugin, xbmcaddon
  37. Addon = xbmcaddon.Addon(id='plugin.video.tvisio.tv')
  38. icon = xbmc.translatePath(os.path.join(Addon.getAddonInfo('path'),'icon.png'))
  39. fcookies = xbmc.translatePath(os.path.join(Addon.getAddonInfo('path'), r'cookies.txt'))
  40. # load XML library
  41. try:
  42. sys.path.append(os.path.join(Addon.getAddonInfo('path'), r'resources', r'lib'))
  43. from BeautifulSoup import BeautifulSoup
  44. except:
  45. try:
  46. sys.path.insert(0, os.path.join(Addon.getAddonInfo('path'), r'resources', r'lib'))
  47. from BeautifulSoup import BeautifulSoup
  48. except:
  49. sys.path.append(os.path.join(os.getcwd(), r'resources', r'lib'))
  50. from BeautifulSoup import BeautifulSoup
  51. icon = xbmc.translatePath(os.path.join(os.getcwd().replace(';', ''),'icon.png'))
  52. import HTMLParser
  53. hpar = HTMLParser.HTMLParser()
  54. h = int(sys.argv[1])
  55. def showMessage(heading, message, times = 3000):
  56. xbmc.executebuiltin('XBMC.Notification("%s", "%s", %s, "%s")'%(heading, message, times, icon))
  57. #---------- get web page -------------------------------------------------------
  58. def get_HTML(url, post = None, ref = None):
  59. request = urllib2.Request(url, post)
  60. host = urlparse.urlsplit(url).hostname
  61. if ref==None:
  62. ref='http://'+host
  63. request.add_header('User-Agent', 'Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 5.1; Trident/4.0; Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; SV1) ; .NET CLR 1.1.4322; .NET CLR 2.0.50727; .NET CLR 3.0.4506.2152; .NET CLR 3.5.30729; .NET4.0C)')
  64. request.add_header('Host', host)
  65. request.add_header('Accept', '*/*')
  66. request.add_header('Accept-Language', 'ru-RU')
  67. request.add_header('Referer', ref)
  68. try:
  69. f = urllib2.urlopen(request)
  70. except IOError, e:
  71. if hasattr(e, 'reason'):
  72. xbmc.log('We failed to reach a server.')
  73. elif hasattr(e, 'code'):
  74. xbmc.log('The server couldn\'t fulfill the request.')
  75. html = f.read()
  76. return html
  77. #---------- get Moscow Time ----------------------------------------------------
  78. def MSK_time():
  79. try:
  80. #-- get MSK time page from Time&Date server
  81. url = 'http://www.timeanddate.com/worldclock/city.html?n=166'
  82. html = get_HTML(url)
  83. MSK_TOff = re.compile('<td>Current time zone offset:<\/td><td><strong>UTC\/GMT \+(.+?) hours<\/strong>', re.MULTILINE|re.DOTALL).findall(html)
  84. TOff = int(MSK_TOff[0])
  85. except:
  86. TOff = 3
  87. #--- return MSK time
  88. return gmtime(time.time()+TOff*60*60)
  89. #---------- get list of TV channels --------------------------------------------
  90. def Get_TV_Channels():
  91. url = 'http://tvisio.tv'
  92. html = get_HTML(url)
  93. try:
  94. #-- get authenticity token
  95. token = re.compile('<input name="authenticity_token" type="hidden" value="(.+?)" \/>', re.MULTILINE|re.DOTALL).findall(html)[0]
  96. #-- login to tvisio.tv
  97. login = Addon.getSetting('Login')
  98. password = Addon.getSetting('Password')
  99. values = {
  100. 'user[email]' : login,
  101. 'user[password]' : password,
  102. 'user[remember_me]' : 1,
  103. 'authenticity_token' : token,
  104. 'commit' : '?????',
  105. 'utf8' : '?'
  106. }
  107. post = urllib.urlencode(values)
  108. url = 'http://tvisio.tv/users/login'
  109. html = get_HTML(url, post)
  110. except:
  111. pass
  112. # -- parsing web page ------------------------------------------------------
  113. html = re.compile('<body>(.+?)<\/body>', re.MULTILINE|re.DOTALL).findall(html)[0]
  114. soup = BeautifulSoup(html)
  115. nav = soup.find('div', { 'id':"channels_list"})
  116. for ch in nav.findAll("a"):
  117. name = unescape(ch.find('img')['alt']).encode('utf-8')
  118. img = 'http://tvisio.tv'+ch.find('img')['src']
  119. ch_url = 'http://tvisio.tv'+ch['href']
  120. i = xbmcgui.ListItem(name, iconImage=img, thumbnailImage=img)
  121. u = sys.argv[0] + '?mode=EPG_DATE'
  122. u += '&name=%s'%urllib.quote_plus(name)
  123. u += '&url=%s'%urllib.quote_plus(ch_url)
  124. u += '&img=%s'%urllib.quote_plus(img)
  125. xbmcplugin.addDirectoryItem(h, u, i, True)
  126. xbmcplugin.endOfDirectory(h)
  127. #-------------------------------------------------------------------------------
  128. def Get_EPG_Date(params):
  129. # -- parameters
  130. url = urllib.unquote_plus(params['url'])
  131. img = urllib.unquote_plus(params['img'])
  132. #-- get online TV link
  133. print url
  134. html = get_HTML(url)
  135. html = re.compile('<body>(.+?)<\/body>', re.MULTILINE|re.DOTALL).findall(html)[0]
  136. soup = BeautifulSoup(html)
  137. print html
  138. for online in soup.find('div',{'class':'head'}).findAll("a"):
  139. prg = 'http://tvisio.tv'+online['href']
  140. name = '[COLOR FF3BB9FF]???????? ??????[/COLOR]'
  141. prg_name = '<< ???????? ?????? >>'
  142. i = xbmcgui.ListItem(name, iconImage=img, thumbnailImage=img)
  143. u = sys.argv[0] + '?mode=PLAY'
  144. u += '&name=%s'%urllib.quote_plus(name)
  145. u += '&url=%s'%urllib.quote_plus(prg)
  146. u += '&prg=%s'%urllib.quote_plus(prg_name)
  147. u += '&img=%s'%urllib.quote_plus(img)
  148. xbmcplugin.addDirectoryItem(h, u, i, False)
  149. #-- get MSK time
  150. MSK = time.mktime(MSK_time())
  151. #-- fill up ETR data list
  152. for day_off in range(0, 16): #-- tvisio.tv keeps 2 weeks of TV data
  153. ETR_date = time.localtime(MSK-day_off*24*60*60)
  154. name = unescape(strftime("%a, %d %b %Y", ETR_date)).encode('utf-8')
  155. id = url + '?date=' + strftime("%Y-%m-%d", ETR_date)
  156. i = xbmcgui.ListItem(name, iconImage=img, thumbnailImage=img)
  157. u = sys.argv[0] + '?mode=EPG'
  158. u += '&name=%s'%urllib.quote_plus(name)
  159. u += '&url=%s'%urllib.quote_plus(id)
  160. u += '&img=%s'%urllib.quote_plus(img)
  161. xbmcplugin.addDirectoryItem(h, u, i, True)
  162. xbmcplugin.endOfDirectory(h)
  163. #---------- get EPG for selected channel ---------------------------------------
  164. def Get_EPG(params):
  165. # -- parameters
  166. url = urllib.unquote_plus(params['url'])
  167. img = urllib.unquote_plus(params['img'])
  168. html = get_HTML(url)
  169. # -- parsing web page ------------------------------------------------------
  170. html = re.compile('<body>(.+?)<\/body>', re.MULTILINE|re.DOTALL).findall(html)[0]
  171. soup = BeautifulSoup(html)
  172. nav = soup.findAll("div", { "class" : "broadcast" })
  173. for ch in nav:
  174. try:
  175. prg = 'http://tvisio.tv'+ch.find('a')['href']
  176. except:
  177. prg = '*'
  178. name = '[COLOR FF3BB9FF]'+unescape(ch.find("span", { "class" : "time" }).text).encode('utf-8')+'[/COLOR]'+' '
  179. if prg == '*':
  180. name += '[COLOR FFFF0000]'+unescape(ch.find("span", { "class" : "title" }).text).encode('utf-8')+'[/COLOR]'
  181. else:
  182. name += '[COLOR FFC3FDB8]'+unescape(ch.find("span", { "class" : "title" }).text).encode('utf-8')+'[/COLOR]'
  183. prg_name = unescape(ch.find("span", { "class" : "title" }).text).encode('utf-8')
  184. i = xbmcgui.ListItem(name, iconImage=img, thumbnailImage=img)
  185. u = sys.argv[0] + '?mode=PLAY'
  186. u += '&name=%s'%urllib.quote_plus(name)
  187. u += '&url=%s'%urllib.quote_plus(prg)
  188. u += '&prg=%s'%urllib.quote_plus(prg_name)
  189. u += '&img=%s'%urllib.quote_plus(img)
  190. xbmcplugin.addDirectoryItem(h, u, i, False)
  191. xbmcplugin.endOfDirectory(h)
  192. #-------------------------------------------------------------------------------
  193. #-------------------------------------------------------------------------------
  194. def PLAY(params):
  195. # -- parameters
  196. url = urllib.unquote_plus(params['url'])
  197. img = urllib.unquote_plus(params['img'])
  198. name = urllib.unquote_plus(params['prg'])
  199. if url == '*':
  200. return False
  201. # -- check if video available
  202. html = get_HTML(url)
  203. # -- parsing web page ----------------------------------------------------------
  204. var = re.compile('flashvars.(.+?) = "(.+?)";', re.MULTILINE|re.DOTALL).findall(html)
  205. for rec in var:
  206. if rec[0].find('stream') > -1:
  207. v_stream = rec[1]
  208. elif rec[0].find('start') > -1:
  209. v_start = rec[1]
  210. elif rec[0].find('server') > -1:
  211. v_server = rec[1]
  212. elif rec[0].find('session') > -1:
  213. v_session = rec[1]
  214. swf = re.compile('swfobject.embedSWF\("(.+?)"', re.MULTILINE|re.DOTALL).findall(html)
  215. v_swf = swf[0]
  216. # -- assemble RTMP link ----------------------------------------------------
  217. if name <> '<< ???????? ?????? >>':
  218. video = 'rtmp://%s/rtmp app=rtmp swfUrl=http://tvisio.tv%s pageUrl=%s playpath=%s?start=%s conn=S:%s' % (v_server, v_swf, url, v_stream, v_start, v_session)
  219. else:
  220. video = 'rtmp://%s/rtmp app=rtmp swfUrl=http://tvisio.tv%s pageUrl=%s playpath=%s conn=S:%s' % (v_server, v_swf, url, v_stream, v_session)
  221. i = xbmcgui.ListItem(name, path = urllib.unquote(video), thumbnailImage=img)
  222. xbmc.Player().play(video, i)
  223. #-------------------------------------------------------------------------------
  224. def unescape(text):
  225. try:
  226. text = hpar.unescape(text)
  227. except:
  228. text = hpar.unescape(text.decode('utf8'))
  229. try:
  230. text = unicode(text, 'utf-8')
  231. except:
  232. text = text
  233. return text
  234. def get_url(url):
  235. return "http:"+urllib.quote(url.replace('http:', ''))
  236. #-------------------------------------------------------------------------------
  237. def get_params(paramstring):
  238. param=[]
  239. if len(paramstring)>=2:
  240. params=paramstring
  241. cleanedparams=params.replace('?','')
  242. if (params[len(params)-1]=='/'):
  243. params=params[0:len(params)-2]
  244. pairsofparams=cleanedparams.split('&')
  245. param={}
  246. for i in range(len(pairsofparams)):
  247. splitparams={}
  248. splitparams=pairsofparams[i].split('=')
  249. if (len(splitparams))==2:
  250. param[splitparams[0]]=splitparams[1]
  251. return param
  252. #-------------------------------------------------------------------------------
  253. params=get_params(sys.argv[2])
  254. # get cookies from last session
  255. cj = cookielib.MozillaCookieJar(fcookies)
  256. try:
  257. cj.load()
  258. except:
  259. pass
  260. hr = urllib2.HTTPCookieProcessor(cj)
  261. opener = urllib2.build_opener(hr)
  262. urllib2.install_opener(opener)
  263. mode = None
  264. try:
  265. mode = urllib.unquote_plus(params['mode'])
  266. except:
  267. Get_TV_Channels()
  268. if mode == 'EPG_DATE':
  269. Get_EPG_Date(params)
  270. elif mode == 'EPG':
  271. Get_EPG(params)
  272. elif mode == 'PLAY':
  273. PLAY(params)
  274. #-- store cookies
  275. cj.save()