PageRenderTime 54ms CodeModel.GetById 17ms RepoModel.GetById 1ms app.codeStats 0ms

/r2/r2/controllers/reddit_base.py

https://github.com/stevewilber/reddit
Python | 989 lines | 932 code | 30 blank | 27 comment | 40 complexity | 6aae1ecbbaa993c1da23ec7a4e495373 MD5 | raw file
Possible License(s): MPL-2.0-no-copyleft-exception, Apache-2.0
  1. # The contents of this file are subject to the Common Public Attribution
  2. # License Version 1.0. (the "License"); you may not use this file except in
  3. # compliance with the License. You may obtain a copy of the License at
  4. # http://code.reddit.com/LICENSE. The License is based on the Mozilla Public
  5. # License Version 1.1, but Sections 14 and 15 have been added to cover use of
  6. # software over a computer network and provide for limited attribution for the
  7. # Original Developer. In addition, Exhibit A has been modified to be consistent
  8. # with Exhibit B.
  9. #
  10. # Software distributed under the License is distributed on an "AS IS" basis,
  11. # WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License for
  12. # the specific language governing rights and limitations under the License.
  13. #
  14. # The Original Code is reddit.
  15. #
  16. # The Original Developer is the Initial Developer. The Initial Developer of
  17. # the Original Code is reddit Inc.
  18. #
  19. # All portions of the code written by reddit are Copyright (c) 2006-2012 reddit
  20. # Inc. All Rights Reserved.
  21. ###############################################################################
  22. from mako.filters import url_escape
  23. from pylons import c, g, request
  24. from pylons.controllers.util import redirect_to
  25. from pylons.i18n import _
  26. from pylons.i18n.translation import LanguageError
  27. from r2.lib import pages, utils, filters, amqp, stats
  28. from r2.lib.utils import http_utils, is_subdomain, UniqueIterator, is_throttled
  29. from r2.lib.cache import LocalCache, make_key, MemcachedError
  30. import random as rand
  31. from r2.models.account import FakeAccount, valid_feed, valid_admin_cookie
  32. from r2.models.subreddit import Subreddit, Frontpage
  33. from r2.models import *
  34. from errors import ErrorSet, ForbiddenError, errors
  35. from validator import *
  36. from r2.lib.template_helpers import add_sr
  37. from r2.config.extensions import is_api
  38. from r2.lib.translation import set_lang
  39. from r2.lib.contrib import ipaddress
  40. from r2.lib.base import BaseController, proxyurl, abort
  41. from r2.lib.authentication import authenticate_user
  42. from Cookie import CookieError
  43. from copy import copy
  44. from Cookie import CookieError
  45. from datetime import datetime, timedelta
  46. from hashlib import sha1, md5
  47. from urllib import quote, unquote
  48. import simplejson
  49. import locale, socket
  50. import babel.core
  51. from r2.lib.tracking import encrypt, decrypt
  52. from pylons import Response
  53. NEVER = 'Thu, 31 Dec 2037 23:59:59 GMT'
  54. DELETE = 'Thu, 01-Jan-1970 00:00:01 GMT'
  55. cache_affecting_cookies = ('reddit_first','over18','_options')
  56. class Cookies(dict):
  57. def add(self, name, value, *k, **kw):
  58. name = name.encode('utf-8')
  59. self[name] = Cookie(value, *k, **kw)
  60. class Cookie(object):
  61. def __init__(self, value, expires=None, domain=None,
  62. dirty=True, secure=False, httponly=False):
  63. self.value = value
  64. self.expires = expires
  65. self.dirty = dirty
  66. self.secure = secure
  67. self.httponly = httponly
  68. if domain:
  69. self.domain = domain
  70. elif c.authorized_cname and not c.default_sr:
  71. self.domain = utils.common_subdomain(request.host, c.site.domain)
  72. else:
  73. self.domain = g.domain
  74. def __repr__(self):
  75. return ("Cookie(value=%r, expires=%r, domain=%r, dirty=%r)"
  76. % (self.value, self.expires, self.domain, self.dirty))
  77. class UnloggedUser(FakeAccount):
  78. _cookie = 'options'
  79. allowed_prefs = ('pref_content_langs', 'pref_lang', 'pref_frame_commentspanel')
  80. def __init__(self, browser_langs, *a, **kw):
  81. FakeAccount.__init__(self, *a, **kw)
  82. if browser_langs:
  83. lang = browser_langs[0]
  84. content_langs = list(browser_langs)
  85. # try to coerce the default language
  86. if g.lang not in content_langs:
  87. content_langs.append(g.lang)
  88. content_langs.sort()
  89. else:
  90. lang = g.lang
  91. content_langs = 'all'
  92. self._defaults = self._defaults.copy()
  93. self._defaults['pref_lang'] = lang
  94. self._defaults['pref_content_langs'] = content_langs
  95. self._defaults['pref_frame_commentspanel'] = False
  96. self._load()
  97. @property
  98. def name(self):
  99. raise NotImplementedError
  100. def _from_cookie(self):
  101. z = read_user_cookie(self._cookie)
  102. try:
  103. d = simplejson.loads(decrypt(z))
  104. return dict((k, v) for k, v in d.iteritems()
  105. if k in self.allowed_prefs)
  106. except ValueError:
  107. return {}
  108. def _to_cookie(self, data):
  109. data = data.copy()
  110. for k in data.keys():
  111. if k not in self.allowed_prefs:
  112. del k
  113. set_user_cookie(self._cookie, encrypt(simplejson.dumps(data)))
  114. def _subscribe(self, sr):
  115. pass
  116. def _unsubscribe(self, sr):
  117. pass
  118. def valid_hash(self, hash):
  119. return False
  120. def _commit(self):
  121. if self._dirty:
  122. for k, (oldv, newv) in self._dirties.iteritems():
  123. self._t[k] = newv
  124. self._to_cookie(self._t)
  125. def _load(self):
  126. self._t.update(self._from_cookie())
  127. self._loaded = True
  128. def read_user_cookie(name):
  129. uname = c.user.name if c.user_is_loggedin else ""
  130. cookie_name = uname + '_' + name
  131. if cookie_name in c.cookies:
  132. return c.cookies[cookie_name].value
  133. else:
  134. return ''
  135. def set_user_cookie(name, val, **kwargs):
  136. uname = c.user.name if c.user_is_loggedin else ""
  137. c.cookies[uname + '_' + name] = Cookie(value=val,
  138. **kwargs)
  139. valid_click_cookie = fullname_regex(Link, True).match
  140. def set_recent_clicks():
  141. c.recent_clicks = []
  142. if not c.user_is_loggedin:
  143. return
  144. click_cookie = read_user_cookie('recentclicks2')
  145. if click_cookie:
  146. if valid_click_cookie(click_cookie):
  147. names = [ x for x in UniqueIterator(click_cookie.split(',')) if x ]
  148. if len(names) > 5:
  149. names = names[:5]
  150. set_user_cookie('recentclicks2', ','.join(names))
  151. #eventually this will look at the user preference
  152. names = names[:5]
  153. try:
  154. c.recent_clicks = Link._by_fullname(names, data = True,
  155. return_dict = False)
  156. except NotFound:
  157. # clear their cookie because it's got bad links in it
  158. set_user_cookie('recentclicks2', '')
  159. else:
  160. #if the cookie wasn't valid, clear it
  161. set_user_cookie('recentclicks2', '')
  162. def read_mod_cookie():
  163. cook = [s.split('=')[0:2] for s in read_user_cookie('mod').split(':') if s]
  164. if cook:
  165. set_user_cookie('mod', '')
  166. def firsttime():
  167. if (request.user_agent and
  168. ('iphone' in request.user_agent.lower() or
  169. 'android' in request.user_agent.lower()) and
  170. not get_redditfirst('mobile_suggest')):
  171. set_redditfirst('mobile_suggest','first')
  172. return 'mobile_suggest'
  173. elif get_redditfirst('firsttime'):
  174. return False
  175. else:
  176. set_redditfirst('firsttime','first')
  177. return True
  178. def get_redditfirst(key,default=None):
  179. try:
  180. val = c.cookies['reddit_first'].value
  181. # on cookie presence, return as much
  182. if default is None:
  183. default = True
  184. cookie = simplejson.loads(val)
  185. return cookie[key]
  186. except (ValueError,TypeError,KeyError),e:
  187. # it's not a proper json dict, or the cookie isn't present, or
  188. # the key isn't part of the cookie; we don't really want a
  189. # broken cookie to propogate an exception up
  190. return default
  191. def set_redditfirst(key,val):
  192. try:
  193. cookie = simplejson.loads(c.cookies['reddit_first'].value)
  194. cookie[key] = val
  195. except (ValueError,TypeError,KeyError),e:
  196. # invalid JSON data; we'll just construct a new cookie
  197. cookie = {key: val}
  198. c.cookies['reddit_first'] = Cookie(simplejson.dumps(cookie),
  199. expires = NEVER)
  200. # this cookie is also accessed by organic.js, so changes to the format
  201. # will have to be made there as well
  202. organic_pos_key = 'organic_pos'
  203. def organic_pos():
  204. "organic_pos() -> (calc_date = str(), pos = int())"
  205. pos = get_redditfirst(organic_pos_key, 0)
  206. if not isinstance(pos, int):
  207. pos = 0
  208. return pos
  209. def set_organic_pos(pos):
  210. "set_organic_pos(str(), int()) -> None"
  211. set_redditfirst(organic_pos_key, pos)
  212. def over18():
  213. if c.user.pref_over_18 or c.user_is_admin:
  214. return True
  215. else:
  216. if 'over18' in c.cookies:
  217. cookie = c.cookies['over18'].value
  218. if cookie == sha1(request.ip).hexdigest():
  219. return True
  220. def set_obey_over18():
  221. "querystring parameter for API to obey over18 filtering rules"
  222. c.obey_over18 = request.GET.get("obey_over18") == "true"
  223. def set_subreddit():
  224. #the r parameter gets added by javascript for POST requests so we
  225. #can reference c.site in api.py
  226. sr_name = request.environ.get("subreddit", request.POST.get('r'))
  227. domain = request.environ.get("domain")
  228. can_stale = request.method.upper() in ('GET','HEAD')
  229. c.site = Frontpage
  230. if not sr_name:
  231. #check for cnames
  232. cname = request.environ.get('legacy-cname')
  233. if cname:
  234. sr = Subreddit._by_domain(cname) or Frontpage
  235. domain = g.domain
  236. if g.domain_prefix:
  237. domain = ".".join((g.domain_prefix, domain))
  238. redirect_to('http://%s%s' % (domain, sr.path), _code=301)
  239. elif sr_name == 'r':
  240. #reddits
  241. c.site = Sub
  242. elif '+' in sr_name:
  243. sr_names = sr_name.split('+')
  244. srs = set(Subreddit._by_name(sr_names, stale=can_stale).values())
  245. if All in srs:
  246. c.site = All
  247. elif Friends in srs:
  248. c.site = Friends
  249. else:
  250. srs = [sr for sr in srs if not isinstance(sr, FakeSubreddit)]
  251. if len(srs) == 0:
  252. c.site = MultiReddit([], sr_name)
  253. elif len(srs) == 1:
  254. c.site = srs.pop()
  255. else:
  256. sr_ids = [sr._id for sr in srs]
  257. c.site = MultiReddit(sr_ids, sr_name)
  258. else:
  259. try:
  260. c.site = Subreddit._by_name(sr_name, stale=can_stale)
  261. except NotFound:
  262. sr_name = chksrname(sr_name)
  263. if sr_name:
  264. redirect_to("/reddits/search?q=%s" % sr_name)
  265. elif not c.error_page and not request.path.startswith("/api/login/") :
  266. abort(404)
  267. #if we didn't find a subreddit, check for a domain listing
  268. if not sr_name and isinstance(c.site, DefaultSR) and domain:
  269. c.site = DomainSR(domain)
  270. if isinstance(c.site, FakeSubreddit):
  271. c.default_sr = True
  272. def set_content_type():
  273. e = request.environ
  274. c.render_style = e['render_style']
  275. c.response_content_type = e['content_type']
  276. if e.has_key('extension'):
  277. c.extension = ext = e['extension']
  278. if ext in ('embed', 'wired', 'widget'):
  279. def to_js(content):
  280. return utils.to_js(content,callback = request.params.get(
  281. "callback", "document.write"))
  282. c.response_wrappers.append(to_js)
  283. if ext in ("rss", "api", "json") and request.method.upper() == "GET":
  284. user = valid_feed(request.GET.get("user"),
  285. request.GET.get("feed"),
  286. request.path)
  287. if user and not g.read_only_mode:
  288. c.user = user
  289. c.user_is_loggedin = True
  290. if ext in ("mobile", "m") and not request.GET.get("keep_extension"):
  291. try:
  292. if request.cookies['reddit_mobility'] == "compact":
  293. c.extension = "compact"
  294. c.render_style = "compact"
  295. except (ValueError, KeyError):
  296. c.suggest_compact = True
  297. if ext in ("mobile", "m", "compact"):
  298. if request.GET.get("keep_extension"):
  299. c.cookies['reddit_mobility'] = Cookie(ext, expires = NEVER)
  300. # allow JSONP requests to generate callbacks, but do not allow
  301. # the user to be logged in for these
  302. if (is_api() and request.method.upper() == "GET" and
  303. request.GET.get("jsonp")):
  304. c.allowed_callback = request.GET['jsonp']
  305. c.user = UnloggedUser(get_browser_langs())
  306. c.user_is_loggedin = False
  307. def get_browser_langs():
  308. browser_langs = []
  309. langs = request.environ.get('HTTP_ACCEPT_LANGUAGE')
  310. if langs:
  311. langs = langs.split(',')
  312. browser_langs = []
  313. seen_langs = set()
  314. # extract languages from browser string
  315. for l in langs:
  316. if ';' in l:
  317. l = l.split(';')[0]
  318. if l not in seen_langs and l in g.languages:
  319. browser_langs.append(l)
  320. seen_langs.add(l)
  321. if '-' in l:
  322. l = l.split('-')[0]
  323. if l not in seen_langs and l in g.languages:
  324. browser_langs.append(l)
  325. seen_langs.add(l)
  326. return browser_langs
  327. def set_host_lang():
  328. # try to grab the language from the domain
  329. host_lang = request.environ.get('reddit-prefer-lang')
  330. if host_lang:
  331. c.host_lang = host_lang
  332. def set_iface_lang():
  333. locale.setlocale(locale.LC_ALL, g.locale)
  334. lang = [g.lang]
  335. # GET param wins
  336. if c.host_lang:
  337. lang = [c.host_lang]
  338. else:
  339. lang = [c.user.pref_lang]
  340. if getattr(g, "lang_override") and lang[0] == "en":
  341. lang.insert(0, g.lang_override)
  342. #choose the first language
  343. c.lang = lang[0]
  344. #then try to overwrite it if we have the translation for another
  345. #one
  346. for l in lang:
  347. try:
  348. set_lang(l, fallback_lang=g.lang)
  349. c.lang = l
  350. break
  351. except LanguageError:
  352. #we don't have a translation for that language
  353. set_lang(g.lang, graceful_fail=True)
  354. try:
  355. c.locale = babel.core.Locale.parse(c.lang, sep='-')
  356. except (babel.core.UnknownLocaleError, ValueError):
  357. c.locale = babel.core.Locale.parse(g.lang, sep='-')
  358. #TODO: add exceptions here for rtl languages
  359. if c.lang in ('ar', 'he', 'fa'):
  360. c.lang_rtl = True
  361. def set_content_lang():
  362. if c.user.pref_content_langs != 'all':
  363. c.content_langs = list(c.user.pref_content_langs)
  364. c.content_langs.sort()
  365. else:
  366. c.content_langs = c.user.pref_content_langs
  367. def set_cnameframe():
  368. if (bool(request.params.get(utils.UrlParser.cname_get))
  369. or not request.host.split(":")[0].endswith(g.domain)):
  370. c.cname = True
  371. request.environ['REDDIT_CNAME'] = 1
  372. if request.params.has_key(utils.UrlParser.cname_get):
  373. del request.params[utils.UrlParser.cname_get]
  374. if request.get.has_key(utils.UrlParser.cname_get):
  375. del request.get[utils.UrlParser.cname_get]
  376. c.frameless_cname = request.environ.get('frameless_cname', False)
  377. if hasattr(c.site, 'domain'):
  378. c.authorized_cname = request.environ.get('authorized_cname', False)
  379. def set_colors():
  380. theme_rx = re.compile(r'')
  381. color_rx = re.compile(r'\A([a-fA-F0-9]){3}(([a-fA-F0-9]){3})?\Z')
  382. c.theme = None
  383. if color_rx.match(request.get.get('bgcolor') or ''):
  384. c.bgcolor = request.get.get('bgcolor')
  385. if color_rx.match(request.get.get('bordercolor') or ''):
  386. c.bordercolor = request.get.get('bordercolor')
  387. def ratelimit_agent(agent):
  388. key = 'rate_agent_' + agent
  389. if g.cache.get(key):
  390. request.environ['retry_after'] = 1
  391. abort(429)
  392. else:
  393. g.cache.set(key, 't', time = 1)
  394. appengine_re = re.compile(r'AppEngine-Google; \(\+http://code.google.com/appengine; appid: s~([a-z0-9-]{6,30})\)\Z')
  395. def ratelimit_agents():
  396. user_agent = request.user_agent
  397. if not user_agent:
  398. return
  399. # parse out the appid for appengine apps
  400. appengine_match = appengine_re.match(user_agent)
  401. if appengine_match:
  402. appid = appengine_match.group(1)
  403. ratelimit_agent(appid)
  404. return
  405. user_agent = user_agent.lower()
  406. for s in g.agents:
  407. if s and user_agent and s in user_agent:
  408. ratelimit_agent(s)
  409. def ratelimit_throttled():
  410. ip = request.ip.strip()
  411. if is_throttled(ip):
  412. abort(429)
  413. def paginated_listing(default_page_size=25, max_page_size=100, backend='sql'):
  414. def decorator(fn):
  415. @validate(num=VLimit('limit', default=default_page_size,
  416. max_limit=max_page_size),
  417. after=VByName('after', backend=backend),
  418. before=VByName('before', backend=backend),
  419. count=VCount('count'),
  420. target=VTarget("target"),
  421. show=VLength('show', 3))
  422. @utils.wraps_api(fn)
  423. def new_fn(self, before, **env):
  424. if c.render_style == "htmllite":
  425. c.link_target = env.get("target")
  426. elif "target" in env:
  427. del env["target"]
  428. if "show" in env and env['show'] == 'all':
  429. c.ignore_hide_rules = True
  430. kw = build_arg_list(fn, env)
  431. #turn before into after/reverse
  432. kw['reverse'] = False
  433. if before:
  434. kw['after'] = before
  435. kw['reverse'] = True
  436. return fn(self, **kw)
  437. return new_fn
  438. return decorator
  439. #TODO i want to get rid of this function. once the listings in front.py are
  440. #moved into listingcontroller, we shouldn't have a need for this
  441. #anymore
  442. def base_listing(fn):
  443. return paginated_listing()(fn)
  444. def is_trusted_origin(origin):
  445. try:
  446. origin = urlparse(origin)
  447. except ValueError:
  448. return False
  449. return any(is_subdomain(origin.hostname, domain) for domain in g.trusted_domains)
  450. def cross_domain(origin_check=is_trusted_origin, **options):
  451. """Set up cross domain validation and hoisting for a request handler."""
  452. def cross_domain_wrap(fn):
  453. cors_perms = {
  454. "origin_check": origin_check,
  455. "allow_credentials": bool(options.get("allow_credentials"))
  456. }
  457. def cross_domain_handler(self, *args, **kwargs):
  458. if request.params.get("hoist") == "cookie":
  459. # Cookie polling response
  460. if cors_perms["origin_check"](g.origin):
  461. name = request.environ["pylons.routes_dict"]["action_name"]
  462. resp = fn(self, *args, **kwargs)
  463. c.cookies.add('hoist_%s' % name, ''.join(resp.content))
  464. c.response_content_type = 'text/html'
  465. resp.content = ''
  466. return resp
  467. else:
  468. abort(403)
  469. else:
  470. self.check_cors()
  471. return fn(self, *args, **kwargs)
  472. cross_domain_handler.cors_perms = cors_perms
  473. return cross_domain_handler
  474. return cross_domain_wrap
  475. def require_https():
  476. if not c.secure:
  477. abort(ForbiddenError(errors.HTTPS_REQUIRED))
  478. def prevent_framing_and_css(allow_cname_frame=False):
  479. def wrap(f):
  480. @utils.wraps_api(f)
  481. def no_funny_business(*args, **kwargs):
  482. c.allow_styles = False
  483. if not (allow_cname_frame and c.cname and not c.authorized_cname):
  484. c.deny_frames = True
  485. return f(*args, **kwargs)
  486. return no_funny_business
  487. return wrap
  488. class MinimalController(BaseController):
  489. allow_stylesheets = False
  490. def request_key(self):
  491. # note that this references the cookie at request time, not
  492. # the current value of it
  493. try:
  494. cookies_key = [(x, request.cookies.get(x,''))
  495. for x in cache_affecting_cookies]
  496. except CookieError:
  497. cookies_key = ''
  498. return make_key('request_key_',
  499. c.lang,
  500. c.content_langs,
  501. request.host,
  502. c.secure,
  503. c.cname,
  504. request.fullpath,
  505. c.over18,
  506. c.firsttime,
  507. c.extension,
  508. c.render_style,
  509. cookies_key)
  510. def cached_response(self):
  511. return c.response
  512. def pre(self):
  513. c.response_wrappers = []
  514. c.start_time = datetime.now(g.tz)
  515. g.reset_caches()
  516. c.domain_prefix = request.environ.get("reddit-domain-prefix",
  517. g.domain_prefix)
  518. c.secure = request.host in g.secure_domains
  519. #check if user-agent needs a dose of rate-limiting
  520. if not c.error_page:
  521. ratelimit_throttled()
  522. ratelimit_agents()
  523. c.allow_loggedin_cache = False
  524. c.show_wiki_actions = False
  525. # the domain has to be set before Cookies get initialized
  526. set_subreddit()
  527. c.errors = ErrorSet()
  528. c.cookies = Cookies()
  529. # if an rss feed, this will also log the user in if a feed=
  530. # GET param is included
  531. set_content_type()
  532. def try_pagecache(self):
  533. #check content cache
  534. if request.method.upper() == 'GET' and not c.user_is_loggedin:
  535. r = g.rendercache.get(self.request_key())
  536. if r:
  537. r, c.cookies = r
  538. response = c.response
  539. response.headers = r.headers
  540. response.content = r.content
  541. for x in r.cookies.keys():
  542. if x in cache_affecting_cookies:
  543. cookie = r.cookies[x]
  544. response.set_cookie(key = x,
  545. value = cookie.value,
  546. domain = cookie.get('domain',None),
  547. expires = cookie.get('expires',None),
  548. path = cookie.get('path',None),
  549. secure = cookie.get('secure', False),
  550. httponly = cookie.get('httponly', False))
  551. response.status_code = r.status_code
  552. request.environ['pylons.routes_dict']['action'] = 'cached_response'
  553. # make sure to carry over the content type
  554. c.response_content_type = r.headers['content-type']
  555. c.used_cache = True
  556. # response wrappers have already been applied before cache write
  557. c.response_wrappers = []
  558. def post(self):
  559. response = c.response
  560. content = filter(None, response.content)
  561. if isinstance(content, (list, tuple)):
  562. content = ''.join(content)
  563. for w in c.response_wrappers:
  564. content = w(content)
  565. response.content = content
  566. if c.response_content_type:
  567. response.headers['Content-Type'] = c.response_content_type
  568. if c.user_is_loggedin and not c.allow_loggedin_cache:
  569. response.headers['Cache-Control'] = 'no-cache'
  570. response.headers['Pragma'] = 'no-cache'
  571. if c.deny_frames:
  572. response.headers["X-Frame-Options"] = "DENY"
  573. #return
  574. #set content cache
  575. if (g.page_cache_time
  576. and request.method.upper() == 'GET'
  577. and (not c.user_is_loggedin or c.allow_loggedin_cache)
  578. and not c.used_cache
  579. and response.status_code not in (429, 503)
  580. and response.content and response.content[0]):
  581. try:
  582. g.rendercache.set(self.request_key(),
  583. (response, c.cookies),
  584. g.page_cache_time)
  585. except MemcachedError as e:
  586. # this codepath will actually never be hit as long as
  587. # the pagecache memcached client is in no_reply mode.
  588. g.log.warning("Ignored exception (%r) on pagecache "
  589. "write for %r", e, request.path)
  590. # send cookies
  591. for k,v in c.cookies.iteritems():
  592. if v.dirty:
  593. response.set_cookie(key = k,
  594. value = quote(v.value),
  595. domain = v.domain,
  596. expires = v.expires,
  597. secure = getattr(v, 'secure', False),
  598. httponly = getattr(v, 'httponly', False))
  599. end_time = datetime.now(g.tz)
  600. # update last_visit
  601. if (c.user_is_loggedin and not g.disallow_db_writes and
  602. request.path != '/validuser'):
  603. c.user.update_last_visit(c.start_time)
  604. if ('pylons.routes_dict' in request.environ and
  605. 'action' in request.environ['pylons.routes_dict']):
  606. action = str(request.environ['pylons.routes_dict']['action'])
  607. else:
  608. action = "unknown"
  609. log_text("unknown action", "no action for %r" % path_info,
  610. "warning")
  611. if g.usage_sampling >= 1.0 or rand.random() < g.usage_sampling:
  612. amqp.add_kw("usage_q",
  613. start_time = c.start_time,
  614. end_time = end_time,
  615. sampling_rate = g.usage_sampling,
  616. action = action)
  617. check_request(end_time)
  618. # this thread is probably going to be reused, but it could be
  619. # a while before it is. So we might as well dump the cache in
  620. # the mean time so that we don't have dead objects hanging
  621. # around taking up memory
  622. g.reset_caches()
  623. # push data to statsd
  624. if 'pylons.action_method' in request.environ:
  625. # only report web timing data if an action handler was called
  626. g.stats.transact('web.%s' % action,
  627. (end_time - c.start_time).total_seconds())
  628. g.stats.flush()
  629. def abort404(self):
  630. abort(404, "not found")
  631. def abort403(self):
  632. abort(403, "forbidden")
  633. def check_cors(self):
  634. origin = request.headers.get("Origin")
  635. if not origin:
  636. return
  637. method = request.method
  638. if method == 'OPTIONS':
  639. # preflight request
  640. method = request.headers.get("Access-Control-Request-Method")
  641. if not method:
  642. self.abort403()
  643. action = request.environ["pylons.routes_dict"]["action_name"]
  644. handler = self._get_action_handler(action, method)
  645. cors = handler and getattr(handler, "cors_perms", None)
  646. if cors and cors["origin_check"](origin):
  647. response.headers["Access-Control-Allow-Origin"] = origin
  648. if cors.get("allow_credentials"):
  649. response.headers["Access-Control-Allow-Credentials"] = "true"
  650. def OPTIONS(self):
  651. """Return empty responses for CORS preflight requests"""
  652. self.check_cors()
  653. def sendpng(self, string):
  654. c.response_content_type = 'image/png'
  655. c.response.content = string
  656. return c.response
  657. def update_qstring(self, dict):
  658. merged = copy(request.get)
  659. merged.update(dict)
  660. return request.path + utils.query_string(merged)
  661. def api_wrapper(self, kw):
  662. data = simplejson.dumps(kw)
  663. c.response.content = filters.websafe_json(data)
  664. return c.response
  665. def iframe_api_wrapper(self, kw):
  666. data = simplejson.dumps(kw)
  667. c.response_content_type = 'text/html'
  668. c.response.content = (
  669. '<html><head><script type="text/javascript">\n'
  670. 'parent.$.handleResponse().call('
  671. 'parent.$("#" + window.frameElement.id).parent(), %s)\n'
  672. '</script></head></html>') % filters.websafe_json(data)
  673. return c.response
  674. class RedditController(MinimalController):
  675. @staticmethod
  676. def login(user, rem=False):
  677. c.cookies[g.login_cookie] = Cookie(value = user.make_cookie(),
  678. expires = NEVER if rem else None)
  679. @staticmethod
  680. def logout():
  681. c.cookies[g.login_cookie] = Cookie(value='', expires=DELETE)
  682. @staticmethod
  683. def enable_admin_mode(user, first_login=None):
  684. # no expiration time so the cookie dies with the browser session
  685. c.cookies[g.admin_cookie] = Cookie(value=user.make_admin_cookie(first_login=first_login))
  686. @staticmethod
  687. def remember_otp(user):
  688. cookie = user.make_otp_cookie()
  689. expiration = datetime.utcnow() + timedelta(seconds=g.OTP_COOKIE_TTL)
  690. expiration = expiration.strftime("%a, %d %b %Y %H:%M:%S GMT")
  691. set_user_cookie(g.otp_cookie,
  692. cookie,
  693. secure=True,
  694. httponly=True,
  695. expires=expiration)
  696. @staticmethod
  697. def disable_admin_mode(user):
  698. c.cookies[g.admin_cookie] = Cookie(value='', expires=DELETE)
  699. def pre(self):
  700. MinimalController.pre(self)
  701. set_cnameframe()
  702. # populate c.cookies unless we're on the unsafe media_domain
  703. if request.host != g.media_domain or g.media_domain == g.domain:
  704. try:
  705. for k,v in request.cookies.iteritems():
  706. # minimalcontroller can still set cookies
  707. if k not in c.cookies:
  708. # we can unquote even if it's not quoted
  709. c.cookies[k] = Cookie(value=unquote(v), dirty=False)
  710. except CookieError:
  711. #pylons or one of the associated retarded libraries
  712. #can't handle broken cookies
  713. request.environ['HTTP_COOKIE'] = ''
  714. c.firsttime = firsttime()
  715. # the user could have been logged in via one of the feeds
  716. maybe_admin = False
  717. is_otpcookie_valid = False
  718. # no logins for RSS feed unless valid_feed has already been called
  719. if not c.user:
  720. if c.extension != "rss":
  721. authenticate_user()
  722. admin_cookie = c.cookies.get(g.admin_cookie)
  723. if c.user_is_loggedin and admin_cookie:
  724. maybe_admin, first_login = valid_admin_cookie(admin_cookie.value)
  725. if maybe_admin:
  726. self.enable_admin_mode(c.user, first_login=first_login)
  727. else:
  728. self.disable_admin_mode(c.user)
  729. otp_cookie = read_user_cookie(g.otp_cookie)
  730. if c.user_is_loggedin and otp_cookie:
  731. is_otpcookie_valid = valid_otp_cookie(otp_cookie)
  732. if not c.user:
  733. c.user = UnloggedUser(get_browser_langs())
  734. # patch for fixing mangled language preferences
  735. if (not isinstance(c.user.pref_lang, basestring) or
  736. not all(isinstance(x, basestring)
  737. for x in c.user.pref_content_langs)):
  738. c.user.pref_lang = g.lang
  739. c.user.pref_content_langs = [g.lang]
  740. c.user._commit()
  741. if c.user_is_loggedin:
  742. if not c.user._loaded:
  743. c.user._load()
  744. c.modhash = c.user.modhash()
  745. if request.method.upper() == 'GET':
  746. read_mod_cookie()
  747. if hasattr(c.user, 'msgtime') and c.user.msgtime:
  748. c.have_messages = c.user.msgtime
  749. c.show_mod_mail = Subreddit.reverse_moderator_ids(c.user)
  750. c.have_mod_messages = getattr(c.user, "modmsgtime", False)
  751. c.user_is_admin = maybe_admin and c.user.name in g.admins
  752. c.user_special_distinguish = c.user.special_distinguish()
  753. c.user_is_sponsor = c.user_is_admin or c.user.name in g.sponsors
  754. c.otp_cached = is_otpcookie_valid
  755. if not isinstance(c.site, FakeSubreddit) and not g.disallow_db_writes:
  756. c.user.update_sr_activity(c.site)
  757. c.over18 = over18()
  758. set_obey_over18()
  759. #set_browser_langs()
  760. set_host_lang()
  761. set_iface_lang()
  762. set_content_lang()
  763. set_recent_clicks()
  764. # used for HTML-lite templates
  765. set_colors()
  766. # set some environmental variables in case we hit an abort
  767. if not isinstance(c.site, FakeSubreddit):
  768. request.environ['REDDIT_NAME'] = c.site.name
  769. # random reddit trickery -- have to do this after the content lang is set
  770. if c.site == Random:
  771. c.site = Subreddit.random_reddit()
  772. redirect_to("/" + c.site.path.strip('/') + request.path)
  773. elif c.site == RandomNSFW:
  774. c.site = Subreddit.random_reddit(over18 = True)
  775. redirect_to("/" + c.site.path.strip('/') + request.path)
  776. if not request.path.startswith("/api/login/"):
  777. # is the subreddit banned?
  778. if c.site.spammy() and not c.user_is_admin and not c.error_page:
  779. ban_info = getattr(c.site, "ban_info", {})
  780. if "message" in ban_info:
  781. message = ban_info['message']
  782. else:
  783. sitelink = url_escape(add_sr("/"))
  784. subject = ("/r/%s has been incorrectly banned" %
  785. c.site.name)
  786. link = ("/r/redditrequest/submit?url=%s&title=%s" %
  787. (sitelink, subject))
  788. message = strings.banned_subreddit_message % dict(
  789. link=link)
  790. errpage = pages.RedditError(strings.banned_subreddit_title,
  791. message,
  792. image="subreddit-banned.png")
  793. request.environ['usable_error_content'] = errpage.render()
  794. self.abort404()
  795. # check if the user has access to this subreddit
  796. if not c.site.can_view(c.user) and not c.error_page:
  797. public_description = c.site.public_description
  798. errpage = pages.RedditError(strings.private_subreddit_title,
  799. strings.private_subreddit_message,
  800. image="subreddit-private.png",
  801. sr_description=public_description)
  802. request.environ['usable_error_content'] = errpage.render()
  803. self.abort403()
  804. #check over 18
  805. if (c.site.over_18 and not c.over18 and
  806. request.path not in ("/frame", "/over18")
  807. and c.render_style == 'html'):
  808. return self.intermediate_redirect("/over18")
  809. #check whether to allow custom styles
  810. c.allow_styles = True
  811. c.can_apply_styles = self.allow_stylesheets
  812. if g.css_killswitch:
  813. c.can_apply_styles = False
  814. #if the preference is set and we're not at a cname
  815. elif not c.user.pref_show_stylesheets and not c.cname:
  816. c.can_apply_styles = False
  817. #if the site has a cname, but we're not using it
  818. elif c.site.domain and c.site.css_on_cname and not c.cname:
  819. c.can_apply_styles = False
  820. def check_modified(self, thing, action,
  821. private=True, max_age=0, must_revalidate=True):
  822. if c.user_is_loggedin and not c.allow_loggedin_cache:
  823. return
  824. last_modified = utils.last_modified_date(thing, action)
  825. date_str = http_utils.http_date_str(last_modified)
  826. c.response.headers['last-modified'] = date_str
  827. cache_control = []
  828. if private:
  829. cache_control.append('private')
  830. cache_control.append('max-age=%d' % max_age)
  831. if must_revalidate:
  832. cache_control.append('must-revalidate')
  833. c.response.headers['cache-control'] = ', '.join(cache_control)
  834. modified_since = request.if_modified_since
  835. if modified_since and modified_since >= last_modified:
  836. abort(304, 'not modified')
  837. def search_fail(self, exception):
  838. from r2.lib.search import SearchException
  839. if isinstance(exception, SearchException + (socket.error,)):
  840. g.log.error("Search Error: %s" % repr(exception))
  841. errpage = pages.RedditError(_("search failed"),
  842. strings.search_failed)
  843. request.environ['usable_error_content'] = errpage.render()
  844. request.environ['retry_after'] = 60
  845. abort(503)