PageRenderTime 56ms CodeModel.GetById 21ms RepoModel.GetById 1ms app.codeStats 0ms

/r2/r2/controllers/front.py

https://github.com/stevewilber/reddit
Python | 1334 lines | 1279 code | 28 blank | 27 comment | 22 complexity | 1497ea3458366797b862ee42bc47426a MD5 | raw file
Possible License(s): MPL-2.0-no-copyleft-exception, Apache-2.0

Large files files are truncated, but you can click here to view the full file

  1. # The contents of this file are subject to the Common Public Attribution
  2. # License Version 1.0. (the "License"); you may not use this file except in
  3. # compliance with the License. You may obtain a copy of the License at
  4. # http://code.reddit.com/LICENSE. The License is based on the Mozilla Public
  5. # License Version 1.1, but Sections 14 and 15 have been added to cover use of
  6. # software over a computer network and provide for limited attribution for the
  7. # Original Developer. In addition, Exhibit A has been modified to be consistent
  8. # with Exhibit B.
  9. #
  10. # Software distributed under the License is distributed on an "AS IS" basis,
  11. # WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License for
  12. # the specific language governing rights and limitations under the License.
  13. #
  14. # The Original Code is reddit.
  15. #
  16. # The Original Developer is the Initial Developer. The Initial Developer of
  17. # the Original Code is reddit Inc.
  18. #
  19. # All portions of the code written by reddit are Copyright (c) 2006-2012 reddit
  20. # Inc. All Rights Reserved.
  21. ###############################################################################
  22. from validator import *
  23. from pylons.i18n import _, ungettext
  24. from pylons.controllers.util import redirect_to
  25. from reddit_base import RedditController, base_listing, paginated_listing, prevent_framing_and_css
  26. from r2 import config
  27. from r2.models import *
  28. from r2.config.extensions import is_api
  29. from r2.lib.pages import *
  30. from r2.lib.pages.things import wrap_links
  31. from r2.lib.pages import trafficpages
  32. from r2.lib.menus import *
  33. from r2.lib.utils import to36, sanitize_url, check_cheating, title_to_url
  34. from r2.lib.utils import query_string, UrlParser, link_from_url, link_duplicates
  35. from r2.lib.template_helpers import get_domain
  36. from r2.lib.filters import unsafe, _force_unicode
  37. from r2.lib.emailer import has_opted_out, Email
  38. from r2.lib.db.operators import desc
  39. from r2.lib.db import queries
  40. from r2.lib.db.tdb_cassandra import MultiColumnQuery
  41. from r2.lib.strings import strings
  42. from r2.lib.search import (SearchQuery, SubredditSearchQuery, SearchException,
  43. InvalidQuery)
  44. from r2.lib import jsontemplates
  45. from r2.lib import sup
  46. import r2.lib.db.thing as thing
  47. from errors import errors
  48. from listingcontroller import ListingController
  49. from oauth2 import OAuth2ResourceController, require_oauth2_scope
  50. from api_docs import api_doc, api_section
  51. from pylons import c, request, request, Response
  52. from r2.models.token import EmailVerificationToken
  53. from r2.controllers.ipn import generate_blob
  54. from operator import attrgetter
  55. import string
  56. import random as rand
  57. import re, socket
  58. import time as time_module
  59. from urllib import quote_plus
  60. class FrontController(RedditController, OAuth2ResourceController):
  61. allow_stylesheets = True
  62. def pre(self):
  63. self.check_for_bearer_token()
  64. RedditController.pre(self)
  65. @validate(article = VLink('article'),
  66. comment = VCommentID('comment'))
  67. def GET_oldinfo(self, article, type, dest, rest=None, comment=''):
  68. """Legacy: supporting permalink pages from '06,
  69. and non-search-engine-friendly links"""
  70. if not (dest in ('comments','related','details')):
  71. dest = 'comments'
  72. if type == 'ancient':
  73. #this could go in config, but it should never change
  74. max_link_id = 10000000
  75. new_id = max_link_id - int(article._id)
  76. return self.redirect('/info/' + to36(new_id) + '/' + rest)
  77. if type == 'old':
  78. new_url = "/%s/%s/%s" % \
  79. (dest, article._id36,
  80. quote_plus(title_to_url(article.title).encode('utf-8')))
  81. if not c.default_sr:
  82. new_url = "/r/%s%s" % (c.site.name, new_url)
  83. if comment:
  84. new_url = new_url + "/%s" % comment._id36
  85. if c.extension:
  86. new_url = new_url + "/.%s" % c.extension
  87. new_url = new_url + query_string(request.get)
  88. # redirect should be smarter and handle extensions, etc.
  89. return self.redirect(new_url, code=301)
  90. @api_doc(api_section.listings)
  91. def GET_random(self):
  92. """The Serendipity button"""
  93. sort = rand.choice(('new','hot'))
  94. links = c.site.get_links(sort, 'all')
  95. if isinstance(links, thing.Query):
  96. links._limit = g.num_serendipity
  97. links = [x._fullname for x in links]
  98. else:
  99. links = list(links)[:g.num_serendipity]
  100. rand.shuffle(links)
  101. builder = IDBuilder(links, skip = True,
  102. keep_fn = lambda x: x.fresh,
  103. num = 1)
  104. links = builder.get_items()[0]
  105. if links:
  106. l = links[0]
  107. return self.redirect(add_sr("/tb/" + l._id36))
  108. else:
  109. return self.redirect(add_sr('/'))
  110. @prevent_framing_and_css()
  111. @validate(VAdmin(),
  112. thing = VByName('article'),
  113. oldid36 = nop('article'),
  114. after=nop('after'),
  115. before=nop('before'),
  116. count=VCount('count'))
  117. def GET_details(self, thing, oldid36, after, before, count):
  118. """The (now deprecated) details page. Content on this page
  119. has been subsubmed by the presence of the LinkInfoBar on the
  120. rightbox, so it is only useful for Admin-only wizardry."""
  121. if not thing:
  122. try:
  123. link = Link._byID36(oldid36)
  124. return self.redirect('/details/' + link._fullname)
  125. except (NotFound, ValueError):
  126. abort(404)
  127. kw = {'count': count}
  128. if before:
  129. kw['after'] = before
  130. kw['reverse'] = True
  131. else:
  132. kw['after'] = after
  133. kw['reverse'] = False
  134. return DetailsPage(thing=thing, expand_children=False, **kw).render()
  135. def GET_selfserviceoatmeal(self):
  136. return BoringPage(_("self service help"),
  137. show_sidebar = False,
  138. content = SelfServiceOatmeal()).render()
  139. @validate(article = VLink('article'))
  140. def GET_shirt(self, article):
  141. if not can_view_link_comments(article):
  142. abort(403, 'forbidden')
  143. return self.abort404()
  144. def _comment_visits(self, article, user, new_visit=None):
  145. hc_key = "comment_visits-%s-%s" % (user.name, article._id36)
  146. old_visits = g.hardcache.get(hc_key, [])
  147. append = False
  148. if new_visit is None:
  149. pass
  150. elif len(old_visits) == 0:
  151. append = True
  152. else:
  153. last_visit = max(old_visits)
  154. time_since_last = new_visit - last_visit
  155. if (time_since_last.days > 0
  156. or time_since_last.seconds > g.comment_visits_period):
  157. append = True
  158. else:
  159. # They were just here a few seconds ago; consider that
  160. # the same "visit" as right now
  161. old_visits.pop()
  162. if append:
  163. copy = list(old_visits) # make a copy
  164. copy.append(new_visit)
  165. if len(copy) > 10:
  166. copy.pop(0)
  167. g.hardcache.set(hc_key, copy, 86400 * 2)
  168. return old_visits
  169. @validate(article = VLink('article'),
  170. comment = VCommentID('comment'),
  171. context = VInt('context', min = 0, max = 8),
  172. sort = VMenu('controller', CommentSortMenu),
  173. limit = VInt('limit'),
  174. depth = VInt('depth'))
  175. def POST_comments(self, article, comment, context, sort, limit, depth):
  176. # VMenu validator will save the value of sort before we reach this
  177. # point. Now just redirect to GET mode.
  178. return self.redirect(request.fullpath + query_string(dict(sort=sort)))
  179. @require_oauth2_scope("read")
  180. @validate(article = VLink('article'),
  181. comment = VCommentID('comment'),
  182. context = VInt('context', min = 0, max = 8),
  183. sort = VMenu('controller', CommentSortMenu),
  184. limit = VInt('limit'),
  185. depth = VInt('depth'))
  186. @api_doc(api_section.listings,
  187. uri='/comments/{article}',
  188. extensions=['json', 'xml'])
  189. def GET_comments(self, article, comment, context, sort, limit, depth):
  190. """Comment page for a given 'article'."""
  191. if comment and comment.link_id != article._id:
  192. return self.abort404()
  193. sr = Subreddit._byID(article.sr_id, True)
  194. if sr.name == g.takedown_sr:
  195. request.environ['REDDIT_TAKEDOWN'] = article._fullname
  196. return self.abort404()
  197. if not c.default_sr and c.site._id != sr._id:
  198. return self.abort404()
  199. if not can_view_link_comments(article):
  200. abort(403, 'forbidden')
  201. #check for 304
  202. self.check_modified(article, 'comments')
  203. # If there is a focal comment, communicate down to
  204. # comment_skeleton.html who that will be. Also, skip
  205. # comment_visits check
  206. previous_visits = None
  207. if comment:
  208. c.focal_comment = comment._id36
  209. elif (c.user_is_loggedin and c.user.gold and
  210. c.user.pref_highlight_new_comments):
  211. #TODO: remove this profiling if load seems okay
  212. from datetime import datetime
  213. before = datetime.now(g.tz)
  214. previous_visits = self._comment_visits(article, c.user, c.start_time)
  215. after = datetime.now(g.tz)
  216. delta = (after - before)
  217. msec = (delta.seconds * 1000 + delta.microseconds / 1000)
  218. if msec >= 100:
  219. g.log.warning("previous_visits code took %d msec" % msec)
  220. # check if we just came from the submit page
  221. infotext = None
  222. if request.get.get('already_submitted'):
  223. infotext = strings.already_submitted % article.resubmit_link()
  224. check_cheating('comments')
  225. if not c.user.pref_num_comments:
  226. num = g.num_comments
  227. elif c.user.gold:
  228. num = min(c.user.pref_num_comments, g.max_comments_gold)
  229. else:
  230. num = min(c.user.pref_num_comments, g.max_comments)
  231. kw = {}
  232. # allow depth to be reset (I suspect I'll turn the VInt into a
  233. # validator on my next pass of .compact)
  234. if depth is not None and 0 < depth < MAX_RECURSION:
  235. kw['max_depth'] = depth
  236. elif c.render_style == "compact":
  237. kw['max_depth'] = 5
  238. displayPane = PaneStack()
  239. # allow the user's total count preferences to be overwritten
  240. # (think of .embed as the use case together with depth=1)
  241. if limit and limit > 0:
  242. num = limit
  243. if c.user_is_loggedin and c.user.gold:
  244. if num > g.max_comments_gold:
  245. displayPane.append(InfoBar(message =
  246. strings.over_comment_limit_gold
  247. % max(0, g.max_comments_gold)))
  248. num = g.max_comments_gold
  249. elif num > g.max_comments:
  250. if limit:
  251. displayPane.append(InfoBar(message =
  252. strings.over_comment_limit
  253. % dict(max=max(0, g.max_comments),
  254. goldmax=max(0,
  255. g.max_comments_gold))))
  256. num = g.max_comments
  257. # if permalink page, add that message first to the content
  258. if comment:
  259. displayPane.append(PermalinkMessage(article.make_permalink_slow()))
  260. displayPane.append(LinkCommentSep())
  261. # insert reply box only for logged in user
  262. if c.user_is_loggedin and can_comment_link(article) and not is_api():
  263. #no comment box for permalinks
  264. display = False
  265. if not comment:
  266. age = c.start_time - article._date
  267. if article.promoted or age.days < g.REPLY_AGE_LIMIT:
  268. display = True
  269. displayPane.append(UserText(item = article, creating = True,
  270. post_form = 'comment',
  271. display = display,
  272. cloneable = True))
  273. if previous_visits:
  274. displayPane.append(CommentVisitsBox(previous_visits))
  275. # Used in later "more comments" renderings
  276. pv_hex = md5(repr(previous_visits)).hexdigest()
  277. g.cache.set(pv_hex, previous_visits, time=g.comment_visits_period)
  278. c.previous_visits_hex = pv_hex
  279. # Used in template_helpers
  280. c.previous_visits = previous_visits
  281. # finally add the comment listing
  282. displayPane.append(CommentPane(article, CommentSortMenu.operator(sort),
  283. comment, context, num, **kw))
  284. subtitle_buttons = []
  285. if c.focal_comment or context is not None:
  286. subtitle = None
  287. elif article.num_comments == 0:
  288. subtitle = _("no comments (yet)")
  289. elif article.num_comments <= num:
  290. subtitle = _("all %d comments") % article.num_comments
  291. else:
  292. subtitle = _("top %d comments") % num
  293. if g.max_comments > num:
  294. self._add_show_comments_link(subtitle_buttons, article, num,
  295. g.max_comments, gold=False)
  296. if (c.user_is_loggedin and c.user.gold
  297. and article.num_comments > g.max_comments):
  298. self._add_show_comments_link(subtitle_buttons, article, num,
  299. g.max_comments_gold, gold=True)
  300. res = LinkInfoPage(link = article, comment = comment,
  301. content = displayPane,
  302. page_classes = ['comments-page'],
  303. subtitle = subtitle,
  304. subtitle_buttons = subtitle_buttons,
  305. nav_menus = [CommentSortMenu(default = sort)],
  306. infotext = infotext).render()
  307. return res
  308. def _add_show_comments_link(self, array, article, num, max_comm, gold=False):
  309. if num == max_comm:
  310. return
  311. elif article.num_comments <= max_comm:
  312. link_text = _("show all %d") % article.num_comments
  313. else:
  314. link_text = _("show %d") % max_comm
  315. limit_param = "?limit=%d" % max_comm
  316. if gold:
  317. link_class = "gold"
  318. else:
  319. link_class = ""
  320. more_link = article.make_permalink_slow() + limit_param
  321. array.append( (link_text, more_link, link_class) )
  322. @validate(VUser(),
  323. name = nop('name'))
  324. def GET_newreddit(self, name):
  325. """Create a community form"""
  326. title = _('create a reddit')
  327. content=CreateSubreddit(name = name or '')
  328. res = FormPage(_("create a community"),
  329. content = content,
  330. ).render()
  331. return res
  332. def GET_stylesheet(self):
  333. if g.css_killswitch:
  334. self.abort404()
  335. # de-stale the subreddit object so we don't poison nginx's cache
  336. if not isinstance(c.site, FakeSubreddit):
  337. c.site = Subreddit._byID(c.site._id, data=True, stale=False)
  338. if c.site.stylesheet_is_static:
  339. # TODO: X-Private-Subreddit?
  340. return redirect_to(c.site.stylesheet_url)
  341. else:
  342. stylesheet_contents = c.site.stylesheet_contents
  343. if stylesheet_contents:
  344. c.allow_loggedin_cache = True
  345. c.response_content_type = 'text/css'
  346. c.response.content = stylesheet_contents
  347. if c.site.type == 'private':
  348. c.response.headers['X-Private-Subreddit'] = 'private'
  349. return c.response
  350. else:
  351. return self.abort404()
  352. def _make_moderationlog(self, srs, num, after, reverse, count, mod=None, action=None):
  353. if mod and action:
  354. query = Subreddit.get_modactions(srs, mod=mod, action=None)
  355. def keep_fn(ma):
  356. return ma.action == action
  357. else:
  358. query = Subreddit.get_modactions(srs, mod=mod, action=action)
  359. def keep_fn(ma):
  360. return True
  361. builder = QueryBuilder(query, skip=True, num=num, after=after,
  362. keep_fn=keep_fn, count=count,
  363. reverse=reverse,
  364. wrap=default_thing_wrapper())
  365. listing = ModActionListing(builder)
  366. pane = listing.listing()
  367. return pane
  368. @require_oauth2_scope("modlog")
  369. @prevent_framing_and_css(allow_cname_frame=True)
  370. @paginated_listing(max_page_size=500, backend='cassandra')
  371. @validate(mod=nop('mod'),
  372. action=VOneOf('type', ModAction.actions))
  373. @api_doc(api_section.moderation)
  374. def GET_moderationlog(self, num, after, reverse, count, mod, action):
  375. if not c.user_is_loggedin or not (c.user_is_admin or
  376. c.site.is_moderator(c.user)):
  377. return self.abort404()
  378. if mod:
  379. try:
  380. mod = Account._by_name(mod, allow_deleted=True)
  381. except NotFound:
  382. mod = None
  383. if isinstance(c.site, (MultiReddit, ModSR)):
  384. srs = Subreddit._byID(c.site.sr_ids, return_dict=False)
  385. # grab all moderators
  386. mod_ids = set(Subreddit.get_all_mod_ids(srs))
  387. mods = Account._byID(mod_ids, data=True)
  388. pane = self._make_moderationlog(srs, num, after, reverse, count,
  389. mod=mod, action=action)
  390. elif isinstance(c.site, FakeSubreddit):
  391. return self.abort404()
  392. else:
  393. mod_ids = c.site.moderators
  394. mods = Account._byID(mod_ids, data=True)
  395. pane = self._make_moderationlog(c.site, num, after, reverse, count,
  396. mod=mod, action=action)
  397. panes = PaneStack()
  398. panes.append(pane)
  399. action_buttons = [NavButton(_('all'), None, opt='type', css_class='primary')]
  400. for a in ModAction.actions:
  401. action_buttons.append(NavButton(ModAction._menu[a], a, opt='type'))
  402. mod_buttons = [NavButton(_('all'), None, opt='mod', css_class='primary')]
  403. for mod_id in mod_ids:
  404. mod = mods[mod_id]
  405. mod_buttons.append(NavButton(mod.name, mod.name, opt='mod'))
  406. base_path = request.path
  407. menus = [NavMenu(action_buttons, base_path=base_path,
  408. title=_('filter by action'), type='lightdrop', css_class='modaction-drop'),
  409. NavMenu(mod_buttons, base_path=base_path,
  410. title=_('filter by moderator'), type='lightdrop')]
  411. return EditReddit(content=panes,
  412. nav_menus=menus,
  413. location="log",
  414. extension_handling=False).render()
  415. def _make_spamlisting(self, location, num, after, reverse, count):
  416. if location == 'reports':
  417. query = c.site.get_reported()
  418. elif location == 'spam':
  419. query = c.site.get_spam()
  420. elif location == 'modqueue':
  421. query = c.site.get_modqueue()
  422. elif location == 'unmoderated':
  423. query = c.site.get_unmoderated()
  424. else:
  425. raise ValueError
  426. if isinstance(query, thing.Query):
  427. builder_cls = QueryBuilder
  428. elif isinstance (query, list):
  429. builder_cls = QueryBuilder
  430. else:
  431. builder_cls = IDBuilder
  432. def keep_fn(x):
  433. # no need to bother mods with banned users, or deleted content
  434. if getattr(x,'hidden',False) or x._deleted:
  435. return False
  436. if location == "reports":
  437. return x.reported > 0 and not x._spam
  438. elif location == "spam":
  439. return x._spam
  440. elif location == "modqueue":
  441. if x.reported > 0 and not x._spam:
  442. return True # reported but not banned
  443. verdict = getattr(x, "verdict", None)
  444. if verdict is None:
  445. return True # anything without a verdict
  446. if x._spam and verdict != 'mod-removed':
  447. return True # spam, unless banned by a moderator
  448. return False
  449. elif location == "unmoderated":
  450. return True
  451. else:
  452. raise ValueError
  453. builder = builder_cls(query,
  454. skip = True,
  455. num = num, after = after,
  456. keep_fn = keep_fn,
  457. count = count, reverse = reverse,
  458. wrap = ListingController.builder_wrapper)
  459. listing = LinkListing(builder)
  460. pane = listing.listing()
  461. # Indicate that the comment tree wasn't built for comments
  462. for i in pane.things:
  463. if hasattr(i, 'body'):
  464. i.child = None
  465. return pane
  466. def _edit_modcontrib_reddit(self, location, num, after, reverse, count, created):
  467. extension_handling = False
  468. if not c.user_is_loggedin:
  469. return self.abort404()
  470. if isinstance(c.site, (ModSR, MultiReddit)):
  471. level = 'mod'
  472. elif isinstance(c.site, ContribSR):
  473. level = 'contrib'
  474. elif isinstance(c.site, AllSR):
  475. level = 'all'
  476. else:
  477. raise ValueError
  478. if (level == 'mod' and
  479. location in ('reports', 'spam', 'modqueue', 'unmoderated')):
  480. pane = self._make_spamlisting(location, num, after, reverse, count)
  481. if c.user.pref_private_feeds:
  482. extension_handling = "private"
  483. else:
  484. return self.abort404()
  485. return EditReddit(content=pane,
  486. location=location,
  487. extension_handling=extension_handling).render()
  488. def _edit_normal_reddit(self, location, num, after, reverse, count, created,
  489. name, user):
  490. # moderator is either reddit's moderator or an admin
  491. is_moderator = c.user_is_loggedin and c.site.is_moderator(c.user) or c.user_is_admin
  492. extension_handling = False
  493. if is_moderator and location == 'edit':
  494. pane = PaneStack()
  495. if created == 'true':
  496. pane.append(InfoBar(message = strings.sr_created))
  497. c.allow_styles = True
  498. c.site = Subreddit._byID(c.site._id, data=True, stale=False)
  499. pane.append(CreateSubreddit(site = c.site))
  500. elif location == 'moderators':
  501. pane = ModList(editable = is_moderator)
  502. elif is_moderator and location == 'banned':
  503. pane = BannedList(editable = is_moderator)
  504. elif is_moderator and location == 'wikibanned':
  505. pane = WikiBannedList(editable = is_moderator)
  506. elif is_moderator and location == 'wikicontributors':
  507. pane = WikiMayContributeList(editable = is_moderator)
  508. elif (location == 'contributors' and
  509. # On public reddits, only moderators can see the whitelist.
  510. # On private reddits, all contributors can see each other.
  511. (c.site.type != 'public' or
  512. (c.user_is_loggedin and
  513. (c.site.is_moderator(c.user) or c.user_is_admin)))):
  514. pane = ContributorList(editable = is_moderator)
  515. elif (location == 'stylesheet'
  516. and c.site.can_change_stylesheet(c.user)
  517. and not g.css_killswitch):
  518. if hasattr(c.site,'stylesheet_contents_user') and c.site.stylesheet_contents_user:
  519. stylesheet_contents = c.site.stylesheet_contents_user
  520. elif hasattr(c.site,'stylesheet_contents') and c.site.stylesheet_contents:
  521. stylesheet_contents = c.site.stylesheet_contents
  522. else:
  523. stylesheet_contents = ''
  524. c.allow_styles = True
  525. pane = SubredditStylesheet(site = c.site,
  526. stylesheet_contents = stylesheet_contents)
  527. elif (location == 'stylesheet'
  528. and c.site.can_view(c.user)
  529. and not g.css_killswitch):
  530. stylesheet = (c.site.stylesheet_contents_user or
  531. c.site.stylesheet_contents)
  532. pane = SubredditStylesheetSource(stylesheet_contents=stylesheet)
  533. elif (location in ('reports', 'spam', 'modqueue', 'unmoderated')
  534. and is_moderator):
  535. c.allow_styles = True
  536. pane = self._make_spamlisting(location, num, after, reverse, count)
  537. if c.user.pref_private_feeds:
  538. extension_handling = "private"
  539. elif (is_moderator or c.user_is_sponsor) and location == 'traffic':
  540. pane = trafficpages.SubredditTraffic()
  541. elif is_moderator and location == 'flair':
  542. c.allow_styles = True
  543. pane = FlairPane(num, after, reverse, name, user)
  544. elif c.user_is_sponsor and location == 'ads':
  545. pane = RedditAds()
  546. elif (location == "about") and is_api():
  547. return self.redirect(add_sr('about.json'), code=301)
  548. else:
  549. return self.abort404()
  550. return EditReddit(content=pane,
  551. location=location,
  552. extension_handling=extension_handling).render()
  553. @base_listing
  554. @prevent_framing_and_css(allow_cname_frame=True)
  555. @validate(location = nop('location'),
  556. created = VOneOf('created', ('true','false'),
  557. default = 'false'),
  558. name = nop('name'))
  559. def GET_editreddit(self, location, num, after, reverse, count, created,
  560. name):
  561. """Edit reddit form."""
  562. user = None
  563. if name:
  564. try:
  565. user = Account._by_name(name)
  566. except NotFound:
  567. c.errors.add(errors.USER_DOESNT_EXIST, field='name')
  568. c.profilepage = True
  569. if isinstance(c.site, ModContribSR):
  570. return self._edit_modcontrib_reddit(location, num, after, reverse,
  571. count, created)
  572. elif isinstance(c.site, MultiReddit):
  573. if not (c.user_is_admin or c.site.is_moderator(c.user)):
  574. self.abort403()
  575. return self._edit_modcontrib_reddit(location, num, after, reverse,
  576. count, created)
  577. elif isinstance(c.site, AllSR) and c.user_is_admin:
  578. return self._edit_modcontrib_reddit(location, num, after, reverse,
  579. count, created)
  580. elif isinstance(c.site, FakeSubreddit):
  581. return self.abort404()
  582. else:
  583. return self._edit_normal_reddit(location, num, after, reverse,
  584. count, created, name, user)
  585. @require_oauth2_scope("read")
  586. @api_doc(api_section.subreddits, uri='/r/{subreddit}/about', extensions=['json'])
  587. def GET_about(self):
  588. """Return information about the subreddit.
  589. Data includes the subscriber count, description, and header image."""
  590. if not is_api() or isinstance(c.site, FakeSubreddit):
  591. return self.abort404()
  592. return Reddit(content = Wrapped(c.site)).render()
  593. def GET_awards(self):
  594. """The awards page."""
  595. return BoringPage(_("awards"), content = UserAwards()).render()
  596. # filter for removing punctuation which could be interpreted as search syntax
  597. related_replace_regex = re.compile(r'[?\\&|!{}+~^()"\':*-]+')
  598. related_replace_with = ' '
  599. @base_listing
  600. @validate(article = VLink('article'))
  601. def GET_related(self, num, article, after, reverse, count):
  602. """Related page: performs a search using title of article as
  603. the search query.
  604. """
  605. if not can_view_link_comments(article):
  606. abort(403, 'forbidden')
  607. query = self.related_replace_regex.sub(self.related_replace_with,
  608. article.title)
  609. query = _force_unicode(query)
  610. query = query[:1024]
  611. query = u"|".join(query.split())
  612. query = u"title:'%s'" % query
  613. rel_range = timedelta(days=3)
  614. start = int(time.mktime((article._date - rel_range).utctimetuple()))
  615. end = int(time.mktime((article._date + rel_range).utctimetuple()))
  616. nsfw = u"nsfw:0" if not (article.over_18 or article._nsfw.findall(article.title)) else u""
  617. query = u"(and %s timestamp:%s..%s %s)" % (query, start, end, nsfw)
  618. q = SearchQuery(query, raw_sort="-text_relevance",
  619. syntax="cloudsearch")
  620. pane = self._search(q, num=num, after=after, reverse=reverse,
  621. count=count)[2]
  622. return LinkInfoPage(link=article, content=pane,
  623. subtitle=_('related')).render()
  624. @base_listing
  625. @validate(article = VLink('article'))
  626. def GET_duplicates(self, article, num, after, reverse, count):
  627. if not can_view_link_comments(article):
  628. abort(403, 'forbidden')
  629. links = link_duplicates(article)
  630. links.sort(key=attrgetter('num_comments'), reverse=True)
  631. builder = IDBuilder([ link._fullname for link in links ],
  632. num = num, after = after, reverse = reverse,
  633. count = count, skip = False)
  634. listing = LinkListing(builder).listing()
  635. res = LinkInfoPage(link = article,
  636. comment = None,
  637. duplicates = links,
  638. content = listing,
  639. subtitle = _('other discussions')).render()
  640. return res
  641. @base_listing
  642. @validate(query = nop('q'))
  643. @api_doc(api_section.subreddits, uri='/reddits/search', extensions=['json', 'xml'])
  644. def GET_search_reddits(self, query, reverse, after, count, num):
  645. """Search reddits by title and description."""
  646. q = SubredditSearchQuery(query)
  647. results, etime, spane = self._search(q, num=num, reverse=reverse,
  648. after=after, count=count,
  649. skip_deleted_authors=False)
  650. res = SubredditsPage(content=spane,
  651. prev_search=query,
  652. elapsed_time=etime,
  653. num_results=results.hits,
  654. # update if we ever add sorts
  655. search_params={},
  656. title=_("search results"),
  657. simple=True).render()
  658. return res
  659. search_help_page = "/help/search"
  660. verify_langs_regex = re.compile(r"\A[a-z][a-z](,[a-z][a-z])*\Z")
  661. @base_listing
  662. @validate(query=VLength('q', max_length=512),
  663. sort=VMenu('sort', SearchSortMenu, remember=False),
  664. restrict_sr=VBoolean('restrict_sr', default=False),
  665. syntax=VOneOf('syntax', options=SearchQuery.known_syntaxes))
  666. @api_doc(api_section.search, extensions=['json', 'xml'])
  667. def GET_search(self, query, num, reverse, after, count, sort, restrict_sr,
  668. syntax):
  669. """Search links page."""
  670. if query and '.' in query:
  671. url = sanitize_url(query, require_scheme = True)
  672. if url:
  673. return self.redirect("/submit" + query_string({'url':url}))
  674. if not restrict_sr:
  675. site = DefaultSR()
  676. else:
  677. site = c.site
  678. if not syntax:
  679. syntax = SearchQuery.default_syntax
  680. try:
  681. cleanup_message = None
  682. try:
  683. q = SearchQuery(query, site, sort, syntax=syntax)
  684. results, etime, spane = self._search(q, num=num, after=after,
  685. reverse=reverse,
  686. count=count)
  687. except InvalidQuery:
  688. # Clean the search of characters that might be causing the
  689. # InvalidQuery exception. If the cleaned search boils down
  690. # to an empty string, the search code is expected to bail
  691. # out early with an empty result set.
  692. cleaned = re.sub("[^\w\s]+", " ", query)
  693. cleaned = cleaned.lower().strip()
  694. q = SearchQuery(cleaned, site, sort)
  695. results, etime, spane = self._search(q, num=num,
  696. after=after,
  697. reverse=reverse,
  698. count=count)
  699. if cleaned:
  700. cleanup_message = strings.invalid_search_query % {
  701. "clean_query": cleaned
  702. }
  703. cleanup_message += " "
  704. cleanup_message += strings.search_help % {
  705. "search_help": self.search_help_page
  706. }
  707. else:
  708. cleanup_message = strings.completely_invalid_search_query
  709. res = SearchPage(_('search results'), query, etime, results.hits,
  710. content=spane,
  711. nav_menus=[SearchSortMenu(default=sort)],
  712. search_params=dict(sort=sort),
  713. infotext=cleanup_message,
  714. simple=False, site=c.site,
  715. restrict_sr=restrict_sr,
  716. syntax=syntax,
  717. converted_data=q.converted_data,
  718. facets=results.subreddit_facets,
  719. sort=sort,
  720. ).render()
  721. return res
  722. except SearchException + (socket.error,) as e:
  723. return self.search_fail(e)
  724. def _search(self, query_obj, num, after, reverse, count=0,
  725. skip_deleted_authors=True):
  726. """Helper function for interfacing with search. Basically a
  727. thin wrapper for SearchBuilder."""
  728. builder = SearchBuilder(query_obj,
  729. after = after, num = num, reverse = reverse,
  730. count = count,
  731. wrap = ListingController.builder_wrapper,
  732. skip_deleted_authors=skip_deleted_authors)
  733. listing = LinkListing(builder, show_nums=True)
  734. # have to do it in two steps since total_num and timing are only
  735. # computed after fetch_more
  736. try:
  737. res = listing.listing()
  738. except SearchException + (socket.error,) as e:
  739. return self.search_fail(e)
  740. timing = time_module.time() - builder.start_time
  741. return builder.results, timing, res
  742. @validate(VAdmin(),
  743. comment = VCommentByID('comment_id'))
  744. def GET_comment_by_id(self, comment):
  745. href = comment.make_permalink_slow(context=5, anchor=True)
  746. return self.redirect(href)
  747. @validate(url = VRequired('url', None),
  748. title = VRequired('title', None),
  749. text = VRequired('text', None),
  750. selftext = VRequired('selftext', None),
  751. then = VOneOf('then', ('tb','comments'), default = 'comments'))
  752. def GET_submit(self, url, title, text, selftext, then):
  753. """Submit form."""
  754. resubmit = request.get.get('resubmit')
  755. if url and not resubmit:
  756. # check to see if the url has already been submitted
  757. links = link_from_url(url)
  758. if links and len(links) == 1:
  759. return self.redirect(links[0].already_submitted_link)
  760. elif links:
  761. infotext = (strings.multiple_submitted
  762. % links[0].resubmit_link())
  763. res = BoringPage(_("seen it"),
  764. content = wrap_links(links),
  765. infotext = infotext).render()
  766. return res
  767. if not c.user_is_loggedin:
  768. raise UserRequiredException
  769. if not (c.default_sr or c.site.can_submit(c.user)):
  770. abort(403, "forbidden")
  771. captcha = Captcha() if c.user.needs_captcha() else None
  772. sr_names = (Subreddit.submit_sr_names(c.user) or
  773. Subreddit.submit_sr_names(None))
  774. never_show_self = request.get.get('no_self')
  775. return FormPage(_("submit"),
  776. show_sidebar = True,
  777. page_classes=['submit-page'],
  778. content=NewLink(url=url or '',
  779. title=title or '',
  780. text=text or '',
  781. selftext=selftext or '',
  782. subreddits = sr_names,
  783. captcha=captcha,
  784. resubmit=resubmit,
  785. never_show_self = never_show_self,
  786. then = then)).render()
  787. def GET_frame(self):
  788. """used for cname support. makes a frame and
  789. puts the proper url as the frame source"""
  790. sub_domain = request.environ.get('sub_domain')
  791. original_path = request.environ.get('original_path')
  792. sr = Subreddit._by_domain(sub_domain)
  793. return Cnameframe(original_path, sr, sub_domain).render()
  794. def GET_framebuster(self, what = None, blah = None):
  795. """
  796. renders the contents of the iframe which, on a cname, checks
  797. if the user is currently logged into reddit.
  798. if this page is hit from the primary domain, redirects to the
  799. cnamed domain version of the site. If the user is logged in,
  800. this cnamed version will drop a boolean session cookie on that
  801. domain so that subsequent page reloads will be caught in
  802. middleware and a frame will be inserted around the content.
  803. If the user is not logged in, previous session cookies will be
  804. emptied so that subsequent refreshes will not be rendered in
  805. that pesky frame.
  806. """
  807. if not c.site.domain:
  808. return ""
  809. elif c.cname:
  810. return FrameBuster(login = (what == "login")).render()
  811. else:
  812. path = "/framebuster/"
  813. if c.user_is_loggedin:
  814. path += "login/"
  815. u = UrlParser(path + str(random.random()))
  816. u.mk_cname(require_frame = False, subreddit = c.site,
  817. port = request.port)
  818. return self.redirect(u.unparse())
  819. # the user is not logged in or there is no cname.
  820. return FrameBuster(login = False).render()
  821. def GET_catchall(self):
  822. return self.abort404()
  823. @validate(period = VInt('seconds',
  824. min = sup.MIN_PERIOD,
  825. max = sup.MAX_PERIOD,
  826. default = sup.MIN_PERIOD))
  827. def GET_sup(self, period):
  828. #dont cache this, it's memoized elsewhere
  829. c.used_cache = True
  830. sup.set_expires_header()
  831. if c.extension == 'json':
  832. c.response.content = sup.sup_json(period)
  833. return c.response
  834. else:
  835. return self.abort404()
  836. @require_oauth2_scope("modtraffic")
  837. @validate(VTrafficViewer('article'),
  838. article = VLink('article'))
  839. def GET_traffic(self, article):
  840. content = trafficpages.PromotedLinkTraffic(article)
  841. if c.render_style == 'csv':
  842. c.response.content = content.as_csv()
  843. return c.response
  844. return LinkInfoPage(link=article,
  845. page_classes=["promoted-traffic"],
  846. comment=None,
  847. content=content).render()
  848. @validate(VTrafficViewer('link'),
  849. link=VLink('link'))
  850. def GET_promo_traffic(self, link):
  851. if link:
  852. content = trafficpages.PromoTraffic(link)
  853. if c.render_style == 'csv':
  854. c.response.content = content.as_csv()
  855. return c.response
  856. return LinkInfoPage(link=link,
  857. page_classes=["promo-traffic"],
  858. comment=None,
  859. content=content).render()
  860. else:
  861. return self.abort404()
  862. @validate(VSponsorAdmin())
  863. def GET_site_traffic(self):
  864. return trafficpages.SitewideTrafficPage().render()
  865. @validate(VSponsorAdmin())
  866. def GET_lang_traffic(self, langcode):
  867. return trafficpages.LanguageTrafficPage(langcode).render()
  868. @validate(VSponsorAdmin())
  869. def GET_advert_traffic(self, code):
  870. return trafficpages.AdvertTrafficPage(code).render()
  871. @validate(VUser())
  872. def GET_account_activity(self):
  873. return AccountActivityPage().render()
  874. def GET_rules(self):
  875. return BoringPage(_("rules of reddit"), show_sidebar=False,
  876. content=RulesPage(), page_classes=["rulespage-body"]
  877. ).render()
  878. @validate(vendor=VOneOf("v", ("claimed-gold", "claimed-creddits",
  879. "paypal", "google-checkout"),
  880. default="claimed-gold"))
  881. def GET_goldthanks(self, vendor):
  882. vendor_url = None
  883. vendor_claim_msg = _("thanks for buying reddit gold! your transaction "
  884. "has been completed and emailed to you. you can "
  885. "check the details by logging into your account "
  886. "at:")
  887. lounge_md = None
  888. if vendor == "claimed-gold":
  889. claim_msg = _("claimed! enjoy your reddit gold membership.")
  890. elif vendor == "claimed-creddits":
  891. claim_msg = _("your gold creddits have been claimed!")
  892. lounge_md = _("now go to someone's userpage and give "
  893. "them a present!")
  894. elif vendor == "paypal":
  895. claim_msg = vendor_claim_msg
  896. vendor_url = "https://www.paypal.com/us"
  897. elif vendor == "google-checkout":
  898. claim_msg = vendor_claim_msg
  899. vendor_url = "https://wallet.google.com/manage"
  900. else:
  901. abort(404)
  902. if g.lounge_reddit and not lounge_md:
  903. lounge_url = "/r/" + g.lounge_reddit
  904. lounge_md = strings.lounge_msg % {'link': lounge_url}
  905. return BoringPage(_("thanks"), show_sidebar=False,
  906. content=GoldThanks(claim_msg=claim_msg,
  907. vendor_url=vendor_url,
  908. lounge_md=lounge_md)).render()
  909. def GET_gold_info(self):
  910. return GoldInfoPage(_("gold"), show_sidebar=False).render()
  911. class FormsController(RedditController):
  912. def GET_password(self):
  913. """The 'what is my password' page"""
  914. return BoringPage(_("password"), content=Password()).render()
  915. @validate(VUser(),
  916. dest = VDestination(),
  917. reason = nop('reason'))
  918. def GET_verify(self, dest, reason):
  919. if c.user.email_verified:
  920. content = InfoBar(message = strings.email_verified)
  921. if dest:
  922. return self.redirect(dest)
  923. else:
  924. if reason == "submit":
  925. infomsg = strings.verify_email_submit
  926. else:
  927. infomsg = strings.verify_email
  928. content = PaneStack(
  929. [InfoBar(message = infomsg),
  930. PrefUpdate(email = True, verify = True,
  931. password = False)])
  932. return BoringPage(_("verify email"), content = content).render()
  933. @validate(VUser(),
  934. token=VOneTimeToken(EmailVerificationToken, "key"),
  935. dest=VDestination(default="/prefs/update"))
  936. def GET_verify_email(self, token, dest):
  937. if token and token.user_id != c.user._fullname:
  938. # wrong user. log them out and try again.
  939. self.logout()
  940. return self.redirect(request.fullpath)
  941. elif token and c.user.email_verified:
  942. # they've already verified. consume and ignore this token.
  943. token.consume()
  944. return self.redirect(dest)
  945. elif token and token.valid_for_user(c.user):
  946. # successful verification!
  947. token.consume()
  948. c.user.email_verified = True
  949. c.user._commit()
  950. Award.give_if_needed("verified_email", c.user)
  951. return self.redirect(dest)
  952. else:
  953. # failure. let 'em know.
  954. content = PaneStack(
  955. [InfoBar(message=strings.email_verify_failed),
  956. PrefUpdate(email=True,
  957. verify=True,
  958. password=False)])
  959. return BoringPage(_("verify email"), content=content).render()
  960. @validate(token=VOneTimeToken(PasswordResetToken, "key"),
  961. key=nop("key"))
  962. def GET_resetpassword(self, token, key):
  963. """page hit once a user has been sent a password reset email
  964. to verify their identity before allowing them to update their
  965. password."""
  966. #if another user is logged-in, log them out
  967. if c.user_is_loggedin:
  968. self.logout()
  969. return self.redirect(request.path)
  970. done = False
  971. if not key and request.referer:
  972. referer_path = request.referer.split(g.domain)[-1]
  973. done = referer_path.startswith(request.fullpath)
  974. elif not token:
  975. return self.redirect("/password?expired=true")
  976. return BoringPage(_("reset password"),
  977. content=ResetPassword(key=key, done=done)).render()
  978. @validate(VUser(),
  979. location = nop("location"))
  980. def GET_prefs(self, location=''):
  981. """Preference page"""
  982. content = None
  983. infotext = None
  984. if not location or location == 'options':
  985. content = PrefOptions(done=request.get.get('done'))
  986. elif location == 'friends':
  987. content = PaneStack()
  988. infotext = strings.friends % Friends.path
  989. content.append(FriendList())
  990. content.append(EnemyList())
  991. elif location == 'update':
  992. content = PrefUpdate()
  993. elif location == 'apps':
  994. content = PrefApps(my_apps=OAuth2Client._by_user(c.user),
  995. developed_apps=OAuth2Client._by_developer(c.user))
  996. elif location == 'feeds' and c.user.pref_private_feeds:
  997. content = PrefFeeds()
  998. elif location == 'delete':
  999. content = PrefDelete()
  1000. elif location == 'otp':
  1001. content = PrefOTP()
  1002. else:
  1003. return self.abort404()
  1004. return PrefsPage(content = content, infotext=infotext).render()
  1005. @validate(dest = VDestination())
  1006. def GET_login(self, dest):
  1007. """The /login form. No link to this page exists any more on
  1008. the site (all actions invoking it now go through the login
  1009. cover). However, this page is still used for logging the user
  1010. in during submission or voting from the bookmarklets."""
  1011. if (c.user_is_loggedin and
  1012. not request.environ.get('extension') == 'embed'):
  1013. return self.redirect(dest)
  1014. return LoginPage(dest = dest).render()
  1015. @validate(dest = VDestination())
  1016. def GET_register(self, dest):
  1017. if (c.user_is_loggedin and
  1018. not request.environ.get('extension') == 'embed'):
  1019. return self.redirect(dest)
  1020. return RegisterPage(dest = dest).render()
  1021. @validate(VUser(),
  1022. VModhash(),
  1023. dest = VDestination())
  1024. def GET_logout(self, dest):
  1025. return self.redirect(dest)
  1026. @validate(VUser(),
  1027. VModhash(),
  1028. dest = VDestination())
  1029. def POST_logout(self, dest):
  1030. """wipe login cookie and redirect to referer."""
  1031. self.logout()
  1032. return self.redirect(dest)
  1033. @validate(VUser(),
  1034. dest = VDestination())
  1035. def GET_adminon(self, dest):
  1036. """Enable admin interaction with site"""
  1037. #check like this because c.user_is_admin is still false
  1038. if not c.user.name in g.admins:
  1039. return self.abort404()
  1040. c.deny_frames = True
  1041. return AdminModeInterstitial(dest=dest).render()
  1042. @validate(VAdmin(),
  1043. dest = VDestination())
  1044. def GET_adminoff(self, dest):
  1045. """disable admin interaction with site."""
  1046. if not c.user.name in g.admins:
  1047. return self.abort404()
  1048. self.disable_admin_mode(c.user)
  1049. return self.redirect(dest)
  1050. def GET_validuser(self):
  1051. """checks login cookie to verify that a user is logged in and
  1052. returns their user name"""
  1053. c.response_content_type = 'text/plain'
  1054. if c.user_is_loggedin:
  1055. perm = str(c.user.can_wiki())
  1056. c.response.content = c.user.name + "," + perm
  1057. else:
  1058. c.response.con

Large files files are truncated, but you can click here to view the full file