PageRenderTime 66ms CodeModel.GetById 28ms RepoModel.GetById 1ms app.codeStats 0ms

/r2/r2/models/link.py

https://github.com/wangmxf/lesswrong
Python | 1234 lines | 997 code | 145 blank | 92 comment | 115 complexity | c2ece57404570ba3e308668a79754ca5 MD5 | raw file
Possible License(s): MPL-2.0-no-copyleft-exception, LGPL-2.1
  1. # The contents of this file are subject to the Common Public Attribution
  2. # License Version 1.0. (the "License"); you may not use this file except in
  3. # compliance with the License. You may obtain a copy of the License at
  4. # http://code.reddit.com/LICENSE. The License is based on the Mozilla Public
  5. # License Version 1.1, but Sections 14 and 15 have been added to cover use of
  6. # software over a computer network and provide for limited attribution for the
  7. # Original Developer. In addition, Exhibit A has been modified to be consistent
  8. # with Exhibit B.
  9. #
  10. # Software distributed under the License is distributed on an "AS IS" basis,
  11. # WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License for
  12. # the specific language governing rights and limitations under the License.
  13. #
  14. # The Original Code is Reddit.
  15. #
  16. # The Original Developer is the Initial Developer. The Initial Developer of the
  17. # Original Code is CondeNet, Inc.
  18. #
  19. # All portions of the code written by CondeNet are Copyright (c) 2006-2008
  20. # CondeNet, Inc. All Rights Reserved.
  21. ################################################################################
  22. from r2.lib.db.thing import Thing, Relation, NotFound, MultiRelation, \
  23. CreationError
  24. from r2.lib.utils import base_url, tup, domain, worker, title_to_url, \
  25. UrlParser, set_last_modified
  26. from account import Account
  27. from subreddit import Subreddit
  28. from printable import Printable
  29. import thing_changes as tc
  30. from r2.config import cache
  31. from r2.lib.memoize import memoize, clear_memo
  32. from r2.lib import utils
  33. from r2.lib.wiki import Wiki
  34. from mako.filters import url_escape
  35. from r2.lib.strings import strings, Score
  36. from r2.lib.db.operators import lower
  37. from r2.lib.db import operators
  38. from r2.lib.filters import _force_unicode
  39. from r2.models.subreddit import FakeSubreddit
  40. from r2.models.image_holder import ImageHolder
  41. from r2.models.poll import containspolls, parsepolls
  42. from pylons import c, g, request
  43. from pylons.i18n import ungettext
  44. import re
  45. import random
  46. import urllib
  47. from datetime import datetime
  48. class LinkExists(Exception): pass
  49. # defining types
  50. class Link(Thing, Printable, ImageHolder):
  51. _data_int_props = Thing._data_int_props + ('num_comments', 'reported')
  52. _defaults = dict(is_self = False,
  53. reported = 0, num_comments = 0,
  54. moderator_banned = False,
  55. banned_before_moderator = False,
  56. media_object = None,
  57. has_thumbnail = False,
  58. promoted = False,
  59. promoted_subscribersonly = False,
  60. promote_until = None,
  61. promoted_by = None,
  62. disable_comments = False,
  63. ip = '0.0.0.0',
  64. render_full = False,
  65. images = None,
  66. blessed = False,
  67. comments_enabled = True,
  68. notify_on_comment = False)
  69. _only_whitespace = re.compile('^\s*$', re.UNICODE)
  70. _more_marker = '<a id="more"></a>'
  71. def __init__(self, *a, **kw):
  72. Thing.__init__(self, *a, **kw)
  73. @classmethod
  74. def by_url_key(cls, url):
  75. return base_url(url.lower()).encode('utf8')
  76. @classmethod
  77. def _by_url(cls, url, sr):
  78. from subreddit import Default
  79. if sr == Default:
  80. sr = None
  81. url = cls.by_url_key(url)
  82. link_ids = g.permacache.get(url)
  83. if link_ids:
  84. links = Link._byID(link_ids, data = True, return_dict = False)
  85. links = [l for l in links if not l._deleted]
  86. if links and sr:
  87. for link in links:
  88. if sr._id == link.sr_id:
  89. return link
  90. elif links:
  91. return links
  92. raise NotFound, 'Link "%s"' % url
  93. def can_submit(self, user):
  94. if c.user_is_admin:
  95. return True
  96. else:
  97. sr = Subreddit._byID(self.sr_id, data=True)
  98. if sr.is_editor(c.user):
  99. return True
  100. elif self.author_id == c.user._id:
  101. # They can submit if they are the author and still have access
  102. # to the subreddit of the article
  103. return sr.can_submit(user)
  104. else:
  105. return False
  106. def is_blessed(self):
  107. return self.blessed
  108. def set_url_cache(self):
  109. if self.url != 'self':
  110. key = self.by_url_key(self.url)
  111. link_ids = g.permacache.get(key) or []
  112. if self._id not in link_ids:
  113. link_ids.append(self._id)
  114. g.permacache.set(key, link_ids)
  115. def update_url_cache(self, old_url):
  116. """Remove the old url from the by_url cache then update the
  117. cache with the new url."""
  118. if old_url != 'self':
  119. key = self.by_url_key(old_url)
  120. link_ids = g.permacache.get(key) or []
  121. while self._id in link_ids:
  122. link_ids.remove(self._id)
  123. g.permacache.set(key, link_ids)
  124. self.set_url_cache()
  125. @property
  126. def already_submitted_link(self):
  127. return self.make_permalink_slow() + '?already_submitted=true'
  128. def resubmit_link(self, sr_url = False):
  129. submit_url = self.subreddit_slow.path if sr_url else '/'
  130. submit_url += 'submit?resubmit=true&url=' + url_escape(self.url)
  131. return submit_url
  132. @classmethod
  133. def _submit(cls, title, article, author, sr, ip, tags, spam = False, date = None, **kwargs):
  134. # Create the Post and commit to db.
  135. l = cls(title = title,
  136. url = 'self',
  137. _spam = spam,
  138. author_id = author._id,
  139. sr_id = sr._id,
  140. lang = sr.lang,
  141. ip = ip,
  142. article = article,
  143. date = date,
  144. **kwargs
  145. )
  146. l._commit()
  147. # Now that the post id is known update the Post with the correct permalink.
  148. l.url = l.make_permalink_slow()
  149. l.is_self = True
  150. l._commit()
  151. # Parse and create polls in the article
  152. l.set_article(article)
  153. l.set_url_cache()
  154. # Add tags
  155. for tag in tags:
  156. l.add_tag(tag)
  157. return l
  158. def set_article(self, article):
  159. self.article = article
  160. self._commit()
  161. def _summary(self):
  162. if hasattr(self, 'article'):
  163. return self.article.split(self._more_marker)[0]
  164. def _has_more(self):
  165. if hasattr(self, 'article'):
  166. return self.article.find(self._more_marker) >= 0
  167. def _more(self):
  168. if hasattr(self, 'article'):
  169. return self.article.split(self._more_marker)[1]
  170. @classmethod
  171. def _somethinged(cls, rel, user, link, name):
  172. return rel._fast_query(tup(user), tup(link), name = name)
  173. def _something(self, rel, user, somethinged, name):
  174. try:
  175. saved = rel(user, self, name=name)
  176. saved._commit()
  177. return saved
  178. except CreationError, e:
  179. return somethinged(user, self)[(user, self, name)]
  180. def _unsomething(self, user, somethinged, name):
  181. saved = somethinged(user, self)[(user, self, name)]
  182. if saved:
  183. saved._delete()
  184. return saved
  185. @classmethod
  186. def _saved(cls, user, link):
  187. return cls._somethinged(SaveHide, user, link, 'save')
  188. def _save(self, user):
  189. return self._something(SaveHide, user, self._saved, 'save')
  190. def _unsave(self, user):
  191. return self._unsomething(user, self._saved, 'save')
  192. @classmethod
  193. def _clicked(cls, user, link):
  194. return cls._somethinged(Click, user, link, 'click')
  195. def _updateClickFromObj(obj):
  196. obj = c[(user,self,'click')]
  197. obj._date = datetime.now(g.tz)
  198. obj._commit()
  199. def _tryUpdateClick(self, user):
  200. obj = Link._clicked(user,self)[(user,self,'click')]
  201. if obj:
  202. obj._date = datetime.now(g.tz)
  203. obj._commit()
  204. return True
  205. return False
  206. def _click(self, user):
  207. if self._tryUpdateClick(user):
  208. return
  209. # No click in the db to update, try and create.
  210. try:
  211. saved = Click(user, self, name='click')
  212. saved._commit()
  213. return
  214. except CreationError, e:
  215. # This is for a possible race. It is possible the row in the db
  216. # has been created but the cache not updated yet. This explicitly
  217. # clears the cache then re-gets from the db
  218. g.log.info("Trying cache clear for lookup : "+str((user,self,'click')))
  219. Click._uncache(user, self, name='click')
  220. if self._tryUpdateClick(user):
  221. return
  222. raise Exception(user,self,e)
  223. def _getLastClickTime(self, user):
  224. c = Link._clicked(user,self)
  225. return c.get((user, self, 'click'))
  226. @classmethod
  227. def _hidden(cls, user, link):
  228. return cls._somethinged(SaveHide, user, link, 'hide')
  229. def _hide(self, user):
  230. return self._something(SaveHide, user, self._hidden, 'hide')
  231. def _unhide(self, user):
  232. return self._unsomething(user, self._hidden, 'hide')
  233. def keep_item(self, wrapped):
  234. user = c.user if c.user_is_loggedin else None
  235. if not c.user_is_admin:
  236. if self._spam and (not user or
  237. (user and self.author_id != user._id)):
  238. return False
  239. #author_karma = wrapped.author.link_karma
  240. #if author_karma <= 0 and random.randint(author_karma, 0) != 0:
  241. #return False
  242. if user:
  243. if user.pref_hide_ups and wrapped.likes == True:
  244. return False
  245. if user.pref_hide_downs and wrapped.likes == False:
  246. return False
  247. if wrapped._score < user.pref_min_link_score:
  248. return False
  249. if wrapped.hidden:
  250. return False
  251. return True
  252. @staticmethod
  253. def cache_key(wrapped):
  254. if c.user_is_admin:
  255. return False
  256. if hasattr(wrapped, 'has_polls') and wrapped.has_polls:
  257. return False
  258. s = (str(i) for i in (wrapped.render_class.__name__,
  259. wrapped._fullname,
  260. bool(c.user_is_sponsor),
  261. bool(c.user_is_loggedin),
  262. wrapped.subreddit == c.site,
  263. c.user.pref_newwindow,
  264. c.user.pref_frame,
  265. c.user.pref_compress,
  266. c.user.pref_media,
  267. request.host,
  268. c.cname,
  269. wrapped.author == c.user,
  270. wrapped.likes,
  271. wrapped.saved,
  272. wrapped.clicked,
  273. wrapped.hidden,
  274. wrapped.friend,
  275. wrapped.show_spam,
  276. wrapped.show_reports,
  277. wrapped.can_ban,
  278. wrapped.thumbnail,
  279. wrapped.moderator_banned,
  280. wrapped.render_full,
  281. wrapped.comments_enabled,
  282. wrapped.votable))
  283. # htmllite depends on other get params
  284. s = ''.join(s)
  285. if c.render_style == "htmllite":
  286. s += ''.join(map(str, [request.get.has_key('style'),
  287. request.get.has_key('expanded'),
  288. request.get.has_key('twocolumn'),
  289. c.bgcolor,
  290. c.bordercolor]))
  291. return s
  292. def make_permalink(self, sr, force_domain = False, sr_path = False):
  293. from r2.lib.template_helpers import get_domain
  294. p = "lw/%s/%s/" % (self._id36, title_to_url(self.title))
  295. if c.default_sr and not sr_path:
  296. res = "/%s" % p
  297. elif sr and not c.cname:
  298. res = "/r/%s/%s" % (sr.name, p)
  299. elif sr != c.site or force_domain:
  300. res = "http://%s/%s" % (get_domain(cname = (c.cname and sr == c.site),
  301. subreddit = not c.cname), p)
  302. else:
  303. res = "/%s" % p
  304. return res
  305. def make_permalink_slow(self):
  306. return self.make_permalink(self.subreddit_slow)
  307. @property
  308. def canonical_url(self):
  309. from r2.lib.template_helpers import get_domain
  310. p = "lw/%s/%s/" % (self._id36, title_to_url(self.title))
  311. return "http://%s/%s" % (get_domain(subreddit = False), p)
  312. @classmethod
  313. def add_props(cls, user, wrapped):
  314. from r2.lib.count import incr_counts
  315. from r2.lib.media import thumbnail_url
  316. from r2.lib.utils import timeago
  317. saved = Link._saved(user, wrapped) if user else {}
  318. hidden = Link._hidden(user, wrapped) if user else {}
  319. for item in wrapped:
  320. show_media = False
  321. if c.user.pref_compress:
  322. pass
  323. elif c.user.pref_media == 'on':
  324. show_media = True
  325. elif c.user.pref_media == 'subreddit' and item.subreddit.show_media:
  326. show_media = True
  327. elif (item.promoted
  328. and item.has_thumbnail
  329. and c.user.pref_media != 'off'):
  330. show_media = True
  331. if not show_media:
  332. item.thumbnail = ""
  333. elif item.has_thumbnail:
  334. item.thumbnail = thumbnail_url(item)
  335. else:
  336. item.thumbnail = g.default_thumb
  337. item.domain = (domain(item.url) if not item.is_self
  338. else 'self.' + item.subreddit.name)
  339. if not hasattr(item,'top_link'):
  340. item.top_link = False
  341. item.urlprefix = ''
  342. item.saved = bool(saved.get((user, item, 'save')))
  343. item.hidden = bool(hidden.get((user, item, 'hide')))
  344. # Only check "last clicked time" on demand. Otherwise it is expensive in big listings. TODO - refactor to use "_getLastClickedTime"
  345. def clicked():
  346. c = Link._clicked(user, wrapped) if user else {}
  347. return c.get((user, item, 'click'))
  348. item.clicked = clicked
  349. item.num = None
  350. item.score_fmt = Score.signed_number
  351. item.permalink = item.make_permalink(item.subreddit)
  352. if item.is_self:
  353. item.url = item.make_permalink(item.subreddit, force_domain = True)
  354. if c.user_is_admin:
  355. item.hide_score = False
  356. elif item.promoted:
  357. item.hide_score = True
  358. elif c.user == item.author:
  359. item.hide_score = False
  360. elif item._date > timeago("2 hours"):
  361. item.hide_score = True
  362. else:
  363. item.hide_score = False
  364. # Don't allow users to vote on their own posts and don't
  365. # allow users to vote on collapsed posts shown when
  366. # viewing comment permalinks.
  367. item.votable = bool(c.user != item.author and
  368. not getattr(item, 'for_comment_permalink', False))
  369. if c.user_is_loggedin and item.author._id == c.user._id:
  370. item.nofollow = False
  371. elif item.score <= 1 or item._spam or item.author._spam:
  372. item.nofollow = True
  373. else:
  374. item.nofollow = False
  375. if c.user_is_loggedin and item.subreddit.name == c.user.draft_sr_name:
  376. item.draft = True
  377. else:
  378. item.draft = False
  379. if c.user_is_loggedin:
  380. incr_counts(wrapped)
  381. @property
  382. def subreddit_slow(self):
  383. from subreddit import Subreddit
  384. """return's a link's subreddit. in most case the subreddit is already
  385. on the wrapped link (as .subreddit), and that should be used
  386. when possible. """
  387. return Subreddit._byID(self.sr_id, True, return_dict = False)
  388. def change_subreddit(self, new_sr_id):
  389. """Change the subreddit of the link and update its date"""
  390. if self.sr_id != new_sr_id:
  391. self.sr_id = new_sr_id
  392. self._date = datetime.now(g.tz)
  393. self.url = self.make_permalink_slow()
  394. self._commit()
  395. # Comments must be in the same subreddit as the link that
  396. # the comments belong to. This is needed so that if a
  397. # comment is made on a draft link then when the link moves
  398. # to a public subreddit the comments also move and others
  399. # will be able to see and reply to the comment.
  400. for comment in Comment._query(Comment.c.link_id == self._id, data=True):
  401. comment.sr_id = new_sr_id
  402. comment._commit()
  403. def set_blessed(self, is_blessed):
  404. if self.blessed != is_blessed:
  405. self.blessed = is_blessed
  406. self._date = datetime.now(g.tz)
  407. self._commit()
  408. def add_tag(self, tag_name, name = 'tag'):
  409. """Adds a tag of the given name to the link. If the tag does not
  410. exist it is created"""
  411. if self._only_whitespace.match(tag_name):
  412. # Don't allow an empty tag
  413. return
  414. try:
  415. tag = Tag._by_name(tag_name)
  416. except NotFound:
  417. tag = Tag._new(tag_name)
  418. tag._commit()
  419. # See if link already has this tag
  420. tags = LinkTag._fast_query(tup(self), tup(tag), name=name)
  421. link_tag = tags[(self, tag, name)]
  422. if not link_tag:
  423. link_tag = LinkTag(self, tag, name=name)
  424. link_tag._commit()
  425. return link_tag
  426. def remove_tag(self, tag_name, name='tag'):
  427. """Removes a tag from the link. The tag is not deleted,
  428. just the relationship between the link and the tag"""
  429. try:
  430. tag = Tag._by_name(tag_name)
  431. except NotFound:
  432. return False
  433. tags = LinkTag._fast_query(tup(self), tup(tag), name=name)
  434. link_tag = tags[(self, tag, name)]
  435. if link_tag:
  436. link_tag._delete()
  437. return link_tag
  438. def get_tags(self):
  439. q = LinkTag._query(LinkTag.c._thing1_id == self._id,
  440. LinkTag.c._name == 'tag',
  441. LinkTag.c._t2_deleted == False,
  442. eager_load = True,
  443. thing_data = not g.use_query_cache
  444. )
  445. return [link_tag._thing2 for link_tag in q]
  446. def set_tags(self, tags):
  447. """Adds and/or removes tags to match the list given"""
  448. current_tags = set(self.tag_names())
  449. updated_tags = set(tags)
  450. removed_tags = current_tags.difference(updated_tags)
  451. new_tags = updated_tags.difference(current_tags)
  452. for tag in new_tags:
  453. self.add_tag(tag)
  454. for tag in removed_tags:
  455. self.remove_tag(tag)
  456. def tag_names(self):
  457. """Returns just the names of the tags of this article"""
  458. return [tag.name for tag in self.get_tags()]
  459. def get_sequence_names(self):
  460. """Returns the names of the sequences"""
  461. return Wiki().sequences_for_article_url(self.url).keys()
  462. def _next_link_for_tag(self, tag, sort):
  463. """Returns a query navigation by tag using the supplied sort"""
  464. from r2.lib.db import tdb_sql as tdb
  465. import sqlalchemy as sa
  466. # List of the subreddit ids this user has access to
  467. sr = Subreddit.default()
  468. # Get a reference to reddit_rel_linktag
  469. linktag_type = tdb.rel_types_id[LinkTag._type_id]
  470. linktag_thing_table = linktag_type.rel_table[0]
  471. # Get a reference to the reddit_thing_link & reddit_data_link tables
  472. link_type = tdb.types_id[Link._type_id]
  473. link_data_table = link_type.data_table[0]
  474. link_thing_table = link_type.thing_table
  475. # Subreddit subquery aliased as link_sr
  476. link_sr = sa.select([
  477. link_data_table.c.thing_id,
  478. sa.cast(link_data_table.c.value, sa.INT).label('sr_id')],
  479. link_data_table.c.key == 'sr_id').alias('link_sr')
  480. # Determine the date clause based on the sort order requested
  481. if isinstance(sort, operators.desc):
  482. date_clause = link_thing_table.c.date < self._date
  483. sort = sa.desc(link_thing_table.c.date)
  484. else:
  485. date_clause = link_thing_table.c.date > self._date
  486. sort = sa.asc(link_thing_table.c.date)
  487. query = sa.select([linktag_thing_table.c.thing1_id],
  488. sa.and_(linktag_thing_table.c.thing2_id == tag._id,
  489. linktag_thing_table.c.thing1_id == link_sr.c.thing_id,
  490. linktag_thing_table.c.thing1_id == link_thing_table.c.thing_id,
  491. linktag_thing_table.c.name == 'tag',
  492. link_thing_table.c.spam == False,
  493. link_thing_table.c.deleted == False,
  494. date_clause,
  495. link_sr.c.sr_id == sr._id),
  496. order_by = sort,
  497. limit = 1)
  498. row = query.execute().fetchone()
  499. return Link._byID(row.thing1_id, data=True) if row else None
  500. def _link_for_query(self, query):
  501. """Returns a single Link result for the given query"""
  502. results = list(query)
  503. return results[0] if results else None
  504. # TODO: These navigation methods might be better in their own module
  505. def next_by_tag(self, tag):
  506. return self._next_link_for_tag(tag, operators.asc('_t1_date'))
  507. # TagNamesByTag.append(tag.name)
  508. # IndexesByTag.append(nextIndexByTag);
  509. # nextIndexByTag = nextIndexByTag + 1
  510. def prev_by_tag(self, tag):
  511. return self._next_link_for_tag(tag, operators.desc('_t1_date'))
  512. def next_in_sequence(self, sequence_name):
  513. sequence = Wiki().sequences_for_article_url(self.url).get(sequence_name)
  514. return sequence['next'] if sequence else None
  515. def prev_in_sequence(self, sequence_name):
  516. sequence = Wiki().sequences_for_article_url(self.url).get(sequence_name)
  517. return sequence['prev'] if sequence else None
  518. def _nav_query_date_clause(self, sort):
  519. if isinstance(sort, operators.desc):
  520. date_clause = Link.c._date < self._date
  521. else:
  522. date_clause = Link.c._date > self._date
  523. return date_clause
  524. def _link_nav_query(self, clause = None, sort = None):
  525. sr = Subreddit.default()
  526. q = Link._query(self._nav_query_date_clause(sort), Link.c._deleted == False, Link.c._spam == False, Link.c.sr_id == sr._id, limit = 1, sort = sort, data = True)
  527. if clause is not None:
  528. q._filter(clause)
  529. return q
  530. def next_by_author(self):
  531. q = self._link_nav_query(Link.c.author_id == self.author_id, operators.asc('_date'))
  532. return self._link_for_query(q)
  533. def prev_by_author(self):
  534. q = self._link_nav_query(Link.c.author_id == self.author_id, operators.desc('_date'))
  535. return self._link_for_query(q)
  536. def next_in_top(self):
  537. q = self._link_nav_query(Link.c.top_link == True, operators.asc('_date'))
  538. return self._link_for_query(q)
  539. def prev_in_top(self):
  540. q = self._link_nav_query(Link.c.top_link == True, operators.desc('_date'))
  541. return self._link_for_query(q)
  542. def next_in_promoted(self):
  543. q = self._link_nav_query(Link.c.blessed == True, operators.asc('_date'))
  544. return self._link_for_query(q)
  545. def prev_in_promoted(self):
  546. q = self._link_nav_query(Link.c.blessed == True, operators.desc('_date'))
  547. return self._link_for_query(q)
  548. def next_link(self):
  549. q = self._link_nav_query(sort = operators.asc('_date'))
  550. return self._link_for_query(q)
  551. def prev_link(self):
  552. q = self._link_nav_query(sort = operators.desc('_date'))
  553. return self._link_for_query(q)
  554. def _commit(self, *a, **kw):
  555. """Detect when we need to invalidate the sidebar recent posts.
  556. Whenever a post is created we need to invalidate. Also invalidate when
  557. various post attributes are changed (such as moving to a different
  558. subreddit). If the post cache is invalidated the comment one is too.
  559. This is primarily for when a post is banned so that its comments
  560. dissapear from the sidebar too.
  561. """
  562. should_invalidate = (not self._created or
  563. frozenset(('title', 'sr_id', '_deleted', '_spam')) & frozenset(self._dirties.keys()))
  564. Thing._commit(self, *a, **kw)
  565. if should_invalidate:
  566. g.rendercache.delete('side-posts' + '-' + c.site.name)
  567. g.rendercache.delete('side-comments' + '-' + c.site.name)
  568. # Note that there are no instances of PromotedLink or LinkCompressed,
  569. # so overriding their methods here will not change their behaviour
  570. # (except for add_props). These classes are used to override the
  571. # render_class on a Wrapped to change the template used for rendering
  572. class PromotedLink(Link):
  573. _nodb = True
  574. @classmethod
  575. def add_props(cls, user, wrapped):
  576. Link.add_props(user, wrapped)
  577. try:
  578. if c.user_is_sponsor:
  579. promoted_by_ids = set(x.promoted_by
  580. for x in wrapped
  581. if hasattr(x,'promoted_by'))
  582. promoted_by_accounts = Account._byID(promoted_by_ids,
  583. data=True)
  584. else:
  585. promoted_by_accounts = {}
  586. except NotFound:
  587. # since this is just cosmetic, we can skip it altogether
  588. # if one isn't found or is broken
  589. promoted_by_accounts = {}
  590. for item in wrapped:
  591. # these are potentially paid for placement
  592. item.nofollow = True
  593. if item.promoted_by in promoted_by_accounts:
  594. item.promoted_by_name = promoted_by_accounts[item.promoted_by].name
  595. else:
  596. # keep the template from trying to read it
  597. item.promoted_by = None
  598. class LinkCompressed(Link):
  599. _nodb = True
  600. @classmethod
  601. def add_props(cls, user, wrapped):
  602. Link.add_props(user, wrapped)
  603. for item in wrapped:
  604. item.score_fmt = Score.points
  605. class InlineArticle(Link):
  606. """Exists to gain a different render_class in Wrapped"""
  607. _nodb = True
  608. class CommentPermalink(Link):
  609. """Exists to gain a different render_class in Wrapped"""
  610. _nodb = True
  611. class TagExists(Exception): pass
  612. class Tag(Thing):
  613. """A tag on a link/article"""
  614. @classmethod
  615. def _new(self, name, **kw):
  616. tag_name = name.lower()
  617. try:
  618. tag = Tag._by_name(tag_name)
  619. raise TagExists
  620. except NotFound:
  621. tag = Tag(name = tag_name, **kw)
  622. tag._commit()
  623. clear_memo('tag._by_name', Tag, name.lower())
  624. return tag
  625. @classmethod
  626. @memoize('tag._by_name')
  627. def _by_name_cache(cls, name):
  628. q = cls._query(lower(cls.c.name) == name.lower(), limit = 1)
  629. l = list(q)
  630. if l:
  631. return l[0]._id
  632. @classmethod
  633. def _by_name(cls, name):
  634. #lower name here so there is only one cache
  635. name = name.lower()
  636. tag_id = cls._by_name_cache(name)
  637. if tag_id:
  638. return cls._byID(tag_id, True)
  639. else:
  640. raise NotFound, 'Tag %s' % name
  641. @property
  642. def path(self):
  643. """Returns the path to the tag listing for this tag"""
  644. quoted_tag_name = urllib.quote(self.name.encode('utf8'))
  645. if not c.default_sr:
  646. return "/r/%s/tag/%s/" % (c.site.name, quoted_tag_name)
  647. else:
  648. return "/tag/%s/" % (quoted_tag_name)
  649. @classmethod
  650. # @memoize('tag.tag_cloud_for_subreddits') enable when it is cleared at appropiate points
  651. def tag_cloud_for_subreddits(cls, sr_ids):
  652. from r2.lib.db import tdb_sql as tdb
  653. import sqlalchemy as sa
  654. type = tdb.rel_types_id[LinkTag._type_id]
  655. linktag_thing_table = type.rel_table[0]
  656. link_type = tdb.types_id[Link._type_id]
  657. link_data_table = link_type.data_table[0]
  658. link_thing_table = link_type.thing_table
  659. link_sr = sa.select([
  660. link_data_table.c.thing_id,
  661. sa.cast(link_data_table.c.value, sa.INT).label('sr_id')],
  662. link_data_table.c.key == 'sr_id').alias('link_sr')
  663. query = sa.select([linktag_thing_table.c.thing2_id,
  664. sa.func.count(linktag_thing_table.c.thing1_id)],
  665. sa.and_(linktag_thing_table.c.thing1_id == link_sr.c.thing_id,
  666. linktag_thing_table.c.thing1_id == link_thing_table.c.thing_id,
  667. link_thing_table.c.spam == False,
  668. link_sr.c.sr_id.in_(*sr_ids)),
  669. group_by = [linktag_thing_table.c.thing2_id],
  670. having = sa.func.count(linktag_thing_table.c.thing1_id) > 1,
  671. order_by = sa.desc(sa.func.count(linktag_thing_table.c.thing1_id)),
  672. limit = 100)
  673. rows = query.execute().fetchall()
  674. tags = []
  675. for result in rows:
  676. tag = Tag._byID(result.thing2_id, data=True)
  677. tags.append((tag, result.count))
  678. # Order by tag name
  679. tags.sort(key=lambda x: _force_unicode(x[0].name))
  680. return cls.make_cloud(10, tags)
  681. @classmethod
  682. def make_cloud(cls, steps, input):
  683. # From: http://www.car-chase.net/2007/jan/16/log-based-tag-clouds-python/
  684. import math
  685. if len(input) <= 0:
  686. return []
  687. else:
  688. temp, newThresholds, results = [], [], []
  689. for item in input:
  690. temp.append(item[1])
  691. maxWeight = float(max(temp))
  692. minWeight = float(min(temp))
  693. newDelta = (maxWeight - minWeight)/float(steps)
  694. for i in range(steps + 1):
  695. newThresholds.append((100 * math.log((minWeight + i * newDelta) + 2), i))
  696. for tag in input:
  697. fontSet = False
  698. for threshold in newThresholds[1:int(steps)+1]:
  699. if (100 * math.log(tag[1] + 2)) <= threshold[0] and not fontSet:
  700. results.append(tuple([tag[0], threshold[1]]))
  701. fontSet = True
  702. return results
  703. class LinkTag(Relation(Link, Tag)):
  704. pass
  705. class Comment(Thing, Printable):
  706. _data_int_props = Thing._data_int_props + ('reported',)
  707. _defaults = dict(reported = 0,
  708. moderator_banned = False,
  709. banned_before_moderator = False,
  710. is_html = False,
  711. retracted = False,
  712. show_response_to = False)
  713. def _markdown(self):
  714. pass
  715. def _delete(self):
  716. link = Link._byID(self.link_id, data = True)
  717. link._incr('num_comments', -1)
  718. @classmethod
  719. def _new(cls, author, link, parent, body, ip, spam = False, date = None):
  720. comment = Comment(body = body,
  721. link_id = link._id,
  722. sr_id = link.sr_id,
  723. author_id = author._id,
  724. ip = ip,
  725. date = date)
  726. comment._spam = spam
  727. #these props aren't relations
  728. if parent:
  729. comment.parent_id = parent._id
  730. comment._commit()
  731. link._incr('num_comments', 1)
  732. inbox_rel = comment._send_post_notifications(link, comment, parent)
  733. #clear that chache
  734. clear_memo('builder.link_comments2', link._id)
  735. # flag search indexer that something has changed
  736. tc.changed(comment)
  737. #update last modified
  738. set_last_modified(author, 'overview')
  739. set_last_modified(author, 'commented')
  740. set_last_modified(link, 'comments')
  741. #update the comment cache
  742. from r2.lib.comment_tree import add_comment
  743. add_comment(comment)
  744. return (comment, inbox_rel)
  745. def try_parent(self, func, default):
  746. """
  747. If this comment has a parent, return `func(parent)`; otherwise
  748. return `default`.
  749. """
  750. if getattr(self, 'parent_id', None) is not None:
  751. parent = type(self)._byID(self.parent_id)
  752. return func(parent)
  753. return default
  754. def _send_post_notifications(self, link, comment, parent):
  755. if parent:
  756. to = Account._byID(parent.author_id)
  757. else:
  758. if not link.notify_on_comment:
  759. return None
  760. elif comment.author_id != link.author_id:
  761. # Send notification if the comment wasn't by the link author
  762. to = Account._byID(link.author_id)
  763. else:
  764. return None
  765. # only global admins can be message spammed.
  766. if self._spam and to.name not in g.admins:
  767. return None
  768. return Inbox._add(to, self, 'inbox')
  769. def has_children(self):
  770. q = Comment._query(Comment.c.parent_id == self._id, limit=1)
  771. child = list(q)
  772. return len(child)>0
  773. def can_delete(self):
  774. if not self._loaded:
  775. self._load()
  776. return (c.user_is_loggedin and self.author_id == c.user._id and \
  777. self.retracted and not self.has_children())
  778. # Changes the body of this comment, parsing the new body for polls and
  779. # creating them if found, and commits.
  780. def set_body(self, body):
  781. self.has_polls = containspolls(body)
  782. self.body = parsepolls(body, self)
  783. self._commit()
  784. @property
  785. def subreddit_slow(self):
  786. from subreddit import Subreddit
  787. """return's a comments's subreddit. in most case the subreddit is already
  788. on the wrapped link (as .subreddit), and that should be used
  789. when possible. if sr_id does not exist, then use the parent link's"""
  790. self._safe_load()
  791. if hasattr(self, 'sr_id'):
  792. sr_id = self.sr_id
  793. else:
  794. l = Link._byID(self.link_id, True)
  795. sr_id = l.sr_id
  796. return Subreddit._byID(sr_id, True, return_dict = False)
  797. @property
  798. def collapse_in_link_threads(self):
  799. if c.user_is_admin:
  800. return False
  801. return self._score <= g.hide_comment_threshold
  802. @property
  803. def reply_costs_karma(self):
  804. if self._score <= g.downvoted_reply_score_threshold:
  805. return True
  806. return self.try_parent(lambda p: p.reply_costs_karma, False)
  807. def keep_item(self, wrapped):
  808. if c.user_is_admin:
  809. return True
  810. if self.collapse_in_link_threads:
  811. return False
  812. return self.try_parent(lambda p: p.keep_item(p), True)
  813. @staticmethod
  814. def cache_key(wrapped):
  815. if c.user_is_admin:
  816. return False
  817. if hasattr(wrapped, 'has_polls') and wrapped.has_polls:
  818. return False
  819. s = (str(i) for i in (c.profilepage,
  820. c.full_comment_listing,
  821. wrapped._fullname,
  822. bool(c.user_is_loggedin),
  823. c.focal_comment == wrapped._id36,
  824. request.host,
  825. c.cname,
  826. wrapped.author == c.user,
  827. wrapped.likes,
  828. wrapped.friend,
  829. wrapped.collapsed,
  830. wrapped.moderator_banned,
  831. wrapped.show_spam,
  832. wrapped.show_reports,
  833. wrapped.can_ban,
  834. wrapped.moderator_banned,
  835. wrapped.can_reply,
  836. wrapped.deleted,
  837. wrapped.is_html,
  838. wrapped.votable,
  839. wrapped.retracted,
  840. wrapped.can_be_deleted,
  841. wrapped.show_response_to))
  842. s = ''.join(s)
  843. return s
  844. def make_permalink(self, link, sr=None):
  845. return link.make_permalink(sr) + self._id36
  846. def make_anchored_permalink(self, link=None, sr=None, context=1, anchor=None):
  847. if link:
  848. permalink = UrlParser(self.make_permalink(link, sr))
  849. else:
  850. permalink = UrlParser(self.make_permalink_slow())
  851. permalink.update_query(context=context)
  852. permalink.fragment = anchor if anchor else self._id36
  853. return permalink.unparse()
  854. def make_permalink_slow(self):
  855. l = Link._byID(self.link_id, data=True)
  856. return self.make_permalink(l, l.subreddit_slow)
  857. def make_permalink_title(self, link):
  858. author = Account._byID(self.author_id, data=True).name
  859. params = {'author' : _force_unicode(author), 'title' : _force_unicode(link.title), 'site' : c.site.title}
  860. return strings.permalink_title % params
  861. @classmethod
  862. def add_props(cls, user, wrapped):
  863. #fetch parent links
  864. links = Link._byID(set(l.link_id for l in wrapped), True)
  865. #get srs for comments that don't have them (old comments)
  866. for cm in wrapped:
  867. if not hasattr(cm, 'sr_id'):
  868. cm.sr_id = links[cm.link_id].sr_id
  869. subreddits = Subreddit._byID(set(cm.sr_id for cm in wrapped),
  870. data=True,return_dict=False)
  871. can_reply_srs = set(s._id for s in subreddits if s.can_comment(user))
  872. min_score = c.user.pref_min_comment_score
  873. cids = dict((w._id, w) for w in wrapped)
  874. for item in wrapped:
  875. item.link = links.get(item.link_id)
  876. if not hasattr(item, 'subreddit'):
  877. item.subreddit = item.subreddit_slow
  878. if hasattr(item, 'parent_id'):
  879. parent = Comment._byID(item.parent_id, data=True)
  880. parent_author = Account._byID(parent.author_id, data=True)
  881. item.parent_author = parent_author
  882. if not c.full_comment_listing and cids.has_key(item.parent_id):
  883. item.parent_permalink = '#' + utils.to36(item.parent_id)
  884. else:
  885. item.parent_permalink = parent.make_anchored_permalink(item.link, item.subreddit)
  886. else:
  887. item.parent_permalink = None
  888. item.parent_author = None
  889. item.can_reply = (item.sr_id in can_reply_srs)
  890. # Don't allow users to vote on their own comments
  891. item.votable = bool(c.user != item.author and not item.retracted)
  892. # not deleted on profile pages,
  893. # deleted if spam and not author or admin
  894. item.deleted = (not c.profilepage and
  895. (item._deleted or
  896. (item._spam and
  897. item.author != c.user and
  898. not item.show_spam)))
  899. # don't collapse for admins, on profile pages, or if deleted
  900. item.collapsed = ((item.score < min_score) and
  901. not (c.profilepage or
  902. item.deleted or
  903. c.user_is_admin))
  904. if not hasattr(item,'editted'):
  905. item.editted = False
  906. #will get updated in builder
  907. item.num_children = 0
  908. item.score_fmt = Score.points
  909. item.permalink = item.make_permalink(item.link, item.subreddit)
  910. item.can_be_deleted = item.can_delete()
  911. def _commit(self, *a, **kw):
  912. """Detect when we need to invalidate the sidebar recent comments.
  913. Whenever a comment is created we need to invalidate. Also
  914. invalidate when various comment attributes are changed.
  915. """
  916. should_invalidate = (not self._created or
  917. frozenset(('body', '_deleted', '_spam')) & frozenset(self._dirties.keys()))
  918. Thing._commit(self, *a, **kw)
  919. if should_invalidate:
  920. g.rendercache.delete('side-comments' + '-' + c.site.name)
  921. class InlineComment(Comment):
  922. """Exists to gain a different render_class in Wrapped"""
  923. _nodb = True
  924. class MoreComments(object):
  925. show_spam = False
  926. show_reports = False
  927. is_special = False
  928. can_ban = False
  929. deleted = False
  930. rowstyle = 'even'
  931. reported = False
  932. collapsed = False
  933. author = None
  934. margin = 0
  935. @staticmethod
  936. def cache_key(item):
  937. return False
  938. def __init__(self, link, depth, parent=None):
  939. if parent:
  940. self.parent_id = parent._id
  941. self.parent_name = parent._fullname
  942. self.parent_permalink = parent.make_permalink(link,
  943. link.subreddit_slow)
  944. self.link_name = link._fullname
  945. self.link_id = link._id
  946. self.depth = depth
  947. self.children = []
  948. self.count = 0
  949. @property
  950. def _fullname(self):
  951. return self.children[0]._fullname if self.children else 't0_blah'
  952. @property
  953. def _id36(self):
  954. return self.children[0]._id36 if self.children else 't0_blah'
  955. class MoreRecursion(MoreComments):
  956. pass
  957. class MoreChildren(MoreComments):
  958. pass
  959. class Message(Thing, Printable):
  960. _defaults = dict(reported = 0,)
  961. _data_int_props = Thing._data_int_props + ('reported', )
  962. @classmethod
  963. def _new(cls, author, to, subject, body, ip, spam = False):
  964. m = Message(subject = subject,
  965. body = body,
  966. author_id = author._id,
  967. ip = ip)
  968. m._spam = spam
  969. m.to_id = to._id
  970. m._commit()
  971. #author = Author(author, m, 'author')
  972. #author._commit()
  973. # only global admins can be message spammed.
  974. inbox_rel = None
  975. if not m._spam or to.name in g.admins:
  976. inbox_rel = Inbox._add(to, m, 'inbox')
  977. return (m, inbox_rel)
  978. @classmethod
  979. def add_props(cls, user, wrapped):
  980. #TODO global-ish functions that shouldn't be here?
  981. #reset msgtime after this request
  982. msgtime = c.have_messages
  983. #load the "to" field if required
  984. to_ids = set(w.to_id for w in wrapped)
  985. tos = Account._byID(to_ids, True) if to_ids else {}
  986. for item in wrapped:
  987. item.to = tos[item.to_id]
  988. if msgtime and item._date >= msgtime:
  989. item.new = True
  990. else:
  991. item.new = False
  992. item.score_fmt = Score.none
  993. @staticmethod
  994. def cache_key(wrapped):
  995. #warning: inbox/sent messages
  996. #comments as messages
  997. return False
  998. def keep_item(self, wrapped):
  999. return True
  1000. class SaveHide(Relation(Account, Link)): pass
  1001. class Click(Relation(Account, Link)): pass
  1002. class Inbox(MultiRelation('inbox',
  1003. Relation(Account, Comment),
  1004. Relation(Account, Message))):
  1005. @classmethod
  1006. def _add(cls, to, obj, *a, **kw):
  1007. i = Inbox(to, obj, *a, **kw)
  1008. i._commit()
  1009. if not to._loaded:
  1010. to._load()
  1011. #if there is not msgtime, or it's false, set it
  1012. if not hasattr(to, 'msgtime') or not to.msgtime:
  1013. to.msgtime = obj._date
  1014. to._commit()
  1015. return i