PageRenderTime 78ms CodeModel.GetById 48ms RepoModel.GetById 1ms app.codeStats 0ms

/demisauce/demisauce/model/__init__.py

https://github.com/araddon/demisauce
Python | 334 lines | 325 code | 6 blank | 3 comment | 1 complexity | 4c2ccb1f12c25ac86980fc7b843d1d4e MD5 | raw file
  1. import logging, re, time, json, datetime
  2. from tornado import escape
  3. from sqlalchemy import create_engine
  4. from sqlalchemy import Column, MetaData, Table, types
  5. from sqlalchemy import engine, orm
  6. from sqlalchemy.orm import mapper, relation
  7. from sqlalchemy.orm import scoped_session, sessionmaker
  8. from demisauce.model import meta
  9. from tornado.options import define, options
  10. from demisaucepy.serializer import SerializationMixin
  11. import redis
  12. from pythonsolr.pysolr import Solr
  13. from gearman import GearmanClient
  14. from demisaucepy import cache, cache_setup
  15. log = logging.getLogger('demisauce.model')
  16. CACHE_DURATION = 3600
  17. def slugify(name):
  18. """
  19. Convert's a string to a slugi-ifyed string
  20. >>> slugify("aaron's good&*^ 89 stuff")
  21. 'aarons-good-stuff'
  22. """
  23. name = name.lower()
  24. name = re.sub('( {1,100})','-',name)
  25. name = re.sub('[^a-z0-9\-]','',name)
  26. #name = re.sub('(-{2,50})','-',name)
  27. return name
  28. class sqlalchemydb(object):
  29. def __init__(self,engine=None,session=None,metadata=None,
  30. redis_host="localhost",cache=None,gearman_client=None):
  31. self.engine = engine
  32. self._session_loaded = False
  33. self._session = session
  34. self.metadata = metadata
  35. self._redis_host = redis_host
  36. self._cache = cache
  37. self._gearman_client = gearman_client
  38. def save(self,instance):
  39. s = self._session()
  40. s.add(instance)
  41. s.commit()
  42. def get_session(self):
  43. if not self._session_loaded:
  44. self._session()
  45. return self._session
  46. session = property(get_session)
  47. def get_redis(self):
  48. if hasattr(self,"_redis"):
  49. return self._redis
  50. else:
  51. self._redis = redis.Redis(host=self._redis_host,pooled=True)
  52. return self._redis
  53. redis = property(get_redis)
  54. def get_cache(self):
  55. return self._cache
  56. cache = property(get_cache)
  57. @property
  58. def gearman_client(self):
  59. return self._gearman_client
  60. def finish(self):
  61. """Called to commit and clean up db for this request"""
  62. #log.debug("sqlalchemydb: Finish -> session commit ")
  63. #self._session.commit()
  64. self._session.close()
  65. pass
  66. def get_database(cache=None):
  67. """Brokers creation of the SqlAlchemy session
  68. using settings info, also sets host/ports for redis etc
  69. Manages session for SA
  70. """
  71. gearman_client = GearmanClient(options.gearman_servers)
  72. #creator=connect, ,
  73. #pool_recycle=True # performance was horrible with this
  74. #cache_setup.load_memcache()
  75. meta.engine = create_engine(options.sqlalchemy_default_url,
  76. echo=options.sqlalchemy_default_echo, pool_recycle=3600)
  77. log.debug("Setting up db with connection = %s" % options.sqlalchemy_default_url)
  78. sm = orm.sessionmaker(autoflush=True, bind=meta.engine)
  79. meta.DBSession = orm.scoped_session(sm)
  80. db = sqlalchemydb(engine=meta.engine,
  81. session=meta.DBSession,
  82. metadata=meta.metadata,
  83. redis_host=options.redis_host,
  84. cache=cache,
  85. gearman_client=gearman_client)
  86. return db
  87. def init_model(enginelocal):
  88. """
  89. Call me before using any of the tables or classes in the model.
  90. """
  91. sm = orm.sessionmaker(autoflush=True, bind=enginelocal)
  92. meta.engine = enginelocal
  93. meta.DBSession = orm.scoped_session(sm)
  94. def setup_site(user):
  95. """does the base site setup for a new account
  96. """
  97. from demisauce.lib import slugify
  98. app = service.App(site_id=user.site_id,owner_id=user.id)
  99. app.slug = slugify(user.site.name)
  100. app.name = user.site.name
  101. app.base_url = user.site.base_url
  102. app.save()
  103. def create_schema():
  104. #metadata.bind = config['pylons.g'].sa_engine
  105. print 'in create schema'
  106. meta.metadata.create_all(config['pylons.g'].sa_engine)
  107. def make_key(key):
  108. """
  109. Converts a string title to a url safe key that is unique per site
  110. """
  111. import urllib
  112. return urllib.quote_plus(key.lower().replace(' ','_'))
  113. def reverse_key(key):
  114. """
  115. Converts a url safe key to the item in the db
  116. """
  117. import urllib
  118. return urllib.quote_plus(key.replace(' ','_'))
  119. class ModelBase(object):
  120. """
  121. Abstract base class implementing some shortcuts
  122. """
  123. _allowed_api_keys = []
  124. __all_schema_keys__ = None
  125. def __init__(self,**kwargs):
  126. self._is_cache = False
  127. self.is_new = False
  128. for key in kwargs:
  129. #if hasattr(self,key):
  130. setattr(self,key,kwargs[key])
  131. self.on_new()
  132. @orm.reconstructor
  133. def init_on_load(self):
  134. self.on_new()
  135. self._is_cache = False
  136. self.is_new = False
  137. def get_keys(self):
  138. if hasattr(self.__class__,"__all_schema_keys__"):
  139. if self.__class__.__all_schema_keys__ == None:
  140. keys = []
  141. keys = [c.name for c in self.__class__.schema.c]
  142. self.__class__.__all_schema_keys__ = keys
  143. return self.__class__.__all_schema_keys__
  144. return []
  145. def on_new(self):
  146. pass
  147. def makekey(self,key):
  148. """
  149. Converts a string title to a url safe key that is unique per site
  150. """
  151. return make_key(key)
  152. def after_load(self):
  153. pass
  154. def isvalid(self):
  155. return True
  156. def save_extra(self):
  157. if hasattr(self,'extra_json') and isinstance(self.extra_json,(list,dict)):
  158. json_str = json.dumps(self.extra_json)
  159. self.extra_json = json_str
  160. def save(self):
  161. if self.id > 0:
  162. self.is_new = False
  163. else:
  164. self.is_new = True
  165. self.save_extra()
  166. meta.DBSession.add(self)
  167. meta.DBSession.commit()
  168. @classmethod
  169. def all(cls,site_id=0):
  170. """Class method to get all
  171. using the native SqlAlchemy get instead of site_id specific one"""
  172. if site_id == -1:
  173. return meta.DBSession.query(cls).all()
  174. else:
  175. return meta.DBSession.query(cls).filter_by(site_id=site_id,id=id).all()
  176. @classmethod
  177. def get(cls,site_id=0,id=0):
  178. """Class method to get by id
  179. using the native SqlAlchemy get instead of site_id specific one"""
  180. #return meta.DBSession.query(cls).get(id)
  181. if site_id == -1:
  182. return meta.DBSession.query(cls).get(id)
  183. else:
  184. qry = meta.DBSession.query(cls).filter_by(site_id=site_id,id=id)
  185. #log.debug('in get: %s' % str(qry))
  186. return qry.first()
  187. @classmethod
  188. def saget(cls,id=0):
  189. """Class method to get by id"""
  190. return meta.DBSession.query(cls).get(id)
  191. def delete(self):
  192. try:
  193. log.debug("in delete %s" % self.__class__)
  194. meta.DBSession.delete(self)
  195. meta.DBSession.commit()
  196. except:
  197. meta.DBSession.rollback() #.close()
  198. logging.error("Error in ModelBase.delete(): %s" % traceback.print_exc())
  199. def delete_cache(self):
  200. try:
  201. log.debug("in delete cache %s" % self.__class__)
  202. jsons = cache.cache.get(self.__class__.cache_key(self.id))
  203. if jsons and jsons.find("{") == -1: # if not json, must be cache key
  204. cache.cache.delete(self.__class__.cache_key(jsons))
  205. cache.cache.delete(self.__class__.cache_key(self.id))
  206. except:
  207. logging.error("Error in ModelBase.delete_cache(): %s" % traceback.print_exc())
  208. def update_cache(self,jsons=None):
  209. 'Update and save json to mc'
  210. if not jsons:
  211. jsons = self.to_json()
  212. ptr = self.id
  213. if hasattr(self,'pointer'):
  214. ptr = self.pointer
  215. if self.id != ptr:
  216. cache.cache.set(self.__class__.cache_key(self.id),ptr)
  217. log.debug("saving key=%s, val=%s" % (self.__class__.cache_key(ptr),'jsons'))
  218. cache.cache.set(self.__class__.cache_key(ptr),jsons,CACHE_DURATION)
  219. return jsons
  220. def after_load(self):
  221. pass
  222. @classmethod
  223. def cache_key(cls,id):
  224. 'simple cachkey of DS-classname-id '
  225. cls_name = str(cls)
  226. cls_name = cls_name[cls_name.rfind('.')+1:cls_name.rfind('\'')].lower()
  227. return re.sub('(\s)','',str("DS-%s-%s" % (cls_name,id)))
  228. @classmethod
  229. def get_manymc(cls,ids):
  230. cls_name = str(cls)
  231. cls_name = cls_name[cls_name.rfind('.')+1:cls_name.rfind('\'')].lower()
  232. keys = [cls.cache_key(id) for id in ids]
  233. vals = cache.cache.get_many(keys)
  234. logging.debug("getmanymc = %s, %s" % (keys,'vals'))
  235. objects = []
  236. for id in ids:
  237. key = cls.cache_key(id)
  238. if vals.has_key(key):
  239. val = vals[key]
  240. if val and val.find("{") == -1:
  241. v = cache.cache.get(cls.cache_key(val))
  242. if not v:
  243. o = cls.get(id)
  244. if o:
  245. v = o.update_cache()
  246. else:
  247. log.error("no item found for id = %s" % id)
  248. #log.debug("id = %s, val=%s, k = %s, v=%s" % (id, val, cls.cache_key(val),v == None))
  249. vals[key] = v
  250. else:
  251. o = cls.saget(id)
  252. if o:
  253. logging.debug("Updating Cache in %s - %s" % (key,id))
  254. jsons = o.update_cache()
  255. vals.update({key:jsons})
  256. else:
  257. log.error("wtf? no o? key = %s id=%s" % (key,id))
  258. for k,v in vals.iteritems():
  259. if v in (None,''):
  260. log.error("missing cache entry for %s" % key)
  261. obj = cls().from_json(v)
  262. objects.append(obj)
  263. return objects
  264. @classmethod
  265. def get_cache(cls,id=0,reload=False):
  266. """Class method to get from cache by id"""
  267. jsons = None
  268. cache_key = cls.cache_key(id)
  269. if reload == True:
  270. o = cls.saget(id)
  271. else:
  272. jsons = cache.cache.get(cls.cache_key(id))
  273. if jsons and jsons.find("{") == -1: # if not json, must be cache key
  274. log.debug('jsons find == -1 %s' % jsons)
  275. try:
  276. cache_key = cls.cache_key(jsons)
  277. jsons = cache.cache.get(cache_key)
  278. except:
  279. logging.error("Error in ModelBase.get_cache(): %s" % cache_key)
  280. if not jsons:
  281. o = cls.saget(id)
  282. if not jsons and o:
  283. log.debug('cache not found, updating cache %s' % (cache_key))
  284. jsons = o.update_cache()
  285. if jsons:
  286. o = cls()
  287. o.from_json(jsons)
  288. o._is_cache = True
  289. else:
  290. logging.error("no jsons? %s" % cache_key)
  291. return o