PageRenderTime 83ms CodeModel.GetById 31ms RepoModel.GetById 0ms app.codeStats 2ms

/gluon/dal.py

https://github.com/clach04/web2py
Python | 10541 lines | 10296 code | 86 blank | 159 comment | 129 complexity | 69459d39f7db32741377b7b112ebd03d MD5 | raw file
Possible License(s): MIT, BSD-3-Clause, BSD-2-Clause

Large files files are truncated, but you can click here to view the full file

  1. #!/bin/env python
  2. # -*- coding: utf-8 -*-
  3. """
  4. This file is part of the web2py Web Framework
  5. Copyrighted by Massimo Di Pierro <mdipierro@cs.depaul.edu>
  6. License: LGPLv3 (http://www.gnu.org/licenses/lgpl.html)
  7. Thanks to
  8. * Niall Sweeny <niall.sweeny@fonjax.com> for MS SQL support
  9. * Marcel Leuthi <mluethi@mlsystems.ch> for Oracle support
  10. * Denes
  11. * Chris Clark
  12. * clach05
  13. * Denes Lengyel
  14. * and many others who have contributed to current and previous versions
  15. This file contains the DAL support for many relational databases,
  16. including:
  17. - SQLite & SpatiaLite
  18. - MySQL
  19. - Postgres
  20. - Firebird
  21. - Oracle
  22. - MS SQL
  23. - DB2
  24. - Interbase
  25. - Ingres
  26. - Informix (9+ and SE)
  27. - SapDB (experimental)
  28. - Cubrid (experimental)
  29. - CouchDB (experimental)
  30. - MongoDB (in progress)
  31. - Google:nosql
  32. - Google:sql
  33. - Teradata
  34. - IMAP (experimental)
  35. Example of usage:
  36. >>> # from dal import DAL, Field
  37. ### create DAL connection (and create DB if it doesn't exist)
  38. >>> db = DAL(('sqlite://storage.sqlite','mysql://a:b@localhost/x'),
  39. ... folder=None)
  40. ### define a table 'person' (create/alter as necessary)
  41. >>> person = db.define_table('person',Field('name','string'))
  42. ### insert a record
  43. >>> id = person.insert(name='James')
  44. ### retrieve it by id
  45. >>> james = person(id)
  46. ### retrieve it by name
  47. >>> james = person(name='James')
  48. ### retrieve it by arbitrary query
  49. >>> query = (person.name=='James') & (person.name.startswith('J'))
  50. >>> james = db(query).select(person.ALL)[0]
  51. ### update one record
  52. >>> james.update_record(name='Jim')
  53. <Row {'id': 1, 'name': 'Jim'}>
  54. ### update multiple records by query
  55. >>> db(person.name.like('J%')).update(name='James')
  56. 1
  57. ### delete records by query
  58. >>> db(person.name.lower() == 'jim').delete()
  59. 0
  60. ### retrieve multiple records (rows)
  61. >>> people = db(person).select(orderby=person.name,
  62. ... groupby=person.name, limitby=(0,100))
  63. ### further filter them
  64. >>> james = people.find(lambda row: row.name == 'James').first()
  65. >>> print james.id, james.name
  66. 1 James
  67. ### check aggregates
  68. >>> counter = person.id.count()
  69. >>> print db(person).select(counter).first()(counter)
  70. 1
  71. ### delete one record
  72. >>> james.delete_record()
  73. 1
  74. ### delete (drop) entire database table
  75. >>> person.drop()
  76. Supported field types:
  77. id string text boolean integer double decimal password upload
  78. blob time date datetime
  79. Supported DAL URI strings:
  80. 'sqlite://test.db'
  81. 'spatialite://test.db'
  82. 'sqlite:memory'
  83. 'spatialite:memory'
  84. 'jdbc:sqlite://test.db'
  85. 'mysql://root:none@localhost/test'
  86. 'postgres://mdipierro:password@localhost/test'
  87. 'postgres:psycopg2://mdipierro:password@localhost/test'
  88. 'postgres:pg8000://mdipierro:password@localhost/test'
  89. 'jdbc:postgres://mdipierro:none@localhost/test'
  90. 'mssql://web2py:none@A64X2/web2py_test'
  91. 'mssql2://web2py:none@A64X2/web2py_test' # alternate mappings
  92. 'oracle://username:password@database'
  93. 'firebird://user:password@server:3050/database'
  94. 'db2://DSN=dsn;UID=user;PWD=pass'
  95. 'firebird://username:password@hostname/database'
  96. 'firebird_embedded://username:password@c://path'
  97. 'informix://user:password@server:3050/database'
  98. 'informixu://user:password@server:3050/database' # unicode informix
  99. 'ingres://database' # or use an ODBC connection string, e.g. 'ingres://dsn=dsn_name'
  100. 'google:datastore' # for google app engine datastore
  101. 'google:sql' # for google app engine with sql (mysql compatible)
  102. 'teradata://DSN=dsn;UID=user;PWD=pass; DATABASE=database' # experimental
  103. 'imap://user:password@server:port' # experimental
  104. For more info:
  105. help(DAL)
  106. help(Field)
  107. """
  108. ###################################################################################
  109. # this file only exposes DAL and Field
  110. ###################################################################################
  111. __all__ = ['DAL', 'Field']
  112. MAXCHARLENGTH = 2**15 # not quite but reasonable default max char length
  113. DEFAULTLENGTH = {'string':512,
  114. 'password':512,
  115. 'upload':512,
  116. 'text':2**15,
  117. 'blob':2**31}
  118. TIMINGSSIZE = 100
  119. SPATIALLIBS = {
  120. 'Windows':'libspatialite',
  121. 'Linux':'libspatialite.so',
  122. 'Darwin':'libspatialite.dylib'
  123. }
  124. DEFAULT_URI = 'sqlite://dummy.db'
  125. import re
  126. import sys
  127. import locale
  128. import os
  129. import types
  130. import datetime
  131. import threading
  132. import time
  133. import csv
  134. import cgi
  135. import copy
  136. import socket
  137. import logging
  138. import base64
  139. import shutil
  140. import marshal
  141. import decimal
  142. import struct
  143. import urllib
  144. import hashlib
  145. import uuid
  146. import glob
  147. import traceback
  148. import platform
  149. PYTHON_VERSION = sys.version_info[0]
  150. if PYTHON_VERSION == 2:
  151. import cPickle as pickle
  152. import cStringIO as StringIO
  153. import copy_reg as copyreg
  154. hashlib_md5 = hashlib.md5
  155. bytes, unicode = str, unicode
  156. else:
  157. import pickle
  158. from io import StringIO as StringIO
  159. import copyreg
  160. long = int
  161. hashlib_md5 = lambda s: hashlib.md5(bytes(s,'utf8'))
  162. bytes, unicode = bytes, str
  163. CALLABLETYPES = (types.LambdaType, types.FunctionType,
  164. types.BuiltinFunctionType,
  165. types.MethodType, types.BuiltinMethodType)
  166. TABLE_ARGS = set(
  167. ('migrate','primarykey','fake_migrate','format','redefine',
  168. 'singular','plural','trigger_name','sequence_name',
  169. 'common_filter','polymodel','table_class','on_define',))
  170. SELECT_ARGS = set(
  171. ('orderby', 'groupby', 'limitby','required', 'cache', 'left',
  172. 'distinct', 'having', 'join','for_update', 'processor','cacheable'))
  173. ogetattr = object.__getattribute__
  174. osetattr = object.__setattr__
  175. exists = os.path.exists
  176. pjoin = os.path.join
  177. ###################################################################################
  178. # following checks allow the use of dal without web2py, as a standalone module
  179. ###################################################################################
  180. try:
  181. from utils import web2py_uuid
  182. except (ImportError, SystemError):
  183. import uuid
  184. def web2py_uuid(): return str(uuid.uuid4())
  185. try:
  186. import portalocker
  187. have_portalocker = True
  188. except ImportError:
  189. have_portalocker = False
  190. try:
  191. import serializers
  192. have_serializers = True
  193. except ImportError:
  194. have_serializers = False
  195. try:
  196. import json as simplejson
  197. except ImportError:
  198. try:
  199. import gluon.contrib.simplejson as simplejson
  200. except ImportError:
  201. simplejson = None
  202. try:
  203. import validators
  204. have_validators = True
  205. except (ImportError, SyntaxError):
  206. have_validators = False
  207. LOGGER = logging.getLogger("web2py.dal")
  208. DEFAULT = lambda:0
  209. GLOBAL_LOCKER = threading.RLock()
  210. THREAD_LOCAL = threading.local()
  211. # internal representation of tables with field
  212. # <table>.<field>, tables and fields may only be [a-zA-Z0-9_]
  213. REGEX_TYPE = re.compile('^([\w\_\:]+)')
  214. REGEX_DBNAME = re.compile('^(\w+)(\:\w+)*')
  215. REGEX_W = re.compile('^\w+$')
  216. REGEX_TABLE_DOT_FIELD = re.compile('^(\w+)\.(\w+)$')
  217. REGEX_UPLOAD_PATTERN = re.compile('(?P<table>[\w\-]+)\.(?P<field>[\w\-]+)\.(?P<uuidkey>[\w\-]+)\.(?P<name>\w+)\.\w+$')
  218. REGEX_CLEANUP_FN = re.compile('[\'"\s;]+')
  219. REGEX_UNPACK = re.compile('(?<!\|)\|(?!\|)')
  220. REGEX_PYTHON_KEYWORDS = re.compile('^(and|del|from|not|while|as|elif|global|or|with|assert|else|if|pass|yield|break|except|import|print|class|exec|in|raise|continue|finally|is|return|def|for|lambda|try)$')
  221. REGEX_SELECT_AS_PARSER = re.compile("\s+AS\s+(\S+)")
  222. REGEX_CONST_STRING = re.compile('(\"[^\"]*?\")|(\'[^\']*?\')')
  223. REGEX_SEARCH_PATTERN = re.compile('^{[^\.]+\.[^\.]+(\.(lt|gt|le|ge|eq|ne|contains|startswith|year|month|day|hour|minute|second))?(\.not)?}$')
  224. REGEX_SQUARE_BRACKETS = re.compile('^.+\[.+\]$')
  225. REGEX_STORE_PATTERN = re.compile('\.(?P<e>\w{1,5})$')
  226. REGEX_QUOTES = re.compile("'[^']*'")
  227. REGEX_ALPHANUMERIC = re.compile('^[0-9a-zA-Z]\w*$')
  228. REGEX_PASSWORD = re.compile('\://([^:@]*)\:')
  229. REGEX_NOPASSWD = re.compile('(?<=\:)([^:@/]+)(?=@.+)')
  230. # list of drivers will be built on the fly
  231. # and lists only what is available
  232. DRIVERS = []
  233. try:
  234. from new import classobj
  235. from google.appengine.ext import db as gae
  236. from google.appengine.api import namespace_manager, rdbms
  237. from google.appengine.api.datastore_types import Key ### for belongs on ID
  238. from google.appengine.ext.db.polymodel import PolyModel
  239. DRIVERS.append('google')
  240. except ImportError:
  241. pass
  242. if not 'google' in DRIVERS:
  243. try:
  244. from pysqlite2 import dbapi2 as sqlite2
  245. DRIVERS.append('SQLite(sqlite2)')
  246. except ImportError:
  247. LOGGER.debug('no SQLite drivers pysqlite2.dbapi2')
  248. try:
  249. from sqlite3 import dbapi2 as sqlite3
  250. DRIVERS.append('SQLite(sqlite3)')
  251. except ImportError:
  252. LOGGER.debug('no SQLite drivers sqlite3')
  253. try:
  254. # first try contrib driver, then from site-packages (if installed)
  255. try:
  256. import contrib.pymysql as pymysql
  257. # monkeypatch pymysql because they havent fixed the bug:
  258. # https://github.com/petehunt/PyMySQL/issues/86
  259. pymysql.ESCAPE_REGEX = re.compile("'")
  260. pymysql.ESCAPE_MAP = {"'": "''"}
  261. # end monkeypatch
  262. except ImportError:
  263. import pymysql
  264. DRIVERS.append('MySQL(pymysql)')
  265. except ImportError:
  266. LOGGER.debug('no MySQL driver pymysql')
  267. try:
  268. import MySQLdb
  269. DRIVERS.append('MySQL(MySQLdb)')
  270. except ImportError:
  271. LOGGER.debug('no MySQL driver MySQLDB')
  272. try:
  273. import psycopg2
  274. from psycopg2.extensions import adapt as psycopg2_adapt
  275. DRIVERS.append('PostgreSQL(psycopg2)')
  276. except ImportError:
  277. LOGGER.debug('no PostgreSQL driver psycopg2')
  278. try:
  279. # first try contrib driver, then from site-packages (if installed)
  280. try:
  281. import contrib.pg8000.dbapi as pg8000
  282. except ImportError:
  283. import pg8000.dbapi as pg8000
  284. DRIVERS.append('PostgreSQL(pg8000)')
  285. except ImportError:
  286. LOGGER.debug('no PostgreSQL driver pg8000')
  287. try:
  288. import cx_Oracle
  289. DRIVERS.append('Oracle(cx_Oracle)')
  290. except ImportError:
  291. LOGGER.debug('no Oracle driver cx_Oracle')
  292. try:
  293. try:
  294. import pyodbc
  295. except ImportError:
  296. try:
  297. import contrib.pypyodbc as pyodbc
  298. except Exception, e:
  299. raise ImportError(str(e))
  300. DRIVERS.append('MSSQL(pyodbc)')
  301. DRIVERS.append('DB2(pyodbc)')
  302. DRIVERS.append('Teradata(pyodbc)')
  303. DRIVERS.append('Ingres(pyodbc)')
  304. except ImportError:
  305. LOGGER.debug('no MSSQL/DB2/Teradata/Ingres driver pyodbc')
  306. try:
  307. import Sybase
  308. DRIVERS.append('Sybase(Sybase)')
  309. except ImportError:
  310. LOGGER.debug('no Sybase driver')
  311. try:
  312. import kinterbasdb
  313. DRIVERS.append('Interbase(kinterbasdb)')
  314. DRIVERS.append('Firebird(kinterbasdb)')
  315. except ImportError:
  316. LOGGER.debug('no Firebird/Interbase driver kinterbasdb')
  317. try:
  318. import fdb
  319. DRIVERS.append('Firbird(fdb)')
  320. except ImportError:
  321. LOGGER.debug('no Firebird driver fdb')
  322. #####
  323. try:
  324. import firebirdsql
  325. DRIVERS.append('Firebird(firebirdsql)')
  326. except ImportError:
  327. LOGGER.debug('no Firebird driver firebirdsql')
  328. try:
  329. import informixdb
  330. DRIVERS.append('Informix(informixdb)')
  331. LOGGER.warning('Informix support is experimental')
  332. except ImportError:
  333. LOGGER.debug('no Informix driver informixdb')
  334. try:
  335. import sapdb
  336. DRIVERS.append('SQL(sapdb)')
  337. LOGGER.warning('SAPDB support is experimental')
  338. except ImportError:
  339. LOGGER.debug('no SAP driver sapdb')
  340. try:
  341. import cubriddb
  342. DRIVERS.append('Cubrid(cubriddb)')
  343. LOGGER.warning('Cubrid support is experimental')
  344. except ImportError:
  345. LOGGER.debug('no Cubrid driver cubriddb')
  346. try:
  347. from com.ziclix.python.sql import zxJDBC
  348. import java.sql
  349. # Try sqlite jdbc driver from http://www.zentus.com/sqlitejdbc/
  350. from org.sqlite import JDBC # required by java.sql; ensure we have it
  351. zxJDBC_sqlite = java.sql.DriverManager
  352. DRIVERS.append('PostgreSQL(zxJDBC)')
  353. DRIVERS.append('SQLite(zxJDBC)')
  354. LOGGER.warning('zxJDBC support is experimental')
  355. is_jdbc = True
  356. except ImportError:
  357. LOGGER.debug('no SQLite/PostgreSQL driver zxJDBC')
  358. is_jdbc = False
  359. try:
  360. import couchdb
  361. DRIVERS.append('CouchDB(couchdb)')
  362. except ImportError:
  363. LOGGER.debug('no Couchdb driver couchdb')
  364. try:
  365. import pymongo
  366. DRIVERS.append('MongoDB(pymongo)')
  367. except:
  368. LOGGER.debug('no MongoDB driver pymongo')
  369. try:
  370. import imaplib
  371. DRIVERS.append('IMAP(imaplib)')
  372. except:
  373. LOGGER.debug('no IMAP driver imaplib')
  374. PLURALIZE_RULES = [
  375. (re.compile('child$'), re.compile('child$'), 'children'),
  376. (re.compile('oot$'), re.compile('oot$'), 'eet'),
  377. (re.compile('ooth$'), re.compile('ooth$'), 'eeth'),
  378. (re.compile('l[eo]af$'), re.compile('l([eo])af$'), 'l\\1aves'),
  379. (re.compile('sis$'), re.compile('sis$'), 'ses'),
  380. (re.compile('man$'), re.compile('man$'), 'men'),
  381. (re.compile('ife$'), re.compile('ife$'), 'ives'),
  382. (re.compile('eau$'), re.compile('eau$'), 'eaux'),
  383. (re.compile('lf$'), re.compile('lf$'), 'lves'),
  384. (re.compile('[sxz]$'), re.compile('$'), 'es'),
  385. (re.compile('[^aeioudgkprt]h$'), re.compile('$'), 'es'),
  386. (re.compile('(qu|[^aeiou])y$'), re.compile('y$'), 'ies'),
  387. (re.compile('$'), re.compile('$'), 's'),
  388. ]
  389. def pluralize(singular, rules=PLURALIZE_RULES):
  390. for line in rules:
  391. re_search, re_sub, replace = line
  392. plural = re_search.search(singular) and re_sub.sub(replace, singular)
  393. if plural: return plural
  394. def hide_password(uri):
  395. return REGEX_NOPASSWD.sub('******',uri)
  396. def OR(a,b):
  397. return a|b
  398. def AND(a,b):
  399. return a&b
  400. def IDENTITY(x): return x
  401. def varquote_aux(name,quotestr='%s'):
  402. return name if REGEX_W.match(name) else quotestr % name
  403. if 'google' in DRIVERS:
  404. is_jdbc = False
  405. class GAEDecimalProperty(gae.Property):
  406. """
  407. GAE decimal implementation
  408. """
  409. data_type = decimal.Decimal
  410. def __init__(self, precision, scale, **kwargs):
  411. super(GAEDecimalProperty, self).__init__(self, **kwargs)
  412. d = '1.'
  413. for x in range(scale):
  414. d += '0'
  415. self.round = decimal.Decimal(d)
  416. def get_value_for_datastore(self, model_instance):
  417. value = super(GAEDecimalProperty, self)\
  418. .get_value_for_datastore(model_instance)
  419. if value is None or value == '':
  420. return None
  421. else:
  422. return str(value)
  423. def make_value_from_datastore(self, value):
  424. if value is None or value == '':
  425. return None
  426. else:
  427. return decimal.Decimal(value).quantize(self.round)
  428. def validate(self, value):
  429. value = super(GAEDecimalProperty, self).validate(value)
  430. if value is None or isinstance(value, decimal.Decimal):
  431. return value
  432. elif isinstance(value, basestring):
  433. return decimal.Decimal(value)
  434. raise gae.BadValueError("Property %s must be a Decimal or string."\
  435. % self.name)
  436. ###################################################################################
  437. # class that handles connection pooling (all adapters are derived from this one)
  438. ###################################################################################
  439. class ConnectionPool(object):
  440. POOLS = {}
  441. check_active_connection = True
  442. @staticmethod
  443. def set_folder(folder):
  444. THREAD_LOCAL.folder = folder
  445. # ## this allows gluon to commit/rollback all dbs in this thread
  446. def close(self,action='commit',really=True):
  447. if action:
  448. if callable(action):
  449. action(self)
  450. else:
  451. getattr(self, action)()
  452. # ## if you want pools, recycle this connection
  453. if self.pool_size:
  454. GLOBAL_LOCKER.acquire()
  455. pool = ConnectionPool.POOLS[self.uri]
  456. if len(pool) < self.pool_size:
  457. pool.append(self.connection)
  458. really = False
  459. GLOBAL_LOCKER.release()
  460. if really:
  461. self.close_connection()
  462. self.connection = None
  463. @staticmethod
  464. def close_all_instances(action):
  465. """ to close cleanly databases in a multithreaded environment """
  466. dbs = getattr(THREAD_LOCAL,'db_instances',{}).items()
  467. for db_uid, db_group in dbs:
  468. for db in db_group:
  469. if hasattr(db,'_adapter'):
  470. db._adapter.close(action)
  471. getattr(THREAD_LOCAL,'db_instances',{}).clear()
  472. getattr(THREAD_LOCAL,'db_instances_zombie',{}).clear()
  473. if callable(action):
  474. action(None)
  475. return
  476. def find_or_make_work_folder(self):
  477. """ this actually does not make the folder. it has to be there """
  478. self.folder = getattr(THREAD_LOCAL,'folder','')
  479. # Creating the folder if it does not exist
  480. if False and self.folder and not exists(self.folder):
  481. os.mkdir(self.folder)
  482. def after_connection_hook(self):
  483. """hook for the after_connection parameter"""
  484. if callable(self._after_connection):
  485. self._after_connection(self)
  486. self.after_connection()
  487. def after_connection(self):
  488. """ this it is supposed to be overloaded by adapters"""
  489. pass
  490. def reconnect(self, f=None, cursor=True):
  491. """
  492. this function defines: self.connection and self.cursor
  493. (iff cursor is True)
  494. if self.pool_size>0 it will try pull the connection from the pool
  495. if the connection is not active (closed by db server) it will loop
  496. if not self.pool_size or no active connections in pool makes a new one
  497. """
  498. if getattr(self,'connection',None) != None:
  499. return
  500. if f is None:
  501. f = self.connector
  502. if not self.pool_size:
  503. self.connection = f()
  504. self.cursor = cursor and self.connection.cursor()
  505. else:
  506. uri = self.uri
  507. POOLS = ConnectionPool.POOLS
  508. while True:
  509. GLOBAL_LOCKER.acquire()
  510. if not uri in POOLS:
  511. POOLS[uri] = []
  512. if POOLS[uri]:
  513. self.connection = POOLS[uri].pop()
  514. GLOBAL_LOCKER.release()
  515. self.cursor = cursor and self.connection.cursor()
  516. try:
  517. if self.cursor and self.check_active_connection:
  518. self.execute('SELECT 1;')
  519. break
  520. except:
  521. pass
  522. else:
  523. GLOBAL_LOCKER.release()
  524. self.connection = f()
  525. self.cursor = cursor and self.connection.cursor()
  526. break
  527. self.after_connection_hook()
  528. ###################################################################################
  529. # this is a generic adapter that does nothing; all others are derived from this one
  530. ###################################################################################
  531. class BaseAdapter(ConnectionPool):
  532. native_json = False
  533. driver = None
  534. driver_name = None
  535. drivers = () # list of drivers from which to pick
  536. connection = None
  537. maxcharlength = MAXCHARLENGTH
  538. commit_on_alter_table = False
  539. support_distributed_transaction = False
  540. uploads_in_blob = False
  541. can_select_for_update = True
  542. TRUE = 'T'
  543. FALSE = 'F'
  544. types = {
  545. 'boolean': 'CHAR(1)',
  546. 'string': 'CHAR(%(length)s)',
  547. 'text': 'TEXT',
  548. 'json': 'TEXT',
  549. 'password': 'CHAR(%(length)s)',
  550. 'blob': 'BLOB',
  551. 'upload': 'CHAR(%(length)s)',
  552. 'integer': 'INTEGER',
  553. 'bigint': 'INTEGER',
  554. 'float':'DOUBLE',
  555. 'double': 'DOUBLE',
  556. 'decimal': 'DOUBLE',
  557. 'date': 'DATE',
  558. 'time': 'TIME',
  559. 'datetime': 'TIMESTAMP',
  560. 'id': 'INTEGER PRIMARY KEY AUTOINCREMENT',
  561. 'reference': 'INTEGER REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
  562. 'list:integer': 'TEXT',
  563. 'list:string': 'TEXT',
  564. 'list:reference': 'TEXT',
  565. # the two below are only used when DAL(...bigint_id=True) and replace 'id','reference'
  566. 'big-id': 'BIGINT PRIMARY KEY AUTOINCREMENT',
  567. 'big-reference': 'BIGINT REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
  568. }
  569. def id_query(self, table):
  570. return table._id != None
  571. def adapt(self, obj):
  572. return "'%s'" % obj.replace("'", "''")
  573. def smart_adapt(self, obj):
  574. if isinstance(obj,(int,float)):
  575. return str(obj)
  576. return self.adapt(str(obj))
  577. def integrity_error(self):
  578. return self.driver.IntegrityError
  579. def operational_error(self):
  580. return self.driver.OperationalError
  581. def file_exists(self, filename):
  582. """
  583. to be used ONLY for files that on GAE may not be on filesystem
  584. """
  585. return exists(filename)
  586. def file_open(self, filename, mode='rb', lock=True):
  587. """
  588. to be used ONLY for files that on GAE may not be on filesystem
  589. """
  590. if have_portalocker and lock:
  591. fileobj = portalocker.LockedFile(filename,mode)
  592. else:
  593. fileobj = open(filename,mode)
  594. return fileobj
  595. def file_close(self, fileobj):
  596. """
  597. to be used ONLY for files that on GAE may not be on filesystem
  598. """
  599. if fileobj:
  600. fileobj.close()
  601. def file_delete(self, filename):
  602. os.unlink(filename)
  603. def find_driver(self,adapter_args,uri=None):
  604. if getattr(self,'driver',None) != None:
  605. return
  606. drivers_available = [driver for driver in self.drivers
  607. if driver in globals()]
  608. if uri:
  609. items = uri.split('://',1)[0].split(':')
  610. request_driver = items[1] if len(items)>1 else None
  611. else:
  612. request_driver = None
  613. request_driver = request_driver or adapter_args.get('driver')
  614. if request_driver:
  615. if request_driver in drivers_available:
  616. self.driver_name = request_driver
  617. self.driver = globals().get(request_driver)
  618. else:
  619. raise RuntimeError("driver %s not available" % request_driver)
  620. elif drivers_available:
  621. self.driver_name = drivers_available[0]
  622. self.driver = globals().get(self.driver_name)
  623. else:
  624. raise RuntimeError("no driver available %s" % str(self.drivers))
  625. def __init__(self, db,uri,pool_size=0, folder=None, db_codec='UTF-8',
  626. credential_decoder=IDENTITY, driver_args={},
  627. adapter_args={},do_connect=True, after_connection=None):
  628. self.db = db
  629. self.dbengine = "None"
  630. self.uri = uri
  631. self.pool_size = pool_size
  632. self.folder = folder
  633. self.db_codec = db_codec
  634. self._after_connection = after_connection
  635. class Dummy(object):
  636. lastrowid = 1
  637. def __getattr__(self, value):
  638. return lambda *a, **b: []
  639. self.connection = Dummy()
  640. self.cursor = Dummy()
  641. def sequence_name(self,tablename):
  642. return '%s_sequence' % tablename
  643. def trigger_name(self,tablename):
  644. return '%s_sequence' % tablename
  645. def varquote(self,name):
  646. return name
  647. def create_table(self, table,
  648. migrate=True,
  649. fake_migrate=False,
  650. polymodel=None):
  651. db = table._db
  652. fields = []
  653. # PostGIS geo fields are added after the table has been created
  654. postcreation_fields = []
  655. sql_fields = {}
  656. sql_fields_aux = {}
  657. TFK = {}
  658. tablename = table._tablename
  659. sortable = 0
  660. types = self.types
  661. for field in table:
  662. sortable += 1
  663. field_name = field.name
  664. field_type = field.type
  665. if isinstance(field_type,SQLCustomType):
  666. ftype = field_type.native or field_type.type
  667. elif field_type.startswith('reference'):
  668. referenced = field_type[10:].strip()
  669. if referenced == '.':
  670. referenced = tablename
  671. constraint_name = self.constraint_name(tablename, field_name)
  672. if not '.' in referenced \
  673. and referenced != tablename \
  674. and hasattr(table,'_primarykey'):
  675. ftype = types['integer']
  676. else:
  677. if hasattr(table,'_primarykey'):
  678. rtablename,rfieldname = referenced.split('.')
  679. rtable = db[rtablename]
  680. rfield = rtable[rfieldname]
  681. # must be PK reference or unique
  682. if rfieldname in rtable._primarykey or \
  683. rfield.unique:
  684. ftype = types[rfield.type[:9]] % \
  685. dict(length=rfield.length)
  686. # multicolumn primary key reference?
  687. if not rfield.unique and len(rtable._primarykey)>1:
  688. # then it has to be a table level FK
  689. if rtablename not in TFK:
  690. TFK[rtablename] = {}
  691. TFK[rtablename][rfieldname] = field_name
  692. else:
  693. ftype = ftype + \
  694. types['reference FK'] % dict(
  695. constraint_name = constraint_name, # should be quoted
  696. foreign_key = '%s (%s)' % (rtablename,
  697. rfieldname),
  698. table_name = tablename,
  699. field_name = field_name,
  700. on_delete_action=field.ondelete)
  701. else:
  702. # make a guess here for circular references
  703. if referenced in db:
  704. id_fieldname = db[referenced]._id.name
  705. elif referenced == tablename:
  706. id_fieldname = table._id.name
  707. else: #make a guess
  708. id_fieldname = 'id'
  709. ftype = types[field_type[:9]] % dict(
  710. index_name = field_name+'__idx',
  711. field_name = field_name,
  712. constraint_name = constraint_name,
  713. foreign_key = '%s (%s)' % (referenced,
  714. id_fieldname),
  715. on_delete_action=field.ondelete)
  716. elif field_type.startswith('list:reference'):
  717. ftype = types[field_type[:14]]
  718. elif field_type.startswith('decimal'):
  719. precision, scale = map(int,field_type[8:-1].split(','))
  720. ftype = types[field_type[:7]] % \
  721. dict(precision=precision,scale=scale)
  722. elif field_type.startswith('geo'):
  723. if not hasattr(self,'srid'):
  724. raise RuntimeError('Adapter does not support geometry')
  725. srid = self.srid
  726. geotype, parms = field_type[:-1].split('(')
  727. if not geotype in types:
  728. raise SyntaxError(
  729. 'Field: unknown field type: %s for %s' \
  730. % (field_type, field_name))
  731. ftype = types[geotype]
  732. if self.dbengine == 'postgres' and geotype == 'geometry':
  733. # parameters: schema, srid, dimension
  734. dimension = 2 # GIS.dimension ???
  735. parms = parms.split(',')
  736. if len(parms) == 3:
  737. schema, srid, dimension = parms
  738. elif len(parms) == 2:
  739. schema, srid = parms
  740. else:
  741. schema = parms[0]
  742. ftype = "SELECT AddGeometryColumn ('%%(schema)s', '%%(tablename)s', '%%(fieldname)s', %%(srid)s, '%s', %%(dimension)s);" % types[geotype]
  743. ftype = ftype % dict(schema=schema,
  744. tablename=tablename,
  745. fieldname=field_name, srid=srid,
  746. dimension=dimension)
  747. postcreation_fields.append(ftype)
  748. elif not field_type in types:
  749. raise SyntaxError('Field: unknown field type: %s for %s' % \
  750. (field_type, field_name))
  751. else:
  752. ftype = types[field_type]\
  753. % dict(length=field.length)
  754. if not field_type.startswith('id') and \
  755. not field_type.startswith('reference'):
  756. if field.notnull:
  757. ftype += ' NOT NULL'
  758. else:
  759. ftype += self.ALLOW_NULL()
  760. if field.unique:
  761. ftype += ' UNIQUE'
  762. if field.custom_qualifier:
  763. ftype += ' %s' % field.custom_qualifier
  764. # add to list of fields
  765. sql_fields[field_name] = dict(
  766. length=field.length,
  767. unique=field.unique,
  768. notnull=field.notnull,
  769. sortable=sortable,
  770. type=str(field_type),
  771. sql=ftype)
  772. if field.notnull and not field.default is None:
  773. # Caveat: sql_fields and sql_fields_aux
  774. # differ for default values.
  775. # sql_fields is used to trigger migrations and sql_fields_aux
  776. # is used for create tables.
  777. # The reason is that we do not want to trigger
  778. # a migration simply because a default value changes.
  779. not_null = self.NOT_NULL(field.default, field_type)
  780. ftype = ftype.replace('NOT NULL', not_null)
  781. sql_fields_aux[field_name] = dict(sql=ftype)
  782. # Postgres - PostGIS:
  783. # geometry fields are added after the table has been created, not now
  784. if not (self.dbengine == 'postgres' and \
  785. field_type.startswith('geom')):
  786. fields.append('%s %s' % (field_name, ftype))
  787. other = ';'
  788. # backend-specific extensions to fields
  789. if self.dbengine == 'mysql':
  790. if not hasattr(table, "_primarykey"):
  791. fields.append('PRIMARY KEY(%s)' % table._id.name)
  792. other = ' ENGINE=InnoDB CHARACTER SET utf8;'
  793. fields = ',\n '.join(fields)
  794. for rtablename in TFK:
  795. rfields = TFK[rtablename]
  796. pkeys = db[rtablename]._primarykey
  797. fkeys = [ rfields[k] for k in pkeys ]
  798. fields = fields + ',\n ' + \
  799. types['reference TFK'] % dict(
  800. table_name = tablename,
  801. field_name=', '.join(fkeys),
  802. foreign_table = rtablename,
  803. foreign_key = ', '.join(pkeys),
  804. on_delete_action = field.ondelete)
  805. if hasattr(table,'_primarykey'):
  806. query = "CREATE TABLE %s(\n %s,\n %s) %s" % \
  807. (tablename, fields,
  808. self.PRIMARY_KEY(', '.join(table._primarykey)),other)
  809. else:
  810. query = "CREATE TABLE %s(\n %s\n)%s" % \
  811. (tablename, fields, other)
  812. if self.uri.startswith('sqlite:///') \
  813. or self.uri.startswith('spatialite:///'):
  814. path_encoding = sys.getfilesystemencoding() \
  815. or locale.getdefaultlocale()[1] or 'utf8'
  816. dbpath = self.uri[9:self.uri.rfind('/')]\
  817. .decode('utf8').encode(path_encoding)
  818. else:
  819. dbpath = self.folder
  820. if not migrate:
  821. return query
  822. elif self.uri.startswith('sqlite:memory')\
  823. or self.uri.startswith('spatialite:memory'):
  824. table._dbt = None
  825. elif isinstance(migrate, str):
  826. table._dbt = pjoin(dbpath, migrate)
  827. else:
  828. table._dbt = pjoin(
  829. dbpath, '%s_%s.table' % (table._db._uri_hash, tablename))
  830. if table._dbt:
  831. table._loggername = pjoin(dbpath, 'sql.log')
  832. logfile = self.file_open(table._loggername, 'a')
  833. else:
  834. logfile = None
  835. if not table._dbt or not self.file_exists(table._dbt):
  836. if table._dbt:
  837. logfile.write('timestamp: %s\n'
  838. % datetime.datetime.today().isoformat())
  839. logfile.write(query + '\n')
  840. if not fake_migrate:
  841. self.create_sequence_and_triggers(query,table)
  842. table._db.commit()
  843. # Postgres geom fields are added now,
  844. # after the table has been created
  845. for query in postcreation_fields:
  846. self.execute(query)
  847. table._db.commit()
  848. if table._dbt:
  849. tfile = self.file_open(table._dbt, 'w')
  850. pickle.dump(sql_fields, tfile)
  851. self.file_close(tfile)
  852. if fake_migrate:
  853. logfile.write('faked!\n')
  854. else:
  855. logfile.write('success!\n')
  856. else:
  857. tfile = self.file_open(table._dbt, 'r')
  858. try:
  859. sql_fields_old = pickle.load(tfile)
  860. except EOFError:
  861. self.file_close(tfile)
  862. self.file_close(logfile)
  863. raise RuntimeError('File %s appears corrupted' % table._dbt)
  864. self.file_close(tfile)
  865. if sql_fields != sql_fields_old:
  866. self.migrate_table(table,
  867. sql_fields, sql_fields_old,
  868. sql_fields_aux, logfile,
  869. fake_migrate=fake_migrate)
  870. self.file_close(logfile)
  871. return query
  872. def migrate_table(
  873. self,
  874. table,
  875. sql_fields,
  876. sql_fields_old,
  877. sql_fields_aux,
  878. logfile,
  879. fake_migrate=False,
  880. ):
  881. db = table._db
  882. db._migrated.append(table._tablename)
  883. tablename = table._tablename
  884. def fix(item):
  885. k,v=item
  886. if not isinstance(v,dict):
  887. v=dict(type='unkown',sql=v)
  888. return k.lower(),v
  889. # make sure all field names are lower case to avoid
  890. # migrations because of case cahnge
  891. sql_fields = dict(map(fix,sql_fields.iteritems()))
  892. sql_fields_old = dict(map(fix,sql_fields_old.iteritems()))
  893. sql_fields_aux = dict(map(fix,sql_fields_aux.iteritems()))
  894. if db._debug:
  895. logging.debug('migrating %s to %s' % (sql_fields_old,sql_fields))
  896. keys = sql_fields.keys()
  897. for key in sql_fields_old:
  898. if not key in keys:
  899. keys.append(key)
  900. new_add = self.concat_add(tablename)
  901. metadata_change = False
  902. sql_fields_current = copy.copy(sql_fields_old)
  903. for key in keys:
  904. query = None
  905. if not key in sql_fields_old:
  906. sql_fields_current[key] = sql_fields[key]
  907. if self.dbengine in ('postgres',) and \
  908. sql_fields[key]['type'].startswith('geometry'):
  909. # 'sql' == ftype in sql
  910. query = [ sql_fields[key]['sql'] ]
  911. else:
  912. query = ['ALTER TABLE %s ADD %s %s;' % \
  913. (tablename, key,
  914. sql_fields_aux[key]['sql'].replace(', ', new_add))]
  915. metadata_change = True
  916. elif self.dbengine in ('sqlite', 'spatialite'):
  917. if key in sql_fields:
  918. sql_fields_current[key] = sql_fields[key]
  919. metadata_change = True
  920. elif not key in sql_fields:
  921. del sql_fields_current[key]
  922. ftype = sql_fields_old[key]['type']
  923. if self.dbengine in ('postgres',) \
  924. and ftype.startswith('geometry'):
  925. geotype, parms = ftype[:-1].split('(')
  926. schema = parms.split(',')[0]
  927. query = [ "SELECT DropGeometryColumn ('%(schema)s', '%(table)s', '%(field)s');" % dict(schema=schema, table=tablename, field=key,) ]
  928. elif not self.dbengine in ('firebird',):
  929. query = ['ALTER TABLE %s DROP COLUMN %s;'
  930. % (tablename, key)]
  931. else:
  932. query = ['ALTER TABLE %s DROP %s;' % (tablename, key)]
  933. metadata_change = True
  934. elif sql_fields[key]['sql'] != sql_fields_old[key]['sql'] \
  935. and not (key in table.fields and
  936. isinstance(table[key].type, SQLCustomType)) \
  937. and not sql_fields[key]['type'].startswith('reference')\
  938. and not sql_fields[key]['type'].startswith('double')\
  939. and not sql_fields[key]['type'].startswith('id'):
  940. sql_fields_current[key] = sql_fields[key]
  941. t = tablename
  942. tt = sql_fields_aux[key]['sql'].replace(', ', new_add)
  943. if not self.dbengine in ('firebird',):
  944. query = ['ALTER TABLE %s ADD %s__tmp %s;' % (t, key, tt),
  945. 'UPDATE %s SET %s__tmp=%s;' % (t, key, key),
  946. 'ALTER TABLE %s DROP COLUMN %s;' % (t, key),
  947. 'ALTER TABLE %s ADD %s %s;' % (t, key, tt),
  948. 'UPDATE %s SET %s=%s__tmp;' % (t, key, key),
  949. 'ALTER TABLE %s DROP COLUMN %s__tmp;' % (t, key)]
  950. else:
  951. query = ['ALTER TABLE %s ADD %s__tmp %s;' % (t, key, tt),
  952. 'UPDATE %s SET %s__tmp=%s;' % (t, key, key),
  953. 'ALTER TABLE %s DROP %s;' % (t, key),
  954. 'ALTER TABLE %s ADD %s %s;' % (t, key, tt),
  955. 'UPDATE %s SET %s=%s__tmp;' % (t, key, key),
  956. 'ALTER TABLE %s DROP %s__tmp;' % (t, key)]
  957. metadata_change = True
  958. elif sql_fields[key]['type'] != sql_fields_old[key]['type']:
  959. sql_fields_current[key] = sql_fields[key]
  960. metadata_change = True
  961. if query:
  962. logfile.write('timestamp: %s\n'
  963. % datetime.datetime.today().isoformat())
  964. db['_lastsql'] = '\n'.join(query)
  965. for sub_query in query:
  966. logfile.write(sub_query + '\n')
  967. if not fake_migrate:
  968. self.execute(sub_query)
  969. # Caveat: mysql, oracle and firebird do not allow multiple alter table
  970. # in one transaction so we must commit partial transactions and
  971. # update table._dbt after alter table.
  972. if db._adapter.commit_on_alter_table:
  973. db.commit()
  974. tfile = self.file_open(table._dbt, 'w')
  975. pickle.dump(sql_fields_current, tfile)
  976. self.file_close(tfile)
  977. logfile.write('success!\n')
  978. else:
  979. logfile.write('faked!\n')
  980. elif metadata_change:
  981. tfile = self.file_open(table._dbt, 'w')
  982. pickle.dump(sql_fields_current, tfile)
  983. self.file_close(tfile)
  984. if metadata_change and \
  985. not (query and self.dbengine in ('mysql','oracle','firebird')):
  986. db.commit()
  987. tfile = self.file_open(table._dbt, 'w')
  988. pickle.dump(sql_fields_current, tfile)
  989. self.file_close(tfile)
  990. def LOWER(self, first):
  991. return 'LOWER(%s)' % self.expand(first)
  992. def UPPER(self, first):
  993. return 'UPPER(%s)' % self.expand(first)
  994. def COUNT(self, first, distinct=None):
  995. return ('COUNT(%s)' if not distinct else 'COUNT(DISTINCT %s)') \
  996. % self.expand(first)
  997. def EXTRACT(self, first, what):
  998. return "EXTRACT(%s FROM %s)" % (what, self.expand(first))
  999. def EPOCH(self, first):
  1000. return self.EXTRACT(first, 'epoch')
  1001. def AGGREGATE(self, first, what):
  1002. return "%s(%s)" % (what, self.expand(first))
  1003. def JOIN(self):
  1004. return 'JOIN'
  1005. def LEFT_JOIN(self):
  1006. return 'LEFT JOIN'
  1007. def RANDOM(self):
  1008. return 'Random()'
  1009. def NOT_NULL(self, default, field_type):
  1010. return 'NOT NULL DEFAULT %s' % self.represent(default,field_type)
  1011. def COALESCE(self, first, second):
  1012. expressions = [self.expand(first)]+[self.expand(e) for e in second]
  1013. return 'COALESCE(%s)' % ','.join(expressions)
  1014. def COALESCE_ZERO(self, first):
  1015. return 'COALESCE(%s,0)' % self.expand(first)
  1016. def RAW(self, first):
  1017. return first
  1018. def ALLOW_NULL(self):
  1019. return ''
  1020. def SUBSTRING(self, field, parameters):
  1021. return 'SUBSTR(%s,%s,%s)' % (self.expand(field), parameters[0], parameters[1])
  1022. def PRIMARY_KEY(self, key):
  1023. return 'PRIMARY KEY(%s)' % key
  1024. def _drop(self, table, mode):
  1025. return ['DROP TABLE %s;' % table]
  1026. def drop(self, table, mode=''):
  1027. db = table._db
  1028. if table._dbt:
  1029. logfile = self.file_open(table._loggername, 'a')
  1030. queries = self._drop(table, mode)
  1031. for query in queries:
  1032. if table._dbt:
  1033. logfile.write(query + '\n')
  1034. self.execute(query)
  1035. db.commit()
  1036. del db[table._tablename]
  1037. del db.tables[db.tables.index(table._tablename)]
  1038. db._remove_references_to(table)
  1039. if table._dbt:
  1040. self.file_delete(table._dbt)
  1041. logfile.write('success!\n')
  1042. def _insert(self, table, fields):
  1043. if fields:
  1044. keys = ','.join(f.name for f, v in fields)
  1045. values = ','.join(self.expand(v, f.type) for f, v in fields)
  1046. return 'INSERT INTO %s(%s) VALUES (%s);' % (table, keys, values)
  1047. else:
  1048. return self._insert_empty(table)
  1049. def _insert_empty(self, table):
  1050. return 'INSERT INTO %s DEFAULT VALUES;' % table
  1051. def insert(self, table, fields):
  1052. query = self._insert(table,fields)
  1053. try:
  1054. self.execute(query)
  1055. except Exception:
  1056. e = sys.exc_info()[1]
  1057. if isinstance(e,self.integrity_error_class()):
  1058. return None
  1059. raise e
  1060. if hasattr(table,'_primarykey'):
  1061. return dict([(k[0].name, k[1]) for k in fields \
  1062. if k[0].name in table._primarykey])
  1063. id = self.lastrowid(table)
  1064. if not isinstance(id,int):
  1065. return id
  1066. rid = Reference(id)
  1067. (rid._table, rid._record) = (table, None)
  1068. return rid
  1069. def bulk_insert(self, table, items):
  1070. return [self.insert(table,item) for item in items]
  1071. def NOT(self, first):
  1072. return '(NOT %s)' % self.expand(first)
  1073. def AND(self, first, second):
  1074. return '(%s AND %s)' % (self.expand(first), self.expand(second))
  1075. def OR(self, first, second):
  1076. return '(%s OR %s)' % (self.expand(first), self.expand(second))
  1077. def BELONGS(self, first, second):
  1078. if isinstance(second, str):
  1079. return '(%s IN (%s))' % (self.expand(first), second[:-1])
  1080. elif not second:
  1081. return '(1=0)'
  1082. items = ','.join(self.expand(item, first.type) for item in second)
  1083. return '(%s IN (%s))' % (self.expand(first), items)
  1084. def REGEXP(self, first, second):
  1085. "regular expression operator"
  1086. raise NotImplementedError
  1087. def LIKE(self, first, second):
  1088. "case sensitive like operator"
  1089. raise NotImplementedError
  1090. def ILIKE(self, first, second):
  1091. "case in-sensitive like operator"
  1092. return '(%s LIKE %s)' % (self.expand(first),
  1093. self.expand(second, 'string'))
  1094. def STARTSWITH(self, first, second):
  1095. return '(%s LIKE %s)' % (self.expand(first),
  1096. self.expand(second+'%', 'string'))
  1097. def ENDSWITH(self, first, second):
  1098. return '(%s LIKE %s)' % (self.expand(first),
  1099. self.expand('%'+second, 'string'))
  1100. def CONTAINS(self, first, second, case_sensitive=False):
  1101. if isinstance(second,Expression):
  1102. field = self.expand(first)
  1103. expr = self.expand(second,'string')
  1104. if first.type.startswith('list:'):
  1105. expr = 'CONCAT("|", %s, "|")' % expr
  1106. elif not first.type in ('string', 'text', 'json'):
  1107. raise RuntimeError("Expression Not Supported")
  1108. return 'INSTR(%s,%s)' % (field, expr)
  1109. else:
  1110. if first.type in ('string', 'text', 'json'):
  1111. key = '%'+str(second).replace('%','%%')+'%'
  1112. elif first.type.startswith('list:'):
  1113. key = '%|'+str(second).replace('|','||').replace('%','%%')+'|%'
  1114. else:
  1115. raise RuntimeError("Expression Not Supported")
  1116. op = case_sensitive and self.LIKE or self.ILIKE
  1117. return op(first,key)
  1118. def EQ(self, first, second=None):
  1119. if second is None:
  1120. return '(%s IS NULL)' % self.expand(first)
  1121. return '(%s = %s)' % (self.expand(first),
  1122. self.expand(second, first.type))
  1123. def NE(self, first, second=None):
  1124. if second is None:
  1125. return '(%s IS NOT NULL)' % self.expand(first)
  1126. return '(%s <> %s)' % (self.expand(first),
  1127. self.expand(second, first.type))
  1128. def LT(self,first,second=None):
  1129. if second is None:
  1130. raise RuntimeError("Cannot compare %s < None" % first)
  1131. return '(%s < %s)' % (self.expand(first),
  1132. self.expand(second,first.type))
  1133. def LE(self,first,second=None):
  1134. if second is None:
  1135. raise RuntimeError("Cannot compare %s <= None" % first)
  1136. return '(%s <= %s)' % (self.expand(first),
  1137. self.expand(second,first.type))
  1138. def GT(self,first,second=None):
  1139. if second is None:
  1140. raise RuntimeError("Cannot compare %s > None" % first)
  1141. return '(%s > %s)' % (self.expand(first),
  1142. self.expand(second,first.type))
  1143. def GE(self,first,second=None):
  1144. if second is None:
  1145. raise RuntimeError("Cannot compare %s >= None" % first)
  1146. return '(%s >= %s)' % (self.expand(first),
  1147. self.expand(second,first.type))
  1148. def ADD(self, first, second):
  1149. return '(%s + %s)' % (self.expand(first),
  1150. self.expand(second, first.type))
  1151. def SUB(self, first, second):
  1152. return '(%s - %s)' % (self.expand(first),
  1153. self.expand(second, first.type))
  1154. def MUL(self, first, second):
  1155. return '(%s * %s)' % (self.expand(first),
  1156. self.expand(second, first.type))
  1157. def DIV(self, first, second):
  1158. return '(%s / %s)' % (self.expand(first),
  1159. self.expand(second, first.type))
  1160. def MOD(self, first, second):
  1161. return '(%s %% %s)' % (self.expand(first),
  1162. self.expand(second, first.type))
  1163. def AS(self, first, second):
  1164. return '%s AS %s' % (self.expand(first), second)
  1165. def ON(self, first, second):
  1166. if use_common_filters(second):
  1167. second = self.common_filter(second,[first._tablename])
  1168. return '%s ON %s' % (self.expand(first), self.expand(second))
  1169. def INVERT(self, first):
  1170. return '%s DESC' % self.expand(first)
  1171. def COMMA(self, first, second):
  1172. return '%s, %s' % (self.expand(first), self.expand(second))
  1173. def expand(self, expression, field_type=None):
  1174. if isinstance(expression, Field):
  1175. return '%s.%s' % (expression.tablename, expression.name)
  1176. elif isinstance(expression, (Expression, Query)):
  1177. first = expression.first
  1178. second = expression.second
  1179. op = expression.op
  1180. optional_args = expression.optional_args or {}
  1181. if not second is None:
  1182. return op(first, second, **optional_args)
  1183. elif not first is None:
  1184. return op(first,**optional_args)
  1185. elif isinstance(op, str):
  1186. if op.endswith(';'):
  1187. op=op[:-1]
  1188. return '(%s)' % op
  1189. else:
  1190. return op()
  1191. elif field_type:
  1192. return str(self.represent(expression,field_type))
  1193. elif isinstance(expression,(list,tuple)):
  1194. return ','.join(self.represent(item,field_type) \
  1195. for item in expression)
  1196. elif isinstance(expression, bool):
  1197. return '1' if expression else '0'
  1198. else:
  1199. return str(expression)
  1200. def alias(self, table, alias):
  1201. """
  1202. Given a t

Large files files are truncated, but you can click here to view the full file