PageRenderTime 85ms CodeModel.GetById 39ms RepoModel.GetById 0ms app.codeStats 1ms

/gluon/dal.py

https://github.com/gokceneraslan/web2py
Python | 10770 lines | 10501 code | 101 blank | 168 comment | 129 complexity | 844dc509b81445288502e2e1825430d5 MD5 | raw file
Possible License(s): BSD-2-Clause, MIT, BSD-3-Clause

Large files files are truncated, but you can click here to view the full file

  1. #!/bin/env python
  2. # -*- coding: utf-8 -*-
  3. """
  4. This file is part of the web2py Web Framework
  5. Copyrighted by Massimo Di Pierro <mdipierro@cs.depaul.edu>
  6. License: LGPLv3 (http://www.gnu.org/licenses/lgpl.html)
  7. Thanks to
  8. * Niall Sweeny <niall.sweeny@fonjax.com> for MS SQL support
  9. * Marcel Leuthi <mluethi@mlsystems.ch> for Oracle support
  10. * Denes
  11. * Chris Clark
  12. * clach05
  13. * Denes Lengyel
  14. * and many others who have contributed to current and previous versions
  15. This file contains the DAL support for many relational databases,
  16. including:
  17. - SQLite & SpatiaLite
  18. - MySQL
  19. - Postgres
  20. - Firebird
  21. - Oracle
  22. - MS SQL
  23. - DB2
  24. - Interbase
  25. - Ingres
  26. - Informix (9+ and SE)
  27. - SapDB (experimental)
  28. - Cubrid (experimental)
  29. - CouchDB (experimental)
  30. - MongoDB (in progress)
  31. - Google:nosql
  32. - Google:sql
  33. - Teradata
  34. - IMAP (experimental)
  35. Example of usage:
  36. >>> # from dal import DAL, Field
  37. ### create DAL connection (and create DB if it doesn't exist)
  38. >>> db = DAL(('sqlite://storage.sqlite','mysql://a:b@localhost/x'),
  39. ... folder=None)
  40. ### define a table 'person' (create/alter as necessary)
  41. >>> person = db.define_table('person',Field('name','string'))
  42. ### insert a record
  43. >>> id = person.insert(name='James')
  44. ### retrieve it by id
  45. >>> james = person(id)
  46. ### retrieve it by name
  47. >>> james = person(name='James')
  48. ### retrieve it by arbitrary query
  49. >>> query = (person.name=='James') & (person.name.startswith('J'))
  50. >>> james = db(query).select(person.ALL)[0]
  51. ### update one record
  52. >>> james.update_record(name='Jim')
  53. <Row {'id': 1, 'name': 'Jim'}>
  54. ### update multiple records by query
  55. >>> db(person.name.like('J%')).update(name='James')
  56. 1
  57. ### delete records by query
  58. >>> db(person.name.lower() == 'jim').delete()
  59. 0
  60. ### retrieve multiple records (rows)
  61. >>> people = db(person).select(orderby=person.name,
  62. ... groupby=person.name, limitby=(0,100))
  63. ### further filter them
  64. >>> james = people.find(lambda row: row.name == 'James').first()
  65. >>> print james.id, james.name
  66. 1 James
  67. ### check aggregates
  68. >>> counter = person.id.count()
  69. >>> print db(person).select(counter).first()(counter)
  70. 1
  71. ### delete one record
  72. >>> james.delete_record()
  73. 1
  74. ### delete (drop) entire database table
  75. >>> person.drop()
  76. Supported field types:
  77. id string text boolean integer double decimal password upload
  78. blob time date datetime
  79. Supported DAL URI strings:
  80. 'sqlite://test.db'
  81. 'spatialite://test.db'
  82. 'sqlite:memory'
  83. 'spatialite:memory'
  84. 'jdbc:sqlite://test.db'
  85. 'mysql://root:none@localhost/test'
  86. 'postgres://mdipierro:password@localhost/test'
  87. 'postgres:psycopg2://mdipierro:password@localhost/test'
  88. 'postgres:pg8000://mdipierro:password@localhost/test'
  89. 'jdbc:postgres://mdipierro:none@localhost/test'
  90. 'mssql://web2py:none@A64X2/web2py_test'
  91. 'mssql2://web2py:none@A64X2/web2py_test' # alternate mappings
  92. 'oracle://username:password@database'
  93. 'firebird://user:password@server:3050/database'
  94. 'db2://DSN=dsn;UID=user;PWD=pass'
  95. 'firebird://username:password@hostname/database'
  96. 'firebird_embedded://username:password@c://path'
  97. 'informix://user:password@server:3050/database'
  98. 'informixu://user:password@server:3050/database' # unicode informix
  99. 'ingres://database' # or use an ODBC connection string, e.g. 'ingres://dsn=dsn_name'
  100. 'google:datastore' # for google app engine datastore
  101. 'google:sql' # for google app engine with sql (mysql compatible)
  102. 'teradata://DSN=dsn;UID=user;PWD=pass; DATABASE=database' # experimental
  103. 'imap://user:password@server:port' # experimental
  104. 'mongodb://user:password@server:port/database' # experimental
  105. For more info:
  106. help(DAL)
  107. help(Field)
  108. """
  109. ###################################################################################
  110. # this file only exposes DAL and Field
  111. ###################################################################################
  112. __all__ = ['DAL', 'Field']
  113. DEFAULTLENGTH = {'string':512,
  114. 'password':512,
  115. 'upload':512,
  116. 'text':2**15,
  117. 'blob':2**31}
  118. TIMINGSSIZE = 100
  119. SPATIALLIBS = {
  120. 'Windows':'libspatialite',
  121. 'Linux':'libspatialite.so',
  122. 'Darwin':'libspatialite.dylib'
  123. }
  124. DEFAULT_URI = 'sqlite://dummy.db'
  125. import re
  126. import sys
  127. import locale
  128. import os
  129. import types
  130. import datetime
  131. import threading
  132. import time
  133. import csv
  134. import cgi
  135. import copy
  136. import socket
  137. import logging
  138. import base64
  139. import shutil
  140. import marshal
  141. import decimal
  142. import struct
  143. import urllib
  144. import hashlib
  145. import uuid
  146. import glob
  147. import traceback
  148. import platform
  149. PYTHON_VERSION = sys.version_info[0]
  150. if PYTHON_VERSION == 2:
  151. import cPickle as pickle
  152. import cStringIO as StringIO
  153. import copy_reg as copyreg
  154. hashlib_md5 = hashlib.md5
  155. bytes, unicode = str, unicode
  156. else:
  157. import pickle
  158. from io import StringIO as StringIO
  159. import copyreg
  160. long = int
  161. hashlib_md5 = lambda s: hashlib.md5(bytes(s,'utf8'))
  162. bytes, unicode = bytes, str
  163. CALLABLETYPES = (types.LambdaType, types.FunctionType,
  164. types.BuiltinFunctionType,
  165. types.MethodType, types.BuiltinMethodType)
  166. TABLE_ARGS = set(
  167. ('migrate','primarykey','fake_migrate','format','redefine',
  168. 'singular','plural','trigger_name','sequence_name',
  169. 'common_filter','polymodel','table_class','on_define','actual_name'))
  170. SELECT_ARGS = set(
  171. ('orderby', 'groupby', 'limitby','required', 'cache', 'left',
  172. 'distinct', 'having', 'join','for_update', 'processor','cacheable', 'orderby_on_limitby'))
  173. ogetattr = object.__getattribute__
  174. osetattr = object.__setattr__
  175. exists = os.path.exists
  176. pjoin = os.path.join
  177. ###################################################################################
  178. # following checks allow the use of dal without web2py, as a standalone module
  179. ###################################################################################
  180. try:
  181. from utils import web2py_uuid
  182. except (ImportError, SystemError):
  183. import uuid
  184. def web2py_uuid(): return str(uuid.uuid4())
  185. try:
  186. import portalocker
  187. have_portalocker = True
  188. except ImportError:
  189. have_portalocker = False
  190. try:
  191. import serializers
  192. have_serializers = True
  193. except ImportError:
  194. have_serializers = False
  195. try:
  196. import json as simplejson
  197. except ImportError:
  198. try:
  199. import gluon.contrib.simplejson as simplejson
  200. except ImportError:
  201. simplejson = None
  202. try:
  203. import validators
  204. have_validators = True
  205. except (ImportError, SyntaxError):
  206. have_validators = False
  207. LOGGER = logging.getLogger("web2py.dal")
  208. DEFAULT = lambda:0
  209. GLOBAL_LOCKER = threading.RLock()
  210. THREAD_LOCAL = threading.local()
  211. # internal representation of tables with field
  212. # <table>.<field>, tables and fields may only be [a-zA-Z0-9_]
  213. REGEX_TYPE = re.compile('^([\w\_\:]+)')
  214. REGEX_DBNAME = re.compile('^(\w+)(\:\w+)*')
  215. REGEX_W = re.compile('^\w+$')
  216. REGEX_TABLE_DOT_FIELD = re.compile('^(\w+)\.(\w+)$')
  217. REGEX_UPLOAD_PATTERN = re.compile('(?P<table>[\w\-]+)\.(?P<field>[\w\-]+)\.(?P<uuidkey>[\w\-]+)\.(?P<name>\w+)\.\w+$')
  218. REGEX_CLEANUP_FN = re.compile('[\'"\s;]+')
  219. REGEX_UNPACK = re.compile('(?<!\|)\|(?!\|)')
  220. REGEX_PYTHON_KEYWORDS = re.compile('^(and|del|from|not|while|as|elif|global|or|with|assert|else|if|pass|yield|break|except|import|print|class|exec|in|raise|continue|finally|is|return|def|for|lambda|try)$')
  221. REGEX_SELECT_AS_PARSER = re.compile("\s+AS\s+(\S+)")
  222. REGEX_CONST_STRING = re.compile('(\"[^\"]*?\")|(\'[^\']*?\')')
  223. REGEX_SEARCH_PATTERN = re.compile('^{[^\.]+\.[^\.]+(\.(lt|gt|le|ge|eq|ne|contains|startswith|year|month|day|hour|minute|second))?(\.not)?}$')
  224. REGEX_SQUARE_BRACKETS = re.compile('^.+\[.+\]$')
  225. REGEX_STORE_PATTERN = re.compile('\.(?P<e>\w{1,5})$')
  226. REGEX_QUOTES = re.compile("'[^']*'")
  227. REGEX_ALPHANUMERIC = re.compile('^[0-9a-zA-Z]\w*$')
  228. REGEX_PASSWORD = re.compile('\://([^:@]*)\:')
  229. REGEX_NOPASSWD = re.compile('\/\/[\w\.\-]+[\:\/](.+)(?=@)') # was '(?<=[\:\/])([^:@/]+)(?=@.+)'
  230. # list of drivers will be built on the fly
  231. # and lists only what is available
  232. DRIVERS = []
  233. try:
  234. from new import classobj
  235. from google.appengine.ext import db as gae
  236. from google.appengine.api import namespace_manager, rdbms
  237. from google.appengine.api.datastore_types import Key ### for belongs on ID
  238. from google.appengine.ext.db.polymodel import PolyModel
  239. DRIVERS.append('google')
  240. except ImportError:
  241. pass
  242. if not 'google' in DRIVERS:
  243. try:
  244. from pysqlite2 import dbapi2 as sqlite2
  245. DRIVERS.append('SQLite(sqlite2)')
  246. except ImportError:
  247. LOGGER.debug('no SQLite drivers pysqlite2.dbapi2')
  248. try:
  249. from sqlite3 import dbapi2 as sqlite3
  250. DRIVERS.append('SQLite(sqlite3)')
  251. except ImportError:
  252. LOGGER.debug('no SQLite drivers sqlite3')
  253. try:
  254. # first try contrib driver, then from site-packages (if installed)
  255. try:
  256. import contrib.pymysql as pymysql
  257. # monkeypatch pymysql because they havent fixed the bug:
  258. # https://github.com/petehunt/PyMySQL/issues/86
  259. pymysql.ESCAPE_REGEX = re.compile("'")
  260. pymysql.ESCAPE_MAP = {"'": "''"}
  261. # end monkeypatch
  262. except ImportError:
  263. import pymysql
  264. DRIVERS.append('MySQL(pymysql)')
  265. except ImportError:
  266. LOGGER.debug('no MySQL driver pymysql')
  267. try:
  268. import MySQLdb
  269. DRIVERS.append('MySQL(MySQLdb)')
  270. except ImportError:
  271. LOGGER.debug('no MySQL driver MySQLDB')
  272. try:
  273. import psycopg2
  274. from psycopg2.extensions import adapt as psycopg2_adapt
  275. DRIVERS.append('PostgreSQL(psycopg2)')
  276. except ImportError:
  277. LOGGER.debug('no PostgreSQL driver psycopg2')
  278. try:
  279. # first try contrib driver, then from site-packages (if installed)
  280. try:
  281. import contrib.pg8000.dbapi as pg8000
  282. except ImportError:
  283. import pg8000.dbapi as pg8000
  284. DRIVERS.append('PostgreSQL(pg8000)')
  285. except ImportError:
  286. LOGGER.debug('no PostgreSQL driver pg8000')
  287. try:
  288. import cx_Oracle
  289. DRIVERS.append('Oracle(cx_Oracle)')
  290. except ImportError:
  291. LOGGER.debug('no Oracle driver cx_Oracle')
  292. try:
  293. try:
  294. import pyodbc
  295. except ImportError:
  296. try:
  297. import contrib.pypyodbc as pyodbc
  298. except Exception, e:
  299. raise ImportError(str(e))
  300. DRIVERS.append('MSSQL(pyodbc)')
  301. DRIVERS.append('DB2(pyodbc)')
  302. DRIVERS.append('Teradata(pyodbc)')
  303. DRIVERS.append('Ingres(pyodbc)')
  304. except ImportError:
  305. LOGGER.debug('no MSSQL/DB2/Teradata/Ingres driver pyodbc')
  306. try:
  307. import Sybase
  308. DRIVERS.append('Sybase(Sybase)')
  309. except ImportError:
  310. LOGGER.debug('no Sybase driver')
  311. try:
  312. import kinterbasdb
  313. DRIVERS.append('Interbase(kinterbasdb)')
  314. DRIVERS.append('Firebird(kinterbasdb)')
  315. except ImportError:
  316. LOGGER.debug('no Firebird/Interbase driver kinterbasdb')
  317. try:
  318. import fdb
  319. DRIVERS.append('Firebird(fdb)')
  320. except ImportError:
  321. LOGGER.debug('no Firebird driver fdb')
  322. #####
  323. try:
  324. import firebirdsql
  325. DRIVERS.append('Firebird(firebirdsql)')
  326. except ImportError:
  327. LOGGER.debug('no Firebird driver firebirdsql')
  328. try:
  329. import informixdb
  330. DRIVERS.append('Informix(informixdb)')
  331. LOGGER.warning('Informix support is experimental')
  332. except ImportError:
  333. LOGGER.debug('no Informix driver informixdb')
  334. try:
  335. import sapdb
  336. DRIVERS.append('SQL(sapdb)')
  337. LOGGER.warning('SAPDB support is experimental')
  338. except ImportError:
  339. LOGGER.debug('no SAP driver sapdb')
  340. try:
  341. import cubriddb
  342. DRIVERS.append('Cubrid(cubriddb)')
  343. LOGGER.warning('Cubrid support is experimental')
  344. except ImportError:
  345. LOGGER.debug('no Cubrid driver cubriddb')
  346. try:
  347. from com.ziclix.python.sql import zxJDBC
  348. import java.sql
  349. # Try sqlite jdbc driver from http://www.zentus.com/sqlitejdbc/
  350. from org.sqlite import JDBC # required by java.sql; ensure we have it
  351. zxJDBC_sqlite = java.sql.DriverManager
  352. DRIVERS.append('PostgreSQL(zxJDBC)')
  353. DRIVERS.append('SQLite(zxJDBC)')
  354. LOGGER.warning('zxJDBC support is experimental')
  355. is_jdbc = True
  356. except ImportError:
  357. LOGGER.debug('no SQLite/PostgreSQL driver zxJDBC')
  358. is_jdbc = False
  359. try:
  360. import couchdb
  361. DRIVERS.append('CouchDB(couchdb)')
  362. except ImportError:
  363. LOGGER.debug('no Couchdb driver couchdb')
  364. try:
  365. import pymongo
  366. DRIVERS.append('MongoDB(pymongo)')
  367. except:
  368. LOGGER.debug('no MongoDB driver pymongo')
  369. try:
  370. import imaplib
  371. DRIVERS.append('IMAP(imaplib)')
  372. except:
  373. LOGGER.debug('no IMAP driver imaplib')
  374. PLURALIZE_RULES = [
  375. (re.compile('child$'), re.compile('child$'), 'children'),
  376. (re.compile('oot$'), re.compile('oot$'), 'eet'),
  377. (re.compile('ooth$'), re.compile('ooth$'), 'eeth'),
  378. (re.compile('l[eo]af$'), re.compile('l([eo])af$'), 'l\\1aves'),
  379. (re.compile('sis$'), re.compile('sis$'), 'ses'),
  380. (re.compile('man$'), re.compile('man$'), 'men'),
  381. (re.compile('ife$'), re.compile('ife$'), 'ives'),
  382. (re.compile('eau$'), re.compile('eau$'), 'eaux'),
  383. (re.compile('lf$'), re.compile('lf$'), 'lves'),
  384. (re.compile('[sxz]$'), re.compile('$'), 'es'),
  385. (re.compile('[^aeioudgkprt]h$'), re.compile('$'), 'es'),
  386. (re.compile('(qu|[^aeiou])y$'), re.compile('y$'), 'ies'),
  387. (re.compile('$'), re.compile('$'), 's'),
  388. ]
  389. def pluralize(singular, rules=PLURALIZE_RULES):
  390. for line in rules:
  391. re_search, re_sub, replace = line
  392. plural = re_search.search(singular) and re_sub.sub(replace, singular)
  393. if plural: return plural
  394. def hide_password(uri):
  395. if isinstance(uri,(list,tuple)):
  396. return [hide_password(item) for item in uri]
  397. return REGEX_NOPASSWD.sub('******',uri)
  398. def OR(a,b):
  399. return a|b
  400. def AND(a,b):
  401. return a&b
  402. def IDENTITY(x): return x
  403. def varquote_aux(name,quotestr='%s'):
  404. return name if REGEX_W.match(name) else quotestr % name
  405. def quote_keyword(a,keyword='timestamp'):
  406. regex = re.compile('\.keyword(?=\w)')
  407. a = regex.sub('."%s"' % keyword,a)
  408. return a
  409. if 'google' in DRIVERS:
  410. is_jdbc = False
  411. class GAEDecimalProperty(gae.Property):
  412. """
  413. GAE decimal implementation
  414. """
  415. data_type = decimal.Decimal
  416. def __init__(self, precision, scale, **kwargs):
  417. super(GAEDecimalProperty, self).__init__(self, **kwargs)
  418. d = '1.'
  419. for x in range(scale):
  420. d += '0'
  421. self.round = decimal.Decimal(d)
  422. def get_value_for_datastore(self, model_instance):
  423. value = super(GAEDecimalProperty, self)\
  424. .get_value_for_datastore(model_instance)
  425. if value is None or value == '':
  426. return None
  427. else:
  428. return str(value)
  429. def make_value_from_datastore(self, value):
  430. if value is None or value == '':
  431. return None
  432. else:
  433. return decimal.Decimal(value).quantize(self.round)
  434. def validate(self, value):
  435. value = super(GAEDecimalProperty, self).validate(value)
  436. if value is None or isinstance(value, decimal.Decimal):
  437. return value
  438. elif isinstance(value, basestring):
  439. return decimal.Decimal(value)
  440. raise gae.BadValueError("Property %s must be a Decimal or string."\
  441. % self.name)
  442. ###################################################################################
  443. # class that handles connection pooling (all adapters are derived from this one)
  444. ###################################################################################
  445. class ConnectionPool(object):
  446. POOLS = {}
  447. check_active_connection = True
  448. @staticmethod
  449. def set_folder(folder):
  450. THREAD_LOCAL.folder = folder
  451. # ## this allows gluon to commit/rollback all dbs in this thread
  452. def close(self,action='commit',really=True):
  453. if action:
  454. if callable(action):
  455. action(self)
  456. else:
  457. getattr(self, action)()
  458. # ## if you want pools, recycle this connection
  459. if self.pool_size:
  460. GLOBAL_LOCKER.acquire()
  461. pool = ConnectionPool.POOLS[self.uri]
  462. if len(pool) < self.pool_size:
  463. pool.append(self.connection)
  464. really = False
  465. GLOBAL_LOCKER.release()
  466. if really:
  467. self.close_connection()
  468. self.connection = None
  469. @staticmethod
  470. def close_all_instances(action):
  471. """ to close cleanly databases in a multithreaded environment """
  472. dbs = getattr(THREAD_LOCAL,'db_instances',{}).items()
  473. for db_uid, db_group in dbs:
  474. for db in db_group:
  475. if hasattr(db,'_adapter'):
  476. db._adapter.close(action)
  477. getattr(THREAD_LOCAL,'db_instances',{}).clear()
  478. getattr(THREAD_LOCAL,'db_instances_zombie',{}).clear()
  479. if callable(action):
  480. action(None)
  481. return
  482. def find_or_make_work_folder(self):
  483. """ this actually does not make the folder. it has to be there """
  484. self.folder = getattr(THREAD_LOCAL,'folder','')
  485. if (os.path.isabs(self.folder) and
  486. isinstance(self, UseDatabaseStoredFile) and
  487. self.folder.startswith(os.getcwd())):
  488. self.folder = os.path.relpath(self.folder, os.getcwd())
  489. # Creating the folder if it does not exist
  490. if False and self.folder and not exists(self.folder):
  491. os.mkdir(self.folder)
  492. def after_connection_hook(self):
  493. """hook for the after_connection parameter"""
  494. if callable(self._after_connection):
  495. self._after_connection(self)
  496. self.after_connection()
  497. def after_connection(self):
  498. """ this it is supposed to be overloaded by adapters"""
  499. pass
  500. def reconnect(self, f=None, cursor=True):
  501. """
  502. this function defines: self.connection and self.cursor
  503. (iff cursor is True)
  504. if self.pool_size>0 it will try pull the connection from the pool
  505. if the connection is not active (closed by db server) it will loop
  506. if not self.pool_size or no active connections in pool makes a new one
  507. """
  508. if getattr(self,'connection', None) != None:
  509. return
  510. if f is None:
  511. f = self.connector
  512. # if not hasattr(self, "driver") or self.driver is None:
  513. # LOGGER.debug("Skipping connection since there's no driver")
  514. # return
  515. if not self.pool_size:
  516. self.connection = f()
  517. self.cursor = cursor and self.connection.cursor()
  518. else:
  519. uri = self.uri
  520. POOLS = ConnectionPool.POOLS
  521. while True:
  522. GLOBAL_LOCKER.acquire()
  523. if not uri in POOLS:
  524. POOLS[uri] = []
  525. if POOLS[uri]:
  526. self.connection = POOLS[uri].pop()
  527. GLOBAL_LOCKER.release()
  528. self.cursor = cursor and self.connection.cursor()
  529. try:
  530. if self.cursor and self.check_active_connection:
  531. self.execute('SELECT 1;')
  532. break
  533. except:
  534. pass
  535. else:
  536. GLOBAL_LOCKER.release()
  537. self.connection = f()
  538. self.cursor = cursor and self.connection.cursor()
  539. break
  540. self.after_connection_hook()
  541. ###################################################################################
  542. # this is a generic adapter that does nothing; all others are derived from this one
  543. ###################################################################################
  544. class BaseAdapter(ConnectionPool):
  545. native_json = False
  546. driver = None
  547. driver_name = None
  548. drivers = () # list of drivers from which to pick
  549. connection = None
  550. commit_on_alter_table = False
  551. support_distributed_transaction = False
  552. uploads_in_blob = False
  553. can_select_for_update = True
  554. dbpath = None
  555. folder = None
  556. TRUE = 'T'
  557. FALSE = 'F'
  558. T_SEP = ' '
  559. QUOTE_TEMPLATE = '"%s"'
  560. types = {
  561. 'boolean': 'CHAR(1)',
  562. 'string': 'CHAR(%(length)s)',
  563. 'text': 'TEXT',
  564. 'json': 'TEXT',
  565. 'password': 'CHAR(%(length)s)',
  566. 'blob': 'BLOB',
  567. 'upload': 'CHAR(%(length)s)',
  568. 'integer': 'INTEGER',
  569. 'bigint': 'INTEGER',
  570. 'float':'DOUBLE',
  571. 'double': 'DOUBLE',
  572. 'decimal': 'DOUBLE',
  573. 'date': 'DATE',
  574. 'time': 'TIME',
  575. 'datetime': 'TIMESTAMP',
  576. 'id': 'INTEGER PRIMARY KEY AUTOINCREMENT',
  577. 'reference': 'INTEGER REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
  578. 'list:integer': 'TEXT',
  579. 'list:string': 'TEXT',
  580. 'list:reference': 'TEXT',
  581. # the two below are only used when DAL(...bigint_id=True) and replace 'id','reference'
  582. 'big-id': 'BIGINT PRIMARY KEY AUTOINCREMENT',
  583. 'big-reference': 'BIGINT REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
  584. }
  585. def isOperationalError(self,exception):
  586. if not hasattr(self.driver, "OperationalError"):
  587. return None
  588. return isinstance(exception, self.driver.OperationalError)
  589. def id_query(self, table):
  590. return table._id != None
  591. def adapt(self, obj):
  592. return "'%s'" % obj.replace("'", "''")
  593. def smart_adapt(self, obj):
  594. if isinstance(obj,(int,float)):
  595. return str(obj)
  596. return self.adapt(str(obj))
  597. def file_exists(self, filename):
  598. """
  599. to be used ONLY for files that on GAE may not be on filesystem
  600. """
  601. return exists(filename)
  602. def file_open(self, filename, mode='rb', lock=True):
  603. """
  604. to be used ONLY for files that on GAE may not be on filesystem
  605. """
  606. if have_portalocker and lock:
  607. fileobj = portalocker.LockedFile(filename,mode)
  608. else:
  609. fileobj = open(filename,mode)
  610. return fileobj
  611. def file_close(self, fileobj):
  612. """
  613. to be used ONLY for files that on GAE may not be on filesystem
  614. """
  615. if fileobj:
  616. fileobj.close()
  617. def file_delete(self, filename):
  618. os.unlink(filename)
  619. def find_driver(self,adapter_args,uri=None):
  620. self.adapter_args = adapter_args
  621. if getattr(self,'driver',None) != None:
  622. return
  623. drivers_available = [driver for driver in self.drivers
  624. if driver in globals()]
  625. if uri:
  626. items = uri.split('://',1)[0].split(':')
  627. request_driver = items[1] if len(items)>1 else None
  628. else:
  629. request_driver = None
  630. request_driver = request_driver or adapter_args.get('driver')
  631. if request_driver:
  632. if request_driver in drivers_available:
  633. self.driver_name = request_driver
  634. self.driver = globals().get(request_driver)
  635. else:
  636. raise RuntimeError("driver %s not available" % request_driver)
  637. elif drivers_available:
  638. self.driver_name = drivers_available[0]
  639. self.driver = globals().get(self.driver_name)
  640. else:
  641. raise RuntimeError("no driver available %s" % str(self.drivers))
  642. def log(self, message, table=None):
  643. """ Logs migrations
  644. It will not log changes if logfile is not specified. Defaults
  645. to sql.log
  646. """
  647. isabs = None
  648. logfilename = self.adapter_args.get('logfile','sql.log')
  649. writelog = bool(logfilename)
  650. if writelog:
  651. isabs = os.path.isabs(logfilename)
  652. if table and table._dbt and writelog and self.folder:
  653. if isabs:
  654. table._loggername = logfilename
  655. else:
  656. table._loggername = pjoin(self.folder, logfilename)
  657. logfile = self.file_open(table._loggername, 'a')
  658. logfile.write(message)
  659. self.file_close(logfile)
  660. def __init__(self, db,uri,pool_size=0, folder=None, db_codec='UTF-8',
  661. credential_decoder=IDENTITY, driver_args={},
  662. adapter_args={},do_connect=True, after_connection=None):
  663. self.db = db
  664. self.dbengine = "None"
  665. self.uri = uri
  666. self.pool_size = pool_size
  667. self.folder = folder
  668. self.db_codec = db_codec
  669. self._after_connection = after_connection
  670. class Dummy(object):
  671. lastrowid = 1
  672. def __getattr__(self, value):
  673. return lambda *a, **b: []
  674. self.connection = Dummy()
  675. self.cursor = Dummy()
  676. def sequence_name(self,tablename):
  677. return '%s_sequence' % tablename
  678. def trigger_name(self,tablename):
  679. return '%s_sequence' % tablename
  680. def varquote(self,name):
  681. return name
  682. def create_table(self, table,
  683. migrate=True,
  684. fake_migrate=False,
  685. polymodel=None):
  686. db = table._db
  687. fields = []
  688. # PostGIS geo fields are added after the table has been created
  689. postcreation_fields = []
  690. sql_fields = {}
  691. sql_fields_aux = {}
  692. TFK = {}
  693. tablename = table._tablename
  694. sortable = 0
  695. types = self.types
  696. for field in table:
  697. sortable += 1
  698. field_name = field.name
  699. field_type = field.type
  700. if isinstance(field_type,SQLCustomType):
  701. ftype = field_type.native or field_type.type
  702. elif field_type.startswith('reference'):
  703. referenced = field_type[10:].strip()
  704. if referenced == '.':
  705. referenced = tablename
  706. constraint_name = self.constraint_name(tablename, field_name)
  707. if not '.' in referenced \
  708. and referenced != tablename \
  709. and hasattr(table,'_primarykey'):
  710. ftype = types['integer']
  711. else:
  712. if hasattr(table,'_primarykey'):
  713. rtablename,rfieldname = referenced.split('.')
  714. rtable = db[rtablename]
  715. rfield = rtable[rfieldname]
  716. # must be PK reference or unique
  717. if rfieldname in rtable._primarykey or \
  718. rfield.unique:
  719. ftype = types[rfield.type[:9]] % \
  720. dict(length=rfield.length)
  721. # multicolumn primary key reference?
  722. if not rfield.unique and len(rtable._primarykey)>1:
  723. # then it has to be a table level FK
  724. if rtablename not in TFK:
  725. TFK[rtablename] = {}
  726. TFK[rtablename][rfieldname] = field_name
  727. else:
  728. ftype = ftype + \
  729. types['reference FK'] % dict(
  730. constraint_name = constraint_name, # should be quoted
  731. foreign_key = '%s (%s)' % (rtablename,
  732. rfieldname),
  733. table_name = tablename,
  734. field_name = field_name,
  735. on_delete_action=field.ondelete)
  736. else:
  737. # make a guess here for circular references
  738. if referenced in db:
  739. id_fieldname = db[referenced]._id.name
  740. elif referenced == tablename:
  741. id_fieldname = table._id.name
  742. else: #make a guess
  743. id_fieldname = 'id'
  744. ftype = types[field_type[:9]] % dict(
  745. index_name = field_name+'__idx',
  746. field_name = field_name,
  747. constraint_name = constraint_name,
  748. foreign_key = '%s (%s)' % (referenced,
  749. id_fieldname),
  750. on_delete_action=field.ondelete)
  751. elif field_type.startswith('list:reference'):
  752. ftype = types[field_type[:14]]
  753. elif field_type.startswith('decimal'):
  754. precision, scale = map(int,field_type[8:-1].split(','))
  755. ftype = types[field_type[:7]] % \
  756. dict(precision=precision,scale=scale)
  757. elif field_type.startswith('geo'):
  758. if not hasattr(self,'srid'):
  759. raise RuntimeError('Adapter does not support geometry')
  760. srid = self.srid
  761. geotype, parms = field_type[:-1].split('(')
  762. if not geotype in types:
  763. raise SyntaxError(
  764. 'Field: unknown field type: %s for %s' \
  765. % (field_type, field_name))
  766. ftype = types[geotype]
  767. if self.dbengine == 'postgres' and geotype == 'geometry':
  768. # parameters: schema, srid, dimension
  769. dimension = 2 # GIS.dimension ???
  770. parms = parms.split(',')
  771. if len(parms) == 3:
  772. schema, srid, dimension = parms
  773. elif len(parms) == 2:
  774. schema, srid = parms
  775. else:
  776. schema = parms[0]
  777. ftype = "SELECT AddGeometryColumn ('%%(schema)s', '%%(tablename)s', '%%(fieldname)s', %%(srid)s, '%s', %%(dimension)s);" % types[geotype]
  778. ftype = ftype % dict(schema=schema,
  779. tablename=tablename,
  780. fieldname=field_name, srid=srid,
  781. dimension=dimension)
  782. postcreation_fields.append(ftype)
  783. elif not field_type in types:
  784. raise SyntaxError('Field: unknown field type: %s for %s' % \
  785. (field_type, field_name))
  786. else:
  787. ftype = types[field_type]\
  788. % dict(length=field.length)
  789. if not field_type.startswith('id') and \
  790. not field_type.startswith('reference'):
  791. if field.notnull:
  792. ftype += ' NOT NULL'
  793. else:
  794. ftype += self.ALLOW_NULL()
  795. if field.unique:
  796. ftype += ' UNIQUE'
  797. if field.custom_qualifier:
  798. ftype += ' %s' % field.custom_qualifier
  799. # add to list of fields
  800. sql_fields[field_name] = dict(
  801. length=field.length,
  802. unique=field.unique,
  803. notnull=field.notnull,
  804. sortable=sortable,
  805. type=str(field_type),
  806. sql=ftype)
  807. if field.notnull and not field.default is None:
  808. # Caveat: sql_fields and sql_fields_aux
  809. # differ for default values.
  810. # sql_fields is used to trigger migrations and sql_fields_aux
  811. # is used for create tables.
  812. # The reason is that we do not want to trigger
  813. # a migration simply because a default value changes.
  814. not_null = self.NOT_NULL(field.default, field_type)
  815. ftype = ftype.replace('NOT NULL', not_null)
  816. sql_fields_aux[field_name] = dict(sql=ftype)
  817. # Postgres - PostGIS:
  818. # geometry fields are added after the table has been created, not now
  819. if not (self.dbengine == 'postgres' and \
  820. field_type.startswith('geom')):
  821. fields.append('%s %s' % (field_name, ftype))
  822. other = ';'
  823. # backend-specific extensions to fields
  824. if self.dbengine == 'mysql':
  825. if not hasattr(table, "_primarykey"):
  826. fields.append('PRIMARY KEY(%s)' % table._id.name)
  827. other = ' ENGINE=InnoDB CHARACTER SET utf8;'
  828. fields = ',\n '.join(fields)
  829. for rtablename in TFK:
  830. rfields = TFK[rtablename]
  831. pkeys = db[rtablename]._primarykey
  832. fkeys = [ rfields[k] for k in pkeys ]
  833. fields = fields + ',\n ' + \
  834. types['reference TFK'] % dict(
  835. table_name = tablename,
  836. field_name=', '.join(fkeys),
  837. foreign_table = rtablename,
  838. foreign_key = ', '.join(pkeys),
  839. on_delete_action = field.ondelete)
  840. if getattr(table,'_primarykey',None):
  841. query = "CREATE TABLE %s(\n %s,\n %s) %s" % \
  842. (tablename, fields,
  843. self.PRIMARY_KEY(', '.join(table._primarykey)),other)
  844. else:
  845. query = "CREATE TABLE %s(\n %s\n)%s" % \
  846. (tablename, fields, other)
  847. if self.uri.startswith('sqlite:///') \
  848. or self.uri.startswith('spatialite:///'):
  849. path_encoding = sys.getfilesystemencoding() \
  850. or locale.getdefaultlocale()[1] or 'utf8'
  851. dbpath = self.uri[9:self.uri.rfind('/')]\
  852. .decode('utf8').encode(path_encoding)
  853. else:
  854. dbpath = self.folder
  855. if not migrate:
  856. return query
  857. elif self.uri.startswith('sqlite:memory')\
  858. or self.uri.startswith('spatialite:memory'):
  859. table._dbt = None
  860. elif isinstance(migrate, str):
  861. table._dbt = pjoin(dbpath, migrate)
  862. else:
  863. table._dbt = pjoin(
  864. dbpath, '%s_%s.table' % (table._db._uri_hash, tablename))
  865. if not table._dbt or not self.file_exists(table._dbt):
  866. if table._dbt:
  867. self.log('timestamp: %s\n%s\n'
  868. % (datetime.datetime.today().isoformat(),
  869. query), table)
  870. if not fake_migrate:
  871. self.create_sequence_and_triggers(query,table)
  872. table._db.commit()
  873. # Postgres geom fields are added now,
  874. # after the table has been created
  875. for query in postcreation_fields:
  876. self.execute(query)
  877. table._db.commit()
  878. if table._dbt:
  879. tfile = self.file_open(table._dbt, 'w')
  880. pickle.dump(sql_fields, tfile)
  881. self.file_close(tfile)
  882. if fake_migrate:
  883. self.log('faked!\n', table)
  884. else:
  885. self.log('success!\n', table)
  886. else:
  887. tfile = self.file_open(table._dbt, 'r')
  888. try:
  889. sql_fields_old = pickle.load(tfile)
  890. except EOFError:
  891. self.file_close(tfile)
  892. raise RuntimeError('File %s appears corrupted' % table._dbt)
  893. self.file_close(tfile)
  894. if sql_fields != sql_fields_old:
  895. self.migrate_table(table,
  896. sql_fields, sql_fields_old,
  897. sql_fields_aux, None,
  898. fake_migrate=fake_migrate)
  899. return query
  900. def migrate_table(
  901. self,
  902. table,
  903. sql_fields,
  904. sql_fields_old,
  905. sql_fields_aux,
  906. logfile,
  907. fake_migrate=False,
  908. ):
  909. # logfile is deprecated (moved to adapter.log method)
  910. db = table._db
  911. db._migrated.append(table._tablename)
  912. tablename = table._tablename
  913. def fix(item):
  914. k,v=item
  915. if not isinstance(v,dict):
  916. v=dict(type='unknown',sql=v)
  917. return k.lower(),v
  918. # make sure all field names are lower case to avoid
  919. # migrations because of case cahnge
  920. sql_fields = dict(map(fix,sql_fields.iteritems()))
  921. sql_fields_old = dict(map(fix,sql_fields_old.iteritems()))
  922. sql_fields_aux = dict(map(fix,sql_fields_aux.iteritems()))
  923. if db._debug:
  924. logging.debug('migrating %s to %s' % (sql_fields_old,sql_fields))
  925. keys = sql_fields.keys()
  926. for key in sql_fields_old:
  927. if not key in keys:
  928. keys.append(key)
  929. new_add = self.concat_add(tablename)
  930. metadata_change = False
  931. sql_fields_current = copy.copy(sql_fields_old)
  932. for key in keys:
  933. query = None
  934. if not key in sql_fields_old:
  935. sql_fields_current[key] = sql_fields[key]
  936. if self.dbengine in ('postgres',) and \
  937. sql_fields[key]['type'].startswith('geometry'):
  938. # 'sql' == ftype in sql
  939. query = [ sql_fields[key]['sql'] ]
  940. else:
  941. query = ['ALTER TABLE %s ADD %s %s;' % \
  942. (tablename, key,
  943. sql_fields_aux[key]['sql'].replace(', ', new_add))]
  944. metadata_change = True
  945. elif self.dbengine in ('sqlite', 'spatialite'):
  946. if key in sql_fields:
  947. sql_fields_current[key] = sql_fields[key]
  948. metadata_change = True
  949. elif not key in sql_fields:
  950. del sql_fields_current[key]
  951. ftype = sql_fields_old[key]['type']
  952. if self.dbengine in ('postgres',) and ftype.startswith('geometry'):
  953. geotype, parms = ftype[:-1].split('(')
  954. schema = parms.split(',')[0]
  955. query = [ "SELECT DropGeometryColumn ('%(schema)s', '%(table)s', '%(field)s');" %
  956. dict(schema=schema, table=tablename, field=key,) ]
  957. elif self.dbengine in ('firebird',):
  958. query = ['ALTER TABLE %s DROP %s;' % (tablename, key)]
  959. else:
  960. query = ['ALTER TABLE %s DROP COLUMN %s;'
  961. % (tablename, key)]
  962. metadata_change = True
  963. elif sql_fields[key]['sql'] != sql_fields_old[key]['sql'] \
  964. and not (key in table.fields and
  965. isinstance(table[key].type, SQLCustomType)) \
  966. and not sql_fields[key]['type'].startswith('reference')\
  967. and not sql_fields[key]['type'].startswith('double')\
  968. and not sql_fields[key]['type'].startswith('id'):
  969. sql_fields_current[key] = sql_fields[key]
  970. t = tablename
  971. tt = sql_fields_aux[key]['sql'].replace(', ', new_add)
  972. if self.dbengine in ('firebird',):
  973. drop_expr = 'ALTER TABLE %s DROP %s;'
  974. else:
  975. drop_expr = 'ALTER TABLE %s DROP COLUMN %s;'
  976. key_tmp = key + '__tmp'
  977. query = ['ALTER TABLE %s ADD %s %s;' % (t, key_tmp, tt),
  978. 'UPDATE %s SET %s=%s;' % (t, key_tmp, key),
  979. drop_expr % (t, key),
  980. 'ALTER TABLE %s ADD %s %s;' % (t, key, tt),
  981. 'UPDATE %s SET %s=%s;' % (t, key, key_tmp),
  982. drop_expr % (t, key_tmp)]
  983. metadata_change = True
  984. elif sql_fields[key]['type'] != sql_fields_old[key]['type']:
  985. sql_fields_current[key] = sql_fields[key]
  986. metadata_change = True
  987. if query:
  988. self.log('timestamp: %s\n'
  989. % datetime.datetime.today().isoformat(), table)
  990. db['_lastsql'] = '\n'.join(query)
  991. for sub_query in query:
  992. self.log(sub_query + '\n', table)
  993. if fake_migrate:
  994. if db._adapter.commit_on_alter_table:
  995. self.save_dbt(table,sql_fields_current)
  996. self.log('faked!\n', table)
  997. else:
  998. self.execute(sub_query)
  999. # Caveat: mysql, oracle and firebird do not allow multiple alter table
  1000. # in one transaction so we must commit partial transactions and
  1001. # update table._dbt after alter table.
  1002. if db._adapter.commit_on_alter_table:
  1003. db.commit()
  1004. self.save_dbt(table,sql_fields_current)
  1005. self.log('success!\n', table)
  1006. elif metadata_change:
  1007. self.save_dbt(table,sql_fields_current)
  1008. if metadata_change and not (query and db._adapter.commit_on_alter_table):
  1009. db.commit()
  1010. self.save_dbt(table,sql_fields_current)
  1011. self.log('success!\n', table)
  1012. def save_dbt(self,table, sql_fields_current):
  1013. tfile = self.file_open(table._dbt, 'w')
  1014. pickle.dump(sql_fields_current, tfile)
  1015. self.file_close(tfile)
  1016. def LOWER(self, first):
  1017. return 'LOWER(%s)' % self.expand(first)
  1018. def UPPER(self, first):
  1019. return 'UPPER(%s)' % self.expand(first)
  1020. def COUNT(self, first, distinct=None):
  1021. return ('COUNT(%s)' if not distinct else 'COUNT(DISTINCT %s)') \
  1022. % self.expand(first)
  1023. def EXTRACT(self, first, what):
  1024. return "EXTRACT(%s FROM %s)" % (what, self.expand(first))
  1025. def EPOCH(self, first):
  1026. return self.EXTRACT(first, 'epoch')
  1027. def LENGTH(self, first):
  1028. return "LENGTH(%s)" % self.expand(first)
  1029. def AGGREGATE(self, first, what):
  1030. return "%s(%s)" % (what, self.expand(first))
  1031. def JOIN(self):
  1032. return 'JOIN'
  1033. def LEFT_JOIN(self):
  1034. return 'LEFT JOIN'
  1035. def RANDOM(self):
  1036. return 'Random()'
  1037. def NOT_NULL(self, default, field_type):
  1038. return 'NOT NULL DEFAULT %s' % self.represent(default,field_type)
  1039. def COALESCE(self, first, second):
  1040. expressions = [self.expand(first)]+[self.expand(e) for e in second]
  1041. return 'COALESCE(%s)' % ','.join(expressions)
  1042. def COALESCE_ZERO(self, first):
  1043. return 'COALESCE(%s,0)' % self.expand(first)
  1044. def RAW(self, first):
  1045. return first
  1046. def ALLOW_NULL(self):
  1047. return ''
  1048. def SUBSTRING(self, field, parameters):
  1049. return 'SUBSTR(%s,%s,%s)' % (self.expand(field), parameters[0], parameters[1])
  1050. def PRIMARY_KEY(self, key):
  1051. return 'PRIMARY KEY(%s)' % key
  1052. def _drop(self, table, mode):
  1053. return ['DROP TABLE %s;' % table]
  1054. def drop(self, table, mode=''):
  1055. db = table._db
  1056. queries = self._drop(table, mode)
  1057. for query in queries:
  1058. if table._dbt:
  1059. self.log(query + '\n', table)
  1060. self.execute(query)
  1061. db.commit()
  1062. del db[table._tablename]
  1063. del db.tables[db.tables.index(table._tablename)]
  1064. db._remove_references_to(table)
  1065. if table._dbt:
  1066. self.file_delete(table._dbt)
  1067. self.log('success!\n', table)
  1068. def _insert(self, table, fields):
  1069. if fields:
  1070. keys = ','.join(f.name for f, v in fields)
  1071. values = ','.join(self.expand(v, f.type) for f, v in fields)
  1072. return 'INSERT INTO %s(%s) VALUES (%s);' % (table, keys, values)
  1073. else:
  1074. return self._insert_empty(table)
  1075. def _insert_empty(self, table):
  1076. return 'INSERT INTO %s DEFAULT VALUES;' % table
  1077. def insert(self, table, fields):
  1078. query = self._insert(table,fields)
  1079. try:
  1080. self.execute(query)
  1081. except Exception:
  1082. e = sys.exc_info()[1]
  1083. if hasattr(table,'_on_insert_error'):
  1084. return table._on_insert_error(table,fields,e)
  1085. raise e
  1086. if hasattr(table,'_primarykey'):
  1087. return dict([(k[0].name, k[1]) for k in fields \
  1088. if k[0].name in table._primarykey])
  1089. id = self.lastrowid(table)
  1090. if not isinstance(id,int):
  1091. return id
  1092. rid = Reference(id)
  1093. (rid._table, rid._record) = (table, None)
  1094. return rid
  1095. def bulk_insert(self, table, items):
  1096. return [self.insert(table,item) for item in items]
  1097. def NOT(self, first):
  1098. return '(NOT %s)' % self.expand(first)
  1099. def AND(self, first, second):
  1100. return '(%s AND %s)' % (self.expand(first), self.expand(second))
  1101. def OR(self, first, second):
  1102. return '(%s OR %s)' % (self.expand(first), self.expand(second))
  1103. def BELONGS(self, first, second):
  1104. if isinstance(second, str):
  1105. return '(%s IN (%s))' % (self.expand(first), second[:-1])
  1106. elif not second:
  1107. return '(1=0)'
  1108. items = ','.join(self.expand(item, first.type) for item in second)
  1109. return '(%s IN (%s))' % (self.expand(first), items)
  1110. def REGEXP(self, first, second):
  1111. "regular expression operator"
  1112. raise NotImplementedError
  1113. def LIKE(self, first, second):
  1114. "case sensitive like operator"
  1115. raise NotImplementedError
  1116. def ILIKE(self, first, second):
  1117. "case in-sensitive like operator"
  1118. return '(%s LIKE %s)' % (self.expand(first),
  1119. self.expand(second, 'string'))
  1120. def STARTSWITH(self, first, second):
  1121. return '(%s LIKE %s)' % (self.expand(first),
  1122. self.expand(second+'%', 'string'))
  1123. def ENDSWITH(self, first, second):
  1124. return '(%s LIKE %s)' % (self.expand(first),
  1125. self.expand('%'+second, 'string'))
  1126. def CONTAINS(self,first,second,case_sensitive=False):
  1127. if first.type in ('string','text', 'json'):
  1128. if isinstance(second,Expression):
  1129. second = Expression(None,self.CONCAT('%',Expression(
  1130. None,self.REPLACE(second,('%','%%'))),'%'))
  1131. else:
  1132. second = '%'+str(second).replace('%','%%')+'%'
  1133. elif first.type.startswith('list:'):
  1134. if isinstance(second,Expression):
  1135. second = Expression(None,self.CONCAT(
  1136. '%|',Expression(None,self.REPLACE(
  1137. Expression(None,self.REPLACE(
  1138. second,('%','%%'))),('|','||'))),'|%'))
  1139. else:
  1140. second = '%|'+str(second).replace('%','%%')\
  1141. .replace('|','||')+'|%'
  1142. op = case_sensitive and self.LIKE or self.ILIKE
  1143. return op(first,second)
  1144. def EQ(self, first, second=None):
  1145. if second is None:
  1146. return '(%s IS NULL)' % self.expand(first)
  1147. return '(%s = %s)' % (self.expand(first),
  1148. self.expand(second, first.type))
  1149. def NE(self, first, second=None):
  1150. if second is None:
  1151. return '(%s IS NOT NULL)' % self.expand(first)
  1152. return '(%s <> %s)' % (self.expand(first),
  1153. self.expand(second, first.type))
  1154. def LT(self,first,second=None):
  1155. if second is None:
  1156. raise RuntimeError("Cannot compare %s < None" % first)
  1157. return '(%s < %s)' % (self.expand(first),
  1158. self.expand(second,first.type))
  1159. def LE(self,first,second=None):
  1160. if second is None:
  1161. raise RuntimeError("Cannot compare %s <= None" % first)
  1162. return '(%s <= %s)' % (self.expand(first),
  1163. self.expand(second,first.type))
  1164. def GT(self,first,second=None):
  1165. if second is None:
  1166. raise RuntimeError("Cannot compare %s > None" % first)
  1167. return '(%s > %s)' % (self.expand(first),
  1168. self.expand(second,first.type))
  1169. def GE(self,first,second=None):
  1170. if second is None:
  1171. raise RuntimeError("Cannot compare %s >= None" % first)
  1172. return '(%s >= %s)' % (self.expand(first),
  1173. self.expand(second,first.type))
  1174. def is_numerical_type(self, ftype):
  1175. return ftype in ('integer','boolean','double','bigint') or \
  1176. ftype.startswith('decimal')
  1177. def REPLACE(self, first, (second, third)):
  1178. return 'REPLACE(%s,%s,%s)' % (self.expand(first,'string'),
  1179. self.expand(second,'string'),
  1180. self.expand(third,'string'))
  1181. def CONCAT(self, *items):
  1182. return '(%s)' % ' || '.join(self.expand(x,'string') for x in items)
  1183. def ADD(self, first, second):
  1184. if self.is_numerical_type(first.type):
  1185. return '(%s + %s)' % (self.expand(first),
  1186. self.expand(second, first.type))
  1187. else:
  1188. return self.CONCAT(first, second)
  1189. def SUB(self, first, second):
  1190. return '(%s - %s)' % (self.expand(first),
  1191. self.expand(second, first.type))
  1192. def MUL(self, first, second):
  1193. return '(%s * %s)' % (self.expand(first),
  1194. self.expand(second, first.type))
  1195. def DIV(self, first, second):
  1196. return '(%s / %s)' % (self.expand(first),
  1197. self.expand(second, first.type))
  1198. def MOD(self, first, second):
  1199. return '(%s %% %s)' % (self.expand(first),
  1200. self.expand(second, first.type))
  1201. def AS(self, first,

Large files files are truncated, but you can click here to view the full file