PageRenderTime 58ms CodeModel.GetById 20ms RepoModel.GetById 0ms app.codeStats 2ms

/gluon/dal.py

https://github.com/clach04/web2py
Python | 10541 lines | 10296 code | 86 blank | 159 comment | 129 complexity | 69459d39f7db32741377b7b112ebd03d MD5 | raw file
Possible License(s): MIT, BSD-3-Clause, BSD-2-Clause
  1. #!/bin/env python
  2. # -*- coding: utf-8 -*-
  3. """
  4. This file is part of the web2py Web Framework
  5. Copyrighted by Massimo Di Pierro <mdipierro@cs.depaul.edu>
  6. License: LGPLv3 (http://www.gnu.org/licenses/lgpl.html)
  7. Thanks to
  8. * Niall Sweeny <niall.sweeny@fonjax.com> for MS SQL support
  9. * Marcel Leuthi <mluethi@mlsystems.ch> for Oracle support
  10. * Denes
  11. * Chris Clark
  12. * clach05
  13. * Denes Lengyel
  14. * and many others who have contributed to current and previous versions
  15. This file contains the DAL support for many relational databases,
  16. including:
  17. - SQLite & SpatiaLite
  18. - MySQL
  19. - Postgres
  20. - Firebird
  21. - Oracle
  22. - MS SQL
  23. - DB2
  24. - Interbase
  25. - Ingres
  26. - Informix (9+ and SE)
  27. - SapDB (experimental)
  28. - Cubrid (experimental)
  29. - CouchDB (experimental)
  30. - MongoDB (in progress)
  31. - Google:nosql
  32. - Google:sql
  33. - Teradata
  34. - IMAP (experimental)
  35. Example of usage:
  36. >>> # from dal import DAL, Field
  37. ### create DAL connection (and create DB if it doesn't exist)
  38. >>> db = DAL(('sqlite://storage.sqlite','mysql://a:b@localhost/x'),
  39. ... folder=None)
  40. ### define a table 'person' (create/alter as necessary)
  41. >>> person = db.define_table('person',Field('name','string'))
  42. ### insert a record
  43. >>> id = person.insert(name='James')
  44. ### retrieve it by id
  45. >>> james = person(id)
  46. ### retrieve it by name
  47. >>> james = person(name='James')
  48. ### retrieve it by arbitrary query
  49. >>> query = (person.name=='James') & (person.name.startswith('J'))
  50. >>> james = db(query).select(person.ALL)[0]
  51. ### update one record
  52. >>> james.update_record(name='Jim')
  53. <Row {'id': 1, 'name': 'Jim'}>
  54. ### update multiple records by query
  55. >>> db(person.name.like('J%')).update(name='James')
  56. 1
  57. ### delete records by query
  58. >>> db(person.name.lower() == 'jim').delete()
  59. 0
  60. ### retrieve multiple records (rows)
  61. >>> people = db(person).select(orderby=person.name,
  62. ... groupby=person.name, limitby=(0,100))
  63. ### further filter them
  64. >>> james = people.find(lambda row: row.name == 'James').first()
  65. >>> print james.id, james.name
  66. 1 James
  67. ### check aggregates
  68. >>> counter = person.id.count()
  69. >>> print db(person).select(counter).first()(counter)
  70. 1
  71. ### delete one record
  72. >>> james.delete_record()
  73. 1
  74. ### delete (drop) entire database table
  75. >>> person.drop()
  76. Supported field types:
  77. id string text boolean integer double decimal password upload
  78. blob time date datetime
  79. Supported DAL URI strings:
  80. 'sqlite://test.db'
  81. 'spatialite://test.db'
  82. 'sqlite:memory'
  83. 'spatialite:memory'
  84. 'jdbc:sqlite://test.db'
  85. 'mysql://root:none@localhost/test'
  86. 'postgres://mdipierro:password@localhost/test'
  87. 'postgres:psycopg2://mdipierro:password@localhost/test'
  88. 'postgres:pg8000://mdipierro:password@localhost/test'
  89. 'jdbc:postgres://mdipierro:none@localhost/test'
  90. 'mssql://web2py:none@A64X2/web2py_test'
  91. 'mssql2://web2py:none@A64X2/web2py_test' # alternate mappings
  92. 'oracle://username:password@database'
  93. 'firebird://user:password@server:3050/database'
  94. 'db2://DSN=dsn;UID=user;PWD=pass'
  95. 'firebird://username:password@hostname/database'
  96. 'firebird_embedded://username:password@c://path'
  97. 'informix://user:password@server:3050/database'
  98. 'informixu://user:password@server:3050/database' # unicode informix
  99. 'ingres://database' # or use an ODBC connection string, e.g. 'ingres://dsn=dsn_name'
  100. 'google:datastore' # for google app engine datastore
  101. 'google:sql' # for google app engine with sql (mysql compatible)
  102. 'teradata://DSN=dsn;UID=user;PWD=pass; DATABASE=database' # experimental
  103. 'imap://user:password@server:port' # experimental
  104. For more info:
  105. help(DAL)
  106. help(Field)
  107. """
  108. ###################################################################################
  109. # this file only exposes DAL and Field
  110. ###################################################################################
  111. __all__ = ['DAL', 'Field']
  112. MAXCHARLENGTH = 2**15 # not quite but reasonable default max char length
  113. DEFAULTLENGTH = {'string':512,
  114. 'password':512,
  115. 'upload':512,
  116. 'text':2**15,
  117. 'blob':2**31}
  118. TIMINGSSIZE = 100
  119. SPATIALLIBS = {
  120. 'Windows':'libspatialite',
  121. 'Linux':'libspatialite.so',
  122. 'Darwin':'libspatialite.dylib'
  123. }
  124. DEFAULT_URI = 'sqlite://dummy.db'
  125. import re
  126. import sys
  127. import locale
  128. import os
  129. import types
  130. import datetime
  131. import threading
  132. import time
  133. import csv
  134. import cgi
  135. import copy
  136. import socket
  137. import logging
  138. import base64
  139. import shutil
  140. import marshal
  141. import decimal
  142. import struct
  143. import urllib
  144. import hashlib
  145. import uuid
  146. import glob
  147. import traceback
  148. import platform
  149. PYTHON_VERSION = sys.version_info[0]
  150. if PYTHON_VERSION == 2:
  151. import cPickle as pickle
  152. import cStringIO as StringIO
  153. import copy_reg as copyreg
  154. hashlib_md5 = hashlib.md5
  155. bytes, unicode = str, unicode
  156. else:
  157. import pickle
  158. from io import StringIO as StringIO
  159. import copyreg
  160. long = int
  161. hashlib_md5 = lambda s: hashlib.md5(bytes(s,'utf8'))
  162. bytes, unicode = bytes, str
  163. CALLABLETYPES = (types.LambdaType, types.FunctionType,
  164. types.BuiltinFunctionType,
  165. types.MethodType, types.BuiltinMethodType)
  166. TABLE_ARGS = set(
  167. ('migrate','primarykey','fake_migrate','format','redefine',
  168. 'singular','plural','trigger_name','sequence_name',
  169. 'common_filter','polymodel','table_class','on_define',))
  170. SELECT_ARGS = set(
  171. ('orderby', 'groupby', 'limitby','required', 'cache', 'left',
  172. 'distinct', 'having', 'join','for_update', 'processor','cacheable'))
  173. ogetattr = object.__getattribute__
  174. osetattr = object.__setattr__
  175. exists = os.path.exists
  176. pjoin = os.path.join
  177. ###################################################################################
  178. # following checks allow the use of dal without web2py, as a standalone module
  179. ###################################################################################
  180. try:
  181. from utils import web2py_uuid
  182. except (ImportError, SystemError):
  183. import uuid
  184. def web2py_uuid(): return str(uuid.uuid4())
  185. try:
  186. import portalocker
  187. have_portalocker = True
  188. except ImportError:
  189. have_portalocker = False
  190. try:
  191. import serializers
  192. have_serializers = True
  193. except ImportError:
  194. have_serializers = False
  195. try:
  196. import json as simplejson
  197. except ImportError:
  198. try:
  199. import gluon.contrib.simplejson as simplejson
  200. except ImportError:
  201. simplejson = None
  202. try:
  203. import validators
  204. have_validators = True
  205. except (ImportError, SyntaxError):
  206. have_validators = False
  207. LOGGER = logging.getLogger("web2py.dal")
  208. DEFAULT = lambda:0
  209. GLOBAL_LOCKER = threading.RLock()
  210. THREAD_LOCAL = threading.local()
  211. # internal representation of tables with field
  212. # <table>.<field>, tables and fields may only be [a-zA-Z0-9_]
  213. REGEX_TYPE = re.compile('^([\w\_\:]+)')
  214. REGEX_DBNAME = re.compile('^(\w+)(\:\w+)*')
  215. REGEX_W = re.compile('^\w+$')
  216. REGEX_TABLE_DOT_FIELD = re.compile('^(\w+)\.(\w+)$')
  217. REGEX_UPLOAD_PATTERN = re.compile('(?P<table>[\w\-]+)\.(?P<field>[\w\-]+)\.(?P<uuidkey>[\w\-]+)\.(?P<name>\w+)\.\w+$')
  218. REGEX_CLEANUP_FN = re.compile('[\'"\s;]+')
  219. REGEX_UNPACK = re.compile('(?<!\|)\|(?!\|)')
  220. REGEX_PYTHON_KEYWORDS = re.compile('^(and|del|from|not|while|as|elif|global|or|with|assert|else|if|pass|yield|break|except|import|print|class|exec|in|raise|continue|finally|is|return|def|for|lambda|try)$')
  221. REGEX_SELECT_AS_PARSER = re.compile("\s+AS\s+(\S+)")
  222. REGEX_CONST_STRING = re.compile('(\"[^\"]*?\")|(\'[^\']*?\')')
  223. REGEX_SEARCH_PATTERN = re.compile('^{[^\.]+\.[^\.]+(\.(lt|gt|le|ge|eq|ne|contains|startswith|year|month|day|hour|minute|second))?(\.not)?}$')
  224. REGEX_SQUARE_BRACKETS = re.compile('^.+\[.+\]$')
  225. REGEX_STORE_PATTERN = re.compile('\.(?P<e>\w{1,5})$')
  226. REGEX_QUOTES = re.compile("'[^']*'")
  227. REGEX_ALPHANUMERIC = re.compile('^[0-9a-zA-Z]\w*$')
  228. REGEX_PASSWORD = re.compile('\://([^:@]*)\:')
  229. REGEX_NOPASSWD = re.compile('(?<=\:)([^:@/]+)(?=@.+)')
  230. # list of drivers will be built on the fly
  231. # and lists only what is available
  232. DRIVERS = []
  233. try:
  234. from new import classobj
  235. from google.appengine.ext import db as gae
  236. from google.appengine.api import namespace_manager, rdbms
  237. from google.appengine.api.datastore_types import Key ### for belongs on ID
  238. from google.appengine.ext.db.polymodel import PolyModel
  239. DRIVERS.append('google')
  240. except ImportError:
  241. pass
  242. if not 'google' in DRIVERS:
  243. try:
  244. from pysqlite2 import dbapi2 as sqlite2
  245. DRIVERS.append('SQLite(sqlite2)')
  246. except ImportError:
  247. LOGGER.debug('no SQLite drivers pysqlite2.dbapi2')
  248. try:
  249. from sqlite3 import dbapi2 as sqlite3
  250. DRIVERS.append('SQLite(sqlite3)')
  251. except ImportError:
  252. LOGGER.debug('no SQLite drivers sqlite3')
  253. try:
  254. # first try contrib driver, then from site-packages (if installed)
  255. try:
  256. import contrib.pymysql as pymysql
  257. # monkeypatch pymysql because they havent fixed the bug:
  258. # https://github.com/petehunt/PyMySQL/issues/86
  259. pymysql.ESCAPE_REGEX = re.compile("'")
  260. pymysql.ESCAPE_MAP = {"'": "''"}
  261. # end monkeypatch
  262. except ImportError:
  263. import pymysql
  264. DRIVERS.append('MySQL(pymysql)')
  265. except ImportError:
  266. LOGGER.debug('no MySQL driver pymysql')
  267. try:
  268. import MySQLdb
  269. DRIVERS.append('MySQL(MySQLdb)')
  270. except ImportError:
  271. LOGGER.debug('no MySQL driver MySQLDB')
  272. try:
  273. import psycopg2
  274. from psycopg2.extensions import adapt as psycopg2_adapt
  275. DRIVERS.append('PostgreSQL(psycopg2)')
  276. except ImportError:
  277. LOGGER.debug('no PostgreSQL driver psycopg2')
  278. try:
  279. # first try contrib driver, then from site-packages (if installed)
  280. try:
  281. import contrib.pg8000.dbapi as pg8000
  282. except ImportError:
  283. import pg8000.dbapi as pg8000
  284. DRIVERS.append('PostgreSQL(pg8000)')
  285. except ImportError:
  286. LOGGER.debug('no PostgreSQL driver pg8000')
  287. try:
  288. import cx_Oracle
  289. DRIVERS.append('Oracle(cx_Oracle)')
  290. except ImportError:
  291. LOGGER.debug('no Oracle driver cx_Oracle')
  292. try:
  293. try:
  294. import pyodbc
  295. except ImportError:
  296. try:
  297. import contrib.pypyodbc as pyodbc
  298. except Exception, e:
  299. raise ImportError(str(e))
  300. DRIVERS.append('MSSQL(pyodbc)')
  301. DRIVERS.append('DB2(pyodbc)')
  302. DRIVERS.append('Teradata(pyodbc)')
  303. DRIVERS.append('Ingres(pyodbc)')
  304. except ImportError:
  305. LOGGER.debug('no MSSQL/DB2/Teradata/Ingres driver pyodbc')
  306. try:
  307. import Sybase
  308. DRIVERS.append('Sybase(Sybase)')
  309. except ImportError:
  310. LOGGER.debug('no Sybase driver')
  311. try:
  312. import kinterbasdb
  313. DRIVERS.append('Interbase(kinterbasdb)')
  314. DRIVERS.append('Firebird(kinterbasdb)')
  315. except ImportError:
  316. LOGGER.debug('no Firebird/Interbase driver kinterbasdb')
  317. try:
  318. import fdb
  319. DRIVERS.append('Firbird(fdb)')
  320. except ImportError:
  321. LOGGER.debug('no Firebird driver fdb')
  322. #####
  323. try:
  324. import firebirdsql
  325. DRIVERS.append('Firebird(firebirdsql)')
  326. except ImportError:
  327. LOGGER.debug('no Firebird driver firebirdsql')
  328. try:
  329. import informixdb
  330. DRIVERS.append('Informix(informixdb)')
  331. LOGGER.warning('Informix support is experimental')
  332. except ImportError:
  333. LOGGER.debug('no Informix driver informixdb')
  334. try:
  335. import sapdb
  336. DRIVERS.append('SQL(sapdb)')
  337. LOGGER.warning('SAPDB support is experimental')
  338. except ImportError:
  339. LOGGER.debug('no SAP driver sapdb')
  340. try:
  341. import cubriddb
  342. DRIVERS.append('Cubrid(cubriddb)')
  343. LOGGER.warning('Cubrid support is experimental')
  344. except ImportError:
  345. LOGGER.debug('no Cubrid driver cubriddb')
  346. try:
  347. from com.ziclix.python.sql import zxJDBC
  348. import java.sql
  349. # Try sqlite jdbc driver from http://www.zentus.com/sqlitejdbc/
  350. from org.sqlite import JDBC # required by java.sql; ensure we have it
  351. zxJDBC_sqlite = java.sql.DriverManager
  352. DRIVERS.append('PostgreSQL(zxJDBC)')
  353. DRIVERS.append('SQLite(zxJDBC)')
  354. LOGGER.warning('zxJDBC support is experimental')
  355. is_jdbc = True
  356. except ImportError:
  357. LOGGER.debug('no SQLite/PostgreSQL driver zxJDBC')
  358. is_jdbc = False
  359. try:
  360. import couchdb
  361. DRIVERS.append('CouchDB(couchdb)')
  362. except ImportError:
  363. LOGGER.debug('no Couchdb driver couchdb')
  364. try:
  365. import pymongo
  366. DRIVERS.append('MongoDB(pymongo)')
  367. except:
  368. LOGGER.debug('no MongoDB driver pymongo')
  369. try:
  370. import imaplib
  371. DRIVERS.append('IMAP(imaplib)')
  372. except:
  373. LOGGER.debug('no IMAP driver imaplib')
  374. PLURALIZE_RULES = [
  375. (re.compile('child$'), re.compile('child$'), 'children'),
  376. (re.compile('oot$'), re.compile('oot$'), 'eet'),
  377. (re.compile('ooth$'), re.compile('ooth$'), 'eeth'),
  378. (re.compile('l[eo]af$'), re.compile('l([eo])af$'), 'l\\1aves'),
  379. (re.compile('sis$'), re.compile('sis$'), 'ses'),
  380. (re.compile('man$'), re.compile('man$'), 'men'),
  381. (re.compile('ife$'), re.compile('ife$'), 'ives'),
  382. (re.compile('eau$'), re.compile('eau$'), 'eaux'),
  383. (re.compile('lf$'), re.compile('lf$'), 'lves'),
  384. (re.compile('[sxz]$'), re.compile('$'), 'es'),
  385. (re.compile('[^aeioudgkprt]h$'), re.compile('$'), 'es'),
  386. (re.compile('(qu|[^aeiou])y$'), re.compile('y$'), 'ies'),
  387. (re.compile('$'), re.compile('$'), 's'),
  388. ]
  389. def pluralize(singular, rules=PLURALIZE_RULES):
  390. for line in rules:
  391. re_search, re_sub, replace = line
  392. plural = re_search.search(singular) and re_sub.sub(replace, singular)
  393. if plural: return plural
  394. def hide_password(uri):
  395. return REGEX_NOPASSWD.sub('******',uri)
  396. def OR(a,b):
  397. return a|b
  398. def AND(a,b):
  399. return a&b
  400. def IDENTITY(x): return x
  401. def varquote_aux(name,quotestr='%s'):
  402. return name if REGEX_W.match(name) else quotestr % name
  403. if 'google' in DRIVERS:
  404. is_jdbc = False
  405. class GAEDecimalProperty(gae.Property):
  406. """
  407. GAE decimal implementation
  408. """
  409. data_type = decimal.Decimal
  410. def __init__(self, precision, scale, **kwargs):
  411. super(GAEDecimalProperty, self).__init__(self, **kwargs)
  412. d = '1.'
  413. for x in range(scale):
  414. d += '0'
  415. self.round = decimal.Decimal(d)
  416. def get_value_for_datastore(self, model_instance):
  417. value = super(GAEDecimalProperty, self)\
  418. .get_value_for_datastore(model_instance)
  419. if value is None or value == '':
  420. return None
  421. else:
  422. return str(value)
  423. def make_value_from_datastore(self, value):
  424. if value is None or value == '':
  425. return None
  426. else:
  427. return decimal.Decimal(value).quantize(self.round)
  428. def validate(self, value):
  429. value = super(GAEDecimalProperty, self).validate(value)
  430. if value is None or isinstance(value, decimal.Decimal):
  431. return value
  432. elif isinstance(value, basestring):
  433. return decimal.Decimal(value)
  434. raise gae.BadValueError("Property %s must be a Decimal or string."\
  435. % self.name)
  436. ###################################################################################
  437. # class that handles connection pooling (all adapters are derived from this one)
  438. ###################################################################################
  439. class ConnectionPool(object):
  440. POOLS = {}
  441. check_active_connection = True
  442. @staticmethod
  443. def set_folder(folder):
  444. THREAD_LOCAL.folder = folder
  445. # ## this allows gluon to commit/rollback all dbs in this thread
  446. def close(self,action='commit',really=True):
  447. if action:
  448. if callable(action):
  449. action(self)
  450. else:
  451. getattr(self, action)()
  452. # ## if you want pools, recycle this connection
  453. if self.pool_size:
  454. GLOBAL_LOCKER.acquire()
  455. pool = ConnectionPool.POOLS[self.uri]
  456. if len(pool) < self.pool_size:
  457. pool.append(self.connection)
  458. really = False
  459. GLOBAL_LOCKER.release()
  460. if really:
  461. self.close_connection()
  462. self.connection = None
  463. @staticmethod
  464. def close_all_instances(action):
  465. """ to close cleanly databases in a multithreaded environment """
  466. dbs = getattr(THREAD_LOCAL,'db_instances',{}).items()
  467. for db_uid, db_group in dbs:
  468. for db in db_group:
  469. if hasattr(db,'_adapter'):
  470. db._adapter.close(action)
  471. getattr(THREAD_LOCAL,'db_instances',{}).clear()
  472. getattr(THREAD_LOCAL,'db_instances_zombie',{}).clear()
  473. if callable(action):
  474. action(None)
  475. return
  476. def find_or_make_work_folder(self):
  477. """ this actually does not make the folder. it has to be there """
  478. self.folder = getattr(THREAD_LOCAL,'folder','')
  479. # Creating the folder if it does not exist
  480. if False and self.folder and not exists(self.folder):
  481. os.mkdir(self.folder)
  482. def after_connection_hook(self):
  483. """hook for the after_connection parameter"""
  484. if callable(self._after_connection):
  485. self._after_connection(self)
  486. self.after_connection()
  487. def after_connection(self):
  488. """ this it is supposed to be overloaded by adapters"""
  489. pass
  490. def reconnect(self, f=None, cursor=True):
  491. """
  492. this function defines: self.connection and self.cursor
  493. (iff cursor is True)
  494. if self.pool_size>0 it will try pull the connection from the pool
  495. if the connection is not active (closed by db server) it will loop
  496. if not self.pool_size or no active connections in pool makes a new one
  497. """
  498. if getattr(self,'connection',None) != None:
  499. return
  500. if f is None:
  501. f = self.connector
  502. if not self.pool_size:
  503. self.connection = f()
  504. self.cursor = cursor and self.connection.cursor()
  505. else:
  506. uri = self.uri
  507. POOLS = ConnectionPool.POOLS
  508. while True:
  509. GLOBAL_LOCKER.acquire()
  510. if not uri in POOLS:
  511. POOLS[uri] = []
  512. if POOLS[uri]:
  513. self.connection = POOLS[uri].pop()
  514. GLOBAL_LOCKER.release()
  515. self.cursor = cursor and self.connection.cursor()
  516. try:
  517. if self.cursor and self.check_active_connection:
  518. self.execute('SELECT 1;')
  519. break
  520. except:
  521. pass
  522. else:
  523. GLOBAL_LOCKER.release()
  524. self.connection = f()
  525. self.cursor = cursor and self.connection.cursor()
  526. break
  527. self.after_connection_hook()
  528. ###################################################################################
  529. # this is a generic adapter that does nothing; all others are derived from this one
  530. ###################################################################################
  531. class BaseAdapter(ConnectionPool):
  532. native_json = False
  533. driver = None
  534. driver_name = None
  535. drivers = () # list of drivers from which to pick
  536. connection = None
  537. maxcharlength = MAXCHARLENGTH
  538. commit_on_alter_table = False
  539. support_distributed_transaction = False
  540. uploads_in_blob = False
  541. can_select_for_update = True
  542. TRUE = 'T'
  543. FALSE = 'F'
  544. types = {
  545. 'boolean': 'CHAR(1)',
  546. 'string': 'CHAR(%(length)s)',
  547. 'text': 'TEXT',
  548. 'json': 'TEXT',
  549. 'password': 'CHAR(%(length)s)',
  550. 'blob': 'BLOB',
  551. 'upload': 'CHAR(%(length)s)',
  552. 'integer': 'INTEGER',
  553. 'bigint': 'INTEGER',
  554. 'float':'DOUBLE',
  555. 'double': 'DOUBLE',
  556. 'decimal': 'DOUBLE',
  557. 'date': 'DATE',
  558. 'time': 'TIME',
  559. 'datetime': 'TIMESTAMP',
  560. 'id': 'INTEGER PRIMARY KEY AUTOINCREMENT',
  561. 'reference': 'INTEGER REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
  562. 'list:integer': 'TEXT',
  563. 'list:string': 'TEXT',
  564. 'list:reference': 'TEXT',
  565. # the two below are only used when DAL(...bigint_id=True) and replace 'id','reference'
  566. 'big-id': 'BIGINT PRIMARY KEY AUTOINCREMENT',
  567. 'big-reference': 'BIGINT REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
  568. }
  569. def id_query(self, table):
  570. return table._id != None
  571. def adapt(self, obj):
  572. return "'%s'" % obj.replace("'", "''")
  573. def smart_adapt(self, obj):
  574. if isinstance(obj,(int,float)):
  575. return str(obj)
  576. return self.adapt(str(obj))
  577. def integrity_error(self):
  578. return self.driver.IntegrityError
  579. def operational_error(self):
  580. return self.driver.OperationalError
  581. def file_exists(self, filename):
  582. """
  583. to be used ONLY for files that on GAE may not be on filesystem
  584. """
  585. return exists(filename)
  586. def file_open(self, filename, mode='rb', lock=True):
  587. """
  588. to be used ONLY for files that on GAE may not be on filesystem
  589. """
  590. if have_portalocker and lock:
  591. fileobj = portalocker.LockedFile(filename,mode)
  592. else:
  593. fileobj = open(filename,mode)
  594. return fileobj
  595. def file_close(self, fileobj):
  596. """
  597. to be used ONLY for files that on GAE may not be on filesystem
  598. """
  599. if fileobj:
  600. fileobj.close()
  601. def file_delete(self, filename):
  602. os.unlink(filename)
  603. def find_driver(self,adapter_args,uri=None):
  604. if getattr(self,'driver',None) != None:
  605. return
  606. drivers_available = [driver for driver in self.drivers
  607. if driver in globals()]
  608. if uri:
  609. items = uri.split('://',1)[0].split(':')
  610. request_driver = items[1] if len(items)>1 else None
  611. else:
  612. request_driver = None
  613. request_driver = request_driver or adapter_args.get('driver')
  614. if request_driver:
  615. if request_driver in drivers_available:
  616. self.driver_name = request_driver
  617. self.driver = globals().get(request_driver)
  618. else:
  619. raise RuntimeError("driver %s not available" % request_driver)
  620. elif drivers_available:
  621. self.driver_name = drivers_available[0]
  622. self.driver = globals().get(self.driver_name)
  623. else:
  624. raise RuntimeError("no driver available %s" % str(self.drivers))
  625. def __init__(self, db,uri,pool_size=0, folder=None, db_codec='UTF-8',
  626. credential_decoder=IDENTITY, driver_args={},
  627. adapter_args={},do_connect=True, after_connection=None):
  628. self.db = db
  629. self.dbengine = "None"
  630. self.uri = uri
  631. self.pool_size = pool_size
  632. self.folder = folder
  633. self.db_codec = db_codec
  634. self._after_connection = after_connection
  635. class Dummy(object):
  636. lastrowid = 1
  637. def __getattr__(self, value):
  638. return lambda *a, **b: []
  639. self.connection = Dummy()
  640. self.cursor = Dummy()
  641. def sequence_name(self,tablename):
  642. return '%s_sequence' % tablename
  643. def trigger_name(self,tablename):
  644. return '%s_sequence' % tablename
  645. def varquote(self,name):
  646. return name
  647. def create_table(self, table,
  648. migrate=True,
  649. fake_migrate=False,
  650. polymodel=None):
  651. db = table._db
  652. fields = []
  653. # PostGIS geo fields are added after the table has been created
  654. postcreation_fields = []
  655. sql_fields = {}
  656. sql_fields_aux = {}
  657. TFK = {}
  658. tablename = table._tablename
  659. sortable = 0
  660. types = self.types
  661. for field in table:
  662. sortable += 1
  663. field_name = field.name
  664. field_type = field.type
  665. if isinstance(field_type,SQLCustomType):
  666. ftype = field_type.native or field_type.type
  667. elif field_type.startswith('reference'):
  668. referenced = field_type[10:].strip()
  669. if referenced == '.':
  670. referenced = tablename
  671. constraint_name = self.constraint_name(tablename, field_name)
  672. if not '.' in referenced \
  673. and referenced != tablename \
  674. and hasattr(table,'_primarykey'):
  675. ftype = types['integer']
  676. else:
  677. if hasattr(table,'_primarykey'):
  678. rtablename,rfieldname = referenced.split('.')
  679. rtable = db[rtablename]
  680. rfield = rtable[rfieldname]
  681. # must be PK reference or unique
  682. if rfieldname in rtable._primarykey or \
  683. rfield.unique:
  684. ftype = types[rfield.type[:9]] % \
  685. dict(length=rfield.length)
  686. # multicolumn primary key reference?
  687. if not rfield.unique and len(rtable._primarykey)>1:
  688. # then it has to be a table level FK
  689. if rtablename not in TFK:
  690. TFK[rtablename] = {}
  691. TFK[rtablename][rfieldname] = field_name
  692. else:
  693. ftype = ftype + \
  694. types['reference FK'] % dict(
  695. constraint_name = constraint_name, # should be quoted
  696. foreign_key = '%s (%s)' % (rtablename,
  697. rfieldname),
  698. table_name = tablename,
  699. field_name = field_name,
  700. on_delete_action=field.ondelete)
  701. else:
  702. # make a guess here for circular references
  703. if referenced in db:
  704. id_fieldname = db[referenced]._id.name
  705. elif referenced == tablename:
  706. id_fieldname = table._id.name
  707. else: #make a guess
  708. id_fieldname = 'id'
  709. ftype = types[field_type[:9]] % dict(
  710. index_name = field_name+'__idx',
  711. field_name = field_name,
  712. constraint_name = constraint_name,
  713. foreign_key = '%s (%s)' % (referenced,
  714. id_fieldname),
  715. on_delete_action=field.ondelete)
  716. elif field_type.startswith('list:reference'):
  717. ftype = types[field_type[:14]]
  718. elif field_type.startswith('decimal'):
  719. precision, scale = map(int,field_type[8:-1].split(','))
  720. ftype = types[field_type[:7]] % \
  721. dict(precision=precision,scale=scale)
  722. elif field_type.startswith('geo'):
  723. if not hasattr(self,'srid'):
  724. raise RuntimeError('Adapter does not support geometry')
  725. srid = self.srid
  726. geotype, parms = field_type[:-1].split('(')
  727. if not geotype in types:
  728. raise SyntaxError(
  729. 'Field: unknown field type: %s for %s' \
  730. % (field_type, field_name))
  731. ftype = types[geotype]
  732. if self.dbengine == 'postgres' and geotype == 'geometry':
  733. # parameters: schema, srid, dimension
  734. dimension = 2 # GIS.dimension ???
  735. parms = parms.split(',')
  736. if len(parms) == 3:
  737. schema, srid, dimension = parms
  738. elif len(parms) == 2:
  739. schema, srid = parms
  740. else:
  741. schema = parms[0]
  742. ftype = "SELECT AddGeometryColumn ('%%(schema)s', '%%(tablename)s', '%%(fieldname)s', %%(srid)s, '%s', %%(dimension)s);" % types[geotype]
  743. ftype = ftype % dict(schema=schema,
  744. tablename=tablename,
  745. fieldname=field_name, srid=srid,
  746. dimension=dimension)
  747. postcreation_fields.append(ftype)
  748. elif not field_type in types:
  749. raise SyntaxError('Field: unknown field type: %s for %s' % \
  750. (field_type, field_name))
  751. else:
  752. ftype = types[field_type]\
  753. % dict(length=field.length)
  754. if not field_type.startswith('id') and \
  755. not field_type.startswith('reference'):
  756. if field.notnull:
  757. ftype += ' NOT NULL'
  758. else:
  759. ftype += self.ALLOW_NULL()
  760. if field.unique:
  761. ftype += ' UNIQUE'
  762. if field.custom_qualifier:
  763. ftype += ' %s' % field.custom_qualifier
  764. # add to list of fields
  765. sql_fields[field_name] = dict(
  766. length=field.length,
  767. unique=field.unique,
  768. notnull=field.notnull,
  769. sortable=sortable,
  770. type=str(field_type),
  771. sql=ftype)
  772. if field.notnull and not field.default is None:
  773. # Caveat: sql_fields and sql_fields_aux
  774. # differ for default values.
  775. # sql_fields is used to trigger migrations and sql_fields_aux
  776. # is used for create tables.
  777. # The reason is that we do not want to trigger
  778. # a migration simply because a default value changes.
  779. not_null = self.NOT_NULL(field.default, field_type)
  780. ftype = ftype.replace('NOT NULL', not_null)
  781. sql_fields_aux[field_name] = dict(sql=ftype)
  782. # Postgres - PostGIS:
  783. # geometry fields are added after the table has been created, not now
  784. if not (self.dbengine == 'postgres' and \
  785. field_type.startswith('geom')):
  786. fields.append('%s %s' % (field_name, ftype))
  787. other = ';'
  788. # backend-specific extensions to fields
  789. if self.dbengine == 'mysql':
  790. if not hasattr(table, "_primarykey"):
  791. fields.append('PRIMARY KEY(%s)' % table._id.name)
  792. other = ' ENGINE=InnoDB CHARACTER SET utf8;'
  793. fields = ',\n '.join(fields)
  794. for rtablename in TFK:
  795. rfields = TFK[rtablename]
  796. pkeys = db[rtablename]._primarykey
  797. fkeys = [ rfields[k] for k in pkeys ]
  798. fields = fields + ',\n ' + \
  799. types['reference TFK'] % dict(
  800. table_name = tablename,
  801. field_name=', '.join(fkeys),
  802. foreign_table = rtablename,
  803. foreign_key = ', '.join(pkeys),
  804. on_delete_action = field.ondelete)
  805. if hasattr(table,'_primarykey'):
  806. query = "CREATE TABLE %s(\n %s,\n %s) %s" % \
  807. (tablename, fields,
  808. self.PRIMARY_KEY(', '.join(table._primarykey)),other)
  809. else:
  810. query = "CREATE TABLE %s(\n %s\n)%s" % \
  811. (tablename, fields, other)
  812. if self.uri.startswith('sqlite:///') \
  813. or self.uri.startswith('spatialite:///'):
  814. path_encoding = sys.getfilesystemencoding() \
  815. or locale.getdefaultlocale()[1] or 'utf8'
  816. dbpath = self.uri[9:self.uri.rfind('/')]\
  817. .decode('utf8').encode(path_encoding)
  818. else:
  819. dbpath = self.folder
  820. if not migrate:
  821. return query
  822. elif self.uri.startswith('sqlite:memory')\
  823. or self.uri.startswith('spatialite:memory'):
  824. table._dbt = None
  825. elif isinstance(migrate, str):
  826. table._dbt = pjoin(dbpath, migrate)
  827. else:
  828. table._dbt = pjoin(
  829. dbpath, '%s_%s.table' % (table._db._uri_hash, tablename))
  830. if table._dbt:
  831. table._loggername = pjoin(dbpath, 'sql.log')
  832. logfile = self.file_open(table._loggername, 'a')
  833. else:
  834. logfile = None
  835. if not table._dbt or not self.file_exists(table._dbt):
  836. if table._dbt:
  837. logfile.write('timestamp: %s\n'
  838. % datetime.datetime.today().isoformat())
  839. logfile.write(query + '\n')
  840. if not fake_migrate:
  841. self.create_sequence_and_triggers(query,table)
  842. table._db.commit()
  843. # Postgres geom fields are added now,
  844. # after the table has been created
  845. for query in postcreation_fields:
  846. self.execute(query)
  847. table._db.commit()
  848. if table._dbt:
  849. tfile = self.file_open(table._dbt, 'w')
  850. pickle.dump(sql_fields, tfile)
  851. self.file_close(tfile)
  852. if fake_migrate:
  853. logfile.write('faked!\n')
  854. else:
  855. logfile.write('success!\n')
  856. else:
  857. tfile = self.file_open(table._dbt, 'r')
  858. try:
  859. sql_fields_old = pickle.load(tfile)
  860. except EOFError:
  861. self.file_close(tfile)
  862. self.file_close(logfile)
  863. raise RuntimeError('File %s appears corrupted' % table._dbt)
  864. self.file_close(tfile)
  865. if sql_fields != sql_fields_old:
  866. self.migrate_table(table,
  867. sql_fields, sql_fields_old,
  868. sql_fields_aux, logfile,
  869. fake_migrate=fake_migrate)
  870. self.file_close(logfile)
  871. return query
  872. def migrate_table(
  873. self,
  874. table,
  875. sql_fields,
  876. sql_fields_old,
  877. sql_fields_aux,
  878. logfile,
  879. fake_migrate=False,
  880. ):
  881. db = table._db
  882. db._migrated.append(table._tablename)
  883. tablename = table._tablename
  884. def fix(item):
  885. k,v=item
  886. if not isinstance(v,dict):
  887. v=dict(type='unkown',sql=v)
  888. return k.lower(),v
  889. # make sure all field names are lower case to avoid
  890. # migrations because of case cahnge
  891. sql_fields = dict(map(fix,sql_fields.iteritems()))
  892. sql_fields_old = dict(map(fix,sql_fields_old.iteritems()))
  893. sql_fields_aux = dict(map(fix,sql_fields_aux.iteritems()))
  894. if db._debug:
  895. logging.debug('migrating %s to %s' % (sql_fields_old,sql_fields))
  896. keys = sql_fields.keys()
  897. for key in sql_fields_old:
  898. if not key in keys:
  899. keys.append(key)
  900. new_add = self.concat_add(tablename)
  901. metadata_change = False
  902. sql_fields_current = copy.copy(sql_fields_old)
  903. for key in keys:
  904. query = None
  905. if not key in sql_fields_old:
  906. sql_fields_current[key] = sql_fields[key]
  907. if self.dbengine in ('postgres',) and \
  908. sql_fields[key]['type'].startswith('geometry'):
  909. # 'sql' == ftype in sql
  910. query = [ sql_fields[key]['sql'] ]
  911. else:
  912. query = ['ALTER TABLE %s ADD %s %s;' % \
  913. (tablename, key,
  914. sql_fields_aux[key]['sql'].replace(', ', new_add))]
  915. metadata_change = True
  916. elif self.dbengine in ('sqlite', 'spatialite'):
  917. if key in sql_fields:
  918. sql_fields_current[key] = sql_fields[key]
  919. metadata_change = True
  920. elif not key in sql_fields:
  921. del sql_fields_current[key]
  922. ftype = sql_fields_old[key]['type']
  923. if self.dbengine in ('postgres',) \
  924. and ftype.startswith('geometry'):
  925. geotype, parms = ftype[:-1].split('(')
  926. schema = parms.split(',')[0]
  927. query = [ "SELECT DropGeometryColumn ('%(schema)s', '%(table)s', '%(field)s');" % dict(schema=schema, table=tablename, field=key,) ]
  928. elif not self.dbengine in ('firebird',):
  929. query = ['ALTER TABLE %s DROP COLUMN %s;'
  930. % (tablename, key)]
  931. else:
  932. query = ['ALTER TABLE %s DROP %s;' % (tablename, key)]
  933. metadata_change = True
  934. elif sql_fields[key]['sql'] != sql_fields_old[key]['sql'] \
  935. and not (key in table.fields and
  936. isinstance(table[key].type, SQLCustomType)) \
  937. and not sql_fields[key]['type'].startswith('reference')\
  938. and not sql_fields[key]['type'].startswith('double')\
  939. and not sql_fields[key]['type'].startswith('id'):
  940. sql_fields_current[key] = sql_fields[key]
  941. t = tablename
  942. tt = sql_fields_aux[key]['sql'].replace(', ', new_add)
  943. if not self.dbengine in ('firebird',):
  944. query = ['ALTER TABLE %s ADD %s__tmp %s;' % (t, key, tt),
  945. 'UPDATE %s SET %s__tmp=%s;' % (t, key, key),
  946. 'ALTER TABLE %s DROP COLUMN %s;' % (t, key),
  947. 'ALTER TABLE %s ADD %s %s;' % (t, key, tt),
  948. 'UPDATE %s SET %s=%s__tmp;' % (t, key, key),
  949. 'ALTER TABLE %s DROP COLUMN %s__tmp;' % (t, key)]
  950. else:
  951. query = ['ALTER TABLE %s ADD %s__tmp %s;' % (t, key, tt),
  952. 'UPDATE %s SET %s__tmp=%s;' % (t, key, key),
  953. 'ALTER TABLE %s DROP %s;' % (t, key),
  954. 'ALTER TABLE %s ADD %s %s;' % (t, key, tt),
  955. 'UPDATE %s SET %s=%s__tmp;' % (t, key, key),
  956. 'ALTER TABLE %s DROP %s__tmp;' % (t, key)]
  957. metadata_change = True
  958. elif sql_fields[key]['type'] != sql_fields_old[key]['type']:
  959. sql_fields_current[key] = sql_fields[key]
  960. metadata_change = True
  961. if query:
  962. logfile.write('timestamp: %s\n'
  963. % datetime.datetime.today().isoformat())
  964. db['_lastsql'] = '\n'.join(query)
  965. for sub_query in query:
  966. logfile.write(sub_query + '\n')
  967. if not fake_migrate:
  968. self.execute(sub_query)
  969. # Caveat: mysql, oracle and firebird do not allow multiple alter table
  970. # in one transaction so we must commit partial transactions and
  971. # update table._dbt after alter table.
  972. if db._adapter.commit_on_alter_table:
  973. db.commit()
  974. tfile = self.file_open(table._dbt, 'w')
  975. pickle.dump(sql_fields_current, tfile)
  976. self.file_close(tfile)
  977. logfile.write('success!\n')
  978. else:
  979. logfile.write('faked!\n')
  980. elif metadata_change:
  981. tfile = self.file_open(table._dbt, 'w')
  982. pickle.dump(sql_fields_current, tfile)
  983. self.file_close(tfile)
  984. if metadata_change and \
  985. not (query and self.dbengine in ('mysql','oracle','firebird')):
  986. db.commit()
  987. tfile = self.file_open(table._dbt, 'w')
  988. pickle.dump(sql_fields_current, tfile)
  989. self.file_close(tfile)
  990. def LOWER(self, first):
  991. return 'LOWER(%s)' % self.expand(first)
  992. def UPPER(self, first):
  993. return 'UPPER(%s)' % self.expand(first)
  994. def COUNT(self, first, distinct=None):
  995. return ('COUNT(%s)' if not distinct else 'COUNT(DISTINCT %s)') \
  996. % self.expand(first)
  997. def EXTRACT(self, first, what):
  998. return "EXTRACT(%s FROM %s)" % (what, self.expand(first))
  999. def EPOCH(self, first):
  1000. return self.EXTRACT(first, 'epoch')
  1001. def AGGREGATE(self, first, what):
  1002. return "%s(%s)" % (what, self.expand(first))
  1003. def JOIN(self):
  1004. return 'JOIN'
  1005. def LEFT_JOIN(self):
  1006. return 'LEFT JOIN'
  1007. def RANDOM(self):
  1008. return 'Random()'
  1009. def NOT_NULL(self, default, field_type):
  1010. return 'NOT NULL DEFAULT %s' % self.represent(default,field_type)
  1011. def COALESCE(self, first, second):
  1012. expressions = [self.expand(first)]+[self.expand(e) for e in second]
  1013. return 'COALESCE(%s)' % ','.join(expressions)
  1014. def COALESCE_ZERO(self, first):
  1015. return 'COALESCE(%s,0)' % self.expand(first)
  1016. def RAW(self, first):
  1017. return first
  1018. def ALLOW_NULL(self):
  1019. return ''
  1020. def SUBSTRING(self, field, parameters):
  1021. return 'SUBSTR(%s,%s,%s)' % (self.expand(field), parameters[0], parameters[1])
  1022. def PRIMARY_KEY(self, key):
  1023. return 'PRIMARY KEY(%s)' % key
  1024. def _drop(self, table, mode):
  1025. return ['DROP TABLE %s;' % table]
  1026. def drop(self, table, mode=''):
  1027. db = table._db
  1028. if table._dbt:
  1029. logfile = self.file_open(table._loggername, 'a')
  1030. queries = self._drop(table, mode)
  1031. for query in queries:
  1032. if table._dbt:
  1033. logfile.write(query + '\n')
  1034. self.execute(query)
  1035. db.commit()
  1036. del db[table._tablename]
  1037. del db.tables[db.tables.index(table._tablename)]
  1038. db._remove_references_to(table)
  1039. if table._dbt:
  1040. self.file_delete(table._dbt)
  1041. logfile.write('success!\n')
  1042. def _insert(self, table, fields):
  1043. if fields:
  1044. keys = ','.join(f.name for f, v in fields)
  1045. values = ','.join(self.expand(v, f.type) for f, v in fields)
  1046. return 'INSERT INTO %s(%s) VALUES (%s);' % (table, keys, values)
  1047. else:
  1048. return self._insert_empty(table)
  1049. def _insert_empty(self, table):
  1050. return 'INSERT INTO %s DEFAULT VALUES;' % table
  1051. def insert(self, table, fields):
  1052. query = self._insert(table,fields)
  1053. try:
  1054. self.execute(query)
  1055. except Exception:
  1056. e = sys.exc_info()[1]
  1057. if isinstance(e,self.integrity_error_class()):
  1058. return None
  1059. raise e
  1060. if hasattr(table,'_primarykey'):
  1061. return dict([(k[0].name, k[1]) for k in fields \
  1062. if k[0].name in table._primarykey])
  1063. id = self.lastrowid(table)
  1064. if not isinstance(id,int):
  1065. return id
  1066. rid = Reference(id)
  1067. (rid._table, rid._record) = (table, None)
  1068. return rid
  1069. def bulk_insert(self, table, items):
  1070. return [self.insert(table,item) for item in items]
  1071. def NOT(self, first):
  1072. return '(NOT %s)' % self.expand(first)
  1073. def AND(self, first, second):
  1074. return '(%s AND %s)' % (self.expand(first), self.expand(second))
  1075. def OR(self, first, second):
  1076. return '(%s OR %s)' % (self.expand(first), self.expand(second))
  1077. def BELONGS(self, first, second):
  1078. if isinstance(second, str):
  1079. return '(%s IN (%s))' % (self.expand(first), second[:-1])
  1080. elif not second:
  1081. return '(1=0)'
  1082. items = ','.join(self.expand(item, first.type) for item in second)
  1083. return '(%s IN (%s))' % (self.expand(first), items)
  1084. def REGEXP(self, first, second):
  1085. "regular expression operator"
  1086. raise NotImplementedError
  1087. def LIKE(self, first, second):
  1088. "case sensitive like operator"
  1089. raise NotImplementedError
  1090. def ILIKE(self, first, second):
  1091. "case in-sensitive like operator"
  1092. return '(%s LIKE %s)' % (self.expand(first),
  1093. self.expand(second, 'string'))
  1094. def STARTSWITH(self, first, second):
  1095. return '(%s LIKE %s)' % (self.expand(first),
  1096. self.expand(second+'%', 'string'))
  1097. def ENDSWITH(self, first, second):
  1098. return '(%s LIKE %s)' % (self.expand(first),
  1099. self.expand('%'+second, 'string'))
  1100. def CONTAINS(self, first, second, case_sensitive=False):
  1101. if isinstance(second,Expression):
  1102. field = self.expand(first)
  1103. expr = self.expand(second,'string')
  1104. if first.type.startswith('list:'):
  1105. expr = 'CONCAT("|", %s, "|")' % expr
  1106. elif not first.type in ('string', 'text', 'json'):
  1107. raise RuntimeError("Expression Not Supported")
  1108. return 'INSTR(%s,%s)' % (field, expr)
  1109. else:
  1110. if first.type in ('string', 'text', 'json'):
  1111. key = '%'+str(second).replace('%','%%')+'%'
  1112. elif first.type.startswith('list:'):
  1113. key = '%|'+str(second).replace('|','||').replace('%','%%')+'|%'
  1114. else:
  1115. raise RuntimeError("Expression Not Supported")
  1116. op = case_sensitive and self.LIKE or self.ILIKE
  1117. return op(first,key)
  1118. def EQ(self, first, second=None):
  1119. if second is None:
  1120. return '(%s IS NULL)' % self.expand(first)
  1121. return '(%s = %s)' % (self.expand(first),
  1122. self.expand(second, first.type))
  1123. def NE(self, first, second=None):
  1124. if second is None:
  1125. return '(%s IS NOT NULL)' % self.expand(first)
  1126. return '(%s <> %s)' % (self.expand(first),
  1127. self.expand(second, first.type))
  1128. def LT(self,first,second=None):
  1129. if second is None:
  1130. raise RuntimeError("Cannot compare %s < None" % first)
  1131. return '(%s < %s)' % (self.expand(first),
  1132. self.expand(second,first.type))
  1133. def LE(self,first,second=None):
  1134. if second is None:
  1135. raise RuntimeError("Cannot compare %s <= None" % first)
  1136. return '(%s <= %s)' % (self.expand(first),
  1137. self.expand(second,first.type))
  1138. def GT(self,first,second=None):
  1139. if second is None:
  1140. raise RuntimeError("Cannot compare %s > None" % first)
  1141. return '(%s > %s)' % (self.expand(first),
  1142. self.expand(second,first.type))
  1143. def GE(self,first,second=None):
  1144. if second is None:
  1145. raise RuntimeError("Cannot compare %s >= None" % first)
  1146. return '(%s >= %s)' % (self.expand(first),
  1147. self.expand(second,first.type))
  1148. def ADD(self, first, second):
  1149. return '(%s + %s)' % (self.expand(first),
  1150. self.expand(second, first.type))
  1151. def SUB(self, first, second):
  1152. return '(%s - %s)' % (self.expand(first),
  1153. self.expand(second, first.type))
  1154. def MUL(self, first, second):
  1155. return '(%s * %s)' % (self.expand(first),
  1156. self.expand(second, first.type))
  1157. def DIV(self, first, second):
  1158. return '(%s / %s)' % (self.expand(first),
  1159. self.expand(second, first.type))
  1160. def MOD(self, first, second):
  1161. return '(%s %% %s)' % (self.expand(first),
  1162. self.expand(second, first.type))
  1163. def AS(self, first, second):
  1164. return '%s AS %s' % (self.expand(first), second)
  1165. def ON(self, first, second):
  1166. if use_common_filters(second):
  1167. second = self.common_filter(second,[first._tablename])
  1168. return '%s ON %s' % (self.expand(first), self.expand(second))
  1169. def INVERT(self, first):
  1170. return '%s DESC' % self.expand(first)
  1171. def COMMA(self, first, second):
  1172. return '%s, %s' % (self.expand(first), self.expand(second))
  1173. def expand(self, expression, field_type=None):
  1174. if isinstance(expression, Field):
  1175. return '%s.%s' % (expression.tablename, expression.name)
  1176. elif isinstance(expression, (Expression, Query)):
  1177. first = expression.first
  1178. second = expression.second
  1179. op = expression.op
  1180. optional_args = expression.optional_args or {}
  1181. if not second is None:
  1182. return op(first, second, **optional_args)
  1183. elif not first is None:
  1184. return op(first,**optional_args)
  1185. elif isinstance(op, str):
  1186. if op.endswith(';'):
  1187. op=op[:-1]
  1188. return '(%s)' % op
  1189. else:
  1190. return op()
  1191. elif field_type:
  1192. return str(self.represent(expression,field_type))
  1193. elif isinstance(expression,(list,tuple)):
  1194. return ','.join(self.represent(item,field_type) \
  1195. for item in expression)
  1196. elif isinstance(expression, bool):
  1197. return '1' if expression else '0'
  1198. else:
  1199. return str(expression)
  1200. def alias(self, table, alias):
  1201. """
  1202. Given a table object, makes a new table object
  1203. with alias name.
  1204. """
  1205. other = copy.copy(table)
  1206. other['_ot'] = other._tablename
  1207. other['ALL'] = SQLALL(other)
  1208. other['_tablename'] = alias
  1209. for fieldname in other.fields:
  1210. other[fieldname] = copy.copy(other[fieldname])
  1211. other[fieldname]._tablename = alias
  1212. other[fieldname].tablename = alias
  1213. other[fieldname].table = other
  1214. table._db[alias] = other
  1215. return other
  1216. def _truncate(self, table, mode=''):
  1217. tablename = table._tablename
  1218. return ['TRUNCATE TABLE %s %s;' % (tablename, mode or '')]
  1219. def truncate(self, table, mode= ' '):
  1220. # Prepare functions "write_to_logfile" and "close_logfile"
  1221. if table._dbt:
  1222. logfile = self.file_open(table._loggername, 'a')
  1223. else:
  1224. class Logfile(object):
  1225. def write(self, value):
  1226. pass
  1227. def close(self):
  1228. pass
  1229. logfile = Logfile()
  1230. try:
  1231. queries = table._db._adapter._truncate(table, mode)
  1232. for query in queries:
  1233. logfile.write(query + '\n')
  1234. self.execute(query)
  1235. table._db.commit()
  1236. logfile.write('success!\n')
  1237. finally:
  1238. logfile.close()
  1239. def _update(self, tablename, query, fields):
  1240. if query:
  1241. if use_common_filters(query):
  1242. query = self.common_filter(query, [tablename])
  1243. sql_w = ' WHERE ' + self.expand(query)
  1244. else:
  1245. sql_w = ''
  1246. sql_v = ','.join(['%s=%s' % (field.name,
  1247. self.expand(value, field.type)) \
  1248. for (field, value) in fields])
  1249. return 'UPDATE %s SET %s%s;' % (tablename, sql_v, sql_w)
  1250. def update(self, tablename, query, fields):
  1251. sql = self._update(tablename, query, fields)
  1252. self.execute(sql)
  1253. try:
  1254. return self.cursor.rowcount
  1255. except:
  1256. return None
  1257. def _delete(self, tablename, query):
  1258. if query:
  1259. if use_common_filters(query):
  1260. query = self.common_filter(query, [tablename])
  1261. sql_w = ' WHERE ' + self.expand(query)
  1262. else:
  1263. sql_w = ''
  1264. return 'DELETE FROM %s%s;' % (tablename, sql_w)
  1265. def delete(self, tablename, query):
  1266. sql = self._delete(tablename, query)
  1267. ### special code to handle CASCADE in SQLite & SpatiaLite
  1268. db = self.db
  1269. table = db[tablename]
  1270. if self.dbengine in ('sqlite', 'spatialite') and table._referenced_by:
  1271. deleted = [x[table._id.name] for x in db(query).select(table._id)]
  1272. ### end special code to handle CASCADE in SQLite & SpatiaLite
  1273. self.execute(sql)
  1274. try:
  1275. counter = self.cursor.rowcount
  1276. except:
  1277. counter = None
  1278. ### special code to handle CASCADE in SQLite & SpatiaLite
  1279. if self.dbengine in ('sqlite', 'spatialite') and counter:
  1280. for field in table._referenced_by:
  1281. if field.type=='reference '+table._tablename \
  1282. and field.ondelete=='CASCADE':
  1283. db(field.belongs(deleted)).delete()
  1284. ### end special code to handle CASCADE in SQLite & SpatiaLite
  1285. return counter
  1286. def get_table(self, query):
  1287. tablenames = self.tables(query)
  1288. if len(tablenames)==1:
  1289. return tablenames[0]
  1290. elif len(tablenames)<1:
  1291. raise RuntimeError("No table selected")
  1292. else:
  1293. raise RuntimeError("Too many tables selected")
  1294. def expand_all(self, fields, tablenames):
  1295. db = self.db
  1296. new_fields = []
  1297. append = new_fields.append
  1298. for item in fields:
  1299. if isinstance(item,SQLALL):
  1300. new_fields += item._table
  1301. elif isinstance(item,str):
  1302. if REGEX_TABLE_DOT_FIELD.match(item):
  1303. tablename,fieldname = item.split('.')
  1304. append(db[tablename][fieldname])
  1305. else:
  1306. append(Expression(db,lambda item=item:item))
  1307. else:
  1308. append(item)
  1309. # ## if no fields specified take them all from the requested tables
  1310. if not new_fields:
  1311. for table in tablenames:
  1312. for field in db[table]:
  1313. append(field)
  1314. return new_fields
  1315. def _select(self, query, fields, attributes):
  1316. tables = self.tables
  1317. for key in set(attributes.keys())-SELECT_ARGS:
  1318. raise SyntaxError('invalid select attribute: %s' % key)
  1319. args_get = attributes.get
  1320. tablenames = tables(query)
  1321. for field in fields:
  1322. if isinstance(field, basestring) \
  1323. and REGEX_TABLE_DOT_FIELD.match(field):
  1324. tn,fn = field.split('.')
  1325. field = self.db[tn][fn]
  1326. for tablename in tables(field):
  1327. if not tablename in tablenames:
  1328. tablenames.append(tablename)
  1329. if len(tablenames) < 1:
  1330. raise SyntaxError('Set: no tables selected')
  1331. self._colnames = map(self.expand, fields)
  1332. def geoexpand(field):
  1333. if isinstance(field.type,str) and field.type.startswith('geometry'):
  1334. field = field.st_astext()
  1335. return self.expand(field)
  1336. sql_f = ', '.join(map(geoexpand, fields))
  1337. sql_o = ''
  1338. sql_s = ''
  1339. left = args_get('left', False)
  1340. inner_join = args_get('join', False)
  1341. distinct = args_get('distinct', False)
  1342. groupby = args_get('groupby', False)
  1343. orderby = args_get('orderby', False)
  1344. having = args_get('having', False)
  1345. limitby = args_get('limitby', False)
  1346. for_update = args_get('for_update', False)
  1347. if self.can_select_for_update is False and for_update is True:
  1348. raise SyntaxError('invalid select attribute: for_update')
  1349. if distinct is True:
  1350. sql_s += 'DISTINCT'
  1351. elif distinct:
  1352. sql_s += 'DISTINCT ON (%s)' % distinct
  1353. if inner_join:
  1354. icommand = self.JOIN()
  1355. if not isinstance(inner_join, (tuple, list)):
  1356. inner_join = [inner_join]
  1357. ijoint = [t._tablename for t in inner_join
  1358. if not isinstance(t,Expression)]
  1359. ijoinon = [t for t in inner_join if isinstance(t, Expression)]
  1360. itables_to_merge={} #issue 490
  1361. [itables_to_merge.update(
  1362. dict.fromkeys(tables(t))) for t in ijoinon]
  1363. ijoinont = [t.first._tablename for t in ijoinon]
  1364. [itables_to_merge.pop(t) for t in ijoinont
  1365. if t in itables_to_merge] #issue 490
  1366. iimportant_tablenames = ijoint + ijoinont + itables_to_merge.keys()
  1367. iexcluded = [t for t in tablenames
  1368. if not t in iimportant_tablenames]
  1369. if left:
  1370. join = attributes['left']
  1371. command = self.LEFT_JOIN()
  1372. if not isinstance(join, (tuple, list)):
  1373. join = [join]
  1374. joint = [t._tablename for t in join
  1375. if not isinstance(t, Expression)]
  1376. joinon = [t for t in join if isinstance(t, Expression)]
  1377. #patch join+left patch (solves problem with ordering in left joins)
  1378. tables_to_merge={}
  1379. [tables_to_merge.update(
  1380. dict.fromkeys(tables(t))) for t in joinon]
  1381. joinont = [t.first._tablename for t in joinon]
  1382. [tables_to_merge.pop(t) for t in joinont if t in tables_to_merge]
  1383. important_tablenames = joint + joinont + tables_to_merge.keys()
  1384. excluded = [t for t in tablenames
  1385. if not t in important_tablenames ]
  1386. else:
  1387. excluded = tablenames
  1388. if use_common_filters(query):
  1389. query = self.common_filter(query,excluded)
  1390. sql_w = ' WHERE ' + self.expand(query) if query else ''
  1391. def alias(t):
  1392. return str(self.db[t])
  1393. if inner_join and not left:
  1394. sql_t = ', '.join([alias(t) for t in iexcluded + \
  1395. itables_to_merge.keys()])
  1396. for t in ijoinon:
  1397. sql_t += ' %s %s' % (icommand, str(t))
  1398. elif not inner_join and left:
  1399. sql_t = ', '.join([alias(t) for t in excluded + \
  1400. tables_to_merge.keys()])
  1401. if joint:
  1402. sql_t += ' %s %s' % (command, ','.join([t for t in joint]))
  1403. for t in joinon:
  1404. sql_t += ' %s %s' % (command, str(t))
  1405. elif inner_join and left:
  1406. all_tables_in_query = set(important_tablenames + \
  1407. iimportant_tablenames + \
  1408. tablenames)
  1409. tables_in_joinon = set(joinont + ijoinont)
  1410. tables_not_in_joinon = \
  1411. all_tables_in_query.difference(tables_in_joinon)
  1412. sql_t = ','.join([alias(t) for t in tables_not_in_joinon])
  1413. for t in ijoinon:
  1414. sql_t += ' %s %s' % (icommand, str(t))
  1415. if joint:
  1416. sql_t += ' %s %s' % (command, ','.join([t for t in joint]))
  1417. for t in joinon:
  1418. sql_t += ' %s %s' % (command, str(t))
  1419. else:
  1420. sql_t = ', '.join(alias(t) for t in tablenames)
  1421. if groupby:
  1422. if isinstance(groupby, (list, tuple)):
  1423. groupby = xorify(groupby)
  1424. sql_o += ' GROUP BY %s' % self.expand(groupby)
  1425. if having:
  1426. sql_o += ' HAVING %s' % attributes['having']
  1427. if orderby:
  1428. if isinstance(orderby, (list, tuple)):
  1429. orderby = xorify(orderby)
  1430. if str(orderby) == '<random>':
  1431. sql_o += ' ORDER BY %s' % self.RANDOM()
  1432. else:
  1433. sql_o += ' ORDER BY %s' % self.expand(orderby)
  1434. if limitby:
  1435. if not orderby and tablenames:
  1436. sql_o += ' ORDER BY %s' % ', '.join(['%s.%s'%(t,x) for t in tablenames for x in (hasattr(self.db[t],'_primarykey') and self.db[t]._primarykey or [self.db[t]._id.name])])
  1437. # oracle does not support limitby
  1438. sql = self.select_limitby(sql_s, sql_f, sql_t, sql_w, sql_o, limitby)
  1439. if for_update and self.can_select_for_update is True:
  1440. sql = sql.rstrip(';') + ' FOR UPDATE;'
  1441. return sql
  1442. def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
  1443. if limitby:
  1444. (lmin, lmax) = limitby
  1445. sql_o += ' LIMIT %i OFFSET %i' % (lmax - lmin, lmin)
  1446. return 'SELECT %s %s FROM %s%s%s;' % \
  1447. (sql_s, sql_f, sql_t, sql_w, sql_o)
  1448. def _fetchall(self):
  1449. return self.cursor.fetchall()
  1450. def _select_aux(self,sql,fields,attributes):
  1451. args_get = attributes.get
  1452. cache = args_get('cache',None)
  1453. if not cache:
  1454. self.execute(sql)
  1455. rows = self._fetchall()
  1456. else:
  1457. (cache_model, time_expire) = cache
  1458. key = self.uri + '/' + sql + '/rows'
  1459. if len(key)>200: key = hashlib_md5(key).hexdigest()
  1460. def _select_aux2():
  1461. self.execute(sql)
  1462. return self._fetchall()
  1463. rows = cache_model(key,_select_aux2,time_expire)
  1464. if isinstance(rows,tuple):
  1465. rows = list(rows)
  1466. limitby = args_get('limitby', None) or (0,)
  1467. rows = self.rowslice(rows,limitby[0],None)
  1468. processor = args_get('processor',self.parse)
  1469. cacheable = args_get('cacheable',False)
  1470. return processor(rows,fields,self._colnames,cacheable=cacheable)
  1471. def select(self, query, fields, attributes):
  1472. """
  1473. Always returns a Rows object, possibly empty.
  1474. """
  1475. sql = self._select(query, fields, attributes)
  1476. cache = attributes.get('cache', None)
  1477. if cache and attributes.get('cacheable',False):
  1478. del attributes['cache']
  1479. (cache_model, time_expire) = cache
  1480. key = self.uri + '/' + sql
  1481. if len(key)>200: key = hashlib_md5(key).hexdigest()
  1482. args = (sql,fields,attributes)
  1483. return cache_model(
  1484. key,
  1485. lambda self=self,args=args:self._select_aux(*args),
  1486. time_expire)
  1487. else:
  1488. return self._select_aux(sql,fields,attributes)
  1489. def _count(self, query, distinct=None):
  1490. tablenames = self.tables(query)
  1491. if query:
  1492. if use_common_filters(query):
  1493. query = self.common_filter(query, tablenames)
  1494. sql_w = ' WHERE ' + self.expand(query)
  1495. else:
  1496. sql_w = ''
  1497. sql_t = ','.join(tablenames)
  1498. if distinct:
  1499. if isinstance(distinct,(list, tuple)):
  1500. distinct = xorify(distinct)
  1501. sql_d = self.expand(distinct)
  1502. return 'SELECT count(DISTINCT %s) FROM %s%s;' % \
  1503. (sql_d, sql_t, sql_w)
  1504. return 'SELECT count(*) FROM %s%s;' % (sql_t, sql_w)
  1505. def count(self, query, distinct=None):
  1506. self.execute(self._count(query, distinct))
  1507. return self.cursor.fetchone()[0]
  1508. def tables(self, *queries):
  1509. tables = set()
  1510. for query in queries:
  1511. if isinstance(query, Field):
  1512. tables.add(query.tablename)
  1513. elif isinstance(query, (Expression, Query)):
  1514. if not query.first is None:
  1515. tables = tables.union(self.tables(query.first))
  1516. if not query.second is None:
  1517. tables = tables.union(self.tables(query.second))
  1518. return list(tables)
  1519. def commit(self):
  1520. if self.connection: return self.connection.commit()
  1521. def rollback(self):
  1522. if self.connection: return self.connection.rollback()
  1523. def close_connection(self):
  1524. if self.connection: return self.connection.close()
  1525. def distributed_transaction_begin(self, key):
  1526. return
  1527. def prepare(self, key):
  1528. if self.connection: self.connection.prepare()
  1529. def commit_prepared(self, key):
  1530. if self.connection: self.connection.commit()
  1531. def rollback_prepared(self, key):
  1532. if self.connection: self.connection.rollback()
  1533. def concat_add(self, tablename):
  1534. return ', ADD '
  1535. def constraint_name(self, table, fieldname):
  1536. return '%s_%s__constraint' % (table,fieldname)
  1537. def create_sequence_and_triggers(self, query, table, **args):
  1538. self.execute(query)
  1539. def log_execute(self, *a, **b):
  1540. if not self.connection: return None
  1541. command = a[0]
  1542. if self.db._debug:
  1543. LOGGER.debug('SQL: %s' % command)
  1544. self.db._lastsql = command
  1545. t0 = time.time()
  1546. ret = self.cursor.execute(*a, **b)
  1547. self.db._timings.append((command,time.time()-t0))
  1548. del self.db._timings[:-TIMINGSSIZE]
  1549. return ret
  1550. def execute(self, *a, **b):
  1551. return self.log_execute(*a, **b)
  1552. def represent(self, obj, fieldtype):
  1553. field_is_type = fieldtype.startswith
  1554. if isinstance(obj, CALLABLETYPES):
  1555. obj = obj()
  1556. if isinstance(fieldtype, SQLCustomType):
  1557. value = fieldtype.encoder(obj)
  1558. if fieldtype.type in ('string','text', 'json'):
  1559. return self.adapt(value)
  1560. return value
  1561. if isinstance(obj, (Expression, Field)):
  1562. return str(obj)
  1563. if field_is_type('list:'):
  1564. if not obj:
  1565. obj = []
  1566. elif not isinstance(obj, (list, tuple)):
  1567. obj = [obj]
  1568. if field_is_type('list:string'):
  1569. obj = map(str,obj)
  1570. else:
  1571. obj = map(int,obj)
  1572. # we don't want to bar_encode json objects
  1573. if isinstance(obj, (list, tuple)) and (not fieldtype == "json"):
  1574. obj = bar_encode(obj)
  1575. if obj is None:
  1576. return 'NULL'
  1577. if obj == '' and not fieldtype[:2] in ['st', 'te', 'js', 'pa', 'up']:
  1578. return 'NULL'
  1579. r = self.represent_exceptions(obj, fieldtype)
  1580. if not r is None:
  1581. return r
  1582. if fieldtype == 'boolean':
  1583. if obj and not str(obj)[:1].upper() in '0F':
  1584. return self.smart_adapt(self.TRUE)
  1585. else:
  1586. return self.smart_adapt(self.FALSE)
  1587. if fieldtype == 'id' or fieldtype == 'integer':
  1588. return str(int(obj))
  1589. if field_is_type('decimal'):
  1590. return str(obj)
  1591. elif field_is_type('reference'): # reference
  1592. if fieldtype.find('.')>0:
  1593. return repr(obj)
  1594. elif isinstance(obj, (Row, Reference)):
  1595. return str(obj['id'])
  1596. return str(int(obj))
  1597. elif fieldtype == 'double':
  1598. return repr(float(obj))
  1599. if isinstance(obj, unicode):
  1600. obj = obj.encode(self.db_codec)
  1601. if fieldtype == 'blob':
  1602. obj = base64.b64encode(str(obj))
  1603. elif fieldtype == 'date':
  1604. if isinstance(obj, (datetime.date, datetime.datetime)):
  1605. obj = obj.isoformat()[:10]
  1606. else:
  1607. obj = str(obj)
  1608. elif fieldtype == 'datetime':
  1609. if isinstance(obj, datetime.datetime):
  1610. obj = obj.isoformat()[:19].replace('T',' ')
  1611. elif isinstance(obj, datetime.date):
  1612. obj = obj.isoformat()[:10]+' 00:00:00'
  1613. else:
  1614. obj = str(obj)
  1615. elif fieldtype == 'time':
  1616. if isinstance(obj, datetime.time):
  1617. obj = obj.isoformat()[:10]
  1618. else:
  1619. obj = str(obj)
  1620. elif fieldtype == 'json':
  1621. if not self.native_json:
  1622. if have_serializers:
  1623. obj = serializers.json(obj)
  1624. elif simplejson:
  1625. obj = simplejson.dumps(items)
  1626. else:
  1627. raise RuntimeError("missing simplejson")
  1628. if not isinstance(obj,bytes):
  1629. obj = bytes(obj)
  1630. try:
  1631. obj.decode(self.db_codec)
  1632. except:
  1633. obj = obj.decode('latin1').encode(self.db_codec)
  1634. return self.adapt(obj)
  1635. def represent_exceptions(self, obj, fieldtype):
  1636. return None
  1637. def lastrowid(self, table):
  1638. return None
  1639. def integrity_error_class(self):
  1640. return type(None)
  1641. def rowslice(self, rows, minimum=0, maximum=None):
  1642. """
  1643. By default this function does nothing;
  1644. overload when db does not do slicing.
  1645. """
  1646. return rows
  1647. def parse_value(self, value, field_type, blob_decode=True):
  1648. if field_type != 'blob' and isinstance(value, str):
  1649. try:
  1650. value = value.decode(self.db._db_codec)
  1651. except Exception:
  1652. pass
  1653. if isinstance(value, unicode):
  1654. value = value.encode('utf-8')
  1655. if isinstance(field_type, SQLCustomType):
  1656. value = field_type.decoder(value)
  1657. if not isinstance(field_type, str) or value is None:
  1658. return value
  1659. elif field_type in ('string', 'text', 'password', 'upload', 'dict'):
  1660. return value
  1661. elif field_type.startswith('geo'):
  1662. return value
  1663. elif field_type == 'blob' and not blob_decode:
  1664. return value
  1665. else:
  1666. key = REGEX_TYPE.match(field_type).group(0)
  1667. return self.parsemap[key](value,field_type)
  1668. def parse_reference(self, value, field_type):
  1669. referee = field_type[10:].strip()
  1670. if not '.' in referee:
  1671. value = Reference(value)
  1672. value._table, value._record = self.db[referee], None
  1673. return value
  1674. def parse_boolean(self, value, field_type):
  1675. return value == True or str(value)[:1].lower() == 't'
  1676. def parse_date(self, value, field_type):
  1677. if isinstance(value, datetime.datetime):
  1678. return value.date()
  1679. if not isinstance(value, (datetime.date,datetime.datetime)):
  1680. (y, m, d) = map(int, str(value)[:10].strip().split('-'))
  1681. value = datetime.date(y, m, d)
  1682. return value
  1683. def parse_time(self, value, field_type):
  1684. if not isinstance(value, datetime.time):
  1685. time_items = map(int,str(value)[:8].strip().split(':')[:3])
  1686. if len(time_items) == 3:
  1687. (h, mi, s) = time_items
  1688. else:
  1689. (h, mi, s) = time_items + [0]
  1690. value = datetime.time(h, mi, s)
  1691. return value
  1692. def parse_datetime(self, value, field_type):
  1693. if not isinstance(value, datetime.datetime):
  1694. value = str(value)
  1695. date_part,time_part,timezone = value[:10],value[11:19],value[19:]
  1696. if '+' in timezone:
  1697. ms,tz = timezone.split('+')
  1698. h,m = tz.split(':')
  1699. dt = datetime.timedelta(seconds=3600*int(h)+60*int(m))
  1700. elif '-' in timezone:
  1701. ms,tz = timezone.split('-')
  1702. h,m = tz.split(':')
  1703. dt = -datetime.timedelta(seconds=3600*int(h)+60*int(m))
  1704. else:
  1705. dt = None
  1706. (y, m, d) = map(int,date_part.split('-'))
  1707. time_parts = time_part and time_part.split(':')[:3] or (0,0,0)
  1708. while len(time_parts)<3: time_parts.append(0)
  1709. time_items = map(int,time_parts)
  1710. (h, mi, s) = time_items
  1711. value = datetime.datetime(y, m, d, h, mi, s)
  1712. if dt:
  1713. value = value + dt
  1714. return value
  1715. def parse_blob(self, value, field_type):
  1716. return base64.b64decode(str(value))
  1717. def parse_decimal(self, value, field_type):
  1718. decimals = int(field_type[8:-1].split(',')[-1])
  1719. if self.dbengine in ('sqlite', 'spatialite'):
  1720. value = ('%.' + str(decimals) + 'f') % value
  1721. if not isinstance(value, decimal.Decimal):
  1722. value = decimal.Decimal(str(value))
  1723. return value
  1724. def parse_list_integers(self, value, field_type):
  1725. if not self.dbengine=='google:datastore':
  1726. value = bar_decode_integer(value)
  1727. return value
  1728. def parse_list_references(self, value, field_type):
  1729. if not self.dbengine=='google:datastore':
  1730. value = bar_decode_integer(value)
  1731. return [self.parse_reference(r, field_type[5:]) for r in value]
  1732. def parse_list_strings(self, value, field_type):
  1733. if not self.dbengine=='google:datastore':
  1734. value = bar_decode_string(value)
  1735. return value
  1736. def parse_id(self, value, field_type):
  1737. return int(value)
  1738. def parse_integer(self, value, field_type):
  1739. return int(value)
  1740. def parse_double(self, value, field_type):
  1741. return float(value)
  1742. def parse_json(self, value, field_type):
  1743. if not self.native_json:
  1744. if not isinstance(value, basestring):
  1745. raise RuntimeError('json data not a string')
  1746. if isinstance(value, unicode):
  1747. value = value.encode('utf-8')
  1748. if have_serializers:
  1749. value = serializers.loads_json(value)
  1750. elif simplejson:
  1751. value = simplejson.loads(value)
  1752. else:
  1753. raise RuntimeError("missing simplejson")
  1754. return value
  1755. def build_parsemap(self):
  1756. self.parsemap = {
  1757. 'id':self.parse_id,
  1758. 'integer':self.parse_integer,
  1759. 'bigint':self.parse_integer,
  1760. 'float':self.parse_double,
  1761. 'double':self.parse_double,
  1762. 'reference':self.parse_reference,
  1763. 'boolean':self.parse_boolean,
  1764. 'date':self.parse_date,
  1765. 'time':self.parse_time,
  1766. 'datetime':self.parse_datetime,
  1767. 'blob':self.parse_blob,
  1768. 'decimal':self.parse_decimal,
  1769. 'json':self.parse_json,
  1770. 'list:integer':self.parse_list_integers,
  1771. 'list:reference':self.parse_list_references,
  1772. 'list:string':self.parse_list_strings,
  1773. }
  1774. def parse(self, rows, fields, colnames, blob_decode=True,
  1775. cacheable = False):
  1776. self.build_parsemap()
  1777. db = self.db
  1778. virtualtables = []
  1779. new_rows = []
  1780. tmps = []
  1781. for colname in colnames:
  1782. if not REGEX_TABLE_DOT_FIELD.match(colname):
  1783. tmps.append(None)
  1784. else:
  1785. (tablename, fieldname) = colname.split('.')
  1786. table = db[tablename]
  1787. field = table[fieldname]
  1788. ft = field.type
  1789. tmps.append((tablename,fieldname,table,field,ft))
  1790. for (i,row) in enumerate(rows):
  1791. new_row = Row()
  1792. for (j,colname) in enumerate(colnames):
  1793. value = row[j]
  1794. tmp = tmps[j]
  1795. if tmp:
  1796. (tablename,fieldname,table,field,ft) = tmp
  1797. if tablename in new_row:
  1798. colset = new_row[tablename]
  1799. else:
  1800. colset = new_row[tablename] = Row()
  1801. if tablename not in virtualtables:
  1802. virtualtables.append(tablename)
  1803. value = self.parse_value(value,ft,blob_decode)
  1804. if field.filter_out:
  1805. value = field.filter_out(value)
  1806. colset[fieldname] = value
  1807. # for backward compatibility
  1808. if ft=='id' and fieldname!='id' and \
  1809. not 'id' in table.fields:
  1810. colset['id'] = value
  1811. if ft == 'id' and not cacheable:
  1812. # temporary hack to deal with
  1813. # GoogleDatastoreAdapter
  1814. # references
  1815. if isinstance(self, GoogleDatastoreAdapter):
  1816. id = value.key().id_or_name()
  1817. colset[fieldname] = id
  1818. colset.gae_item = value
  1819. else:
  1820. id = value
  1821. colset.update_record = RecordUpdater(colset,table,id)
  1822. colset.delete_record = RecordDeleter(table,id)
  1823. for rfield in table._referenced_by:
  1824. referee_link = db._referee_name and \
  1825. db._referee_name % dict(
  1826. table=rfield.tablename,field=rfield.name)
  1827. if referee_link and not referee_link in colset:
  1828. colset[referee_link] = LazySet(rfield,id)
  1829. else:
  1830. if not '_extra' in new_row:
  1831. new_row['_extra'] = Row()
  1832. new_row['_extra'][colname] = \
  1833. self.parse_value(value,
  1834. fields[j].type,blob_decode)
  1835. new_column_name = \
  1836. REGEX_SELECT_AS_PARSER.search(colname)
  1837. if not new_column_name is None:
  1838. column_name = new_column_name.groups(0)
  1839. setattr(new_row,column_name[0],value)
  1840. new_rows.append(new_row)
  1841. rowsobj = Rows(db, new_rows, colnames, rawrows=rows)
  1842. for tablename in virtualtables:
  1843. ### new style virtual fields
  1844. table = db[tablename]
  1845. fields_virtual = [(f,v) for (f,v) in table.iteritems()
  1846. if isinstance(v,FieldVirtual)]
  1847. fields_lazy = [(f,v) for (f,v) in table.iteritems()
  1848. if isinstance(v,FieldMethod)]
  1849. if fields_virtual or fields_lazy:
  1850. for row in rowsobj.records:
  1851. box = row[tablename]
  1852. for f,v in fields_virtual:
  1853. box[f] = v.f(row)
  1854. for f,v in fields_lazy:
  1855. box[f] = (v.handler or VirtualCommand)(v.f,row)
  1856. ### old style virtual fields
  1857. for item in table.virtualfields:
  1858. try:
  1859. rowsobj = rowsobj.setvirtualfields(**{tablename:item})
  1860. except (KeyError, AttributeError):
  1861. # to avoid breaking virtualfields when partial select
  1862. pass
  1863. return rowsobj
  1864. def common_filter(self, query, tablenames):
  1865. tenant_fieldname = self.db._request_tenant
  1866. for tablename in tablenames:
  1867. table = self.db[tablename]
  1868. # deal with user provided filters
  1869. if table._common_filter != None:
  1870. query = query & table._common_filter(query)
  1871. # deal with multi_tenant filters
  1872. if tenant_fieldname in table:
  1873. default = table[tenant_fieldname].default
  1874. if not default is None:
  1875. newquery = table[tenant_fieldname] == default
  1876. if query is None:
  1877. query = newquery
  1878. else:
  1879. query = query & newquery
  1880. return query
  1881. def CASE(self,query,t,f):
  1882. def represent(x):
  1883. types = {type(True):'boolean',type(0):'integer',type(1.0):'double'}
  1884. if x is None: return 'NULL'
  1885. elif isinstance(x,Expression): return str(x)
  1886. else: return self.represent(x,types.get(type(x),'string'))
  1887. return Expression(self.db,'CASE WHEN %s THEN %s ELSE %s END' % \
  1888. (self.expand(query),represent(t),represent(f)))
  1889. ###################################################################################
  1890. # List of all the available adapters; they all extend BaseAdapter.
  1891. ###################################################################################
  1892. class SQLiteAdapter(BaseAdapter):
  1893. drivers = ('sqlite2','sqlite3')
  1894. can_select_for_update = None # support ourselves with BEGIN TRANSACTION
  1895. def EXTRACT(self,field,what):
  1896. return "web2py_extract('%s',%s)" % (what, self.expand(field))
  1897. @staticmethod
  1898. def web2py_extract(lookup, s):
  1899. table = {
  1900. 'year': (0, 4),
  1901. 'month': (5, 7),
  1902. 'day': (8, 10),
  1903. 'hour': (11, 13),
  1904. 'minute': (14, 16),
  1905. 'second': (17, 19),
  1906. }
  1907. try:
  1908. if lookup != 'epoch':
  1909. (i, j) = table[lookup]
  1910. return int(s[i:j])
  1911. else:
  1912. return time.mktime(datetime.datetime.strptime(s, '%Y-%m-%d %H:%M:%S').timetuple())
  1913. except:
  1914. return None
  1915. @staticmethod
  1916. def web2py_regexp(expression, item):
  1917. return re.compile(expression).search(item) is not None
  1918. def __init__(self, db, uri, pool_size=0, folder=None, db_codec ='UTF-8',
  1919. credential_decoder=IDENTITY, driver_args={},
  1920. adapter_args={}, do_connect=True, after_connection=None):
  1921. self.db = db
  1922. self.dbengine = "sqlite"
  1923. self.uri = uri
  1924. if do_connect: self.find_driver(adapter_args)
  1925. self.pool_size = 0
  1926. self.folder = folder
  1927. self.db_codec = db_codec
  1928. self._after_connection = after_connection
  1929. self.find_or_make_work_folder()
  1930. path_encoding = sys.getfilesystemencoding() \
  1931. or locale.getdefaultlocale()[1] or 'utf8'
  1932. if uri.startswith('sqlite:memory'):
  1933. dbpath = ':memory:'
  1934. else:
  1935. dbpath = uri.split('://',1)[1]
  1936. if dbpath[0] != '/':
  1937. if PYTHON_VERSION == 2:
  1938. dbpath = pjoin(
  1939. self.folder.decode(path_encoding).encode('utf8'), dbpath)
  1940. else:
  1941. dbpath = pjoin(self.folder, dbpath)
  1942. if not 'check_same_thread' in driver_args:
  1943. driver_args['check_same_thread'] = False
  1944. if not 'detect_types' in driver_args and do_connect:
  1945. driver_args['detect_types'] = self.driver.PARSE_DECLTYPES
  1946. def connector(dbpath=dbpath, driver_args=driver_args):
  1947. return self.driver.Connection(dbpath, **driver_args)
  1948. self.connector = connector
  1949. if do_connect: self.reconnect()
  1950. def after_connection(self):
  1951. self.connection.create_function('web2py_extract', 2,
  1952. SQLiteAdapter.web2py_extract)
  1953. self.connection.create_function("REGEXP", 2,
  1954. SQLiteAdapter.web2py_regexp)
  1955. def _truncate(self, table, mode=''):
  1956. tablename = table._tablename
  1957. return ['DELETE FROM %s;' % tablename,
  1958. "DELETE FROM sqlite_sequence WHERE name='%s';" % tablename]
  1959. def lastrowid(self, table):
  1960. return self.cursor.lastrowid
  1961. def REGEXP(self,first,second):
  1962. return '(%s REGEXP %s)' % (self.expand(first),
  1963. self.expand(second,'string'))
  1964. def select(self, query, fields, attributes):
  1965. """
  1966. Simulate SELECT ... FOR UPDATE with BEGIN IMMEDIATE TRANSACTION.
  1967. Note that the entire database, rather than one record, is locked
  1968. (it will be locked eventually anyway by the following UPDATE).
  1969. """
  1970. if attributes.get('for_update', False) and not 'cache' in attributes:
  1971. self.execute('BEGIN IMMEDIATE TRANSACTION;')
  1972. return super(SQLiteAdapter, self).select(query, fields, attributes)
  1973. class SpatiaLiteAdapter(SQLiteAdapter):
  1974. drivers = ('sqlite3','sqlite2')
  1975. types = copy.copy(BaseAdapter.types)
  1976. types.update(geometry='GEOMETRY')
  1977. def __init__(self, db, uri, pool_size=0, folder=None, db_codec ='UTF-8',
  1978. credential_decoder=IDENTITY, driver_args={},
  1979. adapter_args={}, do_connect=True, srid=4326, after_connection=None):
  1980. self.db = db
  1981. self.dbengine = "spatialite"
  1982. self.uri = uri
  1983. if do_connect: self.find_driver(adapter_args)
  1984. self.pool_size = 0
  1985. self.folder = folder
  1986. self.db_codec = db_codec
  1987. self._after_connection = after_connection
  1988. self.find_or_make_work_folder()
  1989. self.srid = srid
  1990. path_encoding = sys.getfilesystemencoding() \
  1991. or locale.getdefaultlocale()[1] or 'utf8'
  1992. if uri.startswith('spatialite:memory'):
  1993. dbpath = ':memory:'
  1994. else:
  1995. dbpath = uri.split('://',1)[1]
  1996. if dbpath[0] != '/':
  1997. dbpath = pjoin(
  1998. self.folder.decode(path_encoding).encode('utf8'), dbpath)
  1999. if not 'check_same_thread' in driver_args:
  2000. driver_args['check_same_thread'] = False
  2001. if not 'detect_types' in driver_args and do_connect:
  2002. driver_args['detect_types'] = self.driver.PARSE_DECLTYPES
  2003. def connector(dbpath=dbpath, driver_args=driver_args):
  2004. return self.driver.Connection(dbpath, **driver_args)
  2005. self.connector = connector
  2006. if do_connect: self.reconnect()
  2007. def after_connection(self):
  2008. self.connection.enable_load_extension(True)
  2009. # for Windows, rename libspatialite-2.dll to libspatialite.dll
  2010. # Linux uses libspatialite.so
  2011. # Mac OS X uses libspatialite.dylib
  2012. libspatialite = SPATIALLIBS[platform.system()]
  2013. self.execute(r'SELECT load_extension("%s");') % libspatialite
  2014. self.connection.create_function('web2py_extract', 2,
  2015. SQLiteAdapter.web2py_extract)
  2016. self.connection.create_function("REGEXP", 2,
  2017. SQLiteAdapter.web2py_regexp)
  2018. # GIS functions
  2019. def ST_ASGEOJSON(self, first, second):
  2020. return 'AsGeoJSON(%s,%s,%s)' %(self.expand(first),
  2021. second['precision'], second['options'])
  2022. def ST_ASTEXT(self, first):
  2023. return 'AsText(%s)' %(self.expand(first))
  2024. def ST_CONTAINS(self, first, second):
  2025. return 'Contains(%s,%s)' %(self.expand(first),
  2026. self.expand(second, first.type))
  2027. def ST_DISTANCE(self, first, second):
  2028. return 'Distance(%s,%s)' %(self.expand(first),
  2029. self.expand(second, first.type))
  2030. def ST_EQUALS(self, first, second):
  2031. return 'Equals(%s,%s)' %(self.expand(first),
  2032. self.expand(second, first.type))
  2033. def ST_INTERSECTS(self, first, second):
  2034. return 'Intersects(%s,%s)' %(self.expand(first),
  2035. self.expand(second, first.type))
  2036. def ST_OVERLAPS(self, first, second):
  2037. return 'Overlaps(%s,%s)' %(self.expand(first),
  2038. self.expand(second, first.type))
  2039. def ST_SIMPLIFY(self, first, second):
  2040. return 'Simplify(%s,%s)' %(self.expand(first),
  2041. self.expand(second, 'double'))
  2042. def ST_TOUCHES(self, first, second):
  2043. return 'Touches(%s,%s)' %(self.expand(first),
  2044. self.expand(second, first.type))
  2045. def ST_WITHIN(self, first, second):
  2046. return 'Within(%s,%s)' %(self.expand(first),
  2047. self.expand(second, first.type))
  2048. def represent(self, obj, fieldtype):
  2049. field_is_type = fieldtype.startswith
  2050. if field_is_type('geo'):
  2051. srid = 4326 # Spatialite default srid for geometry
  2052. geotype, parms = fieldtype[:-1].split('(')
  2053. parms = parms.split(',')
  2054. if len(parms) >= 2:
  2055. schema, srid = parms[:2]
  2056. # if field_is_type('geometry'):
  2057. value = "ST_GeomFromText('%s',%s)" %(obj, srid)
  2058. # elif field_is_type('geography'):
  2059. # value = "ST_GeogFromText('SRID=%s;%s')" %(srid, obj)
  2060. # else:
  2061. # raise SyntaxError, 'Invalid field type %s' %fieldtype
  2062. return value
  2063. return BaseAdapter.represent(self, obj, fieldtype)
  2064. class JDBCSQLiteAdapter(SQLiteAdapter):
  2065. drivers = ('zxJDBC_sqlite',)
  2066. def __init__(self, db, uri, pool_size=0, folder=None, db_codec='UTF-8',
  2067. credential_decoder=IDENTITY, driver_args={},
  2068. adapter_args={}, do_connect=True, after_connection=None):
  2069. self.db = db
  2070. self.dbengine = "sqlite"
  2071. self.uri = uri
  2072. if do_connect: self.find_driver(adapter_args)
  2073. self.pool_size = pool_size
  2074. self.folder = folder
  2075. self.db_codec = db_codec
  2076. self._after_connection = after_connection
  2077. self.find_or_make_work_folder()
  2078. path_encoding = sys.getfilesystemencoding() \
  2079. or locale.getdefaultlocale()[1] or 'utf8'
  2080. if uri.startswith('sqlite:memory'):
  2081. dbpath = ':memory:'
  2082. else:
  2083. dbpath = uri.split('://',1)[1]
  2084. if dbpath[0] != '/':
  2085. dbpath = pjoin(
  2086. self.folder.decode(path_encoding).encode('utf8'), dbpath)
  2087. def connector(dbpath=dbpath,driver_args=driver_args):
  2088. return self.driver.connect(
  2089. self.driver.getConnection('jdbc:sqlite:'+dbpath),
  2090. **driver_args)
  2091. self.connector = connector
  2092. if do_connect: self.reconnect()
  2093. def after_connection(self):
  2094. # FIXME http://www.zentus.com/sqlitejdbc/custom_functions.html for UDFs
  2095. self.connection.create_function('web2py_extract', 2,
  2096. SQLiteAdapter.web2py_extract)
  2097. def execute(self, a):
  2098. return self.log_execute(a)
  2099. class MySQLAdapter(BaseAdapter):
  2100. drivers = ('MySQLdb','pymysql')
  2101. maxcharlength = 255
  2102. commit_on_alter_table = True
  2103. support_distributed_transaction = True
  2104. types = {
  2105. 'boolean': 'CHAR(1)',
  2106. 'string': 'VARCHAR(%(length)s)',
  2107. 'text': 'LONGTEXT',
  2108. 'json': 'LONGTEXT',
  2109. 'password': 'VARCHAR(%(length)s)',
  2110. 'blob': 'LONGBLOB',
  2111. 'upload': 'VARCHAR(%(length)s)',
  2112. 'integer': 'INT',
  2113. 'bigint': 'BIGINT',
  2114. 'float': 'FLOAT',
  2115. 'double': 'DOUBLE',
  2116. 'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
  2117. 'date': 'DATE',
  2118. 'time': 'TIME',
  2119. 'datetime': 'DATETIME',
  2120. 'id': 'INT AUTO_INCREMENT NOT NULL',
  2121. 'reference': 'INT, INDEX %(index_name)s (%(field_name)s), FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
  2122. 'list:integer': 'LONGTEXT',
  2123. 'list:string': 'LONGTEXT',
  2124. 'list:reference': 'LONGTEXT',
  2125. 'big-id': 'BIGINT AUTO_INCREMENT NOT NULL',
  2126. 'big-reference': 'BIGINT, INDEX %(index_name)s (%(field_name)s), FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
  2127. }
  2128. def varquote(self,name):
  2129. return varquote_aux(name,'`%s`')
  2130. def RANDOM(self):
  2131. return 'RAND()'
  2132. def SUBSTRING(self,field,parameters):
  2133. return 'SUBSTRING(%s,%s,%s)' % (self.expand(field),
  2134. parameters[0], parameters[1])
  2135. def EPOCH(self, first):
  2136. return "UNIX_TIMESTAMP(%s)" % self.expand(first)
  2137. def REGEXP(self,first,second):
  2138. return '(%s REGEXP %s)' % (self.expand(first),
  2139. self.expand(second,'string'))
  2140. def _drop(self,table,mode):
  2141. # breaks db integrity but without this mysql does not drop table
  2142. return ['SET FOREIGN_KEY_CHECKS=0;','DROP TABLE %s;' % table,
  2143. 'SET FOREIGN_KEY_CHECKS=1;']
  2144. def distributed_transaction_begin(self,key):
  2145. self.execute('XA START;')
  2146. def prepare(self,key):
  2147. self.execute("XA END;")
  2148. self.execute("XA PREPARE;")
  2149. def commit_prepared(self,ley):
  2150. self.execute("XA COMMIT;")
  2151. def rollback_prepared(self,key):
  2152. self.execute("XA ROLLBACK;")
  2153. REGEX_URI = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:/]+)(\:(?P<port>[0-9]+))?/(?P<db>[^?]+)(\?set_encoding=(?P<charset>\w+))?$')
  2154. def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
  2155. credential_decoder=IDENTITY, driver_args={},
  2156. adapter_args={}, do_connect=True, after_connection=None):
  2157. self.db = db
  2158. self.dbengine = "mysql"
  2159. self.uri = uri
  2160. if do_connect: self.find_driver(adapter_args,uri)
  2161. self.pool_size = pool_size
  2162. self.folder = folder
  2163. self.db_codec = db_codec
  2164. self._after_connection = after_connection
  2165. self.find_or_make_work_folder()
  2166. ruri = uri.split('://',1)[1]
  2167. m = self.REGEX_URI.match(ruri)
  2168. if not m:
  2169. raise SyntaxError(
  2170. "Invalid URI string in DAL: %s" % self.uri)
  2171. user = credential_decoder(m.group('user'))
  2172. if not user:
  2173. raise SyntaxError('User required')
  2174. password = credential_decoder(m.group('password'))
  2175. if not password:
  2176. password = ''
  2177. host = m.group('host')
  2178. if not host:
  2179. raise SyntaxError('Host name required')
  2180. db = m.group('db')
  2181. if not db:
  2182. raise SyntaxError('Database name required')
  2183. port = int(m.group('port') or '3306')
  2184. charset = m.group('charset') or 'utf8'
  2185. driver_args.update(db=db,
  2186. user=credential_decoder(user),
  2187. passwd=credential_decoder(password),
  2188. host=host,
  2189. port=port,
  2190. charset=charset)
  2191. def connector(driver_args=driver_args):
  2192. return self.driver.connect(**driver_args)
  2193. self.connector = connector
  2194. if do_connect: self.reconnect()
  2195. def after_connection(self):
  2196. self.execute('SET FOREIGN_KEY_CHECKS=1;')
  2197. self.execute("SET sql_mode='NO_BACKSLASH_ESCAPES';")
  2198. def lastrowid(self,table):
  2199. self.execute('select last_insert_id();')
  2200. return int(self.cursor.fetchone()[0])
  2201. class PostgreSQLAdapter(BaseAdapter):
  2202. drivers = ('psycopg2','pg8000')
  2203. support_distributed_transaction = True
  2204. types = {
  2205. 'boolean': 'CHAR(1)',
  2206. 'string': 'VARCHAR(%(length)s)',
  2207. 'text': 'TEXT',
  2208. 'json': 'TEXT',
  2209. 'password': 'VARCHAR(%(length)s)',
  2210. 'blob': 'BYTEA',
  2211. 'upload': 'VARCHAR(%(length)s)',
  2212. 'integer': 'INTEGER',
  2213. 'bigint': 'BIGINT',
  2214. 'float': 'FLOAT',
  2215. 'double': 'FLOAT8',
  2216. 'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
  2217. 'date': 'DATE',
  2218. 'time': 'TIME',
  2219. 'datetime': 'TIMESTAMP',
  2220. 'id': 'SERIAL PRIMARY KEY',
  2221. 'reference': 'INTEGER REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
  2222. 'list:integer': 'TEXT',
  2223. 'list:string': 'TEXT',
  2224. 'list:reference': 'TEXT',
  2225. 'geometry': 'GEOMETRY',
  2226. 'geography': 'GEOGRAPHY',
  2227. 'big-id': 'BIGSERIAL PRIMARY KEY',
  2228. 'big-reference': 'BIGINT REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
  2229. }
  2230. def varquote(self,name):
  2231. return varquote_aux(name,'"%s"')
  2232. def adapt(self,obj):
  2233. if self.driver_name == 'psycopg2':
  2234. return psycopg2_adapt(obj).getquoted()
  2235. elif self.driver_name == 'pg8000':
  2236. return "'%s'" % str(obj).replace("%","%%").replace("'","''")
  2237. else:
  2238. return "'%s'" % str(obj).replace("'","''")
  2239. def sequence_name(self,table):
  2240. return '%s_id_Seq' % table
  2241. def RANDOM(self):
  2242. return 'RANDOM()'
  2243. def ADD(self, first, second):
  2244. t = first.type
  2245. if t in ('text','string','password', 'json', 'upload','blob'):
  2246. return '(%s || %s)' % (self.expand(first), self.expand(second, t))
  2247. else:
  2248. return '(%s + %s)' % (self.expand(first), self.expand(second, t))
  2249. def distributed_transaction_begin(self,key):
  2250. return
  2251. def prepare(self,key):
  2252. self.execute("PREPARE TRANSACTION '%s';" % key)
  2253. def commit_prepared(self,key):
  2254. self.execute("COMMIT PREPARED '%s';" % key)
  2255. def rollback_prepared(self,key):
  2256. self.execute("ROLLBACK PREPARED '%s';" % key)
  2257. def create_sequence_and_triggers(self, query, table, **args):
  2258. # following lines should only be executed if table._sequence_name does not exist
  2259. # self.execute('CREATE SEQUENCE %s;' % table._sequence_name)
  2260. # self.execute("ALTER TABLE %s ALTER COLUMN %s SET DEFAULT NEXTVAL('%s');" \
  2261. # % (table._tablename, table._fieldname, table._sequence_name))
  2262. self.execute(query)
  2263. REGEX_URI = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:@]+)(\:(?P<port>[0-9]+))?/(?P<db>[^\?]+)(\?sslmode=(?P<sslmode>.+))?$')
  2264. def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
  2265. credential_decoder=IDENTITY, driver_args={},
  2266. adapter_args={}, do_connect=True, srid=4326,
  2267. after_connection=None):
  2268. self.db = db
  2269. self.dbengine = "postgres"
  2270. self.uri = uri
  2271. if do_connect: self.find_driver(adapter_args,uri)
  2272. self.pool_size = pool_size
  2273. self.folder = folder
  2274. self.db_codec = db_codec
  2275. self._after_connection = after_connection
  2276. self.srid = srid
  2277. self.find_or_make_work_folder()
  2278. ruri = uri.split('://',1)[1]
  2279. m = self.REGEX_URI.match(ruri)
  2280. if not m:
  2281. raise SyntaxError("Invalid URI string in DAL")
  2282. user = credential_decoder(m.group('user'))
  2283. if not user:
  2284. raise SyntaxError('User required')
  2285. password = credential_decoder(m.group('password'))
  2286. if not password:
  2287. password = ''
  2288. host = m.group('host')
  2289. if not host:
  2290. raise SyntaxError('Host name required')
  2291. db = m.group('db')
  2292. if not db:
  2293. raise SyntaxError('Database name required')
  2294. port = m.group('port') or '5432'
  2295. sslmode = m.group('sslmode')
  2296. if sslmode:
  2297. msg = ("dbname='%s' user='%s' host='%s' "
  2298. "port=%s password='%s' sslmode='%s'") \
  2299. % (db, user, host, port, password, sslmode)
  2300. else:
  2301. msg = ("dbname='%s' user='%s' host='%s' "
  2302. "port=%s password='%s'") \
  2303. % (db, user, host, port, password)
  2304. # choose diver according uri
  2305. self.__version__ = "%s %s" % (self.driver.__name__, self.driver.__version__)
  2306. def connector(msg=msg,driver_args=driver_args):
  2307. return self.driver.connect(msg,**driver_args)
  2308. self.connector = connector
  2309. if do_connect: self.reconnect()
  2310. def after_connection(self):
  2311. self.connection.set_client_encoding('UTF8')
  2312. self.execute("SET standard_conforming_strings=on;")
  2313. self.try_json()
  2314. def lastrowid(self,table):
  2315. self.execute("select currval('%s')" % table._sequence_name)
  2316. return int(self.cursor.fetchone()[0])
  2317. def try_json(self):
  2318. # check JSON data type support
  2319. # (to be added to after_connection)
  2320. if self.driver_name == "pg8000":
  2321. supports_json = self.connection.server_version >= "9.2.0"
  2322. elif (self.driver_name == "psycopg2") and \
  2323. (self.driver.__version__ >= "2.0.12"):
  2324. supports_json = self.connection.server_version >= 90200
  2325. elif self.driver_name == "zxJDBC":
  2326. supports_json = self.connection.dbversion >= "9.2.0"
  2327. else: supports_json = None
  2328. if supports_json: self.types["json"] = "JSON"
  2329. else: LOGGER.debug("Your database version does not support the JSON data type (using TEXT instead)")
  2330. def LIKE(self,first,second):
  2331. args = (self.expand(first), self.expand(second,'string'))
  2332. if not first.type in ('string', 'text', 'json'):
  2333. return '(CAST(%s AS CHAR(%s)) LIKE %s)' % (args[0], first.length, args[1])
  2334. else:
  2335. return '(%s LIKE %s)' % args
  2336. def ILIKE(self,first,second):
  2337. args = (self.expand(first), self.expand(second,'string'))
  2338. if not first.type in ('string', 'text', 'json'):
  2339. return '(CAST(%s AS CHAR(%s)) LIKE %s)' % (args[0], first.length, args[1])
  2340. else:
  2341. return '(%s ILIKE %s)' % args
  2342. def REGEXP(self,first,second):
  2343. return '(%s ~ %s)' % (self.expand(first),
  2344. self.expand(second,'string'))
  2345. def STARTSWITH(self,first,second):
  2346. return '(%s ILIKE %s)' % (self.expand(first),
  2347. self.expand(second+'%','string'))
  2348. def ENDSWITH(self,first,second):
  2349. return '(%s ILIKE %s)' % (self.expand(first),
  2350. self.expand('%'+second,'string'))
  2351. def CONTAINS(self,first,second,case_sensitive=False):
  2352. if first.type in ('string','text', 'json'):
  2353. second = '%'+str(second).replace('%','%%')+'%'
  2354. elif first.type.startswith('list:'):
  2355. second = '%|'+str(second).replace('|','||').replace('%','%%')+'|%'
  2356. op = case_sensitive and self.LIKE or self.ILIKE
  2357. return op(first,second)
  2358. # GIS functions
  2359. def ST_ASGEOJSON(self, first, second):
  2360. """
  2361. http://postgis.org/docs/ST_AsGeoJSON.html
  2362. """
  2363. return 'ST_AsGeoJSON(%s,%s,%s,%s)' %(second['version'],
  2364. self.expand(first), second['precision'], second['options'])
  2365. def ST_ASTEXT(self, first):
  2366. """
  2367. http://postgis.org/docs/ST_AsText.html
  2368. """
  2369. return 'ST_AsText(%s)' %(self.expand(first))
  2370. def ST_X(self, first):
  2371. """
  2372. http://postgis.org/docs/ST_X.html
  2373. """
  2374. return 'ST_X(%s)' %(self.expand(first))
  2375. def ST_Y(self, first):
  2376. """
  2377. http://postgis.org/docs/ST_Y.html
  2378. """
  2379. return 'ST_Y(%s)' %(self.expand(first))
  2380. def ST_CONTAINS(self, first, second):
  2381. """
  2382. http://postgis.org/docs/ST_Contains.html
  2383. """
  2384. return 'ST_Contains(%s,%s)' %(self.expand(first), self.expand(second, first.type))
  2385. def ST_DISTANCE(self, first, second):
  2386. """
  2387. http://postgis.org/docs/ST_Distance.html
  2388. """
  2389. return 'ST_Distance(%s,%s)' %(self.expand(first), self.expand(second, first.type))
  2390. def ST_EQUALS(self, first, second):
  2391. """
  2392. http://postgis.org/docs/ST_Equals.html
  2393. """
  2394. return 'ST_Equals(%s,%s)' %(self.expand(first), self.expand(second, first.type))
  2395. def ST_INTERSECTS(self, first, second):
  2396. """
  2397. http://postgis.org/docs/ST_Intersects.html
  2398. """
  2399. return 'ST_Intersects(%s,%s)' %(self.expand(first), self.expand(second, first.type))
  2400. def ST_OVERLAPS(self, first, second):
  2401. """
  2402. http://postgis.org/docs/ST_Overlaps.html
  2403. """
  2404. return 'ST_Overlaps(%s,%s)' %(self.expand(first), self.expand(second, first.type))
  2405. def ST_SIMPLIFY(self, first, second):
  2406. """
  2407. http://postgis.org/docs/ST_Simplify.html
  2408. """
  2409. return 'ST_Simplify(%s,%s)' %(self.expand(first), self.expand(second, 'double'))
  2410. def ST_TOUCHES(self, first, second):
  2411. """
  2412. http://postgis.org/docs/ST_Touches.html
  2413. """
  2414. return 'ST_Touches(%s,%s)' %(self.expand(first), self.expand(second, first.type))
  2415. def ST_WITHIN(self, first, second):
  2416. """
  2417. http://postgis.org/docs/ST_Within.html
  2418. """
  2419. return 'ST_Within(%s,%s)' %(self.expand(first), self.expand(second, first.type))
  2420. def represent(self, obj, fieldtype):
  2421. field_is_type = fieldtype.startswith
  2422. if field_is_type('geo'):
  2423. srid = 4326 # postGIS default srid for geometry
  2424. geotype, parms = fieldtype[:-1].split('(')
  2425. parms = parms.split(',')
  2426. if len(parms) >= 2:
  2427. schema, srid = parms[:2]
  2428. if field_is_type('geometry'):
  2429. value = "ST_GeomFromText('%s',%s)" %(obj, srid)
  2430. elif field_is_type('geography'):
  2431. value = "ST_GeogFromText('SRID=%s;%s')" %(srid, obj)
  2432. # else:
  2433. # raise SyntaxError('Invalid field type %s' %fieldtype)
  2434. return value
  2435. return BaseAdapter.represent(self, obj, fieldtype)
  2436. class NewPostgreSQLAdapter(PostgreSQLAdapter):
  2437. drivers = ('psycopg2','pg8000')
  2438. types = {
  2439. 'boolean': 'CHAR(1)',
  2440. 'string': 'VARCHAR(%(length)s)',
  2441. 'text': 'TEXT',
  2442. 'json': 'TEXT',
  2443. 'password': 'VARCHAR(%(length)s)',
  2444. 'blob': 'BYTEA',
  2445. 'upload': 'VARCHAR(%(length)s)',
  2446. 'integer': 'INTEGER',
  2447. 'bigint': 'BIGINT',
  2448. 'float': 'FLOAT',
  2449. 'double': 'FLOAT8',
  2450. 'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
  2451. 'date': 'DATE',
  2452. 'time': 'TIME',
  2453. 'datetime': 'TIMESTAMP',
  2454. 'id': 'SERIAL PRIMARY KEY',
  2455. 'reference': 'INTEGER REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
  2456. 'list:integer': 'BIGINT[]',
  2457. 'list:string': 'TEXT[]',
  2458. 'list:reference': 'BIGINT[]',
  2459. 'geometry': 'GEOMETRY',
  2460. 'geography': 'GEOGRAPHY',
  2461. 'big-id': 'BIGSERIAL PRIMARY KEY',
  2462. 'big-reference': 'BIGINT REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
  2463. }
  2464. def parse_list_integers(self, value, field_type):
  2465. return value
  2466. def parse_list_references(self, value, field_type):
  2467. return [self.parse_reference(r, field_type[5:]) for r in value]
  2468. def parse_list_strings(self, value, field_type):
  2469. return value
  2470. def represent(self, obj, fieldtype):
  2471. field_is_type = fieldtype.startswith
  2472. if field_is_type('list:'):
  2473. if not obj:
  2474. obj = []
  2475. elif not isinstance(obj, (list, tuple)):
  2476. obj = [obj]
  2477. if field_is_type('list:string'):
  2478. obj = map(str,obj)
  2479. else:
  2480. obj = map(int,obj)
  2481. return 'ARRAY[%s]' % ','.join(repr(item) for item in obj)
  2482. return BaseAdapter.represent(self, obj, fieldtype)
  2483. class JDBCPostgreSQLAdapter(PostgreSQLAdapter):
  2484. drivers = ('zxJDBC',)
  2485. REGEX_URI = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:/]+)(\:(?P<port>[0-9]+))?/(?P<db>.+)$')
  2486. def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
  2487. credential_decoder=IDENTITY, driver_args={},
  2488. adapter_args={}, do_connect=True, after_connection=None ):
  2489. self.db = db
  2490. self.dbengine = "postgres"
  2491. self.uri = uri
  2492. if do_connect: self.find_driver(adapter_args,uri)
  2493. self.pool_size = pool_size
  2494. self.folder = folder
  2495. self.db_codec = db_codec
  2496. self._after_connection = after_connection
  2497. self.find_or_make_work_folder()
  2498. ruri = uri.split('://',1)[1]
  2499. m = self.REGEX_URI.match(ruri)
  2500. if not m:
  2501. raise SyntaxError("Invalid URI string in DAL")
  2502. user = credential_decoder(m.group('user'))
  2503. if not user:
  2504. raise SyntaxError('User required')
  2505. password = credential_decoder(m.group('password'))
  2506. if not password:
  2507. password = ''
  2508. host = m.group('host')
  2509. if not host:
  2510. raise SyntaxError('Host name required')
  2511. db = m.group('db')
  2512. if not db:
  2513. raise SyntaxError('Database name required')
  2514. port = m.group('port') or '5432'
  2515. msg = ('jdbc:postgresql://%s:%s/%s' % (host, port, db), user, password)
  2516. def connector(msg=msg,driver_args=driver_args):
  2517. return self.driver.connect(*msg,**driver_args)
  2518. self.connector = connector
  2519. if do_connect: self.reconnect()
  2520. def after_connection(self):
  2521. self.connection.set_client_encoding('UTF8')
  2522. self.execute('BEGIN;')
  2523. self.execute("SET CLIENT_ENCODING TO 'UNICODE';")
  2524. self.try_json()
  2525. class OracleAdapter(BaseAdapter):
  2526. drivers = ('cx_Oracle',)
  2527. commit_on_alter_table = False
  2528. types = {
  2529. 'boolean': 'CHAR(1)',
  2530. 'string': 'VARCHAR2(%(length)s)',
  2531. 'text': 'CLOB',
  2532. 'json': 'CLOB',
  2533. 'password': 'VARCHAR2(%(length)s)',
  2534. 'blob': 'CLOB',
  2535. 'upload': 'VARCHAR2(%(length)s)',
  2536. 'integer': 'INT',
  2537. 'bigint': 'NUMBER',
  2538. 'float': 'FLOAT',
  2539. 'double': 'BINARY_DOUBLE',
  2540. 'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
  2541. 'date': 'DATE',
  2542. 'time': 'CHAR(8)',
  2543. 'datetime': 'DATE',
  2544. 'id': 'NUMBER PRIMARY KEY',
  2545. 'reference': 'NUMBER, CONSTRAINT %(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
  2546. 'list:integer': 'CLOB',
  2547. 'list:string': 'CLOB',
  2548. 'list:reference': 'CLOB',
  2549. 'big-id': 'NUMBER PRIMARY KEY',
  2550. 'big-reference': 'NUMBER, CONSTRAINT %(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
  2551. 'reference FK': ', CONSTRAINT FK_%(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
  2552. 'reference TFK': ' CONSTRAINT FK_%(foreign_table)s_PK FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s) ON DELETE %(on_delete_action)s',
  2553. }
  2554. def sequence_name(self,tablename):
  2555. return '%s_sequence' % tablename
  2556. def trigger_name(self,tablename):
  2557. return '%s_trigger' % tablename
  2558. def LEFT_JOIN(self):
  2559. return 'LEFT OUTER JOIN'
  2560. def RANDOM(self):
  2561. return 'dbms_random.value'
  2562. def NOT_NULL(self,default,field_type):
  2563. return 'DEFAULT %s NOT NULL' % self.represent(default,field_type)
  2564. def _drop(self,table,mode):
  2565. sequence_name = table._sequence_name
  2566. return ['DROP TABLE %s %s;' % (table, mode), 'DROP SEQUENCE %s;' % sequence_name]
  2567. def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
  2568. if limitby:
  2569. (lmin, lmax) = limitby
  2570. if len(sql_w) > 1:
  2571. sql_w_row = sql_w + ' AND w_row > %i' % lmin
  2572. else:
  2573. sql_w_row = 'WHERE w_row > %i' % lmin
  2574. return 'SELECT %s %s FROM (SELECT w_tmp.*, ROWNUM w_row FROM (SELECT %s FROM %s%s%s) w_tmp WHERE ROWNUM<=%i) %s %s %s;' % (sql_s, sql_f, sql_f, sql_t, sql_w, sql_o, lmax, sql_t, sql_w_row, sql_o)
  2575. return 'SELECT %s %s FROM %s%s%s;' % (sql_s, sql_f, sql_t, sql_w, sql_o)
  2576. def constraint_name(self, tablename, fieldname):
  2577. constraint_name = BaseAdapter.constraint_name(self, tablename, fieldname)
  2578. if len(constraint_name)>30:
  2579. constraint_name = '%s_%s__constraint' % (tablename[:10], fieldname[:7])
  2580. return constraint_name
  2581. def represent_exceptions(self, obj, fieldtype):
  2582. if fieldtype == 'blob':
  2583. obj = base64.b64encode(str(obj))
  2584. return ":CLOB('%s')" % obj
  2585. elif fieldtype == 'date':
  2586. if isinstance(obj, (datetime.date, datetime.datetime)):
  2587. obj = obj.isoformat()[:10]
  2588. else:
  2589. obj = str(obj)
  2590. return "to_date('%s','yyyy-mm-dd')" % obj
  2591. elif fieldtype == 'datetime':
  2592. if isinstance(obj, datetime.datetime):
  2593. obj = obj.isoformat()[:19].replace('T',' ')
  2594. elif isinstance(obj, datetime.date):
  2595. obj = obj.isoformat()[:10]+' 00:00:00'
  2596. else:
  2597. obj = str(obj)
  2598. return "to_date('%s','yyyy-mm-dd hh24:mi:ss')" % obj
  2599. return None
  2600. def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
  2601. credential_decoder=IDENTITY, driver_args={},
  2602. adapter_args={}, do_connect=True, after_connection=None):
  2603. self.db = db
  2604. self.dbengine = "oracle"
  2605. self.uri = uri
  2606. if do_connect: self.find_driver(adapter_args,uri)
  2607. self.pool_size = pool_size
  2608. self.folder = folder
  2609. self.db_codec = db_codec
  2610. self._after_connection = after_connection
  2611. self.find_or_make_work_folder()
  2612. ruri = uri.split('://',1)[1]
  2613. if not 'threaded' in driver_args:
  2614. driver_args['threaded']=True
  2615. def connector(uri=ruri,driver_args=driver_args):
  2616. return self.driver.connect(uri,**driver_args)
  2617. self.connector = connector
  2618. if do_connect: self.reconnect()
  2619. def after_connection(self):
  2620. self.execute("ALTER SESSION SET NLS_DATE_FORMAT = 'YYYY-MM-DD HH24:MI:SS';")
  2621. self.execute("ALTER SESSION SET NLS_TIMESTAMP_FORMAT = 'YYYY-MM-DD HH24:MI:SS';")
  2622. oracle_fix = re.compile("[^']*('[^']*'[^']*)*\:(?P<clob>CLOB\('([^']+|'')*'\))")
  2623. def execute(self, command, args=None):
  2624. args = args or []
  2625. i = 1
  2626. while True:
  2627. m = self.oracle_fix.match(command)
  2628. if not m:
  2629. break
  2630. command = command[:m.start('clob')] + str(i) + command[m.end('clob'):]
  2631. args.append(m.group('clob')[6:-2].replace("''", "'"))
  2632. i += 1
  2633. if command[-1:]==';':
  2634. command = command[:-1]
  2635. return self.log_execute(command, args)
  2636. def create_sequence_and_triggers(self, query, table, **args):
  2637. tablename = table._tablename
  2638. sequence_name = table._sequence_name
  2639. trigger_name = table._trigger_name
  2640. self.execute(query)
  2641. self.execute('CREATE SEQUENCE %s START WITH 1 INCREMENT BY 1 NOMAXVALUE MINVALUE -1;' % sequence_name)
  2642. self.execute("""
  2643. CREATE OR REPLACE TRIGGER %(trigger_name)s BEFORE INSERT ON %(tablename)s FOR EACH ROW
  2644. DECLARE
  2645. curr_val NUMBER;
  2646. diff_val NUMBER;
  2647. PRAGMA autonomous_transaction;
  2648. BEGIN
  2649. IF :NEW.id IS NOT NULL THEN
  2650. EXECUTE IMMEDIATE 'SELECT %(sequence_name)s.nextval FROM dual' INTO curr_val;
  2651. diff_val := :NEW.id - curr_val - 1;
  2652. IF diff_val != 0 THEN
  2653. EXECUTE IMMEDIATE 'alter sequence %(sequence_name)s increment by '|| diff_val;
  2654. EXECUTE IMMEDIATE 'SELECT %(sequence_name)s.nextval FROM dual' INTO curr_val;
  2655. EXECUTE IMMEDIATE 'alter sequence %(sequence_name)s increment by 1';
  2656. END IF;
  2657. END IF;
  2658. SELECT %(sequence_name)s.nextval INTO :NEW.id FROM DUAL;
  2659. END;
  2660. """ % dict(trigger_name=trigger_name, tablename=tablename, sequence_name=sequence_name))
  2661. def lastrowid(self,table):
  2662. sequence_name = table._sequence_name
  2663. self.execute('SELECT %s.currval FROM dual;' % sequence_name)
  2664. return int(self.cursor.fetchone()[0])
  2665. #def parse_value(self, value, field_type, blob_decode=True):
  2666. # if blob_decode and isinstance(value, cx_Oracle.LOB):
  2667. # try:
  2668. # value = value.read()
  2669. # except self.driver.ProgrammingError:
  2670. # # After a subsequent fetch the LOB value is not valid anymore
  2671. # pass
  2672. # return BaseAdapter.parse_value(self, value, field_type, blob_decode)
  2673. def _fetchall(self):
  2674. if any(x[1]==cx_Oracle.CLOB for x in self.cursor.description):
  2675. return [tuple([(c.read() if type(c) == cx_Oracle.LOB else c) \
  2676. for c in r]) for r in self.cursor]
  2677. else:
  2678. return self.cursor.fetchall()
  2679. class MSSQLAdapter(BaseAdapter):
  2680. drivers = ('pyodbc',)
  2681. types = {
  2682. 'boolean': 'BIT',
  2683. 'string': 'VARCHAR(%(length)s)',
  2684. 'text': 'TEXT',
  2685. 'json': 'TEXT',
  2686. 'password': 'VARCHAR(%(length)s)',
  2687. 'blob': 'IMAGE',
  2688. 'upload': 'VARCHAR(%(length)s)',
  2689. 'integer': 'INT',
  2690. 'bigint': 'BIGINT',
  2691. 'float': 'FLOAT',
  2692. 'double': 'FLOAT',
  2693. 'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
  2694. 'date': 'DATETIME',
  2695. 'time': 'CHAR(8)',
  2696. 'datetime': 'DATETIME',
  2697. 'id': 'INT IDENTITY PRIMARY KEY',
  2698. 'reference': 'INT NULL, CONSTRAINT %(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
  2699. 'list:integer': 'TEXT',
  2700. 'list:string': 'TEXT',
  2701. 'list:reference': 'TEXT',
  2702. 'geometry': 'geometry',
  2703. 'geography': 'geography',
  2704. 'big-id': 'BIGINT IDENTITY PRIMARY KEY',
  2705. 'big-reference': 'BIGINT NULL, CONSTRAINT %(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
  2706. 'reference FK': ', CONSTRAINT FK_%(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
  2707. 'reference TFK': ' CONSTRAINT FK_%(foreign_table)s_PK FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s) ON DELETE %(on_delete_action)s',
  2708. }
  2709. def concat_add(self,tablename):
  2710. return '; ALTER TABLE %s ADD ' % tablename
  2711. def varquote(self,name):
  2712. return varquote_aux(name,'[%s]')
  2713. def EXTRACT(self,field,what):
  2714. return "DATEPART(%s,%s)" % (what, self.expand(field))
  2715. def LEFT_JOIN(self):
  2716. return 'LEFT OUTER JOIN'
  2717. def RANDOM(self):
  2718. return 'NEWID()'
  2719. def ALLOW_NULL(self):
  2720. return ' NULL'
  2721. def SUBSTRING(self,field,parameters):
  2722. return 'SUBSTRING(%s,%s,%s)' % (self.expand(field), parameters[0], parameters[1])
  2723. def PRIMARY_KEY(self,key):
  2724. return 'PRIMARY KEY CLUSTERED (%s)' % key
  2725. def AGGREGATE(self, first, what):
  2726. if what == 'LENGTH':
  2727. what = 'LEN'
  2728. return "%s(%s)" % (what, self.expand(first))
  2729. def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
  2730. if limitby:
  2731. (lmin, lmax) = limitby
  2732. sql_s += ' TOP %i' % lmax
  2733. if 'GROUP BY' in sql_o:
  2734. sql_o = sql_o[:sql_o.find('ORDER BY ')]
  2735. return 'SELECT %s %s FROM %s%s%s;' % (sql_s, sql_f, sql_t, sql_w, sql_o)
  2736. TRUE = 1
  2737. FALSE = 0
  2738. REGEX_DSN = re.compile('^(?P<dsn>.+)$')
  2739. REGEX_URI = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:/]+)(\:(?P<port>[0-9]+))?/(?P<db>[^\?]+)(\?(?P<urlargs>.*))?$')
  2740. REGEX_ARGPATTERN = re.compile('(?P<argkey>[^=]+)=(?P<argvalue>[^&]*)')
  2741. def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
  2742. credential_decoder=IDENTITY, driver_args={},
  2743. adapter_args={}, do_connect=True, srid=4326,
  2744. after_connection=None):
  2745. self.db = db
  2746. self.dbengine = "mssql"
  2747. self.uri = uri
  2748. if do_connect: self.find_driver(adapter_args,uri)
  2749. self.pool_size = pool_size
  2750. self.folder = folder
  2751. self.db_codec = db_codec
  2752. self._after_connection = after_connection
  2753. self.srid = srid
  2754. self.find_or_make_work_folder()
  2755. # ## read: http://bytes.com/groups/python/460325-cx_oracle-utf8
  2756. ruri = uri.split('://',1)[1]
  2757. if '@' not in ruri:
  2758. try:
  2759. m = self.REGEX_DSN.match(ruri)
  2760. if not m:
  2761. raise SyntaxError(
  2762. 'Parsing uri string(%s) has no result' % self.uri)
  2763. dsn = m.group('dsn')
  2764. if not dsn:
  2765. raise SyntaxError('DSN required')
  2766. except SyntaxError:
  2767. e = sys.exc_info()[1]
  2768. LOGGER.error('NdGpatch error')
  2769. raise e
  2770. # was cnxn = 'DSN=%s' % dsn
  2771. cnxn = dsn
  2772. else:
  2773. m = self.REGEX_URI.match(ruri)
  2774. if not m:
  2775. raise SyntaxError(
  2776. "Invalid URI string in DAL: %s" % self.uri)
  2777. user = credential_decoder(m.group('user'))
  2778. if not user:
  2779. raise SyntaxError('User required')
  2780. password = credential_decoder(m.group('password'))
  2781. if not password:
  2782. password = ''
  2783. host = m.group('host')
  2784. if not host:
  2785. raise SyntaxError('Host name required')
  2786. db = m.group('db')
  2787. if not db:
  2788. raise SyntaxError('Database name required')
  2789. port = m.group('port') or '1433'
  2790. # Parse the optional url name-value arg pairs after the '?'
  2791. # (in the form of arg1=value1&arg2=value2&...)
  2792. # Default values (drivers like FreeTDS insist on uppercase parameter keys)
  2793. argsdict = { 'DRIVER':'{SQL Server}' }
  2794. urlargs = m.group('urlargs') or ''
  2795. for argmatch in self.REGEX_ARGPATTERN.finditer(urlargs):
  2796. argsdict[str(argmatch.group('argkey')).upper()] = argmatch.group('argvalue')
  2797. urlargs = ';'.join(['%s=%s' % (ak, av) for (ak, av) in argsdict.iteritems()])
  2798. cnxn = 'SERVER=%s;PORT=%s;DATABASE=%s;UID=%s;PWD=%s;%s' \
  2799. % (host, port, db, user, password, urlargs)
  2800. def connector(cnxn=cnxn,driver_args=driver_args):
  2801. return self.driver.connect(cnxn,**driver_args)
  2802. self.connector = connector
  2803. if do_connect: self.reconnect()
  2804. def lastrowid(self,table):
  2805. #self.execute('SELECT @@IDENTITY;')
  2806. self.execute('SELECT SCOPE_IDENTITY();')
  2807. return int(self.cursor.fetchone()[0])
  2808. def integrity_error_class(self):
  2809. return pyodbc.IntegrityError
  2810. def rowslice(self,rows,minimum=0,maximum=None):
  2811. if maximum is None:
  2812. return rows[minimum:]
  2813. return rows[minimum:maximum]
  2814. def EPOCH(self, first):
  2815. return "DATEDIFF(second, '1970-01-01 00:00:00', %s)" % self.expand(first)
  2816. # GIS Spatial Extensions
  2817. # No STAsGeoJSON in MSSQL
  2818. def ST_ASTEXT(self, first):
  2819. return '%s.STAsText()' %(self.expand(first))
  2820. def ST_CONTAINS(self, first, second):
  2821. return '%s.STContains(%s)=1' %(self.expand(first), self.expand(second, first.type))
  2822. def ST_DISTANCE(self, first, second):
  2823. return '%s.STDistance(%s)' %(self.expand(first), self.expand(second, first.type))
  2824. def ST_EQUALS(self, first, second):
  2825. return '%s.STEquals(%s)=1' %(self.expand(first), self.expand(second, first.type))
  2826. def ST_INTERSECTS(self, first, second):
  2827. return '%s.STIntersects(%s)=1' %(self.expand(first), self.expand(second, first.type))
  2828. def ST_OVERLAPS(self, first, second):
  2829. return '%s.STOverlaps(%s)=1' %(self.expand(first), self.expand(second, first.type))
  2830. # no STSimplify in MSSQL
  2831. def ST_TOUCHES(self, first, second):
  2832. return '%s.STTouches(%s)=1' %(self.expand(first), self.expand(second, first.type))
  2833. def ST_WITHIN(self, first, second):
  2834. return '%s.STWithin(%s)=1' %(self.expand(first), self.expand(second, first.type))
  2835. def represent(self, obj, fieldtype):
  2836. field_is_type = fieldtype.startswith
  2837. if field_is_type('geometry'):
  2838. srid = 0 # MS SQL default srid for geometry
  2839. geotype, parms = fieldtype[:-1].split('(')
  2840. if parms:
  2841. srid = parms
  2842. return "geometry::STGeomFromText('%s',%s)" %(obj, srid)
  2843. elif fieldtype == 'geography':
  2844. srid = 4326 # MS SQL default srid for geography
  2845. geotype, parms = fieldtype[:-1].split('(')
  2846. if parms:
  2847. srid = parms
  2848. return "geography::STGeomFromText('%s',%s)" %(obj, srid)
  2849. # else:
  2850. # raise SyntaxError('Invalid field type %s' %fieldtype)
  2851. return "geometry::STGeomFromText('%s',%s)" %(obj, srid)
  2852. return BaseAdapter.represent(self, obj, fieldtype)
  2853. class MSSQL3Adapter(MSSQLAdapter):
  2854. """ experimental support for pagination in MSSQL"""
  2855. def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
  2856. if limitby:
  2857. (lmin, lmax) = limitby
  2858. if lmin == 0:
  2859. sql_s += ' TOP %i' % lmax
  2860. return 'SELECT %s %s FROM %s%s%s;' % (sql_s, sql_f, sql_t, sql_w, sql_o)
  2861. lmin += 1
  2862. sql_o_inner = sql_o[sql_o.find('ORDER BY ')+9:]
  2863. sql_g_inner = sql_o[:sql_o.find('ORDER BY ')]
  2864. sql_f_outer = ['f_%s' % f for f in range(len(sql_f.split(',')))]
  2865. sql_f_inner = [f for f in sql_f.split(',')]
  2866. sql_f_iproxy = ['%s AS %s' % (o, n) for (o, n) in zip(sql_f_inner, sql_f_outer)]
  2867. sql_f_iproxy = ', '.join(sql_f_iproxy)
  2868. sql_f_oproxy = ', '.join(sql_f_outer)
  2869. return 'SELECT %s %s FROM (SELECT %s ROW_NUMBER() OVER (ORDER BY %s) AS w_row, %s FROM %s%s%s) TMP WHERE w_row BETWEEN %i AND %s;' % (sql_s,sql_f_oproxy,sql_s,sql_f,sql_f_iproxy,sql_t,sql_w,sql_g_inner,lmin,lmax)
  2870. return 'SELECT %s %s FROM %s%s%s;' % (sql_s,sql_f,sql_t,sql_w,sql_o)
  2871. def rowslice(self,rows,minimum=0,maximum=None):
  2872. return rows
  2873. class MSSQL2Adapter(MSSQLAdapter):
  2874. drivers = ('pyodbc',)
  2875. types = {
  2876. 'boolean': 'CHAR(1)',
  2877. 'string': 'NVARCHAR(%(length)s)',
  2878. 'text': 'NTEXT',
  2879. 'json': 'NTEXT',
  2880. 'password': 'NVARCHAR(%(length)s)',
  2881. 'blob': 'IMAGE',
  2882. 'upload': 'NVARCHAR(%(length)s)',
  2883. 'integer': 'INT',
  2884. 'bigint': 'BIGINT',
  2885. 'float': 'FLOAT',
  2886. 'double': 'FLOAT',
  2887. 'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
  2888. 'date': 'DATETIME',
  2889. 'time': 'CHAR(8)',
  2890. 'datetime': 'DATETIME',
  2891. 'id': 'INT IDENTITY PRIMARY KEY',
  2892. 'reference': 'INT, CONSTRAINT %(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
  2893. 'list:integer': 'NTEXT',
  2894. 'list:string': 'NTEXT',
  2895. 'list:reference': 'NTEXT',
  2896. 'big-id': 'BIGINT IDENTITY PRIMARY KEY',
  2897. 'big-reference': 'BIGINT, CONSTRAINT %(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
  2898. 'reference FK': ', CONSTRAINT FK_%(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
  2899. 'reference TFK': ' CONSTRAINT FK_%(foreign_table)s_PK FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s) ON DELETE %(on_delete_action)s',
  2900. }
  2901. def represent(self, obj, fieldtype):
  2902. value = BaseAdapter.represent(self, obj, fieldtype)
  2903. if fieldtype in ('string','text', 'json') and value[:1]=="'":
  2904. value = 'N'+value
  2905. return value
  2906. def execute(self,a):
  2907. return self.log_execute(a.decode('utf8'))
  2908. class SybaseAdapter(MSSQLAdapter):
  2909. drivers = ('Sybase',)
  2910. types = {
  2911. 'boolean': 'BIT',
  2912. 'string': 'CHAR VARYING(%(length)s)',
  2913. 'text': 'TEXT',
  2914. 'json': 'TEXT',
  2915. 'password': 'CHAR VARYING(%(length)s)',
  2916. 'blob': 'IMAGE',
  2917. 'upload': 'CHAR VARYING(%(length)s)',
  2918. 'integer': 'INT',
  2919. 'bigint': 'BIGINT',
  2920. 'float': 'FLOAT',
  2921. 'double': 'FLOAT',
  2922. 'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
  2923. 'date': 'DATETIME',
  2924. 'time': 'CHAR(8)',
  2925. 'datetime': 'DATETIME',
  2926. 'id': 'INT IDENTITY PRIMARY KEY',
  2927. 'reference': 'INT NULL, CONSTRAINT %(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
  2928. 'list:integer': 'TEXT',
  2929. 'list:string': 'TEXT',
  2930. 'list:reference': 'TEXT',
  2931. 'geometry': 'geometry',
  2932. 'geography': 'geography',
  2933. 'big-id': 'BIGINT IDENTITY PRIMARY KEY',
  2934. 'big-reference': 'BIGINT NULL, CONSTRAINT %(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
  2935. 'reference FK': ', CONSTRAINT FK_%(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
  2936. 'reference TFK': ' CONSTRAINT FK_%(foreign_table)s_PK FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s) ON DELETE %(on_delete_action)s',
  2937. }
  2938. def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
  2939. credential_decoder=IDENTITY, driver_args={},
  2940. adapter_args={}, do_connect=True, srid=4326,
  2941. after_connection=None):
  2942. self.db = db
  2943. self.dbengine = "sybase"
  2944. self.uri = uri
  2945. if do_connect: self.find_driver(adapter_args,uri)
  2946. self.pool_size = pool_size
  2947. self.folder = folder
  2948. self.db_codec = db_codec
  2949. self._after_connection = after_connection
  2950. self.srid = srid
  2951. self.find_or_make_work_folder()
  2952. # ## read: http://bytes.com/groups/python/460325-cx_oracle-utf8
  2953. ruri = uri.split('://',1)[1]
  2954. if '@' not in ruri:
  2955. try:
  2956. m = self.REGEX_DSN.match(ruri)
  2957. if not m:
  2958. raise SyntaxError(
  2959. 'Parsing uri string(%s) has no result' % self.uri)
  2960. dsn = m.group('dsn')
  2961. if not dsn:
  2962. raise SyntaxError('DSN required')
  2963. except SyntaxError:
  2964. e = sys.exc_info()[1]
  2965. LOGGER.error('NdGpatch error')
  2966. raise e
  2967. else:
  2968. m = self.REGEX_URI.match(uri)
  2969. if not m:
  2970. raise SyntaxError(
  2971. "Invalid URI string in DAL: %s" % self.uri)
  2972. user = credential_decoder(m.group('user'))
  2973. if not user:
  2974. raise SyntaxError('User required')
  2975. password = credential_decoder(m.group('password'))
  2976. if not password:
  2977. password = ''
  2978. host = m.group('host')
  2979. if not host:
  2980. raise SyntaxError('Host name required')
  2981. db = m.group('db')
  2982. if not db:
  2983. raise SyntaxError('Database name required')
  2984. port = m.group('port') or '1433'
  2985. dsn = 'sybase:host=%s:%s;dbname=%s' % (host,port,db)
  2986. driver_args.update(user = credential_decoder(user),
  2987. password = credential_decoder(password))
  2988. def connector(dsn=dsn,driver_args=driver_args):
  2989. return self.driver.connect(dsn,**driver_args)
  2990. self.connector = connector
  2991. if do_connect: self.reconnect()
  2992. def integrity_error_class(self):
  2993. return RuntimeError # FIX THIS
  2994. class FireBirdAdapter(BaseAdapter):
  2995. drivers = ('kinterbasdb','firebirdsql','fdb','pyodbc')
  2996. commit_on_alter_table = False
  2997. support_distributed_transaction = True
  2998. types = {
  2999. 'boolean': 'CHAR(1)',
  3000. 'string': 'VARCHAR(%(length)s)',
  3001. 'text': 'BLOB SUB_TYPE 1',
  3002. 'json': 'BLOB SUB_TYPE 1',
  3003. 'password': 'VARCHAR(%(length)s)',
  3004. 'blob': 'BLOB SUB_TYPE 0',
  3005. 'upload': 'VARCHAR(%(length)s)',
  3006. 'integer': 'INTEGER',
  3007. 'bigint': 'BIGINT',
  3008. 'float': 'FLOAT',
  3009. 'double': 'DOUBLE PRECISION',
  3010. 'decimal': 'DECIMAL(%(precision)s,%(scale)s)',
  3011. 'date': 'DATE',
  3012. 'time': 'TIME',
  3013. 'datetime': 'TIMESTAMP',
  3014. 'id': 'INTEGER PRIMARY KEY',
  3015. 'reference': 'INTEGER REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
  3016. 'list:integer': 'BLOB SUB_TYPE 1',
  3017. 'list:string': 'BLOB SUB_TYPE 1',
  3018. 'list:reference': 'BLOB SUB_TYPE 1',
  3019. 'big-id': 'BIGINT PRIMARY KEY',
  3020. 'big-reference': 'BIGINT REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
  3021. }
  3022. def sequence_name(self,tablename):
  3023. return 'genid_%s' % tablename
  3024. def trigger_name(self,tablename):
  3025. return 'trg_id_%s' % tablename
  3026. def RANDOM(self):
  3027. return 'RAND()'
  3028. def EPOCH(self, first):
  3029. return "DATEDIFF(second, '1970-01-01 00:00:00', %s)" % self.expand(first)
  3030. def NOT_NULL(self,default,field_type):
  3031. return 'DEFAULT %s NOT NULL' % self.represent(default,field_type)
  3032. def SUBSTRING(self,field,parameters):
  3033. return 'SUBSTRING(%s from %s for %s)' % (self.expand(field), parameters[0], parameters[1])
  3034. def CONTAINING(self,first,second):
  3035. "case in-sensitive like operator"
  3036. return '(%s CONTAINING %s)' % (self.expand(first),
  3037. self.expand(second, 'string'))
  3038. def CONTAINS(self, first, second, case_sensitive=False):
  3039. if first.type in ('string','text'):
  3040. second = str(second).replace('%','%%')
  3041. elif first.type.startswith('list:'):
  3042. second = '|'+str(second).replace('|','||').replace('%','%%')+'|'
  3043. return self.CONTAINING(first,second)
  3044. def _drop(self,table,mode):
  3045. sequence_name = table._sequence_name
  3046. return ['DROP TABLE %s %s;' % (table, mode), 'DROP GENERATOR %s;' % sequence_name]
  3047. def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
  3048. if limitby:
  3049. (lmin, lmax) = limitby
  3050. sql_s = ' FIRST %i SKIP %i %s' % (lmax - lmin, lmin, sql_s)
  3051. return 'SELECT %s %s FROM %s%s%s;' % (sql_s, sql_f, sql_t, sql_w, sql_o)
  3052. def _truncate(self,table,mode = ''):
  3053. return ['DELETE FROM %s;' % table._tablename,
  3054. 'SET GENERATOR %s TO 0;' % table._sequence_name]
  3055. REGEX_URI = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:/]+)(\:(?P<port>[0-9]+))?/(?P<db>.+?)(\?set_encoding=(?P<charset>\w+))?$')
  3056. def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
  3057. credential_decoder=IDENTITY, driver_args={},
  3058. adapter_args={}, do_connect=True, after_connection=None):
  3059. self.db = db
  3060. self.dbengine = "firebird"
  3061. self.uri = uri
  3062. if do_connect: self.find_driver(adapter_args,uri)
  3063. self.pool_size = pool_size
  3064. self.folder = folder
  3065. self.db_codec = db_codec
  3066. self._after_connection = after_connection
  3067. self.find_or_make_work_folder()
  3068. ruri = uri.split('://',1)[1]
  3069. m = self.REGEX_URI.match(ruri)
  3070. if not m:
  3071. raise SyntaxError("Invalid URI string in DAL: %s" % self.uri)
  3072. user = credential_decoder(m.group('user'))
  3073. if not user:
  3074. raise SyntaxError('User required')
  3075. password = credential_decoder(m.group('password'))
  3076. if not password:
  3077. password = ''
  3078. host = m.group('host')
  3079. if not host:
  3080. raise SyntaxError('Host name required')
  3081. port = int(m.group('port') or 3050)
  3082. db = m.group('db')
  3083. if not db:
  3084. raise SyntaxError('Database name required')
  3085. charset = m.group('charset') or 'UTF8'
  3086. driver_args.update(dsn='%s/%s:%s' % (host,port,db),
  3087. user = credential_decoder(user),
  3088. password = credential_decoder(password),
  3089. charset = charset)
  3090. def connector(driver_args=driver_args):
  3091. return self.driver.connect(**driver_args)
  3092. self.connector = connector
  3093. if do_connect: self.reconnect()
  3094. def create_sequence_and_triggers(self, query, table, **args):
  3095. tablename = table._tablename
  3096. sequence_name = table._sequence_name
  3097. trigger_name = table._trigger_name
  3098. self.execute(query)
  3099. self.execute('create generator %s;' % sequence_name)
  3100. self.execute('set generator %s to 0;' % sequence_name)
  3101. self.execute('create trigger %s for %s active before insert position 0 as\nbegin\nif(new.id is null) then\nbegin\nnew.id = gen_id(%s, 1);\nend\nend;' % (trigger_name, tablename, sequence_name))
  3102. def lastrowid(self,table):
  3103. sequence_name = table._sequence_name
  3104. self.execute('SELECT gen_id(%s, 0) FROM rdb$database' % sequence_name)
  3105. return int(self.cursor.fetchone()[0])
  3106. class FireBirdEmbeddedAdapter(FireBirdAdapter):
  3107. drivers = ('kinterbasdb','firebirdsql','fdb','pyodbc')
  3108. REGEX_URI = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<path>[^\?]+)(\?set_encoding=(?P<charset>\w+))?$')
  3109. def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
  3110. credential_decoder=IDENTITY, driver_args={},
  3111. adapter_args={}, do_connect=True, after_connection=None):
  3112. self.db = db
  3113. self.dbengine = "firebird"
  3114. self.uri = uri
  3115. if do_connect: self.find_driver(adapter_args,uri)
  3116. self.pool_size = pool_size
  3117. self.folder = folder
  3118. self.db_codec = db_codec
  3119. self._after_connection = after_connection
  3120. self.find_or_make_work_folder()
  3121. ruri = uri.split('://',1)[1]
  3122. m = self.REGEX_URI.match(ruri)
  3123. if not m:
  3124. raise SyntaxError(
  3125. "Invalid URI string in DAL: %s" % self.uri)
  3126. user = credential_decoder(m.group('user'))
  3127. if not user:
  3128. raise SyntaxError('User required')
  3129. password = credential_decoder(m.group('password'))
  3130. if not password:
  3131. password = ''
  3132. pathdb = m.group('path')
  3133. if not pathdb:
  3134. raise SyntaxError('Path required')
  3135. charset = m.group('charset')
  3136. if not charset:
  3137. charset = 'UTF8'
  3138. host = ''
  3139. driver_args.update(host=host,
  3140. database=pathdb,
  3141. user=credential_decoder(user),
  3142. password=credential_decoder(password),
  3143. charset=charset)
  3144. def connector(driver_args=driver_args):
  3145. return self.driver.connect(**driver_args)
  3146. self.connector = connector
  3147. if do_connect: self.reconnect()
  3148. class InformixAdapter(BaseAdapter):
  3149. drivers = ('informixdb',)
  3150. types = {
  3151. 'boolean': 'CHAR(1)',
  3152. 'string': 'VARCHAR(%(length)s)',
  3153. 'text': 'BLOB SUB_TYPE 1',
  3154. 'json': 'BLOB SUB_TYPE 1',
  3155. 'password': 'VARCHAR(%(length)s)',
  3156. 'blob': 'BLOB SUB_TYPE 0',
  3157. 'upload': 'VARCHAR(%(length)s)',
  3158. 'integer': 'INTEGER',
  3159. 'bigint': 'BIGINT',
  3160. 'float': 'FLOAT',
  3161. 'double': 'DOUBLE PRECISION',
  3162. 'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
  3163. 'date': 'DATE',
  3164. 'time': 'CHAR(8)',
  3165. 'datetime': 'DATETIME',
  3166. 'id': 'SERIAL',
  3167. 'reference': 'INTEGER REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
  3168. 'list:integer': 'BLOB SUB_TYPE 1',
  3169. 'list:string': 'BLOB SUB_TYPE 1',
  3170. 'list:reference': 'BLOB SUB_TYPE 1',
  3171. 'big-id': 'BIGSERIAL',
  3172. 'big-reference': 'BIGINT REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
  3173. 'reference FK': 'REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s CONSTRAINT FK_%(table_name)s_%(field_name)s',
  3174. 'reference TFK': 'FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s) ON DELETE %(on_delete_action)s CONSTRAINT TFK_%(table_name)s_%(field_name)s',
  3175. }
  3176. def RANDOM(self):
  3177. return 'Random()'
  3178. def NOT_NULL(self,default,field_type):
  3179. return 'DEFAULT %s NOT NULL' % self.represent(default,field_type)
  3180. def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
  3181. if limitby:
  3182. (lmin, lmax) = limitby
  3183. fetch_amt = lmax - lmin
  3184. dbms_version = int(self.connection.dbms_version.split('.')[0])
  3185. if lmin and (dbms_version >= 10):
  3186. # Requires Informix 10.0+
  3187. sql_s += ' SKIP %d' % (lmin, )
  3188. if fetch_amt and (dbms_version >= 9):
  3189. # Requires Informix 9.0+
  3190. sql_s += ' FIRST %d' % (fetch_amt, )
  3191. return 'SELECT %s %s FROM %s%s%s;' % (sql_s, sql_f, sql_t, sql_w, sql_o)
  3192. def represent_exceptions(self, obj, fieldtype):
  3193. if fieldtype == 'date':
  3194. if isinstance(obj, (datetime.date, datetime.datetime)):
  3195. obj = obj.isoformat()[:10]
  3196. else:
  3197. obj = str(obj)
  3198. return "to_date('%s','%%Y-%%m-%%d')" % obj
  3199. elif fieldtype == 'datetime':
  3200. if isinstance(obj, datetime.datetime):
  3201. obj = obj.isoformat()[:19].replace('T',' ')
  3202. elif isinstance(obj, datetime.date):
  3203. obj = obj.isoformat()[:10]+' 00:00:00'
  3204. else:
  3205. obj = str(obj)
  3206. return "to_date('%s','%%Y-%%m-%%d %%H:%%M:%%S')" % obj
  3207. return None
  3208. REGEX_URI = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:/]+)(\:(?P<port>[0-9]+))?/(?P<db>.+)$')
  3209. def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
  3210. credential_decoder=IDENTITY, driver_args={},
  3211. adapter_args={}, do_connect=True, after_connection=None):
  3212. self.db = db
  3213. self.dbengine = "informix"
  3214. self.uri = uri
  3215. if do_connect: self.find_driver(adapter_args,uri)
  3216. self.pool_size = pool_size
  3217. self.folder = folder
  3218. self.db_codec = db_codec
  3219. self._after_connection = after_connection
  3220. self.find_or_make_work_folder()
  3221. ruri = uri.split('://',1)[1]
  3222. m = self.REGEX_URI.match(ruri)
  3223. if not m:
  3224. raise SyntaxError(
  3225. "Invalid URI string in DAL: %s" % self.uri)
  3226. user = credential_decoder(m.group('user'))
  3227. if not user:
  3228. raise SyntaxError('User required')
  3229. password = credential_decoder(m.group('password'))
  3230. if not password:
  3231. password = ''
  3232. host = m.group('host')
  3233. if not host:
  3234. raise SyntaxError('Host name required')
  3235. db = m.group('db')
  3236. if not db:
  3237. raise SyntaxError('Database name required')
  3238. user = credential_decoder(user)
  3239. password = credential_decoder(password)
  3240. dsn = '%s@%s' % (db,host)
  3241. driver_args.update(user=user,password=password,autocommit=True)
  3242. def connector(dsn=dsn,driver_args=driver_args):
  3243. return self.driver.connect(dsn,**driver_args)
  3244. self.connector = connector
  3245. if do_connect: self.reconnect()
  3246. def execute(self,command):
  3247. if command[-1:]==';':
  3248. command = command[:-1]
  3249. return self.log_execute(command)
  3250. def lastrowid(self,table):
  3251. return self.cursor.sqlerrd[1]
  3252. def integrity_error_class(self):
  3253. return informixdb.IntegrityError
  3254. class InformixSEAdapter(InformixAdapter):
  3255. """ work in progress """
  3256. def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
  3257. return 'SELECT %s %s FROM %s%s%s;' % \
  3258. (sql_s, sql_f, sql_t, sql_w, sql_o)
  3259. def rowslice(self,rows,minimum=0,maximum=None):
  3260. if maximum is None:
  3261. return rows[minimum:]
  3262. return rows[minimum:maximum]
  3263. class DB2Adapter(BaseAdapter):
  3264. drivers = ('pyodbc',)
  3265. types = {
  3266. 'boolean': 'CHAR(1)',
  3267. 'string': 'VARCHAR(%(length)s)',
  3268. 'text': 'CLOB',
  3269. 'json': 'CLOB',
  3270. 'password': 'VARCHAR(%(length)s)',
  3271. 'blob': 'BLOB',
  3272. 'upload': 'VARCHAR(%(length)s)',
  3273. 'integer': 'INT',
  3274. 'bigint': 'BIGINT',
  3275. 'float': 'REAL',
  3276. 'double': 'DOUBLE',
  3277. 'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
  3278. 'date': 'DATE',
  3279. 'time': 'TIME',
  3280. 'datetime': 'TIMESTAMP',
  3281. 'id': 'INT GENERATED ALWAYS AS IDENTITY PRIMARY KEY NOT NULL',
  3282. 'reference': 'INT, FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
  3283. 'list:integer': 'CLOB',
  3284. 'list:string': 'CLOB',
  3285. 'list:reference': 'CLOB',
  3286. 'big-id': 'BIGINT GENERATED ALWAYS AS IDENTITY PRIMARY KEY NOT NULL',
  3287. 'big-reference': 'BIGINT, FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
  3288. 'reference FK': ', CONSTRAINT FK_%(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
  3289. 'reference TFK': ' CONSTRAINT FK_%(foreign_table)s_PK FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s) ON DELETE %(on_delete_action)s',
  3290. }
  3291. def LEFT_JOIN(self):
  3292. return 'LEFT OUTER JOIN'
  3293. def RANDOM(self):
  3294. return 'RAND()'
  3295. def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
  3296. if limitby:
  3297. (lmin, lmax) = limitby
  3298. sql_o += ' FETCH FIRST %i ROWS ONLY' % lmax
  3299. return 'SELECT %s %s FROM %s%s%s;' % (sql_s, sql_f, sql_t, sql_w, sql_o)
  3300. def represent_exceptions(self, obj, fieldtype):
  3301. if fieldtype == 'blob':
  3302. obj = base64.b64encode(str(obj))
  3303. return "BLOB('%s')" % obj
  3304. elif fieldtype == 'datetime':
  3305. if isinstance(obj, datetime.datetime):
  3306. obj = obj.isoformat()[:19].replace('T','-').replace(':','.')
  3307. elif isinstance(obj, datetime.date):
  3308. obj = obj.isoformat()[:10]+'-00.00.00'
  3309. return "'%s'" % obj
  3310. return None
  3311. def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
  3312. credential_decoder=IDENTITY, driver_args={},
  3313. adapter_args={}, do_connect=True, after_connection=None):
  3314. self.db = db
  3315. self.dbengine = "db2"
  3316. self.uri = uri
  3317. if do_connect: self.find_driver(adapter_args,uri)
  3318. self.pool_size = pool_size
  3319. self.folder = folder
  3320. self.db_codec = db_codec
  3321. self._after_connection = after_connection
  3322. self.find_or_make_work_folder()
  3323. ruri = uri.split('://', 1)[1]
  3324. def connector(cnxn=ruri,driver_args=driver_args):
  3325. return self.driver.connect(cnxn,**driver_args)
  3326. self.connector = connector
  3327. if do_connect: self.reconnect()
  3328. def execute(self,command):
  3329. if command[-1:]==';':
  3330. command = command[:-1]
  3331. return self.log_execute(command)
  3332. def lastrowid(self,table):
  3333. self.execute('SELECT DISTINCT IDENTITY_VAL_LOCAL() FROM %s;' % table)
  3334. return int(self.cursor.fetchone()[0])
  3335. def rowslice(self,rows,minimum=0,maximum=None):
  3336. if maximum is None:
  3337. return rows[minimum:]
  3338. return rows[minimum:maximum]
  3339. class TeradataAdapter(BaseAdapter):
  3340. drivers = ('pyodbc',)
  3341. types = {
  3342. 'boolean': 'CHAR(1)',
  3343. 'string': 'VARCHAR(%(length)s)',
  3344. 'text': 'CLOB',
  3345. 'json': 'CLOB',
  3346. 'password': 'VARCHAR(%(length)s)',
  3347. 'blob': 'BLOB',
  3348. 'upload': 'VARCHAR(%(length)s)',
  3349. 'integer': 'INT',
  3350. 'bigint': 'BIGINT',
  3351. 'float': 'REAL',
  3352. 'double': 'DOUBLE',
  3353. 'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
  3354. 'date': 'DATE',
  3355. 'time': 'TIME',
  3356. 'datetime': 'TIMESTAMP',
  3357. # Modified Constraint syntax for Teradata.
  3358. # Teradata does not support ON DELETE.
  3359. 'id': 'INT GENERATED ALWAYS AS IDENTITY', # Teradata Specific
  3360. 'reference': 'INT',
  3361. 'list:integer': 'CLOB',
  3362. 'list:string': 'CLOB',
  3363. 'list:reference': 'CLOB',
  3364. 'big-id': 'BIGINT GENERATED ALWAYS AS IDENTITY', # Teradata Specific
  3365. 'big-reference': 'BIGINT',
  3366. 'reference FK': ' REFERENCES %(foreign_key)s',
  3367. 'reference TFK': ' FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s)',
  3368. }
  3369. def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
  3370. credential_decoder=IDENTITY, driver_args={},
  3371. adapter_args={}, do_connect=True, after_connection=None):
  3372. self.db = db
  3373. self.dbengine = "teradata"
  3374. self.uri = uri
  3375. if do_connect: self.find_driver(adapter_args,uri)
  3376. self.pool_size = pool_size
  3377. self.folder = folder
  3378. self.db_codec = db_codec
  3379. self._after_connection = after_connection
  3380. self.find_or_make_work_folder()
  3381. ruri = uri.split('://', 1)[1]
  3382. def connector(cnxn=ruri,driver_args=driver_args):
  3383. return self.driver.connect(cnxn,**driver_args)
  3384. self.connector = connector
  3385. if do_connect: self.reconnect()
  3386. def LEFT_JOIN(self):
  3387. return 'LEFT OUTER JOIN'
  3388. # Similar to MSSQL, Teradata can't specify a range (for Pageby)
  3389. def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
  3390. if limitby:
  3391. (lmin, lmax) = limitby
  3392. sql_s += ' TOP %i' % lmax
  3393. return 'SELECT %s %s FROM %s%s%s;' % (sql_s, sql_f, sql_t, sql_w, sql_o)
  3394. def _truncate(self, table, mode=''):
  3395. tablename = table._tablename
  3396. return ['DELETE FROM %s ALL;' % (tablename)]
  3397. INGRES_SEQNAME='ii***lineitemsequence' # NOTE invalid database object name
  3398. # (ANSI-SQL wants this form of name
  3399. # to be a delimited identifier)
  3400. class IngresAdapter(BaseAdapter):
  3401. drivers = ('pyodbc',)
  3402. types = {
  3403. 'boolean': 'CHAR(1)',
  3404. 'string': 'VARCHAR(%(length)s)',
  3405. 'text': 'CLOB',
  3406. 'json': 'CLOB',
  3407. 'password': 'VARCHAR(%(length)s)', ## Not sure what this contains utf8 or nvarchar. Or even bytes?
  3408. 'blob': 'BLOB',
  3409. 'upload': 'VARCHAR(%(length)s)', ## FIXME utf8 or nvarchar... or blob? what is this type?
  3410. 'integer': 'INTEGER4', # or int8...
  3411. 'bigint': 'BIGINT',
  3412. 'float': 'FLOAT',
  3413. 'double': 'FLOAT8',
  3414. 'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
  3415. 'date': 'ANSIDATE',
  3416. 'time': 'TIME WITHOUT TIME ZONE',
  3417. 'datetime': 'TIMESTAMP WITHOUT TIME ZONE',
  3418. 'id': 'int not null unique with default next value for %s' % INGRES_SEQNAME,
  3419. 'reference': 'INT, FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
  3420. 'list:integer': 'CLOB',
  3421. 'list:string': 'CLOB',
  3422. 'list:reference': 'CLOB',
  3423. 'big-id': 'bigint not null unique with default next value for %s' % INGRES_SEQNAME,
  3424. 'big-reference': 'BIGINT, FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
  3425. 'reference FK': ', CONSTRAINT FK_%(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
  3426. 'reference TFK': ' CONSTRAINT FK_%(foreign_table)s_PK FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s) ON DELETE %(on_delete_action)s', ## FIXME TODO
  3427. }
  3428. def LEFT_JOIN(self):
  3429. return 'LEFT OUTER JOIN'
  3430. def RANDOM(self):
  3431. return 'RANDOM()'
  3432. def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
  3433. if limitby:
  3434. (lmin, lmax) = limitby
  3435. fetch_amt = lmax - lmin
  3436. if fetch_amt:
  3437. sql_s += ' FIRST %d ' % (fetch_amt, )
  3438. if lmin:
  3439. # Requires Ingres 9.2+
  3440. sql_o += ' OFFSET %d' % (lmin, )
  3441. return 'SELECT %s %s FROM %s%s%s;' % (sql_s, sql_f, sql_t, sql_w, sql_o)
  3442. def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
  3443. credential_decoder=IDENTITY, driver_args={},
  3444. adapter_args={}, do_connect=True, after_connection=None):
  3445. self.db = db
  3446. self.dbengine = "ingres"
  3447. self._driver = pyodbc
  3448. self.uri = uri
  3449. if do_connect: self.find_driver(adapter_args,uri)
  3450. self.pool_size = pool_size
  3451. self.folder = folder
  3452. self.db_codec = db_codec
  3453. self._after_connection = after_connection
  3454. self.find_or_make_work_folder()
  3455. connstr = uri.split(':', 1)[1]
  3456. # Simple URI processing
  3457. connstr = connstr.lstrip()
  3458. while connstr.startswith('/'):
  3459. connstr = connstr[1:]
  3460. if '=' in connstr:
  3461. # Assume we have a regular ODBC connection string and just use it
  3462. ruri = connstr
  3463. else:
  3464. # Assume only (local) dbname is passed in with OS auth
  3465. database_name = connstr
  3466. default_driver_name = 'Ingres'
  3467. vnode = '(local)'
  3468. servertype = 'ingres'
  3469. ruri = 'Driver={%s};Server=%s;Database=%s' % (default_driver_name, vnode, database_name)
  3470. def connector(cnxn=ruri,driver_args=driver_args):
  3471. return self.driver.connect(cnxn,**driver_args)
  3472. self.connector = connector
  3473. # TODO if version is >= 10, set types['id'] to Identity column, see http://community.actian.com/wiki/Using_Ingres_Identity_Columns
  3474. if do_connect: self.reconnect()
  3475. def create_sequence_and_triggers(self, query, table, **args):
  3476. # post create table auto inc code (if needed)
  3477. # modify table to btree for performance....
  3478. # Older Ingres releases could use rule/trigger like Oracle above.
  3479. if hasattr(table,'_primarykey'):
  3480. modify_tbl_sql = 'modify %s to btree unique on %s' % \
  3481. (table._tablename,
  3482. ', '.join(["'%s'" % x for x in table.primarykey]))
  3483. self.execute(modify_tbl_sql)
  3484. else:
  3485. tmp_seqname='%s_iisq' % table._tablename
  3486. query=query.replace(INGRES_SEQNAME, tmp_seqname)
  3487. self.execute('create sequence %s' % tmp_seqname)
  3488. self.execute(query)
  3489. self.execute('modify %s to btree unique on %s' % (table._tablename, 'id'))
  3490. def lastrowid(self,table):
  3491. tmp_seqname='%s_iisq' % table
  3492. self.execute('select current value for %s' % tmp_seqname)
  3493. return int(self.cursor.fetchone()[0]) # don't really need int type cast here...
  3494. def integrity_error_class(self):
  3495. return self._driver.IntegrityError
  3496. class IngresUnicodeAdapter(IngresAdapter):
  3497. drivers = ('pyodbc',)
  3498. types = {
  3499. 'boolean': 'CHAR(1)',
  3500. 'string': 'NVARCHAR(%(length)s)',
  3501. 'text': 'NCLOB',
  3502. 'json': 'NCLOB',
  3503. 'password': 'NVARCHAR(%(length)s)', ## Not sure what this contains utf8 or nvarchar. Or even bytes?
  3504. 'blob': 'BLOB',
  3505. 'upload': 'VARCHAR(%(length)s)', ## FIXME utf8 or nvarchar... or blob? what is this type?
  3506. 'integer': 'INTEGER4', # or int8...
  3507. 'bigint': 'BIGINT',
  3508. 'float': 'FLOAT',
  3509. 'double': 'FLOAT8',
  3510. 'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
  3511. 'date': 'ANSIDATE',
  3512. 'time': 'TIME WITHOUT TIME ZONE',
  3513. 'datetime': 'TIMESTAMP WITHOUT TIME ZONE',
  3514. 'id': 'INTEGER4 not null unique with default next value for %s'% INGRES_SEQNAME,
  3515. 'reference': 'INTEGER4, FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
  3516. 'list:integer': 'NCLOB',
  3517. 'list:string': 'NCLOB',
  3518. 'list:reference': 'NCLOB',
  3519. 'big-id': 'BIGINT not null unique with default next value for %s'% INGRES_SEQNAME,
  3520. 'big-reference': 'BIGINT, FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
  3521. 'reference FK': ', CONSTRAINT FK_%(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
  3522. 'reference TFK': ' CONSTRAINT FK_%(foreign_table)s_PK FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s) ON DELETE %(on_delete_action)s', ## FIXME TODO
  3523. }
  3524. class SAPDBAdapter(BaseAdapter):
  3525. drivers = ('sapdb',)
  3526. support_distributed_transaction = False
  3527. types = {
  3528. 'boolean': 'CHAR(1)',
  3529. 'string': 'VARCHAR(%(length)s)',
  3530. 'text': 'LONG',
  3531. 'json': 'LONG',
  3532. 'password': 'VARCHAR(%(length)s)',
  3533. 'blob': 'LONG',
  3534. 'upload': 'VARCHAR(%(length)s)',
  3535. 'integer': 'INT',
  3536. 'bigint': 'BIGINT',
  3537. 'float': 'FLOAT',
  3538. 'double': 'DOUBLE PRECISION',
  3539. 'decimal': 'FIXED(%(precision)s,%(scale)s)',
  3540. 'date': 'DATE',
  3541. 'time': 'TIME',
  3542. 'datetime': 'TIMESTAMP',
  3543. 'id': 'INT PRIMARY KEY',
  3544. 'reference': 'INT, FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
  3545. 'list:integer': 'LONG',
  3546. 'list:string': 'LONG',
  3547. 'list:reference': 'LONG',
  3548. 'big-id': 'BIGINT PRIMARY KEY',
  3549. 'big-reference': 'BIGINT, FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
  3550. }
  3551. def sequence_name(self,table):
  3552. return '%s_id_Seq' % table
  3553. def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
  3554. if limitby:
  3555. (lmin, lmax) = limitby
  3556. if len(sql_w) > 1:
  3557. sql_w_row = sql_w + ' AND w_row > %i' % lmin
  3558. else:
  3559. sql_w_row = 'WHERE w_row > %i' % lmin
  3560. return '%s %s FROM (SELECT w_tmp.*, ROWNO w_row FROM (SELECT %s FROM %s%s%s) w_tmp WHERE ROWNO=%i) %s %s %s;' % (sql_s, sql_f, sql_f, sql_t, sql_w, sql_o, lmax, sql_t, sql_w_row, sql_o)
  3561. return 'SELECT %s %s FROM %s%s%s;' % (sql_s, sql_f, sql_t, sql_w, sql_o)
  3562. def create_sequence_and_triggers(self, query, table, **args):
  3563. # following lines should only be executed if table._sequence_name does not exist
  3564. self.execute('CREATE SEQUENCE %s;' % table._sequence_name)
  3565. self.execute("ALTER TABLE %s ALTER COLUMN %s SET DEFAULT NEXTVAL('%s');" \
  3566. % (table._tablename, table._id.name, table._sequence_name))
  3567. self.execute(query)
  3568. REGEX_URI = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:@]+)(\:(?P<port>[0-9]+))?/(?P<db>[^\?]+)(\?sslmode=(?P<sslmode>.+))?$')
  3569. def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
  3570. credential_decoder=IDENTITY, driver_args={},
  3571. adapter_args={}, do_connect=True, after_connection=None):
  3572. self.db = db
  3573. self.dbengine = "sapdb"
  3574. self.uri = uri
  3575. if do_connect: self.find_driver(adapter_args,uri)
  3576. self.pool_size = pool_size
  3577. self.folder = folder
  3578. self.db_codec = db_codec
  3579. self._after_connection = after_connection
  3580. self.find_or_make_work_folder()
  3581. ruri = uri.split('://',1)[1]
  3582. m = self.REGEX_URI.match(ruri)
  3583. if not m:
  3584. raise SyntaxError("Invalid URI string in DAL")
  3585. user = credential_decoder(m.group('user'))
  3586. if not user:
  3587. raise SyntaxError('User required')
  3588. password = credential_decoder(m.group('password'))
  3589. if not password:
  3590. password = ''
  3591. host = m.group('host')
  3592. if not host:
  3593. raise SyntaxError('Host name required')
  3594. db = m.group('db')
  3595. if not db:
  3596. raise SyntaxError('Database name required')
  3597. def connector(user=user, password=password, database=db,
  3598. host=host, driver_args=driver_args):
  3599. return self.driver.Connection(user, password, database,
  3600. host, **driver_args)
  3601. self.connector = connector
  3602. if do_connect: self.reconnect()
  3603. def lastrowid(self,table):
  3604. self.execute("select %s.NEXTVAL from dual" % table._sequence_name)
  3605. return int(self.cursor.fetchone()[0])
  3606. class CubridAdapter(MySQLAdapter):
  3607. drivers = ('cubriddb',)
  3608. REGEX_URI = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:/]+)(\:(?P<port>[0-9]+))?/(?P<db>[^?]+)(\?set_encoding=(?P<charset>\w+))?$')
  3609. def __init__(self, db, uri, pool_size=0, folder=None, db_codec='UTF-8',
  3610. credential_decoder=IDENTITY, driver_args={},
  3611. adapter_args={}, do_connect=True, after_connection=None):
  3612. self.db = db
  3613. self.dbengine = "cubrid"
  3614. self.uri = uri
  3615. if do_connect: self.find_driver(adapter_args,uri)
  3616. self.pool_size = pool_size
  3617. self.folder = folder
  3618. self.db_codec = db_codec
  3619. self._after_connection = after_connection
  3620. self.find_or_make_work_folder()
  3621. ruri = uri.split('://',1)[1]
  3622. m = self.REGEX_URI.match(ruri)
  3623. if not m:
  3624. raise SyntaxError(
  3625. "Invalid URI string in DAL: %s" % self.uri)
  3626. user = credential_decoder(m.group('user'))
  3627. if not user:
  3628. raise SyntaxError('User required')
  3629. password = credential_decoder(m.group('password'))
  3630. if not password:
  3631. password = ''
  3632. host = m.group('host')
  3633. if not host:
  3634. raise SyntaxError('Host name required')
  3635. db = m.group('db')
  3636. if not db:
  3637. raise SyntaxError('Database name required')
  3638. port = int(m.group('port') or '30000')
  3639. charset = m.group('charset') or 'utf8'
  3640. user = credential_decoder(user)
  3641. passwd = credential_decoder(password)
  3642. def connector(host=host,port=port,db=db,
  3643. user=user,passwd=password,driver_args=driver_args):
  3644. return self.driver.connect(host,port,db,user,passwd,**driver_args)
  3645. self.connector = connector
  3646. if do_connect: self.reconnect()
  3647. def after_connection(self):
  3648. self.execute('SET FOREIGN_KEY_CHECKS=1;')
  3649. self.execute("SET sql_mode='NO_BACKSLASH_ESCAPES';")
  3650. ######## GAE MySQL ##########
  3651. class DatabaseStoredFile:
  3652. web2py_filesystem = False
  3653. def escape(self,obj):
  3654. return self.db._adapter.escape(obj)
  3655. def __init__(self,db,filename,mode):
  3656. if not db._adapter.dbengine in ('mysql', 'postgres'):
  3657. raise RuntimeError("only MySQL/Postgres can store metadata .table files in database for now")
  3658. self.db = db
  3659. self.filename = filename
  3660. self.mode = mode
  3661. if not self.web2py_filesystem:
  3662. if db._adapter.dbengine == 'mysql':
  3663. sql = "CREATE TABLE IF NOT EXISTS web2py_filesystem (path VARCHAR(255), content LONGTEXT, PRIMARY KEY(path) ) ENGINE=InnoDB;"
  3664. elif db._adapter.dbengine == 'postgres':
  3665. sql = "CREATE TABLE IF NOT EXISTS web2py_filesystem (path VARCHAR(255), content TEXT, PRIMARY KEY(path));"
  3666. self.db.executesql(sql)
  3667. DatabaseStoredFile.web2py_filesystem = True
  3668. self.p=0
  3669. self.data = ''
  3670. if mode in ('r','rw','a'):
  3671. query = "SELECT content FROM web2py_filesystem WHERE path='%s'" \
  3672. % filename
  3673. rows = self.db.executesql(query)
  3674. if rows:
  3675. self.data = rows[0][0]
  3676. elif exists(filename):
  3677. datafile = open(filename, 'r')
  3678. try:
  3679. self.data = datafile.read()
  3680. finally:
  3681. datafile.close()
  3682. elif mode in ('r','rw'):
  3683. raise RuntimeError("File %s does not exist" % filename)
  3684. def read(self, bytes):
  3685. data = self.data[self.p:self.p+bytes]
  3686. self.p += len(data)
  3687. return data
  3688. def readline(self):
  3689. i = self.data.find('\n',self.p)+1
  3690. if i>0:
  3691. data, self.p = self.data[self.p:i], i
  3692. else:
  3693. data, self.p = self.data[self.p:], len(self.data)
  3694. return data
  3695. def write(self,data):
  3696. self.data += data
  3697. def close_connection(self):
  3698. if self.db is not None:
  3699. self.db.executesql(
  3700. "DELETE FROM web2py_filesystem WHERE path='%s'" % self.filename)
  3701. query = "INSERT INTO web2py_filesystem(path,content) VALUES ('%s','%s')"\
  3702. % (self.filename, self.data.replace("'","''"))
  3703. self.db.executesql(query)
  3704. self.db.commit()
  3705. self.db = None
  3706. def close(self):
  3707. self.close_connection()
  3708. @staticmethod
  3709. def exists(db, filename):
  3710. if exists(filename):
  3711. return True
  3712. query = "SELECT path FROM web2py_filesystem WHERE path='%s'" % filename
  3713. if db.executesql(query):
  3714. return True
  3715. return False
  3716. class UseDatabaseStoredFile:
  3717. def file_exists(self, filename):
  3718. return DatabaseStoredFile.exists(self.db,filename)
  3719. def file_open(self, filename, mode='rb', lock=True):
  3720. return DatabaseStoredFile(self.db,filename,mode)
  3721. def file_close(self, fileobj):
  3722. fileobj.close_connection()
  3723. def file_delete(self,filename):
  3724. query = "DELETE FROM web2py_filesystem WHERE path='%s'" % filename
  3725. self.db.executesql(query)
  3726. self.db.commit()
  3727. class GoogleSQLAdapter(UseDatabaseStoredFile,MySQLAdapter):
  3728. uploads_in_blob = True
  3729. REGEX_URI = re.compile('^(?P<instance>.*)/(?P<db>.*)$')
  3730. def __init__(self, db, uri='google:sql://realm:domain/database',
  3731. pool_size=0, folder=None, db_codec='UTF-8',
  3732. credential_decoder=IDENTITY, driver_args={},
  3733. adapter_args={}, do_connect=True, after_connection=None):
  3734. self.db = db
  3735. self.dbengine = "mysql"
  3736. self.uri = uri
  3737. self.pool_size = pool_size
  3738. self.db_codec = db_codec
  3739. self._after_connection = after_connection
  3740. self.folder = folder or pjoin('$HOME',THREAD_LOCAL.folder.split(
  3741. os.sep+'applications'+os.sep,1)[1])
  3742. ruri = uri.split("://")[1]
  3743. m = self.REGEX_URI.match(ruri)
  3744. if not m:
  3745. raise SyntaxError("Invalid URI string in SQLDB: %s" % self.uri)
  3746. instance = credential_decoder(m.group('instance'))
  3747. self.dbstring = db = credential_decoder(m.group('db'))
  3748. driver_args['instance'] = instance
  3749. if not 'charset' in driver_args:
  3750. driver_args['charset'] = 'utf8'
  3751. self.createdb = createdb = adapter_args.get('createdb',True)
  3752. if not createdb:
  3753. driver_args['database'] = db
  3754. def connector(driver_args=driver_args):
  3755. return rdbms.connect(**driver_args)
  3756. self.connector = connector
  3757. if do_connect: self.reconnect()
  3758. def after_connection(self):
  3759. if self.createdb:
  3760. # self.execute('DROP DATABASE %s' % self.dbstring)
  3761. self.execute('CREATE DATABASE IF NOT EXISTS %s' % self.dbstring)
  3762. self.execute('USE %s' % self.dbstring)
  3763. self.execute("SET FOREIGN_KEY_CHECKS=1;")
  3764. self.execute("SET sql_mode='NO_BACKSLASH_ESCAPES';")
  3765. def execute(self, command, *a, **b):
  3766. return self.log_execute(command.decode('utf8'), *a, **b)
  3767. class NoSQLAdapter(BaseAdapter):
  3768. can_select_for_update = False
  3769. @staticmethod
  3770. def to_unicode(obj):
  3771. if isinstance(obj, str):
  3772. return obj.decode('utf8')
  3773. elif not isinstance(obj, unicode):
  3774. return unicode(obj)
  3775. return obj
  3776. def id_query(self, table):
  3777. return table._id > 0
  3778. def represent(self, obj, fieldtype):
  3779. field_is_type = fieldtype.startswith
  3780. if isinstance(obj, CALLABLETYPES):
  3781. obj = obj()
  3782. if isinstance(fieldtype, SQLCustomType):
  3783. return fieldtype.encoder(obj)
  3784. if isinstance(obj, (Expression, Field)):
  3785. raise SyntaxError("non supported on GAE")
  3786. if self.dbengine == 'google:datastore':
  3787. if isinstance(fieldtype, gae.Property):
  3788. return obj
  3789. is_string = isinstance(fieldtype,str)
  3790. is_list = is_string and field_is_type('list:')
  3791. if is_list:
  3792. if not obj:
  3793. obj = []
  3794. if not isinstance(obj, (list, tuple)):
  3795. obj = [obj]
  3796. if obj == '' and not \
  3797. (is_string and fieldtype[:2] in ['st','te', 'pa','up']):
  3798. return None
  3799. if not obj is None:
  3800. if isinstance(obj, list) and not is_list:
  3801. obj = [self.represent(o, fieldtype) for o in obj]
  3802. elif fieldtype in ('integer','bigint','id'):
  3803. obj = long(obj)
  3804. elif fieldtype == 'double':
  3805. obj = float(obj)
  3806. elif is_string and field_is_type('reference'):
  3807. if isinstance(obj, (Row, Reference)):
  3808. obj = obj['id']
  3809. obj = long(obj)
  3810. elif fieldtype == 'boolean':
  3811. if obj and not str(obj)[0].upper() in '0F':
  3812. obj = True
  3813. else:
  3814. obj = False
  3815. elif fieldtype == 'date':
  3816. if not isinstance(obj, datetime.date):
  3817. (y, m, d) = map(int,str(obj).strip().split('-'))
  3818. obj = datetime.date(y, m, d)
  3819. elif isinstance(obj,datetime.datetime):
  3820. (y, m, d) = (obj.year, obj.month, obj.day)
  3821. obj = datetime.date(y, m, d)
  3822. elif fieldtype == 'time':
  3823. if not isinstance(obj, datetime.time):
  3824. time_items = map(int,str(obj).strip().split(':')[:3])
  3825. if len(time_items) == 3:
  3826. (h, mi, s) = time_items
  3827. else:
  3828. (h, mi, s) = time_items + [0]
  3829. obj = datetime.time(h, mi, s)
  3830. elif fieldtype == 'datetime':
  3831. if not isinstance(obj, datetime.datetime):
  3832. (y, m, d) = map(int,str(obj)[:10].strip().split('-'))
  3833. time_items = map(int,str(obj)[11:].strip().split(':')[:3])
  3834. while len(time_items)<3:
  3835. time_items.append(0)
  3836. (h, mi, s) = time_items
  3837. obj = datetime.datetime(y, m, d, h, mi, s)
  3838. elif fieldtype == 'blob':
  3839. pass
  3840. elif fieldtype == 'json':
  3841. obj = self.to_unicode(obj)
  3842. if have_serializers:
  3843. obj = serializers.loads_json(obj)
  3844. elif simplejson:
  3845. obj = simplejson.loads(obj)
  3846. else:
  3847. raise RuntimeError("missing simplejson")
  3848. elif is_string and field_is_type('list:string'):
  3849. return map(self.to_unicode,obj)
  3850. elif is_list:
  3851. return map(int,obj)
  3852. else:
  3853. obj = self.to_unicode(obj)
  3854. return obj
  3855. def _insert(self,table,fields):
  3856. return 'insert %s in %s' % (fields, table)
  3857. def _count(self,query,distinct=None):
  3858. return 'count %s' % repr(query)
  3859. def _select(self,query,fields,attributes):
  3860. return 'select %s where %s' % (repr(fields), repr(query))
  3861. def _delete(self,tablename, query):
  3862. return 'delete %s where %s' % (repr(tablename),repr(query))
  3863. def _update(self,tablename,query,fields):
  3864. return 'update %s (%s) where %s' % (repr(tablename),
  3865. repr(fields),repr(query))
  3866. def commit(self):
  3867. """
  3868. remember: no transactions on many NoSQL
  3869. """
  3870. pass
  3871. def rollback(self):
  3872. """
  3873. remember: no transactions on many NoSQL
  3874. """
  3875. pass
  3876. def close_connection(self):
  3877. """
  3878. remember: no transactions on many NoSQL
  3879. """
  3880. pass
  3881. # these functions should never be called!
  3882. def OR(self,first,second): raise SyntaxError("Not supported")
  3883. def AND(self,first,second): raise SyntaxError("Not supported")
  3884. def AS(self,first,second): raise SyntaxError("Not supported")
  3885. def ON(self,first,second): raise SyntaxError("Not supported")
  3886. def STARTSWITH(self,first,second=None): raise SyntaxError("Not supported")
  3887. def ENDSWITH(self,first,second=None): raise SyntaxError("Not supported")
  3888. def ADD(self,first,second): raise SyntaxError("Not supported")
  3889. def SUB(self,first,second): raise SyntaxError("Not supported")
  3890. def MUL(self,first,second): raise SyntaxError("Not supported")
  3891. def DIV(self,first,second): raise SyntaxError("Not supported")
  3892. def LOWER(self,first): raise SyntaxError("Not supported")
  3893. def UPPER(self,first): raise SyntaxError("Not supported")
  3894. def EXTRACT(self,first,what): raise SyntaxError("Not supported")
  3895. def AGGREGATE(self,first,what): raise SyntaxError("Not supported")
  3896. def LEFT_JOIN(self): raise SyntaxError("Not supported")
  3897. def RANDOM(self): raise SyntaxError("Not supported")
  3898. def SUBSTRING(self,field,parameters): raise SyntaxError("Not supported")
  3899. def PRIMARY_KEY(self,key): raise SyntaxError("Not supported")
  3900. def ILIKE(self,first,second): raise SyntaxError("Not supported")
  3901. def drop(self,table,mode): raise SyntaxError("Not supported")
  3902. def alias(self,table,alias): raise SyntaxError("Not supported")
  3903. def migrate_table(self,*a,**b): raise SyntaxError("Not supported")
  3904. def distributed_transaction_begin(self,key): raise SyntaxError("Not supported")
  3905. def prepare(self,key): raise SyntaxError("Not supported")
  3906. def commit_prepared(self,key): raise SyntaxError("Not supported")
  3907. def rollback_prepared(self,key): raise SyntaxError("Not supported")
  3908. def concat_add(self,table): raise SyntaxError("Not supported")
  3909. def constraint_name(self, table, fieldname): raise SyntaxError("Not supported")
  3910. def create_sequence_and_triggers(self, query, table, **args): pass
  3911. def log_execute(self,*a,**b): raise SyntaxError("Not supported")
  3912. def execute(self,*a,**b): raise SyntaxError("Not supported")
  3913. def represent_exceptions(self, obj, fieldtype): raise SyntaxError("Not supported")
  3914. def lastrowid(self,table): raise SyntaxError("Not supported")
  3915. def integrity_error_class(self): raise SyntaxError("Not supported")
  3916. def rowslice(self,rows,minimum=0,maximum=None): raise SyntaxError("Not supported")
  3917. class GAEF(object):
  3918. def __init__(self,name,op,value,apply):
  3919. self.name=name=='id' and '__key__' or name
  3920. self.op=op
  3921. self.value=value
  3922. self.apply=apply
  3923. def __repr__(self):
  3924. return '(%s %s %s:%s)' % (self.name, self.op, repr(self.value), type(self.value))
  3925. class GoogleDatastoreAdapter(NoSQLAdapter):
  3926. uploads_in_blob = True
  3927. types = {}
  3928. def file_exists(self, filename): pass
  3929. def file_open(self, filename, mode='rb', lock=True): pass
  3930. def file_close(self, fileobj): pass
  3931. REGEX_NAMESPACE = re.compile('.*://(?P<namespace>.+)')
  3932. def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
  3933. credential_decoder=IDENTITY, driver_args={},
  3934. adapter_args={}, do_connect=True, after_connection=None):
  3935. self.types.update({
  3936. 'boolean': gae.BooleanProperty,
  3937. 'string': (lambda: gae.StringProperty(multiline=True)),
  3938. 'text': gae.TextProperty,
  3939. 'json': gae.TextProperty,
  3940. 'password': gae.StringProperty,
  3941. 'blob': gae.BlobProperty,
  3942. 'upload': gae.StringProperty,
  3943. 'integer': gae.IntegerProperty,
  3944. 'bigint': gae.IntegerProperty,
  3945. 'float': gae.FloatProperty,
  3946. 'double': gae.FloatProperty,
  3947. 'decimal': GAEDecimalProperty,
  3948. 'date': gae.DateProperty,
  3949. 'time': gae.TimeProperty,
  3950. 'datetime': gae.DateTimeProperty,
  3951. 'id': None,
  3952. 'reference': gae.IntegerProperty,
  3953. 'list:string': (lambda: gae.StringListProperty(default=None)),
  3954. 'list:integer': (lambda: gae.ListProperty(int,default=None)),
  3955. 'list:reference': (lambda: gae.ListProperty(int,default=None)),
  3956. })
  3957. self.db = db
  3958. self.uri = uri
  3959. self.dbengine = 'google:datastore'
  3960. self.folder = folder
  3961. db['_lastsql'] = ''
  3962. self.db_codec = 'UTF-8'
  3963. self._after_connection = after_connection
  3964. self.pool_size = 0
  3965. match = self.REGEX_NAMESPACE.match(uri)
  3966. if match:
  3967. namespace_manager.set_namespace(match.group('namespace'))
  3968. def parse_id(self, value, field_type):
  3969. return value
  3970. def create_table(self,table,migrate=True,fake_migrate=False, polymodel=None):
  3971. myfields = {}
  3972. for field in table:
  3973. if isinstance(polymodel,Table) and field.name in polymodel.fields():
  3974. continue
  3975. attr = {}
  3976. if isinstance(field.custom_qualifier, dict):
  3977. #this is custom properties to add to the GAE field declartion
  3978. attr = field.custom_qualifier
  3979. field_type = field.type
  3980. if isinstance(field_type, SQLCustomType):
  3981. ftype = self.types[field_type.native or field_type.type](**attr)
  3982. elif isinstance(field_type, gae.Property):
  3983. ftype = field_type
  3984. elif field_type.startswith('id'):
  3985. continue
  3986. elif field_type.startswith('decimal'):
  3987. precision, scale = field_type[7:].strip('()').split(',')
  3988. precision = int(precision)
  3989. scale = int(scale)
  3990. ftype = GAEDecimalProperty(precision, scale, **attr)
  3991. elif field_type.startswith('reference'):
  3992. if field.notnull:
  3993. attr = dict(required=True)
  3994. referenced = field_type[10:].strip()
  3995. ftype = self.types[field_type[:9]](referenced, **attr)
  3996. elif field_type.startswith('list:reference'):
  3997. if field.notnull:
  3998. attr['required'] = True
  3999. referenced = field_type[15:].strip()
  4000. ftype = self.types[field_type[:14]](**attr)
  4001. elif field_type.startswith('list:'):
  4002. ftype = self.types[field_type](**attr)
  4003. elif not field_type in self.types\
  4004. or not self.types[field_type]:
  4005. raise SyntaxError('Field: unknown field type: %s' % field_type)
  4006. else:
  4007. ftype = self.types[field_type](**attr)
  4008. myfields[field.name] = ftype
  4009. if not polymodel:
  4010. table._tableobj = classobj(table._tablename, (gae.Model, ), myfields)
  4011. elif polymodel==True:
  4012. table._tableobj = classobj(table._tablename, (PolyModel, ), myfields)
  4013. elif isinstance(polymodel,Table):
  4014. table._tableobj = classobj(table._tablename, (polymodel._tableobj, ), myfields)
  4015. else:
  4016. raise SyntaxError("polymodel must be None, True, a table or a tablename")
  4017. return None
  4018. def expand(self,expression,field_type=None):
  4019. if isinstance(expression,Field):
  4020. if expression.type in ('text', 'blob', 'json'):
  4021. raise SyntaxError('AppEngine does not index by: %s' % expression.type)
  4022. return expression.name
  4023. elif isinstance(expression, (Expression, Query)):
  4024. if not expression.second is None:
  4025. return expression.op(expression.first, expression.second)
  4026. elif not expression.first is None:
  4027. return expression.op(expression.first)
  4028. else:
  4029. return expression.op()
  4030. elif field_type:
  4031. return self.represent(expression,field_type)
  4032. elif isinstance(expression,(list,tuple)):
  4033. return ','.join([self.represent(item,field_type) for item in expression])
  4034. else:
  4035. return str(expression)
  4036. ### TODO from gql.py Expression
  4037. def AND(self,first,second):
  4038. a = self.expand(first)
  4039. b = self.expand(second)
  4040. if b[0].name=='__key__' and a[0].name!='__key__':
  4041. return b+a
  4042. return a+b
  4043. def EQ(self,first,second=None):
  4044. if isinstance(second, Key):
  4045. return [GAEF(first.name,'=',second,lambda a,b:a==b)]
  4046. return [GAEF(first.name,'=',self.represent(second,first.type),lambda a,b:a==b)]
  4047. def NE(self,first,second=None):
  4048. if first.type != 'id':
  4049. return [GAEF(first.name,'!=',self.represent(second,first.type),lambda a,b:a!=b)]
  4050. else:
  4051. if not second is None:
  4052. second = Key.from_path(first._tablename, long(second))
  4053. return [GAEF(first.name,'!=',second,lambda a,b:a!=b)]
  4054. def LT(self,first,second=None):
  4055. if first.type != 'id':
  4056. return [GAEF(first.name,'<',self.represent(second,first.type),lambda a,b:a<b)]
  4057. else:
  4058. second = Key.from_path(first._tablename, long(second))
  4059. return [GAEF(first.name,'<',second,lambda a,b:a<b)]
  4060. def LE(self,first,second=None):
  4061. if first.type != 'id':
  4062. return [GAEF(first.name,'<=',self.represent(second,first.type),lambda a,b:a<=b)]
  4063. else:
  4064. second = Key.from_path(first._tablename, long(second))
  4065. return [GAEF(first.name,'<=',second,lambda a,b:a<=b)]
  4066. def GT(self,first,second=None):
  4067. if first.type != 'id' or second==0 or second == '0':
  4068. return [GAEF(first.name,'>',self.represent(second,first.type),lambda a,b:a>b)]
  4069. else:
  4070. second = Key.from_path(first._tablename, long(second))
  4071. return [GAEF(first.name,'>',second,lambda a,b:a>b)]
  4072. def GE(self,first,second=None):
  4073. if first.type != 'id':
  4074. return [GAEF(first.name,'>=',self.represent(second,first.type),lambda a,b:a>=b)]
  4075. else:
  4076. second = Key.from_path(first._tablename, long(second))
  4077. return [GAEF(first.name,'>=',second,lambda a,b:a>=b)]
  4078. def INVERT(self,first):
  4079. return '-%s' % first.name
  4080. def COMMA(self,first,second):
  4081. return '%s, %s' % (self.expand(first),self.expand(second))
  4082. def BELONGS(self,first,second=None):
  4083. if not isinstance(second,(list, tuple)):
  4084. raise SyntaxError("Not supported")
  4085. if first.type != 'id':
  4086. return [GAEF(first.name,'in',self.represent(second,first.type),lambda a,b:a in b)]
  4087. else:
  4088. second = [Key.from_path(first._tablename, int(i)) for i in second]
  4089. return [GAEF(first.name,'in',second,lambda a,b:a in b)]
  4090. def CONTAINS(self,first,second,case_sensitive=False):
  4091. # silently ignoring: GAE can only do case sensitive matches!
  4092. if not first.type.startswith('list:'):
  4093. raise SyntaxError("Not supported")
  4094. return [GAEF(first.name,'=',self.expand(second,first.type[5:]),lambda a,b:b in a)]
  4095. def NOT(self,first):
  4096. nops = { self.EQ: self.NE,
  4097. self.NE: self.EQ,
  4098. self.LT: self.GE,
  4099. self.GT: self.LE,
  4100. self.LE: self.GT,
  4101. self.GE: self.LT}
  4102. if not isinstance(first,Query):
  4103. raise SyntaxError("Not suported")
  4104. nop = nops.get(first.op,None)
  4105. if not nop:
  4106. raise SyntaxError("Not suported %s" % first.op.__name__)
  4107. first.op = nop
  4108. return self.expand(first)
  4109. def truncate(self,table,mode):
  4110. self.db(table._id).delete()
  4111. def select_raw(self,query,fields=None,attributes=None):
  4112. db = self.db
  4113. fields = fields or []
  4114. attributes = attributes or {}
  4115. args_get = attributes.get
  4116. new_fields = []
  4117. for item in fields:
  4118. if isinstance(item,SQLALL):
  4119. new_fields += item._table
  4120. else:
  4121. new_fields.append(item)
  4122. fields = new_fields
  4123. if query:
  4124. tablename = self.get_table(query)
  4125. elif fields:
  4126. tablename = fields[0].tablename
  4127. query = db._adapter.id_query(fields[0].table)
  4128. else:
  4129. raise SyntaxError("Unable to determine a tablename")
  4130. if query:
  4131. if use_common_filters(query):
  4132. query = self.common_filter(query,[tablename])
  4133. #tableobj is a GAE Model class (or subclass)
  4134. tableobj = db[tablename]._tableobj
  4135. filters = self.expand(query)
  4136. projection = None
  4137. if len(db[tablename].fields) == len(fields):
  4138. #getting all fields, not a projection query
  4139. projection = None
  4140. elif args_get('projection') == True:
  4141. projection = []
  4142. for f in fields:
  4143. if f.type in ['text', 'blob', 'json']:
  4144. raise SyntaxError(
  4145. "text and blob field types not allowed in projection queries")
  4146. else:
  4147. projection.append(f.name)
  4148. # projection's can't include 'id'.
  4149. # it will be added to the result later
  4150. query_projection = [
  4151. p for p in projection if \
  4152. p != db[tablename]._id.name] if projection \
  4153. else None
  4154. cursor = None
  4155. if isinstance(args_get('reusecursor'), str):
  4156. cursor = args_get('reusecursor')
  4157. items = gae.Query(tableobj, projection=query_projection,
  4158. cursor=cursor)
  4159. for filter in filters:
  4160. if args_get('projection') == True and \
  4161. filter.name in query_projection and \
  4162. filter.op in ['=', '<=', '>=']:
  4163. raise SyntaxError(
  4164. "projection fields cannot have equality filters")
  4165. if filter.name=='__key__' and filter.op=='>' and filter.value==0:
  4166. continue
  4167. elif filter.name=='__key__' and filter.op=='=':
  4168. if filter.value==0:
  4169. items = []
  4170. elif isinstance(filter.value, Key):
  4171. # key qeuries return a class instance,
  4172. # can't use projection
  4173. # extra values will be ignored in post-processing later
  4174. item = tableobj.get(filter.value)
  4175. items = (item and [item]) or []
  4176. else:
  4177. # key qeuries return a class instance,
  4178. # can't use projection
  4179. # extra values will be ignored in post-processing later
  4180. item = tableobj.get_by_id(filter.value)
  4181. items = (item and [item]) or []
  4182. elif isinstance(items,list): # i.e. there is a single record!
  4183. items = [i for i in items if filter.apply(
  4184. getattr(item,filter.name),filter.value)]
  4185. else:
  4186. if filter.name=='__key__' and filter.op != 'in':
  4187. items.order('__key__')
  4188. items = items.filter('%s %s' % (filter.name,filter.op),
  4189. filter.value)
  4190. if not isinstance(items,list):
  4191. if args_get('left', None):
  4192. raise SyntaxError('Set: no left join in appengine')
  4193. if args_get('groupby', None):
  4194. raise SyntaxError('Set: no groupby in appengine')
  4195. orderby = args_get('orderby', False)
  4196. if orderby:
  4197. ### THIS REALLY NEEDS IMPROVEMENT !!!
  4198. if isinstance(orderby, (list, tuple)):
  4199. orderby = xorify(orderby)
  4200. if isinstance(orderby,Expression):
  4201. orderby = self.expand(orderby)
  4202. orders = orderby.split(', ')
  4203. for order in orders:
  4204. order={'-id':'-__key__','id':'__key__'}.get(order,order)
  4205. items = items.order(order)
  4206. if args_get('limitby', None):
  4207. (lmin, lmax) = attributes['limitby']
  4208. (limit, offset) = (lmax - lmin, lmin)
  4209. rows = items.fetch(limit,offset=offset)
  4210. #cursor is only useful if there was a limit and we didn't return
  4211. # all results
  4212. if args_get('reusecursor'):
  4213. db['_lastcursor'] = items.cursor()
  4214. items = rows
  4215. return (items, tablename, projection or db[tablename].fields)
  4216. def select(self,query,fields,attributes):
  4217. """
  4218. This is the GAE version of select. some notes to consider:
  4219. - db['_lastsql'] is not set because there is not SQL statement string
  4220. for a GAE query
  4221. - 'nativeRef' is a magical fieldname used for self references on GAE
  4222. - optional attribute 'projection' when set to True will trigger
  4223. use of the GAE projection queries. note that there are rules for
  4224. what is accepted imposed by GAE: each field must be indexed,
  4225. projection queries cannot contain blob or text fields, and you
  4226. cannot use == and also select that same field. see https://developers.google.com/appengine/docs/python/datastore/queries#Query_Projection
  4227. - optional attribute 'reusecursor' allows use of cursor with queries
  4228. that have the limitby attribute. Set the attribute to True for the
  4229. first query, set it to the value of db['_lastcursor'] to continue
  4230. a previous query. The user must save the cursor value between
  4231. requests, and the filters must be identical. It is up to the user
  4232. to follow google's limitations: https://developers.google.com/appengine/docs/python/datastore/queries#Query_Cursors
  4233. """
  4234. (items, tablename, fields) = self.select_raw(query,fields,attributes)
  4235. # self.db['_lastsql'] = self._select(query,fields,attributes)
  4236. rows = [[(t==self.db[tablename]._id.name and item) or \
  4237. (t=='nativeRef' and item) or getattr(item, t) \
  4238. for t in fields] for item in items]
  4239. colnames = ['%s.%s' % (tablename, t) for t in fields]
  4240. processor = attributes.get('processor',self.parse)
  4241. return processor(rows,fields,colnames,False)
  4242. def count(self,query,distinct=None,limit=None):
  4243. if distinct:
  4244. raise RuntimeError("COUNT DISTINCT not supported")
  4245. (items, tablename, fields) = self.select_raw(query)
  4246. # self.db['_lastsql'] = self._count(query)
  4247. try:
  4248. return len(items)
  4249. except TypeError:
  4250. return items.count(limit=limit)
  4251. def delete(self,tablename, query):
  4252. """
  4253. This function was changed on 2010-05-04 because according to
  4254. http://code.google.com/p/googleappengine/issues/detail?id=3119
  4255. GAE no longer supports deleting more than 1000 records.
  4256. """
  4257. # self.db['_lastsql'] = self._delete(tablename,query)
  4258. (items, tablename, fields) = self.select_raw(query)
  4259. # items can be one item or a query
  4260. if not isinstance(items,list):
  4261. #use a keys_only query to ensure that this runs as a datastore
  4262. # small operations
  4263. leftitems = items.fetch(1000, keys_only=True)
  4264. counter = 0
  4265. while len(leftitems):
  4266. counter += len(leftitems)
  4267. gae.delete(leftitems)
  4268. leftitems = items.fetch(1000, keys_only=True)
  4269. else:
  4270. counter = len(items)
  4271. gae.delete(items)
  4272. return counter
  4273. def update(self,tablename,query,update_fields):
  4274. # self.db['_lastsql'] = self._update(tablename,query,update_fields)
  4275. (items, tablename, fields) = self.select_raw(query)
  4276. counter = 0
  4277. for item in items:
  4278. for field, value in update_fields:
  4279. setattr(item, field.name, self.represent(value,field.type))
  4280. item.put()
  4281. counter += 1
  4282. LOGGER.info(str(counter))
  4283. return counter
  4284. def insert(self,table,fields):
  4285. dfields=dict((f.name,self.represent(v,f.type)) for f,v in fields)
  4286. # table._db['_lastsql'] = self._insert(table,fields)
  4287. tmp = table._tableobj(**dfields)
  4288. tmp.put()
  4289. rid = Reference(tmp.key().id())
  4290. (rid._table, rid._record, rid._gaekey) = (table, None, tmp.key())
  4291. return rid
  4292. def bulk_insert(self,table,items):
  4293. parsed_items = []
  4294. for item in items:
  4295. dfields=dict((f.name,self.represent(v,f.type)) for f,v in item)
  4296. parsed_items.append(table._tableobj(**dfields))
  4297. gae.put(parsed_items)
  4298. return True
  4299. def uuid2int(uuidv):
  4300. return uuid.UUID(uuidv).int
  4301. def int2uuid(n):
  4302. return str(uuid.UUID(int=n))
  4303. class CouchDBAdapter(NoSQLAdapter):
  4304. drivers = ('couchdb',)
  4305. uploads_in_blob = True
  4306. types = {
  4307. 'boolean': bool,
  4308. 'string': str,
  4309. 'text': str,
  4310. 'json': str,
  4311. 'password': str,
  4312. 'blob': str,
  4313. 'upload': str,
  4314. 'integer': long,
  4315. 'bigint': long,
  4316. 'float': float,
  4317. 'double': float,
  4318. 'date': datetime.date,
  4319. 'time': datetime.time,
  4320. 'datetime': datetime.datetime,
  4321. 'id': long,
  4322. 'reference': long,
  4323. 'list:string': list,
  4324. 'list:integer': list,
  4325. 'list:reference': list,
  4326. }
  4327. def file_exists(self, filename): pass
  4328. def file_open(self, filename, mode='rb', lock=True): pass
  4329. def file_close(self, fileobj): pass
  4330. def expand(self,expression,field_type=None):
  4331. if isinstance(expression,Field):
  4332. if expression.type=='id':
  4333. return "%s._id" % expression.tablename
  4334. return BaseAdapter.expand(self,expression,field_type)
  4335. def AND(self,first,second):
  4336. return '(%s && %s)' % (self.expand(first),self.expand(second))
  4337. def OR(self,first,second):
  4338. return '(%s || %s)' % (self.expand(first),self.expand(second))
  4339. def EQ(self,first,second):
  4340. if second is None:
  4341. return '(%s == null)' % self.expand(first)
  4342. return '(%s == %s)' % (self.expand(first),self.expand(second,first.type))
  4343. def NE(self,first,second):
  4344. if second is None:
  4345. return '(%s != null)' % self.expand(first)
  4346. return '(%s != %s)' % (self.expand(first),self.expand(second,first.type))
  4347. def COMMA(self,first,second):
  4348. return '%s + %s' % (self.expand(first),self.expand(second))
  4349. def represent(self, obj, fieldtype):
  4350. value = NoSQLAdapter.represent(self, obj, fieldtype)
  4351. if fieldtype=='id':
  4352. return repr(str(int(value)))
  4353. elif fieldtype in ('date','time','datetime','boolean'):
  4354. return serializers.json(value)
  4355. return repr(not isinstance(value,unicode) and value \
  4356. or value and value.encode('utf8'))
  4357. def __init__(self,db,uri='couchdb://127.0.0.1:5984',
  4358. pool_size=0,folder=None,db_codec ='UTF-8',
  4359. credential_decoder=IDENTITY, driver_args={},
  4360. adapter_args={}, do_connect=True, after_connection=None):
  4361. self.db = db
  4362. self.uri = uri
  4363. if do_connect: self.find_driver(adapter_args)
  4364. self.dbengine = 'couchdb'
  4365. self.folder = folder
  4366. db['_lastsql'] = ''
  4367. self.db_codec = 'UTF-8'
  4368. self._after_connection = after_connection
  4369. self.pool_size = pool_size
  4370. url='http://'+uri[10:]
  4371. def connector(url=url,driver_args=driver_args):
  4372. return self.driver.Server(url,**driver_args)
  4373. self.reconnect(connector,cursor=False)
  4374. def create_table(self, table, migrate=True, fake_migrate=False, polymodel=None):
  4375. if migrate:
  4376. try:
  4377. self.connection.create(table._tablename)
  4378. except:
  4379. pass
  4380. def insert(self,table,fields):
  4381. id = uuid2int(web2py_uuid())
  4382. ctable = self.connection[table._tablename]
  4383. values = dict((k.name,self.represent(v,k.type)) for k,v in fields)
  4384. values['_id'] = str(id)
  4385. ctable.save(values)
  4386. return id
  4387. def _select(self,query,fields,attributes):
  4388. if not isinstance(query,Query):
  4389. raise SyntaxError("Not Supported")
  4390. for key in set(attributes.keys())-SELECT_ARGS:
  4391. raise SyntaxError('invalid select attribute: %s' % key)
  4392. new_fields=[]
  4393. for item in fields:
  4394. if isinstance(item,SQLALL):
  4395. new_fields += item._table
  4396. else:
  4397. new_fields.append(item)
  4398. def uid(fd):
  4399. return fd=='id' and '_id' or fd
  4400. def get(row,fd):
  4401. return fd=='id' and int(row['_id']) or row.get(fd,None)
  4402. fields = new_fields
  4403. tablename = self.get_table(query)
  4404. fieldnames = [f.name for f in (fields or self.db[tablename])]
  4405. colnames = ['%s.%s' % (tablename,k) for k in fieldnames]
  4406. fields = ','.join(['%s.%s' % (tablename,uid(f)) for f in fieldnames])
  4407. fn="(function(%(t)s){if(%(query)s)emit(%(order)s,[%(fields)s]);})" %\
  4408. dict(t=tablename,
  4409. query=self.expand(query),
  4410. order='%s._id' % tablename,
  4411. fields=fields)
  4412. return fn, colnames
  4413. def select(self,query,fields,attributes):
  4414. if not isinstance(query,Query):
  4415. raise SyntaxError("Not Supported")
  4416. fn, colnames = self._select(query,fields,attributes)
  4417. tablename = colnames[0].split('.')[0]
  4418. ctable = self.connection[tablename]
  4419. rows = [cols['value'] for cols in ctable.query(fn)]
  4420. processor = attributes.get('processor',self.parse)
  4421. return processor(rows,fields,colnames,False)
  4422. def delete(self,tablename,query):
  4423. if not isinstance(query,Query):
  4424. raise SyntaxError("Not Supported")
  4425. if query.first.type=='id' and query.op==self.EQ:
  4426. id = query.second
  4427. tablename = query.first.tablename
  4428. assert(tablename == query.first.tablename)
  4429. ctable = self.connection[tablename]
  4430. try:
  4431. del ctable[str(id)]
  4432. return 1
  4433. except couchdb.http.ResourceNotFound:
  4434. return 0
  4435. else:
  4436. tablename = self.get_table(query)
  4437. rows = self.select(query,[self.db[tablename]._id],{})
  4438. ctable = self.connection[tablename]
  4439. for row in rows:
  4440. del ctable[str(row.id)]
  4441. return len(rows)
  4442. def update(self,tablename,query,fields):
  4443. if not isinstance(query,Query):
  4444. raise SyntaxError("Not Supported")
  4445. if query.first.type=='id' and query.op==self.EQ:
  4446. id = query.second
  4447. tablename = query.first.tablename
  4448. ctable = self.connection[tablename]
  4449. try:
  4450. doc = ctable[str(id)]
  4451. for key,value in fields:
  4452. doc[key.name] = self.represent(value,self.db[tablename][key.name].type)
  4453. ctable.save(doc)
  4454. return 1
  4455. except couchdb.http.ResourceNotFound:
  4456. return 0
  4457. else:
  4458. tablename = self.get_table(query)
  4459. rows = self.select(query,[self.db[tablename]._id],{})
  4460. ctable = self.connection[tablename]
  4461. table = self.db[tablename]
  4462. for row in rows:
  4463. doc = ctable[str(row.id)]
  4464. for key,value in fields:
  4465. doc[key.name] = self.represent(value,table[key.name].type)
  4466. ctable.save(doc)
  4467. return len(rows)
  4468. def count(self,query,distinct=None):
  4469. if distinct:
  4470. raise RuntimeError("COUNT DISTINCT not supported")
  4471. if not isinstance(query,Query):
  4472. raise SyntaxError("Not Supported")
  4473. tablename = self.get_table(query)
  4474. rows = self.select(query,[self.db[tablename]._id],{})
  4475. return len(rows)
  4476. def cleanup(text):
  4477. """
  4478. validates that the given text is clean: only contains [0-9a-zA-Z_]
  4479. """
  4480. if not REGEX_ALPHANUMERIC.match(text):
  4481. raise SyntaxError('invalid table or field name: %s' % text)
  4482. return text
  4483. class MongoDBAdapter(NoSQLAdapter):
  4484. native_json = True
  4485. drivers = ('pymongo',)
  4486. uploads_in_blob = True
  4487. types = {
  4488. 'boolean': bool,
  4489. 'string': str,
  4490. 'text': str,
  4491. 'json': str,
  4492. 'password': str,
  4493. 'blob': str,
  4494. 'upload': str,
  4495. 'integer': long,
  4496. 'bigint': long,
  4497. 'float': float,
  4498. 'double': float,
  4499. 'date': datetime.date,
  4500. 'time': datetime.time,
  4501. 'datetime': datetime.datetime,
  4502. 'id': long,
  4503. 'reference': long,
  4504. 'list:string': list,
  4505. 'list:integer': list,
  4506. 'list:reference': list,
  4507. }
  4508. error_messages = {"javascript_needed": "This must yet be replaced" +
  4509. " with javascript in order to work."}
  4510. def __init__(self,db,uri='mongodb://127.0.0.1:5984/db',
  4511. pool_size=0, folder=None, db_codec ='UTF-8',
  4512. credential_decoder=IDENTITY, driver_args={},
  4513. adapter_args={}, do_connect=True, after_connection=None):
  4514. self.db = db
  4515. self.uri = uri
  4516. if do_connect: self.find_driver(adapter_args)
  4517. import random
  4518. from bson.objectid import ObjectId
  4519. from bson.son import SON
  4520. import pymongo.uri_parser
  4521. m = pymongo.uri_parser.parse_uri(uri)
  4522. self.SON = SON
  4523. self.ObjectId = ObjectId
  4524. self.random = random
  4525. self.dbengine = 'mongodb'
  4526. self.folder = folder
  4527. db['_lastsql'] = ''
  4528. self.db_codec = 'UTF-8'
  4529. self._after_connection = after_connection
  4530. self.pool_size = pool_size
  4531. #this is the minimum amount of replicates that it should wait
  4532. # for on insert/update
  4533. self.minimumreplication = adapter_args.get('minimumreplication',0)
  4534. # by default all inserts and selects are performand asynchronous,
  4535. # but now the default is
  4536. # synchronous, except when overruled by either this default or
  4537. # function parameter
  4538. self.safe = adapter_args.get('safe',True)
  4539. if isinstance(m,tuple):
  4540. m = {"database" : m[1]}
  4541. if m.get('database')==None:
  4542. raise SyntaxError("Database is required!")
  4543. def connector(uri=self.uri,m=m):
  4544. try:
  4545. # Connection() is deprecated
  4546. if hasattr(self.driver, "MongoClient"):
  4547. Connection = self.driver.MongoClient
  4548. else:
  4549. Connection = self.driver.Connection
  4550. return Connection(uri)[m.get('database')]
  4551. except self.driver.errors.ConnectionFailure:
  4552. inst = sys.exc_info()[1]
  4553. raise SyntaxError("The connection to " +
  4554. uri + " could not be made")
  4555. self.reconnect(connector,cursor=False)
  4556. def object_id(self, arg=None):
  4557. """ Convert input to a valid Mongodb ObjectId instance
  4558. self.object_id("<random>") -> ObjectId (not unique) instance """
  4559. if not arg:
  4560. arg = 0
  4561. if isinstance(arg, basestring):
  4562. # we assume an integer as default input
  4563. rawhex = len(arg.replace("0x", "").replace("L", "")) == 24
  4564. if arg.isdigit() and (not rawhex):
  4565. arg = int(arg)
  4566. elif arg == "<random>":
  4567. arg = int("0x%sL" % \
  4568. "".join([self.random.choice("0123456789abcdef") \
  4569. for x in range(24)]), 0)
  4570. elif arg.isalnum():
  4571. if not arg.startswith("0x"):
  4572. arg = "0x%s" % arg
  4573. try:
  4574. arg = int(arg, 0)
  4575. except ValueError, e:
  4576. raise ValueError(
  4577. "invalid objectid argument string: %s" % e)
  4578. else:
  4579. raise ValueError("Invalid objectid argument string. " +
  4580. "Requires an integer or base 16 value")
  4581. elif isinstance(arg, self.ObjectId):
  4582. return arg
  4583. if not isinstance(arg, (int, long)):
  4584. raise TypeError("object_id argument must be of type " +
  4585. "ObjectId or an objectid representable integer")
  4586. if arg == 0:
  4587. hexvalue = "".zfill(24)
  4588. else:
  4589. hexvalue = hex(arg)[2:].replace("L", "")
  4590. return self.ObjectId(hexvalue)
  4591. def represent(self, obj, fieldtype):
  4592. value = NoSQLAdapter.represent(self, obj, fieldtype)
  4593. if fieldtype =='date':
  4594. if value == None:
  4595. return value
  4596. # this piece of data can be stripped off based on the fieldtype
  4597. t = datetime.time(0, 0, 0)
  4598. # mongodb doesn't has a date object and so it must datetime,
  4599. # string or integer
  4600. return datetime.datetime.combine(value, t)
  4601. elif fieldtype == 'time':
  4602. if value == None:
  4603. return value
  4604. # this piece of data can be stripped of based on the fieldtype
  4605. d = datetime.date(2000, 1, 1)
  4606. # mongodb doesn't has a time object and so it must datetime,
  4607. # string or integer
  4608. return datetime.datetime.combine(d, value)
  4609. elif fieldtype == 'list:string' or \
  4610. fieldtype == 'list:integer' or \
  4611. fieldtype == 'list:reference':
  4612. return value
  4613. return value
  4614. # Safe determines whether a asynchronious request is done or a
  4615. # synchronious action is done
  4616. # For safety, we use by default synchronious requests
  4617. def insert(self, table, fields, safe=None):
  4618. if safe==None:
  4619. safe = self.safe
  4620. ctable = self.connection[table._tablename]
  4621. values = dict()
  4622. for k, v in fields:
  4623. if not k.name in ["id", "safe"]:
  4624. fieldname = k.name
  4625. fieldtype = table[k.name].type
  4626. if ("reference" in fieldtype) or (fieldtype=="id"):
  4627. values[fieldname] = self.object_id(v)
  4628. else:
  4629. values[fieldname] = self.represent(v, fieldtype)
  4630. ctable.insert(values, safe=safe)
  4631. return int(str(values['_id']), 16)
  4632. def create_table(self, table, migrate=True, fake_migrate=False,
  4633. polymodel=None, isCapped=False):
  4634. if isCapped:
  4635. raise RuntimeError("Not implemented")
  4636. def count(self, query, distinct=None, snapshot=True):
  4637. if distinct:
  4638. raise RuntimeError("COUNT DISTINCT not supported")
  4639. if not isinstance(query,Query):
  4640. raise SyntaxError("Not Supported")
  4641. tablename = self.get_table(query)
  4642. return int(self.select(query,[self.db[tablename]._id], {},
  4643. count=True,snapshot=snapshot)['count'])
  4644. # Maybe it would be faster if we just implemented the pymongo
  4645. # .count() function which is probably quicker?
  4646. # therefor call __select() connection[table].find(query).count()
  4647. # Since this will probably reduce the return set?
  4648. def expand(self, expression, field_type=None):
  4649. if isinstance(expression, Query):
  4650. # any query using 'id':=
  4651. # set name as _id (as per pymongo/mongodb primary key)
  4652. # convert second arg to an objectid field
  4653. # (if its not already)
  4654. # if second arg is 0 convert to objectid
  4655. if isinstance(expression.first,Field) and \
  4656. ((expression.first.type == 'id') or \
  4657. ("reference" in expression.first.type)):
  4658. if expression.first.type == 'id':
  4659. expression.first.name = '_id'
  4660. # cast to Mongo ObjectId
  4661. expression.second = self.object_id(expression.second)
  4662. result = expression.op(expression.first, expression.second)
  4663. if isinstance(expression, Field):
  4664. if expression.type=='id':
  4665. result = "_id"
  4666. else:
  4667. result = expression.name
  4668. elif isinstance(expression, (Expression, Query)):
  4669. if not expression.second is None:
  4670. result = expression.op(expression.first, expression.second)
  4671. elif not expression.first is None:
  4672. result = expression.op(expression.first)
  4673. elif not isinstance(expression.op, str):
  4674. result = expression.op()
  4675. else:
  4676. result = expression.op
  4677. elif field_type:
  4678. result = str(self.represent(expression,field_type))
  4679. elif isinstance(expression,(list,tuple)):
  4680. result = ','.join(self.represent(item,field_type) for
  4681. item in expression)
  4682. else:
  4683. result = expression
  4684. return result
  4685. def _select(self, query, fields, attributes):
  4686. if 'for_update' in attributes:
  4687. logging.warn('mongodb does not support for_update')
  4688. for key in set(attributes.keys())-set(('limitby',
  4689. 'orderby','for_update')):
  4690. if attributes[key]!=None:
  4691. logging.warn('select attribute not implemented: %s' % key)
  4692. new_fields=[]
  4693. mongosort_list = []
  4694. # try an orderby attribute
  4695. orderby = attributes.get('orderby', False)
  4696. limitby = attributes.get('limitby', False)
  4697. # distinct = attributes.get('distinct', False)
  4698. if orderby:
  4699. if isinstance(orderby, (list, tuple)):
  4700. orderby = xorify(orderby)
  4701. # !!!! need to add 'random'
  4702. for f in self.expand(orderby).split(','):
  4703. if f.startswith('-'):
  4704. mongosort_list.append((f[1:], -1))
  4705. else:
  4706. mongosort_list.append((f, 1))
  4707. if limitby:
  4708. limitby_skip, limitby_limit = limitby
  4709. else:
  4710. limitby_skip = limitby_limit = 0
  4711. mongofields_dict = self.SON()
  4712. mongoqry_dict = {}
  4713. for item in fields:
  4714. if isinstance(item, SQLALL):
  4715. new_fields += item._table
  4716. else:
  4717. new_fields.append(item)
  4718. fields = new_fields
  4719. if isinstance(query,Query):
  4720. tablename = self.get_table(query)
  4721. elif len(fields) != 0:
  4722. tablename = fields[0].tablename
  4723. else:
  4724. raise SyntaxError("The table name could not be found in " +
  4725. "the query nor from the select statement.")
  4726. mongoqry_dict = self.expand(query)
  4727. fields = fields or self.db[tablename]
  4728. for field in fields:
  4729. mongofields_dict[field.name] = 1
  4730. return tablename, mongoqry_dict, mongofields_dict, mongosort_list, \
  4731. limitby_limit, limitby_skip
  4732. def select(self, query, fields, attributes, count=False,
  4733. snapshot=False):
  4734. # TODO: support joins
  4735. tablename, mongoqry_dict, mongofields_dict, mongosort_list, \
  4736. limitby_limit, limitby_skip = self._select(query, fields, attributes)
  4737. ctable = self.connection[tablename]
  4738. if count:
  4739. return {'count' : ctable.find(
  4740. mongoqry_dict, mongofields_dict,
  4741. skip=limitby_skip, limit=limitby_limit,
  4742. sort=mongosort_list, snapshot=snapshot).count()}
  4743. else:
  4744. # pymongo cursor object
  4745. mongo_list_dicts = ctable.find(mongoqry_dict,
  4746. mongofields_dict, skip=limitby_skip,
  4747. limit=limitby_limit, sort=mongosort_list,
  4748. snapshot=snapshot)
  4749. rows = []
  4750. # populate row in proper order
  4751. # Here we replace ._id with .id to follow the standard naming
  4752. colnames = []
  4753. newnames = []
  4754. for field in fields:
  4755. colname = str(field)
  4756. colnames.append(colname)
  4757. tablename, fieldname = colname.split(".")
  4758. if fieldname == "_id":
  4759. # Mongodb reserved uuid key
  4760. field.name = "id"
  4761. newnames.append(".".join((tablename, field.name)))
  4762. for record in mongo_list_dicts:
  4763. row=[]
  4764. for colname in colnames:
  4765. tablename, fieldname = colname.split(".")
  4766. # switch to Mongo _id uuids for retrieving
  4767. # record id's
  4768. if fieldname == "id": fieldname = "_id"
  4769. if fieldname in record:
  4770. if isinstance(record[fieldname],
  4771. self.ObjectId):
  4772. value = int(str(record[fieldname]), 16)
  4773. else:
  4774. value = record[fieldname]
  4775. else:
  4776. value = None
  4777. row.append(value)
  4778. rows.append(row)
  4779. processor = attributes.get('processor', self.parse)
  4780. result = processor(rows, fields, newnames, False)
  4781. return result
  4782. def INVERT(self, first):
  4783. #print "in invert first=%s" % first
  4784. return '-%s' % self.expand(first)
  4785. def drop(self, table, mode=''):
  4786. ctable = self.connection[table._tablename]
  4787. ctable.drop()
  4788. def truncate(self, table, mode, safe=None):
  4789. if safe == None:
  4790. safe=self.safe
  4791. ctable = self.connection[table._tablename]
  4792. ctable.remove(None, safe=True)
  4793. def oupdate(self, tablename, query, fields):
  4794. if not isinstance(query, Query):
  4795. raise SyntaxError("Not Supported")
  4796. filter = None
  4797. if query:
  4798. filter = self.expand(query)
  4799. modify = {'$set': dict((k.name, self.represent(v, k.type)) for
  4800. k, v in fields)}
  4801. return modify, filter
  4802. def update(self, tablename, query, fields, safe=None):
  4803. if safe == None:
  4804. safe = self.safe
  4805. # return amount of adjusted rows or zero, but no exceptions
  4806. # @ related not finding the result
  4807. if not isinstance(query, Query):
  4808. raise RuntimeError("Not implemented")
  4809. amount = self.count(query, False)
  4810. modify, filter = self.oupdate(tablename, query, fields)
  4811. try:
  4812. result = self.connection[tablename].update(filter,
  4813. modify, multi=True, safe=safe)
  4814. if safe:
  4815. try:
  4816. # if result count is available fetch it
  4817. return result["n"]
  4818. except (KeyError, AttributeError, TypeError):
  4819. return amount
  4820. else:
  4821. return amount
  4822. except Exception, e:
  4823. # TODO Reverse update query to verifiy that the query succeded
  4824. raise RuntimeError("uncaught exception when updating rows: %s" % e)
  4825. #this function returns a dict with the where clause and update fields
  4826. def _update(self,tablename,query,fields):
  4827. return str(self.oupdate(tablename, query, fields))
  4828. def delete(self, tablename, query, safe=None):
  4829. if safe is None:
  4830. safe = self.safe
  4831. amount = 0
  4832. amount = self.count(query, False)
  4833. if not isinstance(query, Query):
  4834. raise RuntimeError("query type %s is not supported" % \
  4835. type(query))
  4836. filter = self.expand(query)
  4837. self._delete(tablename, filter, safe=safe)
  4838. return amount
  4839. def _delete(self, tablename, filter, safe=None):
  4840. return self.connection[tablename].remove(filter, safe=safe)
  4841. def bulk_insert(self, table, items):
  4842. return [self.insert(table,item) for item in items]
  4843. # TODO This will probably not work:(
  4844. def NOT(self, first):
  4845. result = {}
  4846. result["$not"] = self.expand(first)
  4847. return result
  4848. def AND(self,first,second):
  4849. f = self.expand(first)
  4850. s = self.expand(second)
  4851. f.update(s)
  4852. return f
  4853. def OR(self,first,second):
  4854. # pymongo expects: .find({'$or': [{'name':'1'}, {'name':'2'}]})
  4855. result = {}
  4856. f = self.expand(first)
  4857. s = self.expand(second)
  4858. result['$or'] = [f,s]
  4859. return result
  4860. def BELONGS(self, first, second):
  4861. if isinstance(second, str):
  4862. return {self.expand(first) : {"$in" : [ second[:-1]]} }
  4863. elif second==[] or second==():
  4864. return {1:0}
  4865. items = [self.expand(item, first.type) for item in second]
  4866. return {self.expand(first) : {"$in" : items} }
  4867. def EQ(self,first,second):
  4868. result = {}
  4869. result[self.expand(first)] = self.expand(second)
  4870. return result
  4871. def NE(self, first, second=None):
  4872. result = {}
  4873. result[self.expand(first)] = {'$ne': self.expand(second)}
  4874. return result
  4875. def LT(self,first,second=None):
  4876. if second is None:
  4877. raise RuntimeError("Cannot compare %s < None" % first)
  4878. result = {}
  4879. result[self.expand(first)] = {'$lt': self.expand(second)}
  4880. return result
  4881. def LE(self,first,second=None):
  4882. if second is None:
  4883. raise RuntimeError("Cannot compare %s <= None" % first)
  4884. result = {}
  4885. result[self.expand(first)] = {'$lte': self.expand(second)}
  4886. return result
  4887. def GT(self,first,second):
  4888. result = {}
  4889. result[self.expand(first)] = {'$gt': self.expand(second)}
  4890. return result
  4891. def GE(self,first,second=None):
  4892. if second is None:
  4893. raise RuntimeError("Cannot compare %s >= None" % first)
  4894. result = {}
  4895. result[self.expand(first)] = {'$gte': self.expand(second)}
  4896. return result
  4897. def ADD(self, first, second):
  4898. raise NotImplementedError(self.error_messages["javascript_needed"])
  4899. return '%s + %s' % (self.expand(first),
  4900. self.expand(second, first.type))
  4901. def SUB(self, first, second):
  4902. raise NotImplementedError(self.error_messages["javascript_needed"])
  4903. return '(%s - %s)' % (self.expand(first),
  4904. self.expand(second, first.type))
  4905. def MUL(self, first, second):
  4906. raise NotImplementedError(self.error_messages["javascript_needed"])
  4907. return '(%s * %s)' % (self.expand(first),
  4908. self.expand(second, first.type))
  4909. def DIV(self, first, second):
  4910. raise NotImplementedError(self.error_messages["javascript_needed"])
  4911. return '(%s / %s)' % (self.expand(first),
  4912. self.expand(second, first.type))
  4913. def MOD(self, first, second):
  4914. raise NotImplementedError(self.error_messages["javascript_needed"])
  4915. return '(%s %% %s)' % (self.expand(first),
  4916. self.expand(second, first.type))
  4917. def AS(self, first, second):
  4918. raise NotImplementedError(self.error_messages["javascript_needed"])
  4919. return '%s AS %s' % (self.expand(first), second)
  4920. # We could implement an option that simulates a full featured SQL
  4921. # database. But I think the option should be set explicit or
  4922. # implemented as another library.
  4923. def ON(self, first, second):
  4924. raise NotImplementedError("This is not possible in NoSQL" +
  4925. " but can be simulated with a wrapper.")
  4926. return '%s ON %s' % (self.expand(first), self.expand(second))
  4927. # BLOW ARE TWO IMPLEMENTATIONS OF THE SAME FUNCITONS
  4928. # WHICH ONE IS BEST?
  4929. def COMMA(self, first, second):
  4930. return '%s, %s' % (self.expand(first), self.expand(second))
  4931. def LIKE(self, first, second):
  4932. #escaping regex operators?
  4933. return {self.expand(first): ('%s' % \
  4934. self.expand(second, 'string').replace('%','/'))}
  4935. def STARTSWITH(self, first, second):
  4936. #escaping regex operators?
  4937. return {self.expand(first): ('/^%s/' % \
  4938. self.expand(second, 'string'))}
  4939. def ENDSWITH(self, first, second):
  4940. #escaping regex operators?
  4941. return {self.expand(first): ('/%s^/' % \
  4942. self.expand(second, 'string'))}
  4943. def CONTAINS(self, first, second, case_sensitive=False):
  4944. # silently ignore, only case sensitive
  4945. # There is a technical difference, but mongodb doesn't support
  4946. # that, but the result will be the same
  4947. return {self.expand(first) : ('/%s/' % \
  4948. self.expand(second, 'string'))}
  4949. def LIKE(self, first, second):
  4950. import re
  4951. return {self.expand(first): {'$regex': \
  4952. re.escape(self.expand(second,
  4953. 'string')).replace('%','.*')}}
  4954. #TODO verify full compatibilty with official SQL Like operator
  4955. def STARTSWITH(self, first, second):
  4956. #TODO Solve almost the same problem as with endswith
  4957. import re
  4958. return {self.expand(first): {'$regex' : '^' +
  4959. re.escape(self.expand(second,
  4960. 'string'))}}
  4961. #TODO verify full compatibilty with official SQL Like operator
  4962. def ENDSWITH(self, first, second):
  4963. #escaping regex operators?
  4964. #TODO if searched for a name like zsa_corbitt and the function
  4965. # is endswith('a') then this is also returned.
  4966. # Aldo it end with a t
  4967. import re
  4968. return {self.expand(first): {'$regex': \
  4969. re.escape(self.expand(second, 'string')) + '$'}}
  4970. #TODO verify full compatibilty with official oracle contains operator
  4971. def CONTAINS(self, first, second, case_sensitive=False):
  4972. # silently ignore, only case sensitive
  4973. #There is a technical difference, but mongodb doesn't support
  4974. # that, but the result will be the same
  4975. #TODO contains operators need to be transformed to Regex
  4976. return {self.expand(first) : {' $regex': \
  4977. ".*" + re.escape(self.expand(second, 'string')) + ".*"}}
  4978. class IMAPAdapter(NoSQLAdapter):
  4979. drivers = ('imaplib',)
  4980. """ IMAP server adapter
  4981. This class is intended as an interface with
  4982. email IMAP servers to perform simple queries in the
  4983. web2py DAL query syntax, so email read, search and
  4984. other related IMAP mail services (as those implemented
  4985. by brands like Google(r), and Yahoo!(r)
  4986. can be managed from web2py applications.
  4987. The code uses examples by Yuji Tomita on this post:
  4988. http://yuji.wordpress.com/2011/06/22/python-imaplib-imap-example-with-gmail/#comment-1137
  4989. and is based in docs for Python imaplib, python email
  4990. and email IETF's (i.e. RFC2060 and RFC3501)
  4991. This adapter was tested with a small set of operations with Gmail(r). Other
  4992. services requests could raise command syntax and response data issues.
  4993. It creates its table and field names "statically",
  4994. meaning that the developer should leave the table and field
  4995. definitions to the DAL instance by calling the adapter's
  4996. .define_tables() method. The tables are defined with the
  4997. IMAP server mailbox list information.
  4998. .define_tables() returns a dictionary mapping dal tablenames
  4999. to the server mailbox names with the following structure:
  5000. {<tablename>: str <server mailbox name>}
  5001. Here is a list of supported fields:
  5002. Field Type Description
  5003. ################################################################
  5004. uid string
  5005. answered boolean Flag
  5006. created date
  5007. content list:string A list of text or html parts
  5008. to string
  5009. cc string
  5010. bcc string
  5011. size integer the amount of octets of the message*
  5012. deleted boolean Flag
  5013. draft boolean Flag
  5014. flagged boolean Flag
  5015. sender string
  5016. recent boolean Flag
  5017. seen boolean Flag
  5018. subject string
  5019. mime string The mime header declaration
  5020. email string The complete RFC822 message**
  5021. attachments <type list> Each non text part as dict
  5022. encoding string The main detected encoding
  5023. *At the application side it is measured as the length of the RFC822
  5024. message string
  5025. WARNING: As row id's are mapped to email sequence numbers,
  5026. make sure your imap client web2py app does not delete messages
  5027. during select or update actions, to prevent
  5028. updating or deleting different messages.
  5029. Sequence numbers change whenever the mailbox is updated.
  5030. To avoid this sequence numbers issues, it is recommended the use
  5031. of uid fields in query references (although the update and delete
  5032. in separate actions rule still applies).
  5033. # This is the code recommended to start imap support
  5034. # at the app's model:
  5035. imapdb = DAL("imap://user:password@server:port", pool_size=1) # port 993 for ssl
  5036. imapdb.define_tables()
  5037. Here is an (incomplete) list of possible imap commands:
  5038. # Count today's unseen messages
  5039. # smaller than 6000 octets from the
  5040. # inbox mailbox
  5041. q = imapdb.INBOX.seen == False
  5042. q &= imapdb.INBOX.created == datetime.date.today()
  5043. q &= imapdb.INBOX.size < 6000
  5044. unread = imapdb(q).count()
  5045. # Fetch last query messages
  5046. rows = imapdb(q).select()
  5047. # it is also possible to filter query select results with limitby and
  5048. # sequences of mailbox fields
  5049. set.select(<fields sequence>, limitby=(<int>, <int>))
  5050. # Mark last query messages as seen
  5051. messages = [row.uid for row in rows]
  5052. seen = imapdb(imapdb.INBOX.uid.belongs(messages)).update(seen=True)
  5053. # Delete messages in the imap database that have mails from mr. Gumby
  5054. deleted = 0
  5055. for mailbox in imapdb.tables
  5056. deleted += imapdb(imapdb[mailbox].sender.contains("gumby")).delete()
  5057. # It is possible also to mark messages for deletion instead of ereasing them
  5058. # directly with set.update(deleted=True)
  5059. # This object give access
  5060. # to the adapter auto mailbox
  5061. # mapped names (which native
  5062. # mailbox has what table name)
  5063. imapdb.mailboxes <dict> # tablename, server native name pairs
  5064. # To retrieve a table native mailbox name use:
  5065. imapdb.<table>.mailbox
  5066. ### New features v2.4.1:
  5067. # Declare mailboxes statically with tablename, name pairs
  5068. # This avoids the extra server names retrieval
  5069. imapdb.define_tables({"inbox": "INBOX"})
  5070. # Selects without content/attachments/email columns will only
  5071. # fetch header and flags
  5072. imapdb(q).select(imapdb.INBOX.sender, imapdb.INBOX.subject)
  5073. """
  5074. types = {
  5075. 'string': str,
  5076. 'text': str,
  5077. 'date': datetime.date,
  5078. 'datetime': datetime.datetime,
  5079. 'id': long,
  5080. 'boolean': bool,
  5081. 'integer': int,
  5082. 'bigint': long,
  5083. 'blob': str,
  5084. 'list:string': str,
  5085. }
  5086. dbengine = 'imap'
  5087. REGEX_URI = re.compile('^(?P<user>[^:]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:@]+)(\:(?P<port>[0-9]+))?$')
  5088. def __init__(self,
  5089. db,
  5090. uri,
  5091. pool_size=0,
  5092. folder=None,
  5093. db_codec ='UTF-8',
  5094. credential_decoder=IDENTITY,
  5095. driver_args={},
  5096. adapter_args={},
  5097. do_connect=True,
  5098. after_connection=None):
  5099. # db uri: user@example.com:password@imap.server.com:123
  5100. # TODO: max size adapter argument for preventing large mail transfers
  5101. self.db = db
  5102. self.uri = uri
  5103. if do_connect: self.find_driver(adapter_args)
  5104. self.pool_size=pool_size
  5105. self.folder = folder
  5106. self.db_codec = db_codec
  5107. self._after_connection = after_connection
  5108. self.credential_decoder = credential_decoder
  5109. self.driver_args = driver_args
  5110. self.adapter_args = adapter_args
  5111. self.mailbox_size = None
  5112. self.static_names = None
  5113. self.charset = sys.getfilesystemencoding()
  5114. # imap class
  5115. self.imap4 = None
  5116. uri = uri.split("://")[1]
  5117. """ MESSAGE is an identifier for sequence number"""
  5118. self.flags = ['\\Deleted', '\\Draft', '\\Flagged',
  5119. '\\Recent', '\\Seen', '\\Answered']
  5120. self.search_fields = {
  5121. 'id': 'MESSAGE', 'created': 'DATE',
  5122. 'uid': 'UID', 'sender': 'FROM',
  5123. 'to': 'TO', 'cc': 'CC',
  5124. 'bcc': 'BCC', 'content': 'TEXT',
  5125. 'size': 'SIZE', 'deleted': '\\Deleted',
  5126. 'draft': '\\Draft', 'flagged': '\\Flagged',
  5127. 'recent': '\\Recent', 'seen': '\\Seen',
  5128. 'subject': 'SUBJECT', 'answered': '\\Answered',
  5129. 'mime': None, 'email': None,
  5130. 'attachments': None
  5131. }
  5132. db['_lastsql'] = ''
  5133. m = self.REGEX_URI.match(uri)
  5134. user = m.group('user')
  5135. password = m.group('password')
  5136. host = m.group('host')
  5137. port = int(m.group('port'))
  5138. over_ssl = False
  5139. if port==993:
  5140. over_ssl = True
  5141. driver_args.update(host=host,port=port, password=password, user=user)
  5142. def connector(driver_args=driver_args):
  5143. # it is assumed sucessful authentication alLways
  5144. # TODO: support direct connection and login tests
  5145. if over_ssl:
  5146. self.imap4 = self.driver.IMAP4_SSL
  5147. else:
  5148. self.imap4 = self.driver.IMAP4
  5149. connection = self.imap4(driver_args["host"], driver_args["port"])
  5150. data = connection.login(driver_args["user"], driver_args["password"])
  5151. # static mailbox list
  5152. connection.mailbox_names = None
  5153. # dummy cursor function
  5154. connection.cursor = lambda : True
  5155. return connection
  5156. self.db.define_tables = self.define_tables
  5157. self.connector = connector
  5158. if do_connect: self.reconnect()
  5159. def reconnect(self, f=None, cursor=True):
  5160. """
  5161. IMAP4 Pool connection method
  5162. imap connection lacks of self cursor command.
  5163. A custom command should be provided as a replacement
  5164. for connection pooling to prevent uncaught remote session
  5165. closing
  5166. """
  5167. if getattr(self,'connection',None) != None:
  5168. return
  5169. if f is None:
  5170. f = self.connector
  5171. if not self.pool_size:
  5172. self.connection = f()
  5173. self.cursor = cursor and self.connection.cursor()
  5174. else:
  5175. POOLS = ConnectionPool.POOLS
  5176. uri = self.uri
  5177. while True:
  5178. GLOBAL_LOCKER.acquire()
  5179. if not uri in POOLS:
  5180. POOLS[uri] = []
  5181. if POOLS[uri]:
  5182. self.connection = POOLS[uri].pop()
  5183. GLOBAL_LOCKER.release()
  5184. self.cursor = cursor and self.connection.cursor()
  5185. if self.cursor and self.check_active_connection:
  5186. try:
  5187. # check if connection is alive or close it
  5188. result, data = self.connection.list()
  5189. except:
  5190. # Possible connection reset error
  5191. # TODO: read exception class
  5192. self.connection = f()
  5193. break
  5194. else:
  5195. GLOBAL_LOCKER.release()
  5196. self.connection = f()
  5197. self.cursor = cursor and self.connection.cursor()
  5198. break
  5199. self.after_connection_hook()
  5200. def get_last_message(self, tablename):
  5201. last_message = None
  5202. # request mailbox list to the server
  5203. # if needed
  5204. if not isinstance(self.connection.mailbox_names, dict):
  5205. self.get_mailboxes()
  5206. try:
  5207. result = self.connection.select(self.connection.mailbox_names[tablename])
  5208. last_message = int(result[1][0])
  5209. except (IndexError, ValueError, TypeError, KeyError):
  5210. e = sys.exc_info()[1]
  5211. LOGGER.debug("Error retrieving the last mailbox sequence number. %s" % str(e))
  5212. return last_message
  5213. def get_uid_bounds(self, tablename):
  5214. if not isinstance(self.connection.mailbox_names, dict):
  5215. self.get_mailboxes()
  5216. # fetch first and last messages
  5217. # return (first, last) messages uid's
  5218. last_message = self.get_last_message(tablename)
  5219. result, data = self.connection.uid("search", None, "(ALL)")
  5220. uid_list = data[0].strip().split()
  5221. if len(uid_list) <= 0:
  5222. return None
  5223. else:
  5224. return (uid_list[0], uid_list[-1])
  5225. def convert_date(self, date, add=None):
  5226. if add is None:
  5227. add = datetime.timedelta()
  5228. """ Convert a date object to a string
  5229. with d-Mon-Y style for IMAP or the inverse
  5230. case
  5231. add <timedelta> adds to the date object
  5232. """
  5233. months = [None, "Jan","Feb","Mar","Apr","May","Jun",
  5234. "Jul", "Aug","Sep","Oct","Nov","Dec"]
  5235. if isinstance(date, basestring):
  5236. # Prevent unexpected date response format
  5237. try:
  5238. dayname, datestring = date.split(",")
  5239. except (ValueError):
  5240. LOGGER.debug("Could not parse date text: %s" % date)
  5241. return None
  5242. date_list = datestring.strip().split()
  5243. year = int(date_list[2])
  5244. month = months.index(date_list[1])
  5245. day = int(date_list[0])
  5246. hms = map(int, date_list[3].split(":"))
  5247. return datetime.datetime(year, month, day,
  5248. hms[0], hms[1], hms[2]) + add
  5249. elif isinstance(date, (datetime.datetime, datetime.date)):
  5250. return (date + add).strftime("%d-%b-%Y")
  5251. else:
  5252. return None
  5253. @staticmethod
  5254. def header_represent(f, r):
  5255. from email.header import decode_header
  5256. text, encoding = decode_header(f)[0]
  5257. return text
  5258. def encode_text(self, text, charset, errors="replace"):
  5259. """ convert text for mail to unicode"""
  5260. if text is None:
  5261. text = ""
  5262. else:
  5263. if isinstance(text, str):
  5264. if charset is None:
  5265. text = unicode(text, "utf-8", errors)
  5266. else:
  5267. text = unicode(text, charset, errors)
  5268. else:
  5269. raise Exception("Unsupported mail text type %s" % type(text))
  5270. return text.encode("utf-8")
  5271. def get_charset(self, message):
  5272. charset = message.get_content_charset()
  5273. return charset
  5274. def get_mailboxes(self):
  5275. """ Query the mail database for mailbox names """
  5276. if self.static_names:
  5277. # statically defined mailbox names
  5278. self.connection.mailbox_names = self.static_names
  5279. return self.static_names.keys()
  5280. mailboxes_list = self.connection.list()
  5281. self.connection.mailbox_names = dict()
  5282. mailboxes = list()
  5283. x = 0
  5284. for item in mailboxes_list[1]:
  5285. x = x + 1
  5286. item = item.strip()
  5287. if not "NOSELECT" in item.upper():
  5288. sub_items = item.split("\"")
  5289. sub_items = [sub_item for sub_item in sub_items \
  5290. if len(sub_item.strip()) > 0]
  5291. # mailbox = sub_items[len(sub_items) -1]
  5292. mailbox = sub_items[-1]
  5293. # remove unwanted characters and store original names
  5294. # Don't allow leading non alphabetic characters
  5295. mailbox_name = re.sub('^[_0-9]*', '', re.sub('[^_\w]','',re.sub('[/ ]','_',mailbox)))
  5296. mailboxes.append(mailbox_name)
  5297. self.connection.mailbox_names[mailbox_name] = mailbox
  5298. return mailboxes
  5299. def get_query_mailbox(self, query):
  5300. nofield = True
  5301. tablename = None
  5302. attr = query
  5303. while nofield:
  5304. if hasattr(attr, "first"):
  5305. attr = attr.first
  5306. if isinstance(attr, Field):
  5307. return attr.tablename
  5308. elif isinstance(attr, Query):
  5309. pass
  5310. else:
  5311. return None
  5312. else:
  5313. return None
  5314. return tablename
  5315. def is_flag(self, flag):
  5316. if self.search_fields.get(flag, None) in self.flags:
  5317. return True
  5318. else:
  5319. return False
  5320. def define_tables(self, mailbox_names=None):
  5321. """
  5322. Auto create common IMAP fileds
  5323. This function creates fields definitions "statically"
  5324. meaning that custom fields as in other adapters should
  5325. not be supported and definitions handled on a service/mode
  5326. basis (local syntax for Gmail(r), Ymail(r)
  5327. Returns a dictionary with tablename, server native mailbox name
  5328. pairs.
  5329. """
  5330. if mailbox_names:
  5331. # optional statically declared mailboxes
  5332. self.static_names = mailbox_names
  5333. else:
  5334. self.static_names = None
  5335. if not isinstance(self.connection.mailbox_names, dict):
  5336. self.get_mailboxes()
  5337. names = self.connection.mailbox_names.keys()
  5338. for name in names:
  5339. self.db.define_table("%s" % name,
  5340. Field("uid", "string", writable=False),
  5341. Field("answered", "boolean"),
  5342. Field("created", "datetime", writable=False),
  5343. Field("content", "list:string", writable=False),
  5344. Field("to", "string", writable=False),
  5345. Field("cc", "string", writable=False),
  5346. Field("bcc", "string", writable=False),
  5347. Field("size", "integer", writable=False),
  5348. Field("deleted", "boolean"),
  5349. Field("draft", "boolean"),
  5350. Field("flagged", "boolean"),
  5351. Field("sender", "string", writable=False),
  5352. Field("recent", "boolean", writable=False),
  5353. Field("seen", "boolean"),
  5354. Field("subject", "string", writable=False),
  5355. Field("mime", "string", writable=False),
  5356. Field("email", "string", writable=False, readable=False),
  5357. Field("attachments", list, writable=False, readable=False),
  5358. Field("encoding")
  5359. )
  5360. # Set a special _mailbox attribute for storing
  5361. # native mailbox names
  5362. self.db[name].mailbox = \
  5363. self.connection.mailbox_names[name]
  5364. # decode quoted printable
  5365. self.db[name].to.represent = self.db[name].cc.represent = \
  5366. self.db[name].bcc.represent = self.db[name].sender.represent = \
  5367. self.db[name].subject.represent = self.header_represent
  5368. # Set the db instance mailbox collections
  5369. self.db.mailboxes = self.connection.mailbox_names
  5370. return self.db.mailboxes
  5371. def create_table(self, *args, **kwargs):
  5372. # not implemented
  5373. # but required by DAL
  5374. pass
  5375. def _select(self, query, fields, attributes):
  5376. if use_common_filters(query):
  5377. query = self.common_filter(query, [self.get_query_mailbox(query),])
  5378. return str(query)
  5379. def select(self, query, fields, attributes):
  5380. """ Search and Fetch records and return web2py rows
  5381. """
  5382. # move this statement elsewhere (upper-level)
  5383. if use_common_filters(query):
  5384. query = self.common_filter(query, [self.get_query_mailbox(query),])
  5385. import email
  5386. # get records from imap server with search + fetch
  5387. # convert results to a dictionary
  5388. tablename = None
  5389. fetch_results = list()
  5390. if isinstance(query, Query):
  5391. tablename = self.get_table(query)
  5392. mailbox = self.connection.mailbox_names.get(tablename, None)
  5393. if mailbox is None:
  5394. raise ValueError("Mailbox name not found: %s" % mailbox)
  5395. else:
  5396. # select with readonly
  5397. result, selected = self.connection.select(mailbox, True)
  5398. if result != "OK":
  5399. raise Exception("IMAP error: %s" % selected)
  5400. self.mailbox_size = int(selected[0])
  5401. search_query = "(%s)" % str(query).strip()
  5402. search_result = self.connection.uid("search", None, search_query)
  5403. # Normal IMAP response OK is assumed (change this)
  5404. if search_result[0] == "OK":
  5405. # For "light" remote server responses just get the first
  5406. # ten records (change for non-experimental implementation)
  5407. # However, light responses are not guaranteed with this
  5408. # approach, just fewer messages.
  5409. limitby = attributes.get('limitby', None)
  5410. messages_set = search_result[1][0].split()
  5411. # descending order
  5412. messages_set.reverse()
  5413. if limitby is not None:
  5414. # TODO: orderby, asc/desc, limitby from complete message set
  5415. messages_set = messages_set[int(limitby[0]):int(limitby[1])]
  5416. # keep the requests small for header/flags
  5417. if any([(field.name in ["content", "size",
  5418. "attachments", "email"]) for
  5419. field in fields]):
  5420. imap_fields = "(RFC822 FLAGS)"
  5421. else:
  5422. imap_fields = "(RFC822.HEADER FLAGS)"
  5423. if len(messages_set) > 0:
  5424. # create fetch results object list
  5425. # fetch each remote message and store it in memmory
  5426. # (change to multi-fetch command syntax for faster
  5427. # transactions)
  5428. for uid in messages_set:
  5429. # fetch the RFC822 message body
  5430. typ, data = self.connection.uid("fetch", uid, imap_fields)
  5431. if typ == "OK":
  5432. fr = {"message": int(data[0][0].split()[0]),
  5433. "uid": int(uid),
  5434. "email": email.message_from_string(data[0][1]),
  5435. "raw_message": data[0][1]}
  5436. fr["multipart"] = fr["email"].is_multipart()
  5437. # fetch flags for the message
  5438. fr["flags"] = self.driver.ParseFlags(data[1])
  5439. fetch_results.append(fr)
  5440. else:
  5441. # error retrieving the message body
  5442. raise Exception("IMAP error retrieving the body: %s" % data)
  5443. else:
  5444. raise Exception("IMAP search error: %s" % search_result[1])
  5445. elif isinstance(query, (Expression, basestring)):
  5446. raise NotImplementedError()
  5447. else:
  5448. raise TypeError("Unexpected query type")
  5449. imapqry_dict = {}
  5450. imapfields_dict = {}
  5451. if len(fields) == 1 and isinstance(fields[0], SQLALL):
  5452. allfields = True
  5453. elif len(fields) == 0:
  5454. allfields = True
  5455. else:
  5456. allfields = False
  5457. if allfields:
  5458. colnames = ["%s.%s" % (tablename, field) for field in self.search_fields.keys()]
  5459. else:
  5460. colnames = ["%s.%s" % (tablename, field.name) for field in fields]
  5461. for k in colnames:
  5462. imapfields_dict[k] = k
  5463. imapqry_list = list()
  5464. imapqry_array = list()
  5465. for fr in fetch_results:
  5466. attachments = []
  5467. content = []
  5468. size = 0
  5469. n = int(fr["message"])
  5470. item_dict = dict()
  5471. message = fr["email"]
  5472. uid = fr["uid"]
  5473. charset = self.get_charset(message)
  5474. flags = fr["flags"]
  5475. raw_message = fr["raw_message"]
  5476. # Return messages data mapping static fields
  5477. # and fetched results. Mapping should be made
  5478. # outside the select function (with auxiliary
  5479. # instance methods)
  5480. # pending: search flags states trough the email message
  5481. # instances for correct output
  5482. # preserve subject encoding (ASCII/quoted printable)
  5483. if "%s.id" % tablename in colnames:
  5484. item_dict["%s.id" % tablename] = n
  5485. if "%s.created" % tablename in colnames:
  5486. item_dict["%s.created" % tablename] = self.convert_date(message["Date"])
  5487. if "%s.uid" % tablename in colnames:
  5488. item_dict["%s.uid" % tablename] = uid
  5489. if "%s.sender" % tablename in colnames:
  5490. # If there is no encoding found in the message header
  5491. # force utf-8 replacing characters (change this to
  5492. # module's defaults). Applies to .sender, .to, .cc and .bcc fields
  5493. item_dict["%s.sender" % tablename] = message["From"]
  5494. if "%s.to" % tablename in colnames:
  5495. item_dict["%s.to" % tablename] = message["To"]
  5496. if "%s.cc" % tablename in colnames:
  5497. if "Cc" in message.keys():
  5498. item_dict["%s.cc" % tablename] = message["Cc"]
  5499. else:
  5500. item_dict["%s.cc" % tablename] = ""
  5501. if "%s.bcc" % tablename in colnames:
  5502. if "Bcc" in message.keys():
  5503. item_dict["%s.bcc" % tablename] = message["Bcc"]
  5504. else:
  5505. item_dict["%s.bcc" % tablename] = ""
  5506. if "%s.deleted" % tablename in colnames:
  5507. item_dict["%s.deleted" % tablename] = "\\Deleted" in flags
  5508. if "%s.draft" % tablename in colnames:
  5509. item_dict["%s.draft" % tablename] = "\\Draft" in flags
  5510. if "%s.flagged" % tablename in colnames:
  5511. item_dict["%s.flagged" % tablename] = "\\Flagged" in flags
  5512. if "%s.recent" % tablename in colnames:
  5513. item_dict["%s.recent" % tablename] = "\\Recent" in flags
  5514. if "%s.seen" % tablename in colnames:
  5515. item_dict["%s.seen" % tablename] = "\\Seen" in flags
  5516. if "%s.subject" % tablename in colnames:
  5517. item_dict["%s.subject" % tablename] = message["Subject"]
  5518. if "%s.answered" % tablename in colnames:
  5519. item_dict["%s.answered" % tablename] = "\\Answered" in flags
  5520. if "%s.mime" % tablename in colnames:
  5521. item_dict["%s.mime" % tablename] = message.get_content_type()
  5522. if "%s.encoding" % tablename in colnames:
  5523. item_dict["%s.encoding" % tablename] = charset
  5524. # Here goes the whole RFC822 body as an email instance
  5525. # for controller side custom processing
  5526. # The message is stored as a raw string
  5527. # >> email.message_from_string(raw string)
  5528. # returns a Message object for enhanced object processing
  5529. if "%s.email" % tablename in colnames:
  5530. # WARNING: no encoding performed (raw message)
  5531. item_dict["%s.email" % tablename] = raw_message
  5532. # Size measure as suggested in a Velocity Reviews post
  5533. # by Tim Williams: "how to get size of email attachment"
  5534. # Note: len() and server RFC822.SIZE reports doesn't match
  5535. # To retrieve the server size for representation would add a new
  5536. # fetch transaction to the process
  5537. for part in message.walk():
  5538. maintype = part.get_content_maintype()
  5539. if ("%s.attachments" % tablename in colnames) or \
  5540. ("%s.content" % tablename in colnames):
  5541. if "%s.attachments" % tablename in colnames:
  5542. if not ("text" in maintype):
  5543. payload = part.get_payload(decode=True)
  5544. if payload:
  5545. attachment = {
  5546. "payload": payload,
  5547. "filename": part.get_filename(),
  5548. "encoding": part.get_content_charset(),
  5549. "mime": part.get_content_type(),
  5550. "disposition": part["Content-Disposition"]}
  5551. attachments.append(attachment)
  5552. if "%s.content" % tablename in colnames:
  5553. payload = part.get_payload(decode=True)
  5554. part_charset = self.get_charset(part)
  5555. if "text" in maintype:
  5556. if payload:
  5557. content.append(self.encode_text(payload, part_charset))
  5558. if "%s.size" % tablename in colnames:
  5559. if part is not None:
  5560. size += len(str(part))
  5561. item_dict["%s.content" % tablename] = bar_encode(content)
  5562. item_dict["%s.attachments" % tablename] = attachments
  5563. item_dict["%s.size" % tablename] = size
  5564. imapqry_list.append(item_dict)
  5565. # extra object mapping for the sake of rows object
  5566. # creation (sends an array or lists)
  5567. for item_dict in imapqry_list:
  5568. imapqry_array_item = list()
  5569. for fieldname in colnames:
  5570. imapqry_array_item.append(item_dict[fieldname])
  5571. imapqry_array.append(imapqry_array_item)
  5572. # parse result and return a rows object
  5573. colnames = colnames
  5574. processor = attributes.get('processor',self.parse)
  5575. return processor(imapqry_array, fields, colnames)
  5576. def _update(self, tablename, query, fields, commit=False):
  5577. # TODO: the adapter should implement an .expand method
  5578. commands = list()
  5579. if use_common_filters(query):
  5580. query = self.common_filter(query, [tablename,])
  5581. mark = []
  5582. unmark = []
  5583. if query:
  5584. for item in fields:
  5585. field = item[0]
  5586. name = field.name
  5587. value = item[1]
  5588. if self.is_flag(name):
  5589. flag = self.search_fields[name]
  5590. if (value is not None) and (flag != "\\Recent"):
  5591. if value:
  5592. mark.append(flag)
  5593. else:
  5594. unmark.append(flag)
  5595. result, data = self.connection.select(
  5596. self.connection.mailbox_names[tablename])
  5597. string_query = "(%s)" % query
  5598. result, data = self.connection.search(None, string_query)
  5599. store_list = [item.strip() for item in data[0].split()
  5600. if item.strip().isdigit()]
  5601. # build commands for marked flags
  5602. for number in store_list:
  5603. result = None
  5604. if len(mark) > 0:
  5605. commands.append((number, "+FLAGS", "(%s)" % " ".join(mark)))
  5606. if len(unmark) > 0:
  5607. commands.append((number, "-FLAGS", "(%s)" % " ".join(unmark)))
  5608. return commands
  5609. def update(self, tablename, query, fields):
  5610. rowcount = 0
  5611. commands = self._update(tablename, query, fields)
  5612. for command in commands:
  5613. result, data = self.connection.store(*command)
  5614. if result == "OK":
  5615. rowcount += 1
  5616. else:
  5617. raise Exception("IMAP storing error: %s" % data)
  5618. return rowcount
  5619. def _count(self, query, distinct=None):
  5620. raise NotImplementedError()
  5621. def count(self,query,distinct=None):
  5622. counter = 0
  5623. tablename = self.get_query_mailbox(query)
  5624. if query and tablename is not None:
  5625. if use_common_filters(query):
  5626. query = self.common_filter(query, [tablename,])
  5627. result, data = self.connection.select(self.connection.mailbox_names[tablename])
  5628. string_query = "(%s)" % query
  5629. result, data = self.connection.search(None, string_query)
  5630. store_list = [item.strip() for item in data[0].split() if item.strip().isdigit()]
  5631. counter = len(store_list)
  5632. return counter
  5633. def delete(self, tablename, query):
  5634. counter = 0
  5635. if query:
  5636. if use_common_filters(query):
  5637. query = self.common_filter(query, [tablename,])
  5638. result, data = self.connection.select(self.connection.mailbox_names[tablename])
  5639. string_query = "(%s)" % query
  5640. result, data = self.connection.search(None, string_query)
  5641. store_list = [item.strip() for item in data[0].split() if item.strip().isdigit()]
  5642. for number in store_list:
  5643. result, data = self.connection.store(number, "+FLAGS", "(\\Deleted)")
  5644. if result == "OK":
  5645. counter += 1
  5646. else:
  5647. raise Exception("IMAP store error: %s" % data)
  5648. if counter > 0:
  5649. result, data = self.connection.expunge()
  5650. return counter
  5651. def BELONGS(self, first, second):
  5652. result = None
  5653. name = self.search_fields[first.name]
  5654. if name == "MESSAGE":
  5655. values = [str(val) for val in second if str(val).isdigit()]
  5656. result = "%s" % ",".join(values).strip()
  5657. elif name == "UID":
  5658. values = [str(val) for val in second if str(val).isdigit()]
  5659. result = "UID %s" % ",".join(values).strip()
  5660. else:
  5661. raise Exception("Operation not supported")
  5662. # result = "(%s %s)" % (self.expand(first), self.expand(second))
  5663. return result
  5664. def CONTAINS(self, first, second, case_sensitive=False):
  5665. # silently ignore, only case sensitive
  5666. result = None
  5667. name = self.search_fields[first.name]
  5668. if name in ("FROM", "TO", "SUBJECT", "TEXT"):
  5669. result = "%s \"%s\"" % (name, self.expand(second))
  5670. else:
  5671. if first.name in ("cc", "bcc"):
  5672. result = "%s \"%s\"" % (first.name.upper(), self.expand(second))
  5673. elif first.name == "mime":
  5674. result = "HEADER Content-Type \"%s\"" % self.expand(second)
  5675. else:
  5676. raise Exception("Operation not supported")
  5677. return result
  5678. def GT(self, first, second):
  5679. result = None
  5680. name = self.search_fields[first.name]
  5681. if name == "MESSAGE":
  5682. last_message = self.get_last_message(first.tablename)
  5683. result = "%d:%d" % (int(self.expand(second)) + 1, last_message)
  5684. elif name == "UID":
  5685. # GT and LT may not return
  5686. # expected sets depending on
  5687. # the uid format implemented
  5688. try:
  5689. pedestal, threshold = self.get_uid_bounds(first.tablename)
  5690. except TypeError:
  5691. e = sys.exc_info()[1]
  5692. LOGGER.debug("Error requesting uid bounds: %s", str(e))
  5693. return ""
  5694. try:
  5695. lower_limit = int(self.expand(second)) + 1
  5696. except (ValueError, TypeError):
  5697. e = sys.exc_info()[1]
  5698. raise Exception("Operation not supported (non integer UID)")
  5699. result = "UID %s:%s" % (lower_limit, threshold)
  5700. elif name == "DATE":
  5701. result = "SINCE %s" % self.convert_date(second, add=datetime.timedelta(1))
  5702. elif name == "SIZE":
  5703. result = "LARGER %s" % self.expand(second)
  5704. else:
  5705. raise Exception("Operation not supported")
  5706. return result
  5707. def GE(self, first, second):
  5708. result = None
  5709. name = self.search_fields[first.name]
  5710. if name == "MESSAGE":
  5711. last_message = self.get_last_message(first.tablename)
  5712. result = "%s:%s" % (self.expand(second), last_message)
  5713. elif name == "UID":
  5714. # GT and LT may not return
  5715. # expected sets depending on
  5716. # the uid format implemented
  5717. try:
  5718. pedestal, threshold = self.get_uid_bounds(first.tablename)
  5719. except TypeError:
  5720. e = sys.exc_info()[1]
  5721. LOGGER.debug("Error requesting uid bounds: %s", str(e))
  5722. return ""
  5723. lower_limit = self.expand(second)
  5724. result = "UID %s:%s" % (lower_limit, threshold)
  5725. elif name == "DATE":
  5726. result = "SINCE %s" % self.convert_date(second)
  5727. else:
  5728. raise Exception("Operation not supported")
  5729. return result
  5730. def LT(self, first, second):
  5731. result = None
  5732. name = self.search_fields[first.name]
  5733. if name == "MESSAGE":
  5734. result = "%s:%s" % (1, int(self.expand(second)) - 1)
  5735. elif name == "UID":
  5736. try:
  5737. pedestal, threshold = self.get_uid_bounds(first.tablename)
  5738. except TypeError:
  5739. e = sys.exc_info()[1]
  5740. LOGGER.debug("Error requesting uid bounds: %s", str(e))
  5741. return ""
  5742. try:
  5743. upper_limit = int(self.expand(second)) - 1
  5744. except (ValueError, TypeError):
  5745. e = sys.exc_info()[1]
  5746. raise Exception("Operation not supported (non integer UID)")
  5747. result = "UID %s:%s" % (pedestal, upper_limit)
  5748. elif name == "DATE":
  5749. result = "BEFORE %s" % self.convert_date(second)
  5750. elif name == "SIZE":
  5751. result = "SMALLER %s" % self.expand(second)
  5752. else:
  5753. raise Exception("Operation not supported")
  5754. return result
  5755. def LE(self, first, second):
  5756. result = None
  5757. name = self.search_fields[first.name]
  5758. if name == "MESSAGE":
  5759. result = "%s:%s" % (1, self.expand(second))
  5760. elif name == "UID":
  5761. try:
  5762. pedestal, threshold = self.get_uid_bounds(first.tablename)
  5763. except TypeError:
  5764. e = sys.exc_info()[1]
  5765. LOGGER.debug("Error requesting uid bounds: %s", str(e))
  5766. return ""
  5767. upper_limit = int(self.expand(second))
  5768. result = "UID %s:%s" % (pedestal, upper_limit)
  5769. elif name == "DATE":
  5770. result = "BEFORE %s" % self.convert_date(second, add=datetime.timedelta(1))
  5771. else:
  5772. raise Exception("Operation not supported")
  5773. return result
  5774. def NE(self, first, second=None):
  5775. if (second is None) and isinstance(first, Field):
  5776. # All records special table query
  5777. if first.type == "id":
  5778. return self.GE(first, 1)
  5779. result = self.NOT(self.EQ(first, second))
  5780. result = result.replace("NOT NOT", "").strip()
  5781. return result
  5782. def EQ(self,first,second):
  5783. name = self.search_fields[first.name]
  5784. result = None
  5785. if name is not None:
  5786. if name == "MESSAGE":
  5787. # query by message sequence number
  5788. result = "%s" % self.expand(second)
  5789. elif name == "UID":
  5790. result = "UID %s" % self.expand(second)
  5791. elif name == "DATE":
  5792. result = "ON %s" % self.convert_date(second)
  5793. elif name in self.flags:
  5794. if second:
  5795. result = "%s" % (name.upper()[1:])
  5796. else:
  5797. result = "NOT %s" % (name.upper()[1:])
  5798. else:
  5799. raise Exception("Operation not supported")
  5800. else:
  5801. raise Exception("Operation not supported")
  5802. return result
  5803. def AND(self, first, second):
  5804. result = "%s %s" % (self.expand(first), self.expand(second))
  5805. return result
  5806. def OR(self, first, second):
  5807. result = "OR %s %s" % (self.expand(first), self.expand(second))
  5808. return "%s" % result.replace("OR OR", "OR")
  5809. def NOT(self, first):
  5810. result = "NOT %s" % self.expand(first)
  5811. return result
  5812. ########################################################################
  5813. # end of adapters
  5814. ########################################################################
  5815. ADAPTERS = {
  5816. 'sqlite': SQLiteAdapter,
  5817. 'spatialite': SpatiaLiteAdapter,
  5818. 'sqlite:memory': SQLiteAdapter,
  5819. 'spatialite:memory': SpatiaLiteAdapter,
  5820. 'mysql': MySQLAdapter,
  5821. 'postgres': PostgreSQLAdapter,
  5822. 'postgres:psycopg2': PostgreSQLAdapter,
  5823. 'postgres:pg8000': PostgreSQLAdapter,
  5824. 'postgres2:psycopg2': NewPostgreSQLAdapter,
  5825. 'postgres2:pg8000': NewPostgreSQLAdapter,
  5826. 'oracle': OracleAdapter,
  5827. 'mssql': MSSQLAdapter,
  5828. 'mssql2': MSSQL2Adapter,
  5829. 'mssql3': MSSQL3Adapter,
  5830. 'sybase': SybaseAdapter,
  5831. 'db2': DB2Adapter,
  5832. 'teradata': TeradataAdapter,
  5833. 'informix': InformixAdapter,
  5834. 'informix-se': InformixSEAdapter,
  5835. 'firebird': FireBirdAdapter,
  5836. 'firebird_embedded': FireBirdAdapter,
  5837. 'ingres': IngresAdapter,
  5838. 'ingresu': IngresUnicodeAdapter,
  5839. 'sapdb': SAPDBAdapter,
  5840. 'cubrid': CubridAdapter,
  5841. 'jdbc:sqlite': JDBCSQLiteAdapter,
  5842. 'jdbc:sqlite:memory': JDBCSQLiteAdapter,
  5843. 'jdbc:postgres': JDBCPostgreSQLAdapter,
  5844. 'gae': GoogleDatastoreAdapter, # discouraged, for backward compatibility
  5845. 'google:datastore': GoogleDatastoreAdapter,
  5846. 'google:sql': GoogleSQLAdapter,
  5847. 'couchdb': CouchDBAdapter,
  5848. 'mongodb': MongoDBAdapter,
  5849. 'imap': IMAPAdapter
  5850. }
  5851. def sqlhtml_validators(field):
  5852. """
  5853. Field type validation, using web2py's validators mechanism.
  5854. makes sure the content of a field is in line with the declared
  5855. fieldtype
  5856. """
  5857. db = field.db
  5858. if not have_validators:
  5859. return []
  5860. field_type, field_length = field.type, field.length
  5861. if isinstance(field_type, SQLCustomType):
  5862. if hasattr(field_type, 'validator'):
  5863. return field_type.validator
  5864. else:
  5865. field_type = field_type.type
  5866. elif not isinstance(field_type,str):
  5867. return []
  5868. requires=[]
  5869. def ff(r,id):
  5870. row=r(id)
  5871. if not row:
  5872. return id
  5873. elif hasattr(r, '_format') and isinstance(r._format,str):
  5874. return r._format % row
  5875. elif hasattr(r, '_format') and callable(r._format):
  5876. return r._format(row)
  5877. else:
  5878. return id
  5879. if field_type in (('string', 'text', 'password')):
  5880. requires.append(validators.IS_LENGTH(field_length))
  5881. elif field_type == 'json':
  5882. requires.append(validators.IS_EMPTY_OR(validators.IS_JSON()))
  5883. elif field_type == 'double' or field_type == 'float':
  5884. requires.append(validators.IS_FLOAT_IN_RANGE(-1e100, 1e100))
  5885. elif field_type in ('integer','bigint'):
  5886. requires.append(validators.IS_INT_IN_RANGE(-1e100, 1e100))
  5887. elif field_type.startswith('decimal'):
  5888. requires.append(validators.IS_DECIMAL_IN_RANGE(-10**10, 10**10))
  5889. elif field_type == 'date':
  5890. requires.append(validators.IS_DATE())
  5891. elif field_type == 'time':
  5892. requires.append(validators.IS_TIME())
  5893. elif field_type == 'datetime':
  5894. requires.append(validators.IS_DATETIME())
  5895. elif db and field_type.startswith('reference') and \
  5896. field_type.find('.') < 0 and \
  5897. field_type[10:] in db.tables:
  5898. referenced = db[field_type[10:]]
  5899. def repr_ref(id, row=None, r=referenced, f=ff): return f(r, id)
  5900. field.represent = field.represent or repr_ref
  5901. if hasattr(referenced, '_format') and referenced._format:
  5902. requires = validators.IS_IN_DB(db,referenced._id,
  5903. referenced._format)
  5904. if field.unique:
  5905. requires._and = validators.IS_NOT_IN_DB(db,field)
  5906. if field.tablename == field_type[10:]:
  5907. return validators.IS_EMPTY_OR(requires)
  5908. return requires
  5909. elif db and field_type.startswith('list:reference') and \
  5910. field_type.find('.') < 0 and \
  5911. field_type[15:] in db.tables:
  5912. referenced = db[field_type[15:]]
  5913. def list_ref_repr(ids, row=None, r=referenced, f=ff):
  5914. if not ids:
  5915. return None
  5916. refs = None
  5917. db, id = r._db, r._id
  5918. if isinstance(db._adapter, GoogleDatastoreAdapter):
  5919. def count(values): return db(id.belongs(values)).select(id)
  5920. rx = range(0, len(ids), 30)
  5921. refs = reduce(lambda a,b:a&b, [count(ids[i:i+30]) for i in rx])
  5922. else:
  5923. refs = db(id.belongs(ids)).select(id)
  5924. return (refs and ', '.join(str(f(r,x.id)) for x in refs) or '')
  5925. field.represent = field.represent or list_ref_repr
  5926. if hasattr(referenced, '_format') and referenced._format:
  5927. requires = validators.IS_IN_DB(db,referenced._id,
  5928. referenced._format,multiple=True)
  5929. else:
  5930. requires = validators.IS_IN_DB(db,referenced._id,
  5931. multiple=True)
  5932. if field.unique:
  5933. requires._and = validators.IS_NOT_IN_DB(db,field)
  5934. return requires
  5935. elif field_type.startswith('list:'):
  5936. def repr_list(values,row=None): return', '.join(str(v) for v in (values or []))
  5937. field.represent = field.represent or repr_list
  5938. if field.unique:
  5939. requires.insert(0,validators.IS_NOT_IN_DB(db,field))
  5940. sff = ['in', 'do', 'da', 'ti', 'de', 'bo']
  5941. if field.notnull and not field_type[:2] in sff:
  5942. requires.insert(0, validators.IS_NOT_EMPTY())
  5943. elif not field.notnull and field_type[:2] in sff and requires:
  5944. requires[-1] = validators.IS_EMPTY_OR(requires[-1])
  5945. return requires
  5946. def bar_escape(item):
  5947. return str(item).replace('|', '||')
  5948. def bar_encode(items):
  5949. return '|%s|' % '|'.join(bar_escape(item) for item in items if str(item).strip())
  5950. def bar_decode_integer(value):
  5951. if not hasattr(value,'split') and hasattr(value,'read'):
  5952. value = value.read()
  5953. return [int(x) for x in value.split('|') if x.strip()]
  5954. def bar_decode_string(value):
  5955. return [x.replace('||', '|') for x in
  5956. REGEX_UNPACK.split(value[1:-1]) if x.strip()]
  5957. class Row(object):
  5958. """
  5959. a dictionary that lets you do d['a'] as well as d.a
  5960. this is only used to store a Row
  5961. """
  5962. def __init__(self,*args,**kwargs):
  5963. self.__dict__.update(*args,**kwargs)
  5964. def __getitem__(self, key):
  5965. key=str(key)
  5966. m = REGEX_TABLE_DOT_FIELD.match(key)
  5967. if key in self.get('_extra',{}):
  5968. return self._extra[key]
  5969. elif m:
  5970. try:
  5971. return ogetattr(self, m.group(1))[m.group(2)]
  5972. except (KeyError,AttributeError,TypeError):
  5973. key = m.group(2)
  5974. return ogetattr(self, key)
  5975. def __setitem__(self, key, value):
  5976. setattr(self, str(key), value)
  5977. __delitem__ = delattr
  5978. __copy__ = lambda self: Row(self)
  5979. __call__ = __getitem__
  5980. def get(self,key,default=None):
  5981. return self.__dict__.get(key,default)
  5982. def __contains__(self,key):
  5983. return key in self.__dict__
  5984. has_key = __contains__
  5985. def __nonzero__(self):
  5986. return len(self.__dict__)>0
  5987. def update(self, *args, **kwargs):
  5988. self.__dict__.update(*args, **kwargs)
  5989. def keys(self):
  5990. return self.__dict__.keys()
  5991. def items(self):
  5992. return self.__dict__.items()
  5993. def values(self):
  5994. return self.__dict__.values()
  5995. def __iter__(self):
  5996. return self.__dict__.__iter__()
  5997. def iteritems(self):
  5998. return self.__dict__.iteritems()
  5999. def __str__(self):
  6000. ### this could be made smarter
  6001. return '<Row %s>' % self.as_dict()
  6002. def __repr__(self):
  6003. return '<Row %s>' % self.as_dict()
  6004. def __int__(self):
  6005. return object.__getattribute__(self,'id')
  6006. def __eq__(self,other):
  6007. try:
  6008. return self.as_dict() == other.as_dict()
  6009. except AttributeError:
  6010. return False
  6011. def __ne__(self,other):
  6012. return not (self == other)
  6013. def __copy__(self):
  6014. return Row(dict(self))
  6015. def as_dict(self, datetime_to_str=False, custom_types=None):
  6016. SERIALIZABLE_TYPES = [str, unicode, int, long, float, bool, list, dict]
  6017. if isinstance(custom_types,(list,tuple,set)):
  6018. SERIALIZABLE_TYPES += custom_types
  6019. elif custom_types:
  6020. SERIALIZABLE_TYPES.append(custom_types)
  6021. d = dict(self)
  6022. for k in copy.copy(d.keys()):
  6023. v=d[k]
  6024. if d[k] is None:
  6025. continue
  6026. elif isinstance(v,Row):
  6027. d[k]=v.as_dict()
  6028. elif isinstance(v,Reference):
  6029. d[k]=int(v)
  6030. elif isinstance(v,decimal.Decimal):
  6031. d[k]=float(v)
  6032. elif isinstance(v, (datetime.date, datetime.datetime, datetime.time)):
  6033. if datetime_to_str:
  6034. d[k] = v.isoformat().replace('T',' ')[:19]
  6035. elif not isinstance(v,tuple(SERIALIZABLE_TYPES)):
  6036. del d[k]
  6037. return d
  6038. def as_xml(self, row_name="row", colnames=None, indent=' '):
  6039. def f(row,field,indent=' '):
  6040. if isinstance(row,Row):
  6041. spc = indent+' \n'
  6042. items = [f(row[x],x,indent+' ') for x in row]
  6043. return '%s<%s>\n%s\n%s</%s>' % (
  6044. indent,
  6045. field,
  6046. spc.join(item for item in items if item),
  6047. indent,
  6048. field)
  6049. elif not callable(row):
  6050. if REGEX_ALPHANUMERIC.match(field):
  6051. return '%s<%s>%s</%s>' % (indent,field,row,field)
  6052. else:
  6053. return '%s<extra name="%s">%s</extra>' % \
  6054. (indent,field,row)
  6055. else:
  6056. return None
  6057. return f(self, row_name, indent=indent)
  6058. def as_json(self, mode="object", default=None, colnames=None,
  6059. serialize=True, **kwargs):
  6060. """
  6061. serializes the table to a JSON list of objects
  6062. kwargs are passed to .as_dict method
  6063. only "object" mode supported for single row
  6064. serialize = False used by Rows.as_json
  6065. TODO: return array mode with query column order
  6066. """
  6067. def inner_loop(record, col):
  6068. (t, f) = col.split('.')
  6069. res = None
  6070. if not REGEX_TABLE_DOT_FIELD.match(col):
  6071. key = col
  6072. res = record._extra[col]
  6073. else:
  6074. key = f
  6075. if isinstance(record.get(t, None), Row):
  6076. res = record[t][f]
  6077. else:
  6078. res = record[f]
  6079. if mode == 'object':
  6080. return (key, res)
  6081. else:
  6082. return res
  6083. multi = any([isinstance(v, self.__class__) for v in self.values()])
  6084. mode = mode.lower()
  6085. if not mode in ['object', 'array']:
  6086. raise SyntaxError('Invalid JSON serialization mode: %s' % mode)
  6087. if mode=='object' and colnames:
  6088. item = dict([inner_loop(self, col) for col in colnames])
  6089. elif colnames:
  6090. item = [inner_loop(self, col) for col in colnames]
  6091. else:
  6092. if not mode == 'object':
  6093. raise SyntaxError('Invalid JSON serialization mode: %s' % mode)
  6094. if multi:
  6095. item = dict()
  6096. [item.update(**v.as_dict(**kwargs)) for v in self.values()]
  6097. else:
  6098. item = self.as_dict(**kwargs)
  6099. if serialize:
  6100. if have_serializers:
  6101. return serializers.json(item,
  6102. default=default or
  6103. serializers.custom_json)
  6104. elif simplejson:
  6105. return simplejson.dumps(item)
  6106. else:
  6107. raise RuntimeError("missing simplejson")
  6108. else:
  6109. return item
  6110. ################################################################################
  6111. # Everything below should be independent of the specifics of the database
  6112. # and should work for RDBMs and some NoSQL databases
  6113. ################################################################################
  6114. class SQLCallableList(list):
  6115. def __call__(self):
  6116. return copy.copy(self)
  6117. def smart_query(fields,text):
  6118. if not isinstance(fields,(list,tuple)):
  6119. fields = [fields]
  6120. new_fields = []
  6121. for field in fields:
  6122. if isinstance(field,Field):
  6123. new_fields.append(field)
  6124. elif isinstance(field,Table):
  6125. for ofield in field:
  6126. new_fields.append(ofield)
  6127. else:
  6128. raise RuntimeError("fields must be a list of fields")
  6129. fields = new_fields
  6130. field_map = {}
  6131. for field in fields:
  6132. n = field.name.lower()
  6133. if not n in field_map:
  6134. field_map[n] = field
  6135. n = str(field).lower()
  6136. if not n in field_map:
  6137. field_map[n] = field
  6138. constants = {}
  6139. i = 0
  6140. while True:
  6141. m = REGEX_CONST_STRING.search(text)
  6142. if not m: break
  6143. text = text[:m.start()]+('#%i' % i)+text[m.end():]
  6144. constants[str(i)] = m.group()[1:-1]
  6145. i+=1
  6146. text = re.sub('\s+',' ',text).lower()
  6147. for a,b in [('&','and'),
  6148. ('|','or'),
  6149. ('~','not'),
  6150. ('==','='),
  6151. ('<','<'),
  6152. ('>','>'),
  6153. ('<=','<='),
  6154. ('>=','>='),
  6155. ('<>','!='),
  6156. ('=<','<='),
  6157. ('=>','>='),
  6158. ('=','='),
  6159. (' less or equal than ','<='),
  6160. (' greater or equal than ','>='),
  6161. (' equal or less than ','<='),
  6162. (' equal or greater than ','>='),
  6163. (' less or equal ','<='),
  6164. (' greater or equal ','>='),
  6165. (' equal or less ','<='),
  6166. (' equal or greater ','>='),
  6167. (' not equal to ','!='),
  6168. (' not equal ','!='),
  6169. (' equal to ','='),
  6170. (' equal ','='),
  6171. (' equals ','='),
  6172. (' less than ','<'),
  6173. (' greater than ','>'),
  6174. (' starts with ','startswith'),
  6175. (' ends with ','endswith'),
  6176. (' not in ' , 'notbelongs'),
  6177. (' in ' , 'belongs'),
  6178. (' is ','=')]:
  6179. if a[0]==' ':
  6180. text = text.replace(' is'+a,' %s ' % b)
  6181. text = text.replace(a,' %s ' % b)
  6182. text = re.sub('\s+',' ',text).lower()
  6183. text = re.sub('(?P<a>[\<\>\!\=])\s+(?P<b>[\<\>\!\=])','\g<a>\g<b>',text)
  6184. query = field = neg = op = logic = None
  6185. for item in text.split():
  6186. if field is None:
  6187. if item == 'not':
  6188. neg = True
  6189. elif not neg and not logic and item in ('and','or'):
  6190. logic = item
  6191. elif item in field_map:
  6192. field = field_map[item]
  6193. else:
  6194. raise RuntimeError("Invalid syntax")
  6195. elif not field is None and op is None:
  6196. op = item
  6197. elif not op is None:
  6198. if item.startswith('#'):
  6199. if not item[1:] in constants:
  6200. raise RuntimeError("Invalid syntax")
  6201. value = constants[item[1:]]
  6202. else:
  6203. value = item
  6204. if field.type in ('text', 'string', 'json'):
  6205. if op == '=': op = 'like'
  6206. if op == '=': new_query = field==value
  6207. elif op == '<': new_query = field<value
  6208. elif op == '>': new_query = field>value
  6209. elif op == '<=': new_query = field<=value
  6210. elif op == '>=': new_query = field>=value
  6211. elif op == '!=': new_query = field!=value
  6212. elif op == 'belongs': new_query = field.belongs(value.split(','))
  6213. elif op == 'notbelongs': new_query = ~field.belongs(value.split(','))
  6214. elif field.type in ('text', 'string', 'json'):
  6215. if op == 'contains': new_query = field.contains(value)
  6216. elif op == 'like': new_query = field.like(value)
  6217. elif op == 'startswith': new_query = field.startswith(value)
  6218. elif op == 'endswith': new_query = field.endswith(value)
  6219. else: raise RuntimeError("Invalid operation")
  6220. elif field._db._adapter.dbengine=='google:datastore' and \
  6221. field.type in ('list:integer', 'list:string', 'list:reference'):
  6222. if op == 'contains': new_query = field.contains(value)
  6223. else: raise RuntimeError("Invalid operation")
  6224. else: raise RuntimeError("Invalid operation")
  6225. if neg: new_query = ~new_query
  6226. if query is None:
  6227. query = new_query
  6228. elif logic == 'and':
  6229. query &= new_query
  6230. elif logic == 'or':
  6231. query |= new_query
  6232. field = op = neg = logic = None
  6233. return query
  6234. class DAL(object):
  6235. """
  6236. an instance of this class represents a database connection
  6237. Example::
  6238. db = DAL('sqlite://test.db')
  6239. db.define_table('tablename', Field('fieldname1'),
  6240. Field('fieldname2'))
  6241. (experimental)
  6242. you can pass a dict object as uri with the uri string
  6243. and table/field definitions. For an example of valid data check
  6244. the output of:
  6245. >>> db.as_dict(flat=True, sanitize=False)
  6246. """
  6247. def __new__(cls, uri='sqlite://dummy.db', *args, **kwargs):
  6248. if not hasattr(THREAD_LOCAL,'db_instances'):
  6249. THREAD_LOCAL.db_instances = {}
  6250. if not hasattr(THREAD_LOCAL,'db_instances_zombie'):
  6251. THREAD_LOCAL.db_instances_zombie = {}
  6252. if uri == '<zombie>':
  6253. db_uid = kwargs['db_uid'] # a zombie must have a db_uid!
  6254. if db_uid in THREAD_LOCAL.db_instances:
  6255. db_group = THREAD_LOCAL.db_instances[db_uid]
  6256. db = db_group[-1]
  6257. elif db_uid in THREAD_LOCAL.db_instances_zombie:
  6258. db = THREAD_LOCAL.db_instances_zombie[db_uid]
  6259. else:
  6260. db = super(DAL, cls).__new__(cls)
  6261. THREAD_LOCAL.db_instances_zombie[db_uid] = db
  6262. else:
  6263. db_uid = kwargs.get('db_uid',hashlib_md5(repr(uri)).hexdigest())
  6264. if db_uid in THREAD_LOCAL.db_instances_zombie:
  6265. db = THREAD_LOCAL.db_instances_zombie[db_uid]
  6266. del THREAD_LOCAL.db_instances_zombie[db_uid]
  6267. else:
  6268. db = super(DAL, cls).__new__(cls)
  6269. db_group = THREAD_LOCAL.db_instances.get(db_uid,[])
  6270. db_group.append(db)
  6271. THREAD_LOCAL.db_instances[db_uid] = db_group
  6272. db._db_uid = db_uid
  6273. return db
  6274. @staticmethod
  6275. def set_folder(folder):
  6276. """
  6277. # ## this allows gluon to set a folder for this thread
  6278. # ## <<<<<<<<< Should go away as new DAL replaces old sql.py
  6279. """
  6280. BaseAdapter.set_folder(folder)
  6281. @staticmethod
  6282. def get_instances():
  6283. """
  6284. Returns a dictionary with uri as key with timings and defined tables
  6285. {'sqlite://storage.sqlite': {
  6286. 'dbstats': [(select auth_user.email from auth_user, 0.02009)],
  6287. 'dbtables': {
  6288. 'defined': ['auth_cas', 'auth_event', 'auth_group',
  6289. 'auth_membership', 'auth_permission', 'auth_user'],
  6290. 'lazy': '[]'
  6291. }
  6292. }
  6293. }
  6294. """
  6295. dbs = getattr(THREAD_LOCAL,'db_instances',{}).items()
  6296. infos = {}
  6297. for db_uid, db_group in dbs:
  6298. for db in db_group:
  6299. if not db._uri:
  6300. continue
  6301. k = hide_password(db._uri)
  6302. infos[k] = dict(dbstats = [(row[0], row[1]) for row in db._timings],
  6303. dbtables = {'defined':
  6304. sorted(list(set(db.tables) -
  6305. set(db._LAZY_TABLES.keys()))),
  6306. 'lazy': sorted(db._LAZY_TABLES.keys())}
  6307. )
  6308. return infos
  6309. @staticmethod
  6310. def distributed_transaction_begin(*instances):
  6311. if not instances:
  6312. return
  6313. thread_key = '%s.%s' % (socket.gethostname(), threading.currentThread())
  6314. keys = ['%s.%i' % (thread_key, i) for (i,db) in instances]
  6315. instances = enumerate(instances)
  6316. for (i, db) in instances:
  6317. if not db._adapter.support_distributed_transaction():
  6318. raise SyntaxError(
  6319. 'distributed transaction not suported by %s' % db._dbname)
  6320. for (i, db) in instances:
  6321. db._adapter.distributed_transaction_begin(keys[i])
  6322. @staticmethod
  6323. def distributed_transaction_commit(*instances):
  6324. if not instances:
  6325. return
  6326. instances = enumerate(instances)
  6327. thread_key = '%s.%s' % (socket.gethostname(), threading.currentThread())
  6328. keys = ['%s.%i' % (thread_key, i) for (i,db) in instances]
  6329. for (i, db) in instances:
  6330. if not db._adapter.support_distributed_transaction():
  6331. raise SyntaxError(
  6332. 'distributed transaction not suported by %s' % db._dbanme)
  6333. try:
  6334. for (i, db) in instances:
  6335. db._adapter.prepare(keys[i])
  6336. except:
  6337. for (i, db) in instances:
  6338. db._adapter.rollback_prepared(keys[i])
  6339. raise RuntimeError('failure to commit distributed transaction')
  6340. else:
  6341. for (i, db) in instances:
  6342. db._adapter.commit_prepared(keys[i])
  6343. return
  6344. def __init__(self, uri=DEFAULT_URI,
  6345. pool_size=0, folder=None,
  6346. db_codec='UTF-8', check_reserved=None,
  6347. migrate=True, fake_migrate=False,
  6348. migrate_enabled=True, fake_migrate_all=False,
  6349. decode_credentials=False, driver_args=None,
  6350. adapter_args=None, attempts=5, auto_import=False,
  6351. bigint_id=False,debug=False,lazy_tables=False,
  6352. db_uid=None, do_connect=True, after_connection=None):
  6353. """
  6354. Creates a new Database Abstraction Layer instance.
  6355. Keyword arguments:
  6356. :uri: string that contains information for connecting to a database.
  6357. (default: 'sqlite://dummy.db')
  6358. :pool_size: How many open connections to make to the database object.
  6359. :folder: where .table files will be created.
  6360. automatically set within web2py
  6361. use an explicit path when using DAL outside web2py
  6362. :db_codec: string encoding of the database (default: 'UTF-8')
  6363. :check_reserved: list of adapters to check tablenames and column names
  6364. against sql/nosql reserved keywords. (Default None)
  6365. * 'common' List of sql keywords that are common to all database types
  6366. such as "SELECT, INSERT". (recommended)
  6367. * 'all' Checks against all known SQL keywords. (not recommended)
  6368. <adaptername> Checks against the specific adapters list of keywords
  6369. (recommended)
  6370. * '<adaptername>_nonreserved' Checks against the specific adapters
  6371. list of nonreserved keywords. (if available)
  6372. :migrate (defaults to True) sets default migrate behavior for all tables
  6373. :fake_migrate (defaults to False) sets default fake_migrate behavior for all tables
  6374. :migrate_enabled (defaults to True). If set to False disables ALL migrations
  6375. :fake_migrate_all (defaults to False). If sets to True fake migrates ALL tables
  6376. :attempts (defaults to 5). Number of times to attempt connecting
  6377. :auto_import (defaults to False). If set, import automatically table definitions from the
  6378. databases folder
  6379. :bigint_id (defaults to False): If set, turn on bigint instead of int for id fields
  6380. :lazy_tables (defaults to False): delay table definition until table access
  6381. :after_connection (defaults to None): a callable that will be execute after the connection
  6382. """
  6383. items = None
  6384. if isinstance(uri, dict):
  6385. if "items" in uri:
  6386. items = uri.pop("items")
  6387. try:
  6388. newuri = uri.pop("uri")
  6389. except KeyError:
  6390. newuri = DEFAULT_URI
  6391. locals().update(uri)
  6392. uri = newuri
  6393. if uri == '<zombie>' and db_uid is not None: return
  6394. if not decode_credentials:
  6395. credential_decoder = lambda cred: cred
  6396. else:
  6397. credential_decoder = lambda cred: urllib.unquote(cred)
  6398. self._folder = folder
  6399. if folder:
  6400. self.set_folder(folder)
  6401. self._uri = uri
  6402. self._pool_size = pool_size
  6403. self._db_codec = db_codec
  6404. self._lastsql = ''
  6405. self._timings = []
  6406. self._pending_references = {}
  6407. self._request_tenant = 'request_tenant'
  6408. self._common_fields = []
  6409. self._referee_name = '%(table)s'
  6410. self._bigint_id = bigint_id
  6411. self._debug = debug
  6412. self._migrated = []
  6413. self._LAZY_TABLES = {}
  6414. self._lazy_tables = lazy_tables
  6415. self._tables = SQLCallableList()
  6416. self._driver_args = driver_args
  6417. self._adapter_args = adapter_args
  6418. self._check_reserved = check_reserved
  6419. self._decode_credentials = decode_credentials
  6420. self._attempts = attempts
  6421. self._do_connect = do_connect
  6422. if not str(attempts).isdigit() or attempts < 0:
  6423. attempts = 5
  6424. if uri:
  6425. uris = isinstance(uri,(list,tuple)) and uri or [uri]
  6426. error = ''
  6427. connected = False
  6428. for k in range(attempts):
  6429. for uri in uris:
  6430. try:
  6431. if is_jdbc and not uri.startswith('jdbc:'):
  6432. uri = 'jdbc:'+uri
  6433. self._dbname = REGEX_DBNAME.match(uri).group()
  6434. if not self._dbname in ADAPTERS:
  6435. raise SyntaxError("Error in URI '%s' or database not supported" % self._dbname)
  6436. # notice that driver args or {} else driver_args
  6437. # defaults to {} global, not correct
  6438. kwargs = dict(db=self,uri=uri,
  6439. pool_size=pool_size,
  6440. folder=folder,
  6441. db_codec=db_codec,
  6442. credential_decoder=credential_decoder,
  6443. driver_args=driver_args or {},
  6444. adapter_args=adapter_args or {},
  6445. do_connect=do_connect,
  6446. after_connection=after_connection)
  6447. self._adapter = ADAPTERS[self._dbname](**kwargs)
  6448. types = ADAPTERS[self._dbname].types
  6449. # copy so multiple DAL() possible
  6450. self._adapter.types = copy.copy(types)
  6451. if bigint_id:
  6452. if 'big-id' in types and 'reference' in types:
  6453. self._adapter.types['id'] = types['big-id']
  6454. self._adapter.types['reference'] = types['big-reference']
  6455. connected = True
  6456. break
  6457. except SyntaxError:
  6458. raise
  6459. except Exception:
  6460. tb = traceback.format_exc()
  6461. sys.stderr.write('DEBUG: connect attempt %i, connection error:\n%s' % (k, tb))
  6462. if connected:
  6463. break
  6464. else:
  6465. time.sleep(1)
  6466. if not connected:
  6467. raise RuntimeError("Failure to connect, tried %d times:\n%s" % (attempts, tb))
  6468. else:
  6469. self._adapter = BaseAdapter(db=self,pool_size=0,
  6470. uri='None',folder=folder,
  6471. db_codec=db_codec, after_connection=after_connection)
  6472. migrate = fake_migrate = False
  6473. adapter = self._adapter
  6474. self._uri_hash = hashlib_md5(adapter.uri).hexdigest()
  6475. self.check_reserved = check_reserved
  6476. if self.check_reserved:
  6477. from reserved_sql_keywords import ADAPTERS as RSK
  6478. self.RSK = RSK
  6479. self._migrate = migrate
  6480. self._fake_migrate = fake_migrate
  6481. self._migrate_enabled = migrate_enabled
  6482. self._fake_migrate_all = fake_migrate_all
  6483. if auto_import or items:
  6484. self.import_table_definitions(adapter.folder,
  6485. items=items)
  6486. @property
  6487. def tables(self):
  6488. return self._tables
  6489. def import_table_definitions(self, path, migrate=False,
  6490. fake_migrate=False, items=None):
  6491. pattern = pjoin(path,self._uri_hash+'_*.table')
  6492. if items:
  6493. for tablename, table in items.iteritems():
  6494. # TODO: read all field/table options
  6495. fields = []
  6496. # remove unsupported/illegal Table arguments
  6497. [table.pop(name) for name in ("name", "fields") if
  6498. name in table]
  6499. if "items" in table:
  6500. for fieldname, field in table.pop("items").iteritems():
  6501. # remove unsupported/illegal Field arguments
  6502. [field.pop(key) for key in ("requires", "name",
  6503. "compute", "colname") if key in field]
  6504. fields.append(Field(str(fieldname), **field))
  6505. self.define_table(str(tablename), *fields, **table)
  6506. else:
  6507. for filename in glob.glob(pattern):
  6508. tfile = self._adapter.file_open(filename, 'r')
  6509. try:
  6510. sql_fields = pickle.load(tfile)
  6511. name = filename[len(pattern)-7:-6]
  6512. mf = [(value['sortable'],
  6513. Field(key,
  6514. type=value['type'],
  6515. length=value.get('length',None),
  6516. notnull=value.get('notnull',False),
  6517. unique=value.get('unique',False))) \
  6518. for key, value in sql_fields.iteritems()]
  6519. mf.sort(lambda a,b: cmp(a[0],b[0]))
  6520. self.define_table(name,*[item[1] for item in mf],
  6521. **dict(migrate=migrate,
  6522. fake_migrate=fake_migrate))
  6523. finally:
  6524. self._adapter.file_close(tfile)
  6525. def check_reserved_keyword(self, name):
  6526. """
  6527. Validates ``name`` against SQL keywords
  6528. Uses self.check_reserve which is a list of
  6529. operators to use.
  6530. self.check_reserved
  6531. ['common', 'postgres', 'mysql']
  6532. self.check_reserved
  6533. ['all']
  6534. """
  6535. for backend in self.check_reserved:
  6536. if name.upper() in self.RSK[backend]:
  6537. raise SyntaxError(
  6538. 'invalid table/column name "%s" is a "%s" reserved SQL/NOSQL keyword' % (name, backend.upper()))
  6539. def parse_as_rest(self,patterns,args,vars,queries=None,nested_select=True):
  6540. """
  6541. EXAMPLE:
  6542. db.define_table('person',Field('name'),Field('info'))
  6543. db.define_table('pet',Field('ownedby',db.person),Field('name'),Field('info'))
  6544. @request.restful()
  6545. def index():
  6546. def GET(*args,**vars):
  6547. patterns = [
  6548. "/friends[person]",
  6549. "/{person.name}/:field",
  6550. "/{person.name}/pets[pet.ownedby]",
  6551. "/{person.name}/pets[pet.ownedby]/{pet.name}",
  6552. "/{person.name}/pets[pet.ownedby]/{pet.name}/:field",
  6553. ("/dogs[pet]", db.pet.info=='dog'),
  6554. ("/dogs[pet]/{pet.name.startswith}", db.pet.info=='dog'),
  6555. ]
  6556. parser = db.parse_as_rest(patterns,args,vars)
  6557. if parser.status == 200:
  6558. return dict(content=parser.response)
  6559. else:
  6560. raise HTTP(parser.status,parser.error)
  6561. def POST(table_name,**vars):
  6562. if table_name == 'person':
  6563. return db.person.validate_and_insert(**vars)
  6564. elif table_name == 'pet':
  6565. return db.pet.validate_and_insert(**vars)
  6566. else:
  6567. raise HTTP(400)
  6568. return locals()
  6569. """
  6570. db = self
  6571. re1 = REGEX_SEARCH_PATTERN
  6572. re2 = REGEX_SQUARE_BRACKETS
  6573. def auto_table(table,base='',depth=0):
  6574. patterns = []
  6575. for field in db[table].fields:
  6576. if base:
  6577. tag = '%s/%s' % (base,field.replace('_','-'))
  6578. else:
  6579. tag = '/%s/%s' % (table.replace('_','-'),field.replace('_','-'))
  6580. f = db[table][field]
  6581. if not f.readable: continue
  6582. if f.type=='id' or 'slug' in field or f.type.startswith('reference'):
  6583. tag += '/{%s.%s}' % (table,field)
  6584. patterns.append(tag)
  6585. patterns.append(tag+'/:field')
  6586. elif f.type.startswith('boolean'):
  6587. tag += '/{%s.%s}' % (table,field)
  6588. patterns.append(tag)
  6589. patterns.append(tag+'/:field')
  6590. elif f.type in ('float','double','integer','bigint'):
  6591. tag += '/{%s.%s.ge}/{%s.%s.lt}' % (table,field,table,field)
  6592. patterns.append(tag)
  6593. patterns.append(tag+'/:field')
  6594. elif f.type.startswith('list:'):
  6595. tag += '/{%s.%s.contains}' % (table,field)
  6596. patterns.append(tag)
  6597. patterns.append(tag+'/:field')
  6598. elif f.type in ('date','datetime'):
  6599. tag+= '/{%s.%s.year}' % (table,field)
  6600. patterns.append(tag)
  6601. patterns.append(tag+'/:field')
  6602. tag+='/{%s.%s.month}' % (table,field)
  6603. patterns.append(tag)
  6604. patterns.append(tag+'/:field')
  6605. tag+='/{%s.%s.day}' % (table,field)
  6606. patterns.append(tag)
  6607. patterns.append(tag+'/:field')
  6608. if f.type in ('datetime','time'):
  6609. tag+= '/{%s.%s.hour}' % (table,field)
  6610. patterns.append(tag)
  6611. patterns.append(tag+'/:field')
  6612. tag+='/{%s.%s.minute}' % (table,field)
  6613. patterns.append(tag)
  6614. patterns.append(tag+'/:field')
  6615. tag+='/{%s.%s.second}' % (table,field)
  6616. patterns.append(tag)
  6617. patterns.append(tag+'/:field')
  6618. if depth>0:
  6619. for f in db[table]._referenced_by:
  6620. tag+='/%s[%s.%s]' % (table,f.tablename,f.name)
  6621. patterns.append(tag)
  6622. patterns += auto_table(table,base=tag,depth=depth-1)
  6623. return patterns
  6624. if patterns == 'auto':
  6625. patterns=[]
  6626. for table in db.tables:
  6627. if not table.startswith('auth_'):
  6628. patterns.append('/%s[%s]' % (table,table))
  6629. patterns += auto_table(table,base='',depth=1)
  6630. else:
  6631. i = 0
  6632. while i<len(patterns):
  6633. pattern = patterns[i]
  6634. if not isinstance(pattern,str):
  6635. pattern = pattern[0]
  6636. tokens = pattern.split('/')
  6637. if tokens[-1].startswith(':auto') and re2.match(tokens[-1]):
  6638. new_patterns = auto_table(tokens[-1][tokens[-1].find('[')+1:-1],
  6639. '/'.join(tokens[:-1]))
  6640. patterns = patterns[:i]+new_patterns+patterns[i+1:]
  6641. i += len(new_patterns)
  6642. else:
  6643. i += 1
  6644. if '/'.join(args) == 'patterns':
  6645. return Row({'status':200,'pattern':'list',
  6646. 'error':None,'response':patterns})
  6647. for pattern in patterns:
  6648. basequery, exposedfields = None, []
  6649. if isinstance(pattern,tuple):
  6650. if len(pattern)==2:
  6651. pattern, basequery = pattern
  6652. elif len(pattern)>2:
  6653. pattern, basequery, exposedfields = pattern[0:3]
  6654. otable=table=None
  6655. if not isinstance(queries,dict):
  6656. dbset=db(queries)
  6657. if basequery is not None:
  6658. dbset = dbset(basequery)
  6659. i=0
  6660. tags = pattern[1:].split('/')
  6661. if len(tags)!=len(args):
  6662. continue
  6663. for tag in tags:
  6664. if re1.match(tag):
  6665. # print 're1:'+tag
  6666. tokens = tag[1:-1].split('.')
  6667. table, field = tokens[0], tokens[1]
  6668. if not otable or table == otable:
  6669. if len(tokens)==2 or tokens[2]=='eq':
  6670. query = db[table][field]==args[i]
  6671. elif tokens[2]=='ne':
  6672. query = db[table][field]!=args[i]
  6673. elif tokens[2]=='lt':
  6674. query = db[table][field]<args[i]
  6675. elif tokens[2]=='gt':
  6676. query = db[table][field]>args[i]
  6677. elif tokens[2]=='ge':
  6678. query = db[table][field]>=args[i]
  6679. elif tokens[2]=='le':
  6680. query = db[table][field]<=args[i]
  6681. elif tokens[2]=='year':
  6682. query = db[table][field].year()==args[i]
  6683. elif tokens[2]=='month':
  6684. query = db[table][field].month()==args[i]
  6685. elif tokens[2]=='day':
  6686. query = db[table][field].day()==args[i]
  6687. elif tokens[2]=='hour':
  6688. query = db[table][field].hour()==args[i]
  6689. elif tokens[2]=='minute':
  6690. query = db[table][field].minutes()==args[i]
  6691. elif tokens[2]=='second':
  6692. query = db[table][field].seconds()==args[i]
  6693. elif tokens[2]=='startswith':
  6694. query = db[table][field].startswith(args[i])
  6695. elif tokens[2]=='contains':
  6696. query = db[table][field].contains(args[i])
  6697. else:
  6698. raise RuntimeError("invalid pattern: %s" % pattern)
  6699. if len(tokens)==4 and tokens[3]=='not':
  6700. query = ~query
  6701. elif len(tokens)>=4:
  6702. raise RuntimeError("invalid pattern: %s" % pattern)
  6703. if not otable and isinstance(queries,dict):
  6704. dbset = db(queries[table])
  6705. if basequery is not None:
  6706. dbset = dbset(basequery)
  6707. dbset=dbset(query)
  6708. else:
  6709. raise RuntimeError("missing relation in pattern: %s" % pattern)
  6710. elif re2.match(tag) and args[i]==tag[:tag.find('[')]:
  6711. ref = tag[tag.find('[')+1:-1]
  6712. if '.' in ref and otable:
  6713. table,field = ref.split('.')
  6714. selfld = '_id'
  6715. if db[table][field].type.startswith('reference '):
  6716. refs = [ x.name for x in db[otable] if x.type == db[table][field].type ]
  6717. else:
  6718. refs = [ x.name for x in db[table]._referenced_by if x.tablename==otable ]
  6719. if refs:
  6720. selfld = refs[0]
  6721. if nested_select:
  6722. try:
  6723. dbset=db(db[table][field].belongs(dbset._select(db[otable][selfld])))
  6724. except ValueError:
  6725. return Row({'status':400,'pattern':pattern,
  6726. 'error':'invalid path','response':None})
  6727. else:
  6728. items = [item.id for item in dbset.select(db[otable][selfld])]
  6729. dbset=db(db[table][field].belongs(items))
  6730. else:
  6731. table = ref
  6732. if not otable and isinstance(queries,dict):
  6733. dbset = db(queries[table])
  6734. dbset=dbset(db[table])
  6735. elif tag==':field' and table:
  6736. # print 're3:'+tag
  6737. field = args[i]
  6738. if not field in db[table]: break
  6739. # hand-built patterns should respect .readable=False as well
  6740. if not db[table][field].readable:
  6741. return Row({'status':418,'pattern':pattern,
  6742. 'error':'I\'m a teapot','response':None})
  6743. try:
  6744. distinct = vars.get('distinct', False) == 'True'
  6745. offset = int(vars.get('offset',None) or 0)
  6746. limits = (offset,int(vars.get('limit',None) or 1000)+offset)
  6747. except ValueError:
  6748. return Row({'status':400,'error':'invalid limits','response':None})
  6749. items = dbset.select(db[table][field], distinct=distinct, limitby=limits)
  6750. if items:
  6751. return Row({'status':200,'response':items,
  6752. 'pattern':pattern})
  6753. else:
  6754. return Row({'status':404,'pattern':pattern,
  6755. 'error':'no record found','response':None})
  6756. elif tag != args[i]:
  6757. break
  6758. otable = table
  6759. i += 1
  6760. if i==len(tags) and table:
  6761. ofields = vars.get('order',db[table]._id.name).split('|')
  6762. try:
  6763. orderby = [db[table][f] if not f.startswith('~') else ~db[table][f[1:]] for f in ofields]
  6764. except (KeyError, AttributeError):
  6765. return Row({'status':400,'error':'invalid orderby','response':None})
  6766. if exposedfields:
  6767. fields = [field for field in db[table] if str(field).split('.')[-1] in exposedfields and field.readable]
  6768. else:
  6769. fields = [field for field in db[table] if field.readable]
  6770. count = dbset.count()
  6771. try:
  6772. offset = int(vars.get('offset',None) or 0)
  6773. limits = (offset,int(vars.get('limit',None) or 1000)+offset)
  6774. except ValueError:
  6775. return Row({'status':400,'error':'invalid limits','response':None})
  6776. if count > limits[1]-limits[0]:
  6777. return Row({'status':400,'error':'too many records','response':None})
  6778. try:
  6779. response = dbset.select(limitby=limits,orderby=orderby,*fields)
  6780. except ValueError:
  6781. return Row({'status':400,'pattern':pattern,
  6782. 'error':'invalid path','response':None})
  6783. return Row({'status':200,'response':response,
  6784. 'pattern':pattern,'count':count})
  6785. return Row({'status':400,'error':'no matching pattern','response':None})
  6786. def define_table(
  6787. self,
  6788. tablename,
  6789. *fields,
  6790. **args
  6791. ):
  6792. if not isinstance(tablename,str):
  6793. raise SyntaxError("missing table name")
  6794. elif hasattr(self,tablename) or tablename in self.tables:
  6795. if not args.get('redefine',False):
  6796. raise SyntaxError('table already defined: %s' % tablename)
  6797. elif tablename.startswith('_') or hasattr(self,tablename) or \
  6798. REGEX_PYTHON_KEYWORDS.match(tablename):
  6799. raise SyntaxError('invalid table name: %s' % tablename)
  6800. elif self.check_reserved:
  6801. self.check_reserved_keyword(tablename)
  6802. else:
  6803. invalid_args = set(args)-TABLE_ARGS
  6804. if invalid_args:
  6805. raise SyntaxError('invalid table "%s" attributes: %s' \
  6806. % (tablename,invalid_args))
  6807. if self._lazy_tables and not tablename in self._LAZY_TABLES:
  6808. self._LAZY_TABLES[tablename] = (tablename,fields,args)
  6809. table = None
  6810. else:
  6811. table = self.lazy_define_table(tablename,*fields,**args)
  6812. if not tablename in self.tables:
  6813. self.tables.append(tablename)
  6814. return table
  6815. def lazy_define_table(
  6816. self,
  6817. tablename,
  6818. *fields,
  6819. **args
  6820. ):
  6821. args_get = args.get
  6822. common_fields = self._common_fields
  6823. if common_fields:
  6824. fields = list(fields) + list(common_fields)
  6825. table_class = args_get('table_class',Table)
  6826. table = table_class(self, tablename, *fields, **args)
  6827. table._actual = True
  6828. self[tablename] = table
  6829. # must follow above line to handle self references
  6830. table._create_references()
  6831. for field in table:
  6832. if field.requires == DEFAULT:
  6833. field.requires = sqlhtml_validators(field)
  6834. migrate = self._migrate_enabled and args_get('migrate',self._migrate)
  6835. if migrate and not self._uri in (None,'None') \
  6836. or self._adapter.dbengine=='google:datastore':
  6837. fake_migrate = self._fake_migrate_all or \
  6838. args_get('fake_migrate',self._fake_migrate)
  6839. polymodel = args_get('polymodel',None)
  6840. try:
  6841. GLOBAL_LOCKER.acquire()
  6842. self._lastsql = self._adapter.create_table(
  6843. table,migrate=migrate,
  6844. fake_migrate=fake_migrate,
  6845. polymodel=polymodel)
  6846. finally:
  6847. GLOBAL_LOCKER.release()
  6848. else:
  6849. table._dbt = None
  6850. on_define = args_get('on_define',None)
  6851. if on_define: on_define(table)
  6852. return table
  6853. def as_dict(self, flat=False, sanitize=True, field_options=True):
  6854. dbname = db_uid = uri = None
  6855. if not sanitize:
  6856. uri, dbname, db_uid = (self._uri, self._dbname, self._db_uid)
  6857. db_as_dict = dict(items={}, tables=[], uri=uri, dbname=dbname,
  6858. db_uid=db_uid,
  6859. **dict([(k, getattr(self, "_" + k)) for
  6860. k in 'pool_size','folder','db_codec',
  6861. 'check_reserved','migrate','fake_migrate',
  6862. 'migrate_enabled','fake_migrate_all',
  6863. 'decode_credentials','driver_args',
  6864. 'adapter_args', 'attempts',
  6865. 'bigint_id','debug','lazy_tables',
  6866. 'do_connect']))
  6867. for table in self:
  6868. tablename = str(table)
  6869. db_as_dict["tables"].append(tablename)
  6870. db_as_dict["items"][tablename] = table.as_dict(flat=flat,
  6871. sanitize=sanitize,
  6872. field_options=field_options)
  6873. return db_as_dict
  6874. def as_xml(self, sanitize=True, field_options=True):
  6875. if not have_serializers:
  6876. raise ImportError("No xml serializers available")
  6877. d = self.as_dict(flat=True, sanitize=sanitize,
  6878. field_options=field_options)
  6879. return serializers.xml(d)
  6880. def as_json(self, sanitize=True, field_options=True):
  6881. if not have_serializers:
  6882. raise ImportError("No json serializers available")
  6883. d = self.as_dict(flat=True, sanitize=sanitize,
  6884. field_options=field_options)
  6885. return serializers.json(d)
  6886. def as_yaml(self, sanitize=True, field_options=True):
  6887. if not have_serializers:
  6888. raise ImportError("No YAML serializers available")
  6889. d = self.as_dict(flat=True, sanitize=sanitize,
  6890. field_options=field_options)
  6891. return serializers.yaml(d)
  6892. def __contains__(self, tablename):
  6893. try:
  6894. return tablename in self.tables
  6895. except AttributeError:
  6896. # The instance has no .tables attribute yet
  6897. return False
  6898. has_key = __contains__
  6899. def get(self,key,default=None):
  6900. return self.__dict__.get(key,default)
  6901. def __iter__(self):
  6902. for tablename in self.tables:
  6903. yield self[tablename]
  6904. def __getitem__(self, key):
  6905. return self.__getattr__(str(key))
  6906. def __getattr__(self, key):
  6907. if ogetattr(self,'_lazy_tables') and \
  6908. key in ogetattr(self,'_LAZY_TABLES'):
  6909. tablename, fields, args = self._LAZY_TABLES.pop(key)
  6910. return self.lazy_define_table(tablename,*fields,**args)
  6911. return ogetattr(self, key)
  6912. def __setitem__(self, key, value):
  6913. osetattr(self, str(key), value)
  6914. def __setattr__(self, key, value):
  6915. if key[:1]!='_' and key in self:
  6916. raise SyntaxError(
  6917. 'Object %s exists and cannot be redefined' % key)
  6918. osetattr(self,key,value)
  6919. __delitem__ = object.__delattr__
  6920. def __repr__(self):
  6921. if hasattr(self,'_uri'):
  6922. return '<DAL uri="%s">' % hide_password(str(self._uri))
  6923. else:
  6924. return '<DAL db_uid="%s">' % self._db_uid
  6925. def smart_query(self,fields,text):
  6926. return Set(self, smart_query(fields,text))
  6927. def __call__(self, query=None, ignore_common_filters=None):
  6928. if isinstance(query,Table):
  6929. query = self._adapter.id_query(query)
  6930. elif isinstance(query,Field):
  6931. query = query!=None
  6932. elif isinstance(query, dict):
  6933. icf = query.get("ignore_common_filters")
  6934. if icf: ignore_common_filters = icf
  6935. return Set(self, query, ignore_common_filters=ignore_common_filters)
  6936. def commit(self):
  6937. self._adapter.commit()
  6938. def rollback(self):
  6939. self._adapter.rollback()
  6940. def close(self):
  6941. self._adapter.close()
  6942. if self._db_uid in THREAD_LOCAL.db_instances:
  6943. db_group = THREAD_LOCAL.db_instances[self._db_uid]
  6944. db_group.remove(self)
  6945. if not db_group:
  6946. del THREAD_LOCAL.db_instances[self._db_uid]
  6947. def executesql(self, query, placeholders=None, as_dict=False,
  6948. fields=None, colnames=None):
  6949. """
  6950. placeholders is optional and will always be None.
  6951. If using raw SQL with placeholders, placeholders may be
  6952. a sequence of values to be substituted in
  6953. or, (if supported by the DB driver), a dictionary with keys
  6954. matching named placeholders in your SQL.
  6955. Added 2009-12-05 "as_dict" optional argument. Will always be
  6956. None when using DAL. If using raw SQL can be set to True
  6957. and the results cursor returned by the DB driver will be
  6958. converted to a sequence of dictionaries keyed with the db
  6959. field names. Tested with SQLite but should work with any database
  6960. since the cursor.description used to get field names is part of the
  6961. Python dbi 2.0 specs. Results returned with as_dict=True are
  6962. the same as those returned when applying .to_list() to a DAL query.
  6963. [{field1: value1, field2: value2}, {field1: value1b, field2: value2b}]
  6964. Added 2012-08-24 "fields" and "colnames" optional arguments. If either
  6965. is provided, the results cursor returned by the DB driver will be
  6966. converted to a DAL Rows object using the db._adapter.parse() method.
  6967. The "fields" argument is a list of DAL Field objects that match the
  6968. fields returned from the DB. The Field objects should be part of one or
  6969. more Table objects defined on the DAL object. The "fields" list can
  6970. include one or more DAL Table objects in addition to or instead of
  6971. including Field objects, or it can be just a single table (not in a
  6972. list). In that case, the Field objects will be extracted from the
  6973. table(s).
  6974. Instead of specifying the "fields" argument, the "colnames" argument
  6975. can be specified as a list of field names in tablename.fieldname format.
  6976. Again, these should represent tables and fields defined on the DAL
  6977. object.
  6978. It is also possible to specify both "fields" and the associated
  6979. "colnames". In that case, "fields" can also include DAL Expression
  6980. objects in addition to Field objects. For Field objects in "fields",
  6981. the associated "colnames" must still be in tablename.fieldname format.
  6982. For Expression objects in "fields", the associated "colnames" can
  6983. be any arbitrary labels.
  6984. Note, the DAL Table objects referred to by "fields" or "colnames" can
  6985. be dummy tables and do not have to represent any real tables in the
  6986. database. Also, note that the "fields" and "colnames" must be in the
  6987. same order as the fields in the results cursor returned from the DB.
  6988. """
  6989. adapter = self._adapter
  6990. if placeholders:
  6991. adapter.execute(query, placeholders)
  6992. else:
  6993. adapter.execute(query)
  6994. if as_dict:
  6995. if not hasattr(adapter.cursor,'description'):
  6996. raise RuntimeError("database does not support executesql(...,as_dict=True)")
  6997. # Non-DAL legacy db query, converts cursor results to dict.
  6998. # sequence of 7-item sequences. each sequence tells about a column.
  6999. # first item is always the field name according to Python Database API specs
  7000. columns = adapter.cursor.description
  7001. # reduce the column info down to just the field names
  7002. fields = [f[0] for f in columns]
  7003. # will hold our finished resultset in a list
  7004. data = adapter._fetchall()
  7005. # convert the list for each row into a dictionary so it's
  7006. # easier to work with. row['field_name'] rather than row[0]
  7007. return [dict(zip(fields,row)) for row in data]
  7008. try:
  7009. data = adapter._fetchall()
  7010. except:
  7011. return None
  7012. if fields or colnames:
  7013. fields = [] if fields is None else fields
  7014. if not isinstance(fields, list):
  7015. fields = [fields]
  7016. extracted_fields = []
  7017. for field in fields:
  7018. if isinstance(field, Table):
  7019. extracted_fields.extend([f for f in field])
  7020. else:
  7021. extracted_fields.append(field)
  7022. if not colnames:
  7023. colnames = ['%s.%s' % (f.tablename, f.name)
  7024. for f in extracted_fields]
  7025. data = adapter.parse(
  7026. data, fields=extracted_fields, colnames=colnames)
  7027. return data
  7028. def _remove_references_to(self, thistable):
  7029. for table in self:
  7030. table._referenced_by = [field for field in table._referenced_by
  7031. if not field.table==thistable]
  7032. def export_to_csv_file(self, ofile, *args, **kwargs):
  7033. step = int(kwargs.get('max_fetch_rows,',500))
  7034. write_colnames = kwargs['write_colnames'] = \
  7035. kwargs.get("write_colnames", True)
  7036. for table in self.tables:
  7037. ofile.write('TABLE %s\r\n' % table)
  7038. query = self._adapter.id_query(self[table])
  7039. nrows = self(query).count()
  7040. kwargs['write_colnames'] = write_colnames
  7041. for k in range(0,nrows,step):
  7042. self(query).select(limitby=(k,k+step)).export_to_csv_file(
  7043. ofile, *args, **kwargs)
  7044. kwargs['write_colnames'] = False
  7045. ofile.write('\r\n\r\n')
  7046. ofile.write('END')
  7047. def import_from_csv_file(self, ifile, id_map=None, null='<NULL>',
  7048. unique='uuid', *args, **kwargs):
  7049. #if id_map is None: id_map={}
  7050. id_offset = {} # only used if id_map is None
  7051. for line in ifile:
  7052. line = line.strip()
  7053. if not line:
  7054. continue
  7055. elif line == 'END':
  7056. return
  7057. elif not line.startswith('TABLE ') or not line[6:] in self.tables:
  7058. raise SyntaxError('invalid file format')
  7059. else:
  7060. tablename = line[6:]
  7061. self[tablename].import_from_csv_file(
  7062. ifile, id_map, null, unique, id_offset, *args, **kwargs)
  7063. def DAL_unpickler(db_uid):
  7064. return DAL('<zombie>',db_uid=db_uid)
  7065. def DAL_pickler(db):
  7066. return DAL_unpickler, (db._db_uid,)
  7067. copyreg.pickle(DAL, DAL_pickler, DAL_unpickler)
  7068. class SQLALL(object):
  7069. """
  7070. Helper class providing a comma-separated string having all the field names
  7071. (prefixed by table name and '.')
  7072. normally only called from within gluon.sql
  7073. """
  7074. def __init__(self, table):
  7075. self._table = table
  7076. def __str__(self):
  7077. return ', '.join([str(field) for field in self._table])
  7078. # class Reference(int):
  7079. class Reference(long):
  7080. def __allocate(self):
  7081. if not self._record:
  7082. self._record = self._table[int(self)]
  7083. if not self._record:
  7084. raise RuntimeError(
  7085. "Using a recursive select but encountered a broken reference: %s %d"%(self._table, int(self)))
  7086. def __getattr__(self, key):
  7087. if key == 'id':
  7088. return int(self)
  7089. self.__allocate()
  7090. return self._record.get(key, None)
  7091. def get(self, key, default=None):
  7092. return self.__getattr__(key, default)
  7093. def __setattr__(self, key, value):
  7094. if key.startswith('_'):
  7095. int.__setattr__(self, key, value)
  7096. return
  7097. self.__allocate()
  7098. self._record[key] = value
  7099. def __getitem__(self, key):
  7100. if key == 'id':
  7101. return int(self)
  7102. self.__allocate()
  7103. return self._record.get(key, None)
  7104. def __setitem__(self,key,value):
  7105. self.__allocate()
  7106. self._record[key] = value
  7107. def Reference_unpickler(data):
  7108. return marshal.loads(data)
  7109. def Reference_pickler(data):
  7110. try:
  7111. marshal_dump = marshal.dumps(int(data))
  7112. except AttributeError:
  7113. marshal_dump = 'i%s' % struct.pack('<i', int(data))
  7114. return (Reference_unpickler, (marshal_dump,))
  7115. copyreg.pickle(Reference, Reference_pickler, Reference_unpickler)
  7116. class Table(object):
  7117. """
  7118. an instance of this class represents a database table
  7119. Example::
  7120. db = DAL(...)
  7121. db.define_table('users', Field('name'))
  7122. db.users.insert(name='me') # print db.users._insert(...) to see SQL
  7123. db.users.drop()
  7124. """
  7125. def __init__(
  7126. self,
  7127. db,
  7128. tablename,
  7129. *fields,
  7130. **args
  7131. ):
  7132. """
  7133. Initializes the table and performs checking on the provided fields.
  7134. Each table will have automatically an 'id'.
  7135. If a field is of type Table, the fields (excluding 'id') from that table
  7136. will be used instead.
  7137. :raises SyntaxError: when a supplied field is of incorrect type.
  7138. """
  7139. self._actual = False # set to True by define_table()
  7140. self._tablename = tablename
  7141. self._sequence_name = args.get('sequence_name',None) or \
  7142. db and db._adapter.sequence_name(tablename)
  7143. self._trigger_name = args.get('trigger_name',None) or \
  7144. db and db._adapter.trigger_name(tablename)
  7145. self._common_filter = args.get('common_filter', None)
  7146. self._format = args.get('format',None)
  7147. self._singular = args.get(
  7148. 'singular',tablename.replace('_',' ').capitalize())
  7149. self._plural = args.get(
  7150. 'plural',pluralize(self._singular.lower()).capitalize())
  7151. # horrible but for backard compatibility of appamdin:
  7152. if 'primarykey' in args and args['primarykey']:
  7153. self._primarykey = args.get('primarykey', None)
  7154. self._before_insert = []
  7155. self._before_update = [Set.delete_uploaded_files]
  7156. self._before_delete = [Set.delete_uploaded_files]
  7157. self._after_insert = []
  7158. self._after_update = []
  7159. self._after_delete = []
  7160. fieldnames,newfields=set(),[]
  7161. if hasattr(self,'_primarykey'):
  7162. if not isinstance(self._primarykey,list):
  7163. raise SyntaxError(
  7164. "primarykey must be a list of fields from table '%s'" \
  7165. % tablename)
  7166. if len(self._primarykey)==1:
  7167. self._id = [f for f in fields if isinstance(f,Field) \
  7168. and f.name==self._primarykey[0]][0]
  7169. elif not [f for f in fields if isinstance(f,Field) and f.type=='id']:
  7170. field = Field('id', 'id')
  7171. newfields.append(field)
  7172. fieldnames.add('id')
  7173. self._id = field
  7174. virtual_fields = []
  7175. for field in fields:
  7176. if isinstance(field, (FieldMethod, FieldVirtual)):
  7177. virtual_fields.append(field)
  7178. elif isinstance(field, Field) and not field.name in fieldnames:
  7179. if field.db is not None:
  7180. field = copy.copy(field)
  7181. newfields.append(field)
  7182. fieldnames.add(field.name)
  7183. if field.type=='id':
  7184. self._id = field
  7185. elif isinstance(field, Table):
  7186. table = field
  7187. for field in table:
  7188. if not field.name in fieldnames and not field.type=='id':
  7189. t2 = not table._actual and self._tablename
  7190. field = field.clone(point_self_references_to=t2)
  7191. newfields.append(field)
  7192. fieldnames.add(field.name)
  7193. elif not isinstance(field, (Field, Table)):
  7194. raise SyntaxError(
  7195. 'define_table argument is not a Field or Table: %s' % field)
  7196. fields = newfields
  7197. self._db = db
  7198. tablename = tablename
  7199. self._fields = SQLCallableList()
  7200. self.virtualfields = []
  7201. fields = list(fields)
  7202. if db and db._adapter.uploads_in_blob==True:
  7203. uploadfields = [f.name for f in fields if f.type=='blob']
  7204. for field in fields:
  7205. fn = field.uploadfield
  7206. if isinstance(field, Field) and field.type == 'upload'\
  7207. and fn is True:
  7208. fn = field.uploadfield = '%s_blob' % field.name
  7209. if isinstance(fn,str) and not fn in uploadfields:
  7210. fields.append(Field(fn,'blob',default='',
  7211. writable=False,readable=False))
  7212. lower_fieldnames = set()
  7213. reserved = dir(Table) + ['fields']
  7214. for field in fields:
  7215. field_name = field.name
  7216. if db and db.check_reserved:
  7217. db.check_reserved_keyword(field_name)
  7218. elif field_name in reserved:
  7219. raise SyntaxError("field name %s not allowed" % field_name)
  7220. if field_name.lower() in lower_fieldnames:
  7221. raise SyntaxError("duplicate field %s in table %s" \
  7222. % (field_name, tablename))
  7223. else:
  7224. lower_fieldnames.add(field_name.lower())
  7225. self.fields.append(field_name)
  7226. self[field_name] = field
  7227. if field.type == 'id':
  7228. self['id'] = field
  7229. field.tablename = field._tablename = tablename
  7230. field.table = field._table = self
  7231. field.db = field._db = db
  7232. if db and not field.type in ('text', 'blob', 'json') and \
  7233. db._adapter.maxcharlength < field.length:
  7234. field.length = db._adapter.maxcharlength
  7235. self.ALL = SQLALL(self)
  7236. if hasattr(self,'_primarykey'):
  7237. for k in self._primarykey:
  7238. if k not in self.fields:
  7239. raise SyntaxError(
  7240. "primarykey must be a list of fields from table '%s " % tablename)
  7241. else:
  7242. self[k].notnull = True
  7243. for field in virtual_fields:
  7244. self[field.name] = field
  7245. @property
  7246. def fields(self):
  7247. return self._fields
  7248. def update(self,*args,**kwargs):
  7249. raise RuntimeError("Syntax Not Supported")
  7250. def _enable_record_versioning(self,
  7251. archive_db=None,
  7252. archive_name = '%(tablename)s_archive',
  7253. current_record = 'current_record',
  7254. is_active = 'is_active'):
  7255. archive_db = archive_db or self._db
  7256. archive_name = archive_name % dict(tablename=self._tablename)
  7257. if archive_name in archive_db.tables():
  7258. return # do not try define the archive if already exists
  7259. fieldnames = self.fields()
  7260. field_type = self if archive_db is self._db else 'bigint'
  7261. archive_db.define_table(
  7262. archive_name,
  7263. Field(current_record,field_type),
  7264. *[field.clone(unique=False) for field in self])
  7265. self._before_update.append(
  7266. lambda qset,fs,db=archive_db,an=archive_name,cn=current_record:
  7267. archive_record(qset,fs,db[an],cn))
  7268. if is_active and is_active in fieldnames:
  7269. self._before_delete.append(
  7270. lambda qset: qset.update(is_active=False))
  7271. newquery = lambda query, t=self: t.is_active == True
  7272. query = self._common_filter
  7273. if query:
  7274. newquery = query & newquery
  7275. self._common_filter = newquery
  7276. def _validate(self,**vars):
  7277. errors = Row()
  7278. for key,value in vars.iteritems():
  7279. value,error = self[key].validate(value)
  7280. if error:
  7281. errors[key] = error
  7282. return errors
  7283. def _create_references(self):
  7284. db = self._db
  7285. pr = db._pending_references
  7286. self._referenced_by = []
  7287. for field in self:
  7288. fieldname = field.name
  7289. field_type = field.type
  7290. if isinstance(field_type,str) and field_type[:10] == 'reference ':
  7291. ref = field_type[10:].strip()
  7292. if not ref.split():
  7293. raise SyntaxError('Table: reference to nothing: %s' %ref)
  7294. refs = ref.split('.')
  7295. rtablename = refs[0]
  7296. if not rtablename in db:
  7297. pr[rtablename] = pr.get(rtablename,[]) + [field]
  7298. continue
  7299. rtable = db[rtablename]
  7300. if len(refs)==2:
  7301. rfieldname = refs[1]
  7302. if not hasattr(rtable,'_primarykey'):
  7303. raise SyntaxError(
  7304. 'keyed tables can only reference other keyed tables (for now)')
  7305. if rfieldname not in rtable.fields:
  7306. raise SyntaxError(
  7307. "invalid field '%s' for referenced table '%s' in table '%s'" \
  7308. % (rfieldname, rtablename, self._tablename))
  7309. rtable._referenced_by.append(field)
  7310. for referee in pr.get(self._tablename,[]):
  7311. self._referenced_by.append(referee)
  7312. def _filter_fields(self, record, id=False):
  7313. return dict([(k, v) for (k, v) in record.iteritems() if k
  7314. in self.fields and (self[k].type!='id' or id)])
  7315. def _build_query(self,key):
  7316. """ for keyed table only """
  7317. query = None
  7318. for k,v in key.iteritems():
  7319. if k in self._primarykey:
  7320. if query:
  7321. query = query & (self[k] == v)
  7322. else:
  7323. query = (self[k] == v)
  7324. else:
  7325. raise SyntaxError(
  7326. 'Field %s is not part of the primary key of %s' % \
  7327. (k,self._tablename))
  7328. return query
  7329. def __getitem__(self, key):
  7330. if not key:
  7331. return None
  7332. elif isinstance(key, dict):
  7333. """ for keyed table """
  7334. query = self._build_query(key)
  7335. rows = self._db(query).select()
  7336. if rows:
  7337. return rows[0]
  7338. return None
  7339. elif str(key).isdigit() or 'google' in DRIVERS and isinstance(key, Key):
  7340. return self._db(self._id == key).select(limitby=(0,1)).first()
  7341. elif key:
  7342. return ogetattr(self, str(key))
  7343. def __call__(self, key=DEFAULT, **kwargs):
  7344. for_update = kwargs.get('_for_update',False)
  7345. if '_for_update' in kwargs: del kwargs['_for_update']
  7346. orderby = kwargs.get('_orderby',None)
  7347. if '_orderby' in kwargs: del kwargs['_orderby']
  7348. if not key is DEFAULT:
  7349. if isinstance(key, Query):
  7350. record = self._db(key).select(
  7351. limitby=(0,1),for_update=for_update, orderby=orderby).first()
  7352. elif not str(key).isdigit():
  7353. record = None
  7354. else:
  7355. record = self._db(self._id == key).select(
  7356. limitby=(0,1),for_update=for_update, orderby=orderby).first()
  7357. if record:
  7358. for k,v in kwargs.iteritems():
  7359. if record[k]!=v: return None
  7360. return record
  7361. elif kwargs:
  7362. query = reduce(lambda a,b:a&b,[self[k]==v for k,v in kwargs.iteritems()])
  7363. return self._db(query).select(limitby=(0,1),for_update=for_update, orderby=orderby).first()
  7364. else:
  7365. return None
  7366. def __setitem__(self, key, value):
  7367. if isinstance(key, dict) and isinstance(value, dict):
  7368. """ option for keyed table """
  7369. if set(key.keys()) == set(self._primarykey):
  7370. value = self._filter_fields(value)
  7371. kv = {}
  7372. kv.update(value)
  7373. kv.update(key)
  7374. if not self.insert(**kv):
  7375. query = self._build_query(key)
  7376. self._db(query).update(**self._filter_fields(value))
  7377. else:
  7378. raise SyntaxError(
  7379. 'key must have all fields from primary key: %s'%\
  7380. (self._primarykey))
  7381. elif str(key).isdigit():
  7382. if key == 0:
  7383. self.insert(**self._filter_fields(value))
  7384. elif self._db(self._id == key)\
  7385. .update(**self._filter_fields(value)) is None:
  7386. raise SyntaxError('No such record: %s' % key)
  7387. else:
  7388. if isinstance(key, dict):
  7389. raise SyntaxError(
  7390. 'value must be a dictionary: %s' % value)
  7391. osetattr(self, str(key), value)
  7392. __getattr__ = __getitem__
  7393. def __setattr__(self, key, value):
  7394. if key[:1]!='_' and key in self:
  7395. raise SyntaxError('Object exists and cannot be redefined: %s' % key)
  7396. osetattr(self,key,value)
  7397. def __delitem__(self, key):
  7398. if isinstance(key, dict):
  7399. query = self._build_query(key)
  7400. if not self._db(query).delete():
  7401. raise SyntaxError('No such record: %s' % key)
  7402. elif not str(key).isdigit() or \
  7403. not self._db(self._id == key).delete():
  7404. raise SyntaxError('No such record: %s' % key)
  7405. def __contains__(self,key):
  7406. return hasattr(self,key)
  7407. has_key = __contains__
  7408. def items(self):
  7409. return self.__dict__.items()
  7410. def __iter__(self):
  7411. for fieldname in self.fields:
  7412. yield self[fieldname]
  7413. def iteritems(self):
  7414. return self.__dict__.iteritems()
  7415. def __repr__(self):
  7416. return '<Table %s (%s)>' % (self._tablename,','.join(self.fields()))
  7417. def __str__(self):
  7418. if hasattr(self,'_ot') and self._ot is not None:
  7419. if 'Oracle' in str(type(self._db._adapter)): # <<< patch
  7420. return '%s %s' % (self._ot, self._tablename) # <<< patch
  7421. return '%s AS %s' % (self._ot, self._tablename)
  7422. return self._tablename
  7423. def _drop(self, mode = ''):
  7424. return self._db._adapter._drop(self, mode)
  7425. def drop(self, mode = ''):
  7426. return self._db._adapter.drop(self,mode)
  7427. def _listify(self,fields,update=False):
  7428. new_fields = {} # format: new_fields[name] = (field,value)
  7429. # store all fields passed as input in new_fields
  7430. for name in fields:
  7431. if not name in self.fields:
  7432. if name != 'id':
  7433. raise SyntaxError(
  7434. 'Field %s does not belong to the table' % name)
  7435. else:
  7436. field = self[name]
  7437. value = fields[name]
  7438. if field.filter_in:
  7439. value = field.filter_in(value)
  7440. new_fields[name] = (field,value)
  7441. # check all fields that should be in the table but are not passed
  7442. to_compute = []
  7443. for ofield in self:
  7444. name = ofield.name
  7445. if not name in new_fields:
  7446. # if field is supposed to be computed, compute it!
  7447. if ofield.compute: # save those to compute for later
  7448. to_compute.append((name,ofield))
  7449. # if field is required, check its default value
  7450. elif not update and not ofield.default is None:
  7451. value = ofield.default
  7452. fields[name] = value
  7453. new_fields[name] = (ofield,value)
  7454. # if this is an update, user the update field instead
  7455. elif update and not ofield.update is None:
  7456. value = ofield.update
  7457. fields[name] = value
  7458. new_fields[name] = (ofield,value)
  7459. # if the field is still not there but it should, error
  7460. elif not update and ofield.required:
  7461. raise RuntimeError(
  7462. 'Table: missing required field: %s' % name)
  7463. # now deal with fields that are supposed to be computed
  7464. if to_compute:
  7465. row = Row(fields)
  7466. for name,ofield in to_compute:
  7467. # try compute it
  7468. try:
  7469. new_fields[name] = (ofield,ofield.compute(row))
  7470. except (KeyError, AttributeError):
  7471. # error sinlently unless field is required!
  7472. if ofield.required:
  7473. raise SyntaxError('unable to comput field: %s' % name)
  7474. return new_fields.values()
  7475. def _attempt_upload(self, fields):
  7476. for field in self:
  7477. if field.type=='upload' and field.name in fields:
  7478. value = fields[field.name]
  7479. if value and not isinstance(value,str):
  7480. if hasattr(value,'file') and hasattr(value,'filename'):
  7481. new_name = field.store(value.file,filename=value.filename)
  7482. elif hasattr(value,'read') and hasattr(value,'name'):
  7483. new_name = field.store(value,filename=value.name)
  7484. else:
  7485. raise RuntimeError("Unable to handle upload")
  7486. fields[field.name] = new_name
  7487. def _defaults(self, fields):
  7488. "If there are no fields/values specified, return table defaults"
  7489. if not fields:
  7490. fields = {}
  7491. for field in self:
  7492. if field.type != "id":
  7493. fields[field.name] = field.default
  7494. return fields
  7495. def _insert(self, **fields):
  7496. fields = self._defaults(fields)
  7497. return self._db._adapter._insert(self, self._listify(fields))
  7498. def insert(self, **fields):
  7499. fields = self._defaults(fields)
  7500. self._attempt_upload(fields)
  7501. if any(f(fields) for f in self._before_insert): return 0
  7502. ret = self._db._adapter.insert(self, self._listify(fields))
  7503. if ret and self._after_insert:
  7504. fields = Row(fields)
  7505. [f(fields,ret) for f in self._after_insert]
  7506. return ret
  7507. def validate_and_insert(self,**fields):
  7508. response = Row()
  7509. response.errors = Row()
  7510. new_fields = copy.copy(fields)
  7511. for key,value in fields.iteritems():
  7512. value,error = self[key].validate(value)
  7513. if error:
  7514. response.errors[key] = "%s" % error
  7515. else:
  7516. new_fields[key] = value
  7517. if not response.errors:
  7518. response.id = self.insert(**new_fields)
  7519. else:
  7520. response.id = None
  7521. return response
  7522. def update_or_insert(self, _key=DEFAULT, **values):
  7523. if _key is DEFAULT:
  7524. record = self(**values)
  7525. elif isinstance(_key,dict):
  7526. record = self(**_key)
  7527. else:
  7528. record = self(_key)
  7529. if record:
  7530. record.update_record(**values)
  7531. newid = None
  7532. else:
  7533. newid = self.insert(**values)
  7534. return newid
  7535. def bulk_insert(self, items):
  7536. """
  7537. here items is a list of dictionaries
  7538. """
  7539. items = [self._listify(item) for item in items]
  7540. if any(f(item) for item in items for f in self._before_insert):return 0
  7541. ret = self._db._adapter.bulk_insert(self,items)
  7542. ret and [[f(item,ret[k]) for k,item in enumerate(items)] for f in self._after_insert]
  7543. return ret
  7544. def _truncate(self, mode = None):
  7545. return self._db._adapter._truncate(self, mode)
  7546. def truncate(self, mode = None):
  7547. return self._db._adapter.truncate(self, mode)
  7548. def import_from_csv_file(
  7549. self,
  7550. csvfile,
  7551. id_map=None,
  7552. null='<NULL>',
  7553. unique='uuid',
  7554. id_offset=None, # id_offset used only when id_map is None
  7555. *args, **kwargs
  7556. ):
  7557. """
  7558. Import records from csv file.
  7559. Column headers must have same names as table fields.
  7560. Field 'id' is ignored.
  7561. If column names read 'table.file' the 'table.' prefix is ignored.
  7562. 'unique' argument is a field which must be unique
  7563. (typically a uuid field)
  7564. 'restore' argument is default False;
  7565. if set True will remove old values in table first.
  7566. 'id_map' ff set to None will not map ids.
  7567. The import will keep the id numbers in the restored table.
  7568. This assumes that there is an field of type id that
  7569. is integer and in incrementing order.
  7570. Will keep the id numbers in restored table.
  7571. """
  7572. delimiter = kwargs.get('delimiter', ',')
  7573. quotechar = kwargs.get('quotechar', '"')
  7574. quoting = kwargs.get('quoting', csv.QUOTE_MINIMAL)
  7575. restore = kwargs.get('restore', False)
  7576. if restore:
  7577. self._db[self].truncate()
  7578. reader = csv.reader(csvfile, delimiter=delimiter,
  7579. quotechar=quotechar, quoting=quoting)
  7580. colnames = None
  7581. if isinstance(id_map, dict):
  7582. if not self._tablename in id_map:
  7583. id_map[self._tablename] = {}
  7584. id_map_self = id_map[self._tablename]
  7585. def fix(field, value, id_map, id_offset):
  7586. list_reference_s='list:reference'
  7587. if value == null:
  7588. value = None
  7589. elif field.type=='blob':
  7590. value = base64.b64decode(value)
  7591. elif field.type=='double' or field.type=='float':
  7592. if not value.strip():
  7593. value = None
  7594. else:
  7595. value = float(value)
  7596. elif field.type in ('integer','bigint'):
  7597. if not value.strip():
  7598. value = None
  7599. else:
  7600. value = int(value)
  7601. elif field.type.startswith('list:string'):
  7602. value = bar_decode_string(value)
  7603. elif field.type.startswith(list_reference_s):
  7604. ref_table = field.type[len(list_reference_s):].strip()
  7605. if id_map is not None:
  7606. value = [id_map[ref_table][int(v)] \
  7607. for v in bar_decode_string(value)]
  7608. else:
  7609. value = [v for v in bar_decode_string(value)]
  7610. elif field.type.startswith('list:'):
  7611. value = bar_decode_integer(value)
  7612. elif id_map and field.type.startswith('reference'):
  7613. try:
  7614. value = id_map[field.type[9:].strip()][int(value)]
  7615. except KeyError:
  7616. pass
  7617. elif id_offset and field.type.startswith('reference'):
  7618. try:
  7619. value = id_offset[field.type[9:].strip()]+int(value)
  7620. except KeyError:
  7621. pass
  7622. return (field.name, value)
  7623. def is_id(colname):
  7624. if colname in self:
  7625. return self[colname].type == 'id'
  7626. else:
  7627. return False
  7628. first = True
  7629. unique_idx = None
  7630. for line in reader:
  7631. if not line:
  7632. break
  7633. if not colnames:
  7634. colnames = [x.split('.',1)[-1] for x in line][:len(line)]
  7635. cols, cid = [], None
  7636. for i,colname in enumerate(colnames):
  7637. if is_id(colname):
  7638. cid = i
  7639. else:
  7640. cols.append(i)
  7641. if colname == unique:
  7642. unique_idx = i
  7643. else:
  7644. items = [fix(self[colnames[i]], line[i], id_map, id_offset) \
  7645. for i in cols if colnames[i] in self.fields]
  7646. if not id_map and cid is not None and id_offset is not None and not unique_idx:
  7647. csv_id = int(line[cid])
  7648. curr_id = self.insert(**dict(items))
  7649. if first:
  7650. first = False
  7651. # First curr_id is bigger than csv_id,
  7652. # then we are not restoring but
  7653. # extending db table with csv db table
  7654. if curr_id>csv_id:
  7655. id_offset[self._tablename] = curr_id-csv_id
  7656. else:
  7657. id_offset[self._tablename] = 0
  7658. # create new id until we get the same as old_id+offset
  7659. while curr_id<csv_id+id_offset[self._tablename]:
  7660. self._db(self._db[self][colnames[cid]] == curr_id).delete()
  7661. curr_id = self.insert(**dict(items))
  7662. # Validation. Check for duplicate of 'unique' &,
  7663. # if present, update instead of insert.
  7664. elif not unique_idx:
  7665. new_id = self.insert(**dict(items))
  7666. else:
  7667. unique_value = line[unique_idx]
  7668. query = self._db[self][unique] == unique_value
  7669. record = self._db(query).select().first()
  7670. if record:
  7671. record.update_record(**dict(items))
  7672. new_id = record[self._id.name]
  7673. else:
  7674. new_id = self.insert(**dict(items))
  7675. if id_map and cid is not None:
  7676. id_map_self[int(line[cid])] = new_id
  7677. def as_dict(self, flat=False, sanitize=True, field_options=True):
  7678. tablename = str(self)
  7679. table_as_dict = dict(name=tablename, items={}, fields=[],
  7680. sequence_name=self._sequence_name,
  7681. trigger_name=self._trigger_name,
  7682. common_filter=self._common_filter, format=self._format,
  7683. singular=self._singular, plural=self._plural)
  7684. for field in self:
  7685. if (field.readable or field.writable) or (not sanitize):
  7686. table_as_dict["fields"].append(field.name)
  7687. table_as_dict["items"][field.name] = \
  7688. field.as_dict(flat=flat, sanitize=sanitize,
  7689. options=field_options)
  7690. return table_as_dict
  7691. def as_xml(self, sanitize=True, field_options=True):
  7692. if not have_serializers:
  7693. raise ImportError("No xml serializers available")
  7694. d = self.as_dict(flat=True, sanitize=sanitize,
  7695. field_options=field_options)
  7696. return serializers.xml(d)
  7697. def as_json(self, sanitize=True, field_options=True):
  7698. if not have_serializers:
  7699. raise ImportError("No json serializers available")
  7700. d = self.as_dict(flat=True, sanitize=sanitize,
  7701. field_options=field_options)
  7702. return serializers.json(d)
  7703. def as_yaml(self, sanitize=True, field_options=True):
  7704. if not have_serializers:
  7705. raise ImportError("No YAML serializers available")
  7706. d = self.as_dict(flat=True, sanitize=sanitize,
  7707. field_options=field_options)
  7708. return serializers.yaml(d)
  7709. def with_alias(self, alias):
  7710. return self._db._adapter.alias(self,alias)
  7711. def on(self, query):
  7712. return Expression(self._db,self._db._adapter.ON,self,query)
  7713. def archive_record(qset,fs,archive_table,current_record):
  7714. tablenames = qset.db._adapter.tables(qset.query)
  7715. if len(tablenames)!=1: raise RuntimeError("cannot update join")
  7716. table = qset.db[tablenames[0]]
  7717. for row in qset.select():
  7718. fields = archive_table._filter_fields(row)
  7719. fields[current_record] = row.id
  7720. archive_table.insert(**fields)
  7721. return False
  7722. class Expression(object):
  7723. def __init__(
  7724. self,
  7725. db,
  7726. op,
  7727. first=None,
  7728. second=None,
  7729. type=None,
  7730. **optional_args
  7731. ):
  7732. self.db = db
  7733. self.op = op
  7734. self.first = first
  7735. self.second = second
  7736. self._table = getattr(first,'_table',None)
  7737. ### self._tablename = first._tablename ## CHECK
  7738. if not type and first and hasattr(first,'type'):
  7739. self.type = first.type
  7740. else:
  7741. self.type = type
  7742. self.optional_args = optional_args
  7743. def sum(self):
  7744. db = self.db
  7745. return Expression(db, db._adapter.AGGREGATE, self, 'SUM', self.type)
  7746. def max(self):
  7747. db = self.db
  7748. return Expression(db, db._adapter.AGGREGATE, self, 'MAX', self.type)
  7749. def min(self):
  7750. db = self.db
  7751. return Expression(db, db._adapter.AGGREGATE, self, 'MIN', self.type)
  7752. def len(self):
  7753. db = self.db
  7754. return Expression(db, db._adapter.AGGREGATE, self, 'LENGTH', 'integer')
  7755. def avg(self):
  7756. db = self.db
  7757. return Expression(db, db._adapter.AGGREGATE, self, 'AVG', self.type)
  7758. def abs(self):
  7759. db = self.db
  7760. return Expression(db, db._adapter.AGGREGATE, self, 'ABS', self.type)
  7761. def lower(self):
  7762. db = self.db
  7763. return Expression(db, db._adapter.LOWER, self, None, self.type)
  7764. def upper(self):
  7765. db = self.db
  7766. return Expression(db, db._adapter.UPPER, self, None, self.type)
  7767. def year(self):
  7768. db = self.db
  7769. return Expression(db, db._adapter.EXTRACT, self, 'year', 'integer')
  7770. def month(self):
  7771. db = self.db
  7772. return Expression(db, db._adapter.EXTRACT, self, 'month', 'integer')
  7773. def day(self):
  7774. db = self.db
  7775. return Expression(db, db._adapter.EXTRACT, self, 'day', 'integer')
  7776. def hour(self):
  7777. db = self.db
  7778. return Expression(db, db._adapter.EXTRACT, self, 'hour', 'integer')
  7779. def minutes(self):
  7780. db = self.db
  7781. return Expression(db, db._adapter.EXTRACT, self, 'minute', 'integer')
  7782. def coalesce(self,*others):
  7783. db = self.db
  7784. return Expression(db, db._adapter.COALESCE, self, others, self.type)
  7785. def coalesce_zero(self):
  7786. db = self.db
  7787. return Expression(db, db._adapter.COALESCE_ZERO, self, None, self.type)
  7788. def seconds(self):
  7789. db = self.db
  7790. return Expression(db, db._adapter.EXTRACT, self, 'second', 'integer')
  7791. def epoch(self):
  7792. db = self.db
  7793. return Expression(db, db._adapter.EPOCH, self, None, 'integer')
  7794. def __getslice__(self, start, stop):
  7795. db = self.db
  7796. if start < 0:
  7797. pos0 = '(%s - %d)' % (self.len(), abs(start) - 1)
  7798. else:
  7799. pos0 = start + 1
  7800. if stop < 0:
  7801. length = '(%s - %d - %s)' % (self.len(), abs(stop) - 1, pos0)
  7802. elif stop == sys.maxint:
  7803. length = self.len()
  7804. else:
  7805. length = '(%s - %s)' % (stop + 1, pos0)
  7806. return Expression(db,db._adapter.SUBSTRING,
  7807. self, (pos0, length), self.type)
  7808. def __getitem__(self, i):
  7809. return self[i:i + 1]
  7810. def __str__(self):
  7811. return self.db._adapter.expand(self,self.type)
  7812. def __or__(self, other): # for use in sortby
  7813. db = self.db
  7814. return Expression(db,db._adapter.COMMA,self,other,self.type)
  7815. def __invert__(self):
  7816. db = self.db
  7817. if hasattr(self,'_op') and self.op == db._adapter.INVERT:
  7818. return self.first
  7819. return Expression(db,db._adapter.INVERT,self,type=self.type)
  7820. def __add__(self, other):
  7821. db = self.db
  7822. return Expression(db,db._adapter.ADD,self,other,self.type)
  7823. def __sub__(self, other):
  7824. db = self.db
  7825. if self.type in ('integer','bigint'):
  7826. result_type = 'integer'
  7827. elif self.type in ['date','time','datetime','double','float']:
  7828. result_type = 'double'
  7829. else:
  7830. raise SyntaxError("subtraction operation not supported for type")
  7831. return Expression(db,db._adapter.SUB,self,other,result_type)
  7832. def __mul__(self, other):
  7833. db = self.db
  7834. return Expression(db,db._adapter.MUL,self,other,self.type)
  7835. def __div__(self, other):
  7836. db = self.db
  7837. return Expression(db,db._adapter.DIV,self,other,self.type)
  7838. def __mod__(self, other):
  7839. db = self.db
  7840. return Expression(db,db._adapter.MOD,self,other,self.type)
  7841. def __eq__(self, value):
  7842. db = self.db
  7843. return Query(db, db._adapter.EQ, self, value)
  7844. def __ne__(self, value):
  7845. db = self.db
  7846. return Query(db, db._adapter.NE, self, value)
  7847. def __lt__(self, value):
  7848. db = self.db
  7849. return Query(db, db._adapter.LT, self, value)
  7850. def __le__(self, value):
  7851. db = self.db
  7852. return Query(db, db._adapter.LE, self, value)
  7853. def __gt__(self, value):
  7854. db = self.db
  7855. return Query(db, db._adapter.GT, self, value)
  7856. def __ge__(self, value):
  7857. db = self.db
  7858. return Query(db, db._adapter.GE, self, value)
  7859. def like(self, value, case_sensitive=False):
  7860. db = self.db
  7861. op = case_sensitive and db._adapter.LIKE or db._adapter.ILIKE
  7862. return Query(db, op, self, value)
  7863. def regexp(self, value):
  7864. db = self.db
  7865. return Query(db, db._adapter.REGEXP, self, value)
  7866. def belongs(self, *value):
  7867. """
  7868. Accepts the following inputs:
  7869. field.belongs(1,2)
  7870. field.belongs((1,2))
  7871. field.belongs(query)
  7872. Does NOT accept:
  7873. field.belongs(1)
  7874. """
  7875. db = self.db
  7876. if len(value) == 1:
  7877. value = value[0]
  7878. if isinstance(value,Query):
  7879. value = db(value)._select(value.first._table._id)
  7880. return Query(db, db._adapter.BELONGS, self, value)
  7881. def startswith(self, value):
  7882. db = self.db
  7883. if not self.type in ('string', 'text', 'json'):
  7884. raise SyntaxError("startswith used with incompatible field type")
  7885. return Query(db, db._adapter.STARTSWITH, self, value)
  7886. def endswith(self, value):
  7887. db = self.db
  7888. if not self.type in ('string', 'text', 'json'):
  7889. raise SyntaxError("endswith used with incompatible field type")
  7890. return Query(db, db._adapter.ENDSWITH, self, value)
  7891. def contains(self, value, all=False, case_sensitive=False):
  7892. """
  7893. The case_sensitive parameters is only useful for PostgreSQL
  7894. For other RDMBs it is ignored and contains is always case in-sensitive
  7895. For MongoDB and GAE contains is always case sensitive
  7896. """
  7897. db = self.db
  7898. if isinstance(value,(list, tuple)):
  7899. subqueries = [self.contains(str(v).strip(),case_sensitive=case_sensitive)
  7900. for v in value if str(v).strip()]
  7901. if not subqueries:
  7902. return self.contains('')
  7903. else:
  7904. return reduce(all and AND or OR,subqueries)
  7905. if not self.type in ('string', 'text', 'json') and not self.type.startswith('list:'):
  7906. raise SyntaxError("contains used with incompatible field type")
  7907. return Query(db, db._adapter.CONTAINS, self, value, case_sensitive=case_sensitive)
  7908. def with_alias(self, alias):
  7909. db = self.db
  7910. return Expression(db, db._adapter.AS, self, alias, self.type)
  7911. # GIS expressions
  7912. def st_asgeojson(self, precision=15, options=0, version=1):
  7913. return Expression(self.db, self.db._adapter.ST_ASGEOJSON, self,
  7914. dict(precision=precision, options=options,
  7915. version=version), 'string')
  7916. def st_astext(self):
  7917. db = self.db
  7918. return Expression(db, db._adapter.ST_ASTEXT, self, type='string')
  7919. def st_x(self):
  7920. db = self.db
  7921. return Expression(db, db._adapter.ST_X, self, type='string')
  7922. def st_y(self):
  7923. db = self.db
  7924. return Expression(db, db._adapter.ST_Y, self, type='string')
  7925. def st_distance(self, other):
  7926. db = self.db
  7927. return Expression(db,db._adapter.ST_DISTANCE,self,other, 'double')
  7928. def st_simplify(self, value):
  7929. db = self.db
  7930. return Expression(db, db._adapter.ST_SIMPLIFY, self, value, self.type)
  7931. # GIS queries
  7932. def st_contains(self, value):
  7933. db = self.db
  7934. return Query(db, db._adapter.ST_CONTAINS, self, value)
  7935. def st_equals(self, value):
  7936. db = self.db
  7937. return Query(db, db._adapter.ST_EQUALS, self, value)
  7938. def st_intersects(self, value):
  7939. db = self.db
  7940. return Query(db, db._adapter.ST_INTERSECTS, self, value)
  7941. def st_overlaps(self, value):
  7942. db = self.db
  7943. return Query(db, db._adapter.ST_OVERLAPS, self, value)
  7944. def st_touches(self, value):
  7945. db = self.db
  7946. return Query(db, db._adapter.ST_TOUCHES, self, value)
  7947. def st_within(self, value):
  7948. db = self.db
  7949. return Query(db, db._adapter.ST_WITHIN, self, value)
  7950. # for use in both Query and sortby
  7951. class SQLCustomType(object):
  7952. """
  7953. allows defining of custom SQL types
  7954. Example::
  7955. decimal = SQLCustomType(
  7956. type ='double',
  7957. native ='integer',
  7958. encoder =(lambda x: int(float(x) * 100)),
  7959. decoder = (lambda x: Decimal("0.00") + Decimal(str(float(x)/100)) )
  7960. )
  7961. db.define_table(
  7962. 'example',
  7963. Field('value', type=decimal)
  7964. )
  7965. :param type: the web2py type (default = 'string')
  7966. :param native: the backend type
  7967. :param encoder: how to encode the value to store it in the backend
  7968. :param decoder: how to decode the value retrieved from the backend
  7969. :param validator: what validators to use ( default = None, will use the
  7970. default validator for type)
  7971. """
  7972. def __init__(
  7973. self,
  7974. type='string',
  7975. native=None,
  7976. encoder=None,
  7977. decoder=None,
  7978. validator=None,
  7979. _class=None,
  7980. ):
  7981. self.type = type
  7982. self.native = native
  7983. self.encoder = encoder or (lambda x: x)
  7984. self.decoder = decoder or (lambda x: x)
  7985. self.validator = validator
  7986. self._class = _class or type
  7987. def startswith(self, text=None):
  7988. try:
  7989. return self.type.startswith(self, text)
  7990. except TypeError:
  7991. return False
  7992. def __getslice__(self, a=0, b=100):
  7993. return None
  7994. def __getitem__(self, i):
  7995. return None
  7996. def __str__(self):
  7997. return self._class
  7998. class FieldVirtual(object):
  7999. def __init__(self, name, f=None, ftype='string',label=None,table_name=None):
  8000. # for backward compatibility
  8001. (self.name, self.f) = (name, f) if f else ('unkown', name)
  8002. self.type = ftype
  8003. self.label = label or self.name.capitalize().replace('_',' ')
  8004. self.represent = IDENTITY
  8005. self.formatter = IDENTITY
  8006. self.comment = None
  8007. self.readable = True
  8008. self.writable = False
  8009. self.requires = None
  8010. self.widget = None
  8011. self.tablename = table_name
  8012. self.filter_out = None
  8013. class FieldMethod(object):
  8014. def __init__(self, name, f=None, handler=None):
  8015. # for backward compatibility
  8016. (self.name, self.f) = (name, f) if f else ('unkown', name)
  8017. self.handler = handler
  8018. def list_represent(x,r=None):
  8019. return ', '.join(str(y) for y in x or [])
  8020. class Field(Expression):
  8021. Virtual = FieldVirtual
  8022. Method = FieldMethod
  8023. Lazy = FieldMethod # for backward compatibility
  8024. """
  8025. an instance of this class represents a database field
  8026. example::
  8027. a = Field(name, 'string', length=32, default=None, required=False,
  8028. requires=IS_NOT_EMPTY(), ondelete='CASCADE',
  8029. notnull=False, unique=False,
  8030. uploadfield=True, widget=None, label=None, comment=None,
  8031. uploadfield=True, # True means store on disk,
  8032. # 'a_field_name' means store in this field in db
  8033. # False means file content will be discarded.
  8034. writable=True, readable=True, update=None, authorize=None,
  8035. autodelete=False, represent=None, uploadfolder=None,
  8036. uploadseparate=False # upload to separate directories by uuid_keys
  8037. # first 2 character and tablename.fieldname
  8038. # False - old behavior
  8039. # True - put uploaded file in
  8040. # <uploaddir>/<tablename>.<fieldname>/uuid_key[:2]
  8041. # directory)
  8042. uploadfs=None # a pyfilesystem where to store upload
  8043. to be used as argument of DAL.define_table
  8044. allowed field types:
  8045. string, boolean, integer, double, text, blob,
  8046. date, time, datetime, upload, password
  8047. strings must have a length of Adapter.maxcharlength by default (512 or 255 for mysql)
  8048. fields should have a default or they will be required in SQLFORMs
  8049. the requires argument is used to validate the field input in SQLFORMs
  8050. """
  8051. def __init__(
  8052. self,
  8053. fieldname,
  8054. type='string',
  8055. length=None,
  8056. default=DEFAULT,
  8057. required=False,
  8058. requires=DEFAULT,
  8059. ondelete='CASCADE',
  8060. notnull=False,
  8061. unique=False,
  8062. uploadfield=True,
  8063. widget=None,
  8064. label=None,
  8065. comment=None,
  8066. writable=True,
  8067. readable=True,
  8068. update=None,
  8069. authorize=None,
  8070. autodelete=False,
  8071. represent=None,
  8072. uploadfolder=None,
  8073. uploadseparate=False,
  8074. uploadfs=None,
  8075. compute=None,
  8076. custom_store=None,
  8077. custom_retrieve=None,
  8078. custom_retrieve_file_properties=None,
  8079. custom_delete=None,
  8080. filter_in = None,
  8081. filter_out = None,
  8082. custom_qualifier = None,
  8083. map_none = None,
  8084. ):
  8085. self._db = self.db = None # both for backward compatibility
  8086. self.op = None
  8087. self.first = None
  8088. self.second = None
  8089. self.name = fieldname = cleanup(fieldname)
  8090. if not isinstance(fieldname,str) or hasattr(Table,fieldname) or \
  8091. fieldname[0] == '_' or REGEX_PYTHON_KEYWORDS.match(fieldname):
  8092. raise SyntaxError('Field: invalid field name: %s' % fieldname)
  8093. self.type = type if not isinstance(type, (Table,Field)) else 'reference %s' % type
  8094. self.length = length if not length is None else DEFAULTLENGTH.get(self.type,512)
  8095. self.default = default if default!=DEFAULT else (update or None)
  8096. self.required = required # is this field required
  8097. self.ondelete = ondelete.upper() # this is for reference fields only
  8098. self.notnull = notnull
  8099. self.unique = unique
  8100. self.uploadfield = uploadfield
  8101. self.uploadfolder = uploadfolder
  8102. self.uploadseparate = uploadseparate
  8103. self.uploadfs = uploadfs
  8104. self.widget = widget
  8105. self.comment = comment
  8106. self.writable = writable
  8107. self.readable = readable
  8108. self.update = update
  8109. self.authorize = authorize
  8110. self.autodelete = autodelete
  8111. self.represent = list_represent if \
  8112. represent==None and type in ('list:integer','list:string') else represent
  8113. self.compute = compute
  8114. self.isattachment = True
  8115. self.custom_store = custom_store
  8116. self.custom_retrieve = custom_retrieve
  8117. self.custom_retrieve_file_properties = custom_retrieve_file_properties
  8118. self.custom_delete = custom_delete
  8119. self.filter_in = filter_in
  8120. self.filter_out = filter_out
  8121. self.custom_qualifier = custom_qualifier
  8122. self.label = label if label!=None else fieldname.replace('_',' ').title()
  8123. self.requires = requires if requires!=None else []
  8124. self.map_none = map_none
  8125. def set_attributes(self,*args,**attributes):
  8126. self.__dict__.update(*args,**attributes)
  8127. def clone(self,point_self_references_to=False,**args):
  8128. field = copy.copy(self)
  8129. if point_self_references_to and \
  8130. field.type == 'reference %s'+field._tablename:
  8131. field.type = 'reference %s' % point_self_references_to
  8132. field.__dict__.update(args)
  8133. return field
  8134. def store(self, file, filename=None, path=None):
  8135. if self.custom_store:
  8136. return self.custom_store(file,filename,path)
  8137. if isinstance(file, cgi.FieldStorage):
  8138. filename = filename or file.filename
  8139. file = file.file
  8140. elif not filename:
  8141. filename = file.name
  8142. filename = os.path.basename(filename.replace('/', os.sep)\
  8143. .replace('\\', os.sep))
  8144. m = REGEX_STORE_PATTERN.search(filename)
  8145. extension = m and m.group('e') or 'txt'
  8146. uuid_key = web2py_uuid().replace('-', '')[-16:]
  8147. encoded_filename = base64.b16encode(filename).lower()
  8148. newfilename = '%s.%s.%s.%s' % \
  8149. (self._tablename, self.name, uuid_key, encoded_filename)
  8150. newfilename = newfilename[:(self.length - 1 - len(extension))] + '.' + extension
  8151. self_uploadfield = self.uploadfield
  8152. if isinstance(self_uploadfield,Field):
  8153. blob_uploadfield_name = self_uploadfield.uploadfield
  8154. keys={self_uploadfield.name: newfilename,
  8155. blob_uploadfield_name: file.read()}
  8156. self_uploadfield.table.insert(**keys)
  8157. elif self_uploadfield == True:
  8158. if path:
  8159. pass
  8160. elif self.uploadfolder:
  8161. path = self.uploadfolder
  8162. elif self.db._adapter.folder:
  8163. path = pjoin(self.db._adapter.folder, '..', 'uploads')
  8164. else:
  8165. raise RuntimeError(
  8166. "you must specify a Field(...,uploadfolder=...)")
  8167. if self.uploadseparate:
  8168. if self.uploadfs:
  8169. raise RuntimeError("not supported")
  8170. path = pjoin(path,"%s.%s" %(self._tablename, self.name),
  8171. uuid_key[:2])
  8172. if not exists(path):
  8173. os.makedirs(path)
  8174. pathfilename = pjoin(path, newfilename)
  8175. if self.uploadfs:
  8176. dest_file = self.uploadfs.open(newfilename, 'wb')
  8177. else:
  8178. dest_file = open(pathfilename, 'wb')
  8179. try:
  8180. shutil.copyfileobj(file, dest_file)
  8181. except IOError:
  8182. raise IOError(
  8183. 'Unable to store file "%s" because invalid permissions, readonly file system, or filename too long' % pathfilename)
  8184. dest_file.close()
  8185. return newfilename
  8186. def retrieve(self, name, path=None):
  8187. self_uploadfield = self.uploadfield
  8188. if self.custom_retrieve:
  8189. return self.custom_retrieve(name, path)
  8190. import http
  8191. if self.authorize or isinstance(self_uploadfield, str):
  8192. row = self.db(self == name).select().first()
  8193. if not row:
  8194. raise http.HTTP(404)
  8195. if self.authorize and not self.authorize(row):
  8196. raise http.HTTP(403)
  8197. m = REGEX_UPLOAD_PATTERN.match(name)
  8198. if not m or not self.isattachment:
  8199. raise TypeError('Can\'t retrieve %s' % name)
  8200. file_properties = self.retrieve_file_properties(name,path)
  8201. filename = file_properties['filename']
  8202. if isinstance(self_uploadfield, str): # ## if file is in DB
  8203. stream = StringIO.StringIO(row[self_uploadfield] or '')
  8204. elif isinstance(self_uploadfield,Field):
  8205. blob_uploadfield_name = self_uploadfield.uploadfield
  8206. query = self_uploadfield == name
  8207. data = self_uploadfield.table(query)[blob_uploadfield_name]
  8208. stream = StringIO.StringIO(data)
  8209. elif self.uploadfs:
  8210. # ## if file is on pyfilesystem
  8211. stream = self.uploadfs.open(name, 'rb')
  8212. else:
  8213. # ## if file is on regular filesystem
  8214. fullname = pjoin(file_properties['path'], name)
  8215. stream = open(fullname,'rb')
  8216. return (filename, stream)
  8217. def retrieve_file_properties(self, name, path=None):
  8218. self_uploadfield = self.uploadfield
  8219. if self.custom_retrieve_file_properties:
  8220. return self.custom_retrieve_file_properties(name, path)
  8221. try:
  8222. m = REGEX_UPLOAD_PATTERN.match(name)
  8223. if not m or not self.isattachment:
  8224. raise TypeError('Can\'t retrieve %s file properties' % name)
  8225. filename = base64.b16decode(m.group('name'), True)
  8226. filename = REGEX_CLEANUP_FN.sub('_', filename)
  8227. except (TypeError, AttributeError):
  8228. filename = name
  8229. if isinstance(self_uploadfield, str): # ## if file is in DB
  8230. return dict(path=None,filename=filename)
  8231. elif isinstance(self_uploadfield,Field):
  8232. return dict(path=None,filename=filename)
  8233. else:
  8234. # ## if file is on filesystem
  8235. if path:
  8236. pass
  8237. elif self.uploadfolder:
  8238. path = self.uploadfolder
  8239. else:
  8240. path = pjoin(self.db._adapter.folder, '..', 'uploads')
  8241. if self.uploadseparate:
  8242. t = m.group('table')
  8243. f = m.group('field')
  8244. u = m.group('uuidkey')
  8245. path = pjoin(path,"%s.%s" % (t,f),u[:2])
  8246. return dict(path=path,filename=filename)
  8247. def formatter(self, value):
  8248. requires = self.requires
  8249. if value is None or not requires:
  8250. return value or self.map_none
  8251. if not isinstance(requires, (list, tuple)):
  8252. requires = [requires]
  8253. elif isinstance(requires, tuple):
  8254. requires = list(requires)
  8255. else:
  8256. requires = copy.copy(requires)
  8257. requires.reverse()
  8258. for item in requires:
  8259. if hasattr(item, 'formatter'):
  8260. value = item.formatter(value)
  8261. return value
  8262. def validate(self, value):
  8263. if not self.requires or self.requires == DEFAULT:
  8264. return ((value if value!=self.map_none else None), None)
  8265. requires = self.requires
  8266. if not isinstance(requires, (list, tuple)):
  8267. requires = [requires]
  8268. for validator in requires:
  8269. (value, error) = validator(value)
  8270. if error:
  8271. return (value, error)
  8272. return ((value if value!=self.map_none else None), None)
  8273. def count(self, distinct=None):
  8274. return Expression(self.db, self.db._adapter.COUNT, self, distinct, 'integer')
  8275. def as_dict(self, flat=False, sanitize=True, options=True):
  8276. attrs = ('type', 'length', 'default', 'required',
  8277. 'ondelete', 'notnull', 'unique', 'uploadfield',
  8278. 'widget', 'label', 'comment', 'writable', 'readable',
  8279. 'update', 'authorize', 'autodelete', 'represent',
  8280. 'uploadfolder', 'uploadseparate', 'uploadfs',
  8281. 'compute', 'custom_store', 'custom_retrieve',
  8282. 'custom_retrieve_file_properties', 'custom_delete',
  8283. 'filter_in', 'filter_out', 'custom_qualifier',
  8284. 'map_none', 'name')
  8285. SERIALIZABLE_TYPES = (int, long, basestring, dict, list,
  8286. float, tuple, bool, type(None))
  8287. def flatten(obj):
  8288. if flat:
  8289. if isinstance(obj, flatten.__class__):
  8290. return str(type(obj))
  8291. elif isinstance(obj, type):
  8292. try:
  8293. return str(obj).split("'")[1]
  8294. except IndexError:
  8295. return str(obj)
  8296. elif not isinstance(obj, SERIALIZABLE_TYPES):
  8297. return str(obj)
  8298. elif isinstance(obj, dict):
  8299. newobj = dict()
  8300. for k, v in obj.items():
  8301. newobj[k] = flatten(v)
  8302. return newobj
  8303. elif isinstance(obj, (list, tuple, set)):
  8304. return [flatten(v) for v in obj]
  8305. else:
  8306. return obj
  8307. elif isinstance(obj, (dict, set)):
  8308. return obj.copy()
  8309. else: return obj
  8310. def filter_requires(t, r, options=True):
  8311. if sanitize and any([keyword in str(t).upper() for
  8312. keyword in ("CRYPT", "IS_STRONG")]):
  8313. return None
  8314. if not isinstance(r, dict):
  8315. if options and hasattr(r, "options"):
  8316. if callable(r.options):
  8317. r.options()
  8318. newr = r.__dict__.copy()
  8319. else:
  8320. newr = r.copy()
  8321. # remove options if not required
  8322. if not options and newr.has_key("labels"):
  8323. [newr.update({key:None}) for key in
  8324. ("labels", "theset") if (key in newr)]
  8325. for k, v in newr.items():
  8326. if k == "other":
  8327. if isinstance(v, dict):
  8328. otype, other = v.popitem()
  8329. else:
  8330. otype = flatten(type(v))
  8331. other = v
  8332. newr[k] = {otype: filter_requires(otype, other,
  8333. options=options)}
  8334. else:
  8335. newr[k] = flatten(v)
  8336. return newr
  8337. if isinstance(self.requires, (tuple, list, set)):
  8338. requires = dict([(flatten(type(r)),
  8339. filter_requires(type(r), r,
  8340. options=options)) for
  8341. r in self.requires])
  8342. else:
  8343. requires = {flatten(type(self.requires)):
  8344. filter_requires(type(self.requires),
  8345. self.requires, options=options)}
  8346. d = dict(colname="%s.%s" % (self.tablename, self.name),
  8347. requires=requires)
  8348. d.update([(attr, flatten(getattr(self, attr))) for attr in attrs])
  8349. return d
  8350. def as_xml(self, sanitize=True, options=True):
  8351. if have_serializers:
  8352. xml = serializers.xml
  8353. else:
  8354. raise ImportError("No xml serializers available")
  8355. d = self.as_dict(flat=True, sanitize=sanitize,
  8356. options=options)
  8357. return xml(d)
  8358. def as_json(self, sanitize=True, options=True):
  8359. if have_serializers:
  8360. json = serializers.json
  8361. else:
  8362. raise ImportError("No json serializers available")
  8363. d = self.as_dict(flat=True, sanitize=sanitize,
  8364. options=options)
  8365. return json(d)
  8366. def as_yaml(self, sanitize=True, options=True):
  8367. if have_serializers:
  8368. d = self.as_dict(flat=True, sanitize=sanitize,
  8369. options=options)
  8370. return serializers.yaml(d)
  8371. else:
  8372. raise ImportError("No YAML serializers available")
  8373. def __nonzero__(self):
  8374. return True
  8375. def __str__(self):
  8376. try:
  8377. return '%s.%s' % (self.tablename, self.name)
  8378. except:
  8379. return '<no table>.%s' % self.name
  8380. class Query(object):
  8381. """
  8382. a query object necessary to define a set.
  8383. it can be stored or can be passed to DAL.__call__() to obtain a Set
  8384. Example::
  8385. query = db.users.name=='Max'
  8386. set = db(query)
  8387. records = set.select()
  8388. """
  8389. def __init__(
  8390. self,
  8391. db,
  8392. op,
  8393. first=None,
  8394. second=None,
  8395. ignore_common_filters = False,
  8396. **optional_args
  8397. ):
  8398. self.db = self._db = db
  8399. self.op = op
  8400. self.first = first
  8401. self.second = second
  8402. self.ignore_common_filters = ignore_common_filters
  8403. self.optional_args = optional_args
  8404. def __repr__(self):
  8405. return '<Query %s>' % BaseAdapter.expand(self.db._adapter,self)
  8406. def __str__(self):
  8407. return self.db._adapter.expand(self)
  8408. def __and__(self, other):
  8409. return Query(self.db,self.db._adapter.AND,self,other)
  8410. def __or__(self, other):
  8411. return Query(self.db,self.db._adapter.OR,self,other)
  8412. def __invert__(self):
  8413. if self.op==self.db._adapter.NOT:
  8414. return self.first
  8415. return Query(self.db,self.db._adapter.NOT,self)
  8416. def __eq__(self, other):
  8417. return repr(self) == repr(other)
  8418. def __ne__(self, other):
  8419. return not (self == other)
  8420. def case(self,t=1,f=0):
  8421. return self.db._adapter.CASE(self,t,f)
  8422. def as_dict(self, flat=False, sanitize=True):
  8423. """Experimental stuff
  8424. This allows to return a plain dictionary with the basic
  8425. query representation. Can be used with json/xml services
  8426. for client-side db I/O
  8427. Example:
  8428. >>> q = db.auth_user.id != 0
  8429. >>> q.as_dict(flat=True)
  8430. {"op": "NE", "first":{"tablename": "auth_user",
  8431. "fieldname": "id"},
  8432. "second":0}
  8433. """
  8434. SERIALIZABLE_TYPES = (tuple, dict, list, int, long, float,
  8435. basestring, type(None), bool)
  8436. def loop(d):
  8437. newd = dict()
  8438. for k, v in d.items():
  8439. if k in ("first", "second"):
  8440. if isinstance(v, self.__class__):
  8441. newd[k] = loop(v.__dict__)
  8442. elif isinstance(v, Field):
  8443. newd[k] = {"tablename": v._tablename,
  8444. "fieldname": v.name}
  8445. elif isinstance(v, Expression):
  8446. newd[k] = loop(v.__dict__)
  8447. elif isinstance(v, SERIALIZABLE_TYPES):
  8448. newd[k] = v
  8449. elif isinstance(v, (datetime.date,
  8450. datetime.time,
  8451. datetime.datetime)):
  8452. newd[k] = unicode(v)
  8453. elif k == "op":
  8454. if callable(v):
  8455. newd[k] = v.__name__
  8456. elif isinstance(v, basestring):
  8457. newd[k] = v
  8458. else: pass # not callable or string
  8459. elif isinstance(v, SERIALIZABLE_TYPES):
  8460. if isinstance(v, dict):
  8461. newd[k] = loop(v)
  8462. else: newd[k] = v
  8463. return newd
  8464. if flat:
  8465. return loop(self.__dict__)
  8466. else: return self.__dict__
  8467. def as_xml(self, sanitize=True):
  8468. if have_serializers:
  8469. xml = serializers.xml
  8470. else:
  8471. raise ImportError("No xml serializers available")
  8472. d = self.as_dict(flat=True, sanitize=sanitize)
  8473. return xml(d)
  8474. def as_json(self, sanitize=True):
  8475. if have_serializers:
  8476. json = serializers.json
  8477. else:
  8478. raise ImportError("No json serializers available")
  8479. d = self.as_dict(flat=True, sanitize=sanitize)
  8480. return json(d)
  8481. def xorify(orderby):
  8482. if not orderby:
  8483. return None
  8484. orderby2 = orderby[0]
  8485. for item in orderby[1:]:
  8486. orderby2 = orderby2 | item
  8487. return orderby2
  8488. def use_common_filters(query):
  8489. return (query and hasattr(query,'ignore_common_filters') and \
  8490. not query.ignore_common_filters)
  8491. class Set(object):
  8492. """
  8493. a Set represents a set of records in the database,
  8494. the records are identified by the query=Query(...) object.
  8495. normally the Set is generated by DAL.__call__(Query(...))
  8496. given a set, for example
  8497. set = db(db.users.name=='Max')
  8498. you can:
  8499. set.update(db.users.name='Massimo')
  8500. set.delete() # all elements in the set
  8501. set.select(orderby=db.users.id, groupby=db.users.name, limitby=(0,10))
  8502. and take subsets:
  8503. subset = set(db.users.id<5)
  8504. """
  8505. def __init__(self, db, query, ignore_common_filters = None):
  8506. self.db = db
  8507. self._db = db # for backward compatibility
  8508. self.dquery = None
  8509. # if query is a dict, parse it
  8510. if isinstance(query, dict):
  8511. query = self.parse(query)
  8512. if not ignore_common_filters is None and \
  8513. use_common_filters(query) == ignore_common_filters:
  8514. query = copy.copy(query)
  8515. query.ignore_common_filters = ignore_common_filters
  8516. self.query = query
  8517. def __repr__(self):
  8518. return '<Set %s>' % BaseAdapter.expand(self.db._adapter,self.query)
  8519. def __call__(self, query, ignore_common_filters=False):
  8520. if isinstance(query,Table):
  8521. query = self.db._adapter.id_query(query)
  8522. elif isinstance(query,str):
  8523. query = Expression(self.db,query)
  8524. elif isinstance(query,Field):
  8525. query = query!=None
  8526. if self.query:
  8527. return Set(self.db, self.query & query,
  8528. ignore_common_filters=ignore_common_filters)
  8529. else:
  8530. return Set(self.db, query,
  8531. ignore_common_filters=ignore_common_filters)
  8532. def _count(self,distinct=None):
  8533. return self.db._adapter._count(self.query,distinct)
  8534. def _select(self, *fields, **attributes):
  8535. adapter = self.db._adapter
  8536. tablenames = adapter.tables(self.query,
  8537. attributes.get('join',None),
  8538. attributes.get('left',None),
  8539. attributes.get('orderby',None),
  8540. attributes.get('groupby',None))
  8541. fields = adapter.expand_all(fields, tablenames)
  8542. return adapter._select(self.query,fields,attributes)
  8543. def _delete(self):
  8544. db = self.db
  8545. tablename = db._adapter.get_table(self.query)
  8546. return db._adapter._delete(tablename,self.query)
  8547. def _update(self, **update_fields):
  8548. db = self.db
  8549. tablename = db._adapter.get_table(self.query)
  8550. fields = db[tablename]._listify(update_fields,update=True)
  8551. return db._adapter._update(tablename,self.query,fields)
  8552. def as_dict(self, flat=False, sanitize=True):
  8553. if flat:
  8554. uid = dbname = uri = None
  8555. codec = self.db._db_codec
  8556. if not sanitize:
  8557. uri, dbname, uid = (self.db._dbname, str(self.db),
  8558. self.db._db_uid)
  8559. d = {"query": self.query.as_dict(flat=flat)}
  8560. d["db"] = {"uid": uid, "codec": codec,
  8561. "name": dbname, "uri": uri}
  8562. return d
  8563. else: return self.__dict__
  8564. def as_xml(self, sanitize=True):
  8565. if have_serializers:
  8566. xml = serializers.xml
  8567. else:
  8568. raise ImportError("No xml serializers available")
  8569. d = self.as_dict(flat=True, sanitize=sanitize)
  8570. return xml(d)
  8571. def as_json(self, sanitize=True):
  8572. if have_serializers:
  8573. json = serializers.json
  8574. else:
  8575. raise ImportError("No json serializers available")
  8576. d = self.as_dict(flat=True, sanitize=sanitize)
  8577. return json(d)
  8578. def parse(self, dquery):
  8579. "Experimental: Turn a dictionary into a Query object"
  8580. self.dquery = dquery
  8581. return self.build(self.dquery)
  8582. def build(self, d):
  8583. "Experimental: see .parse()"
  8584. op, first, second = (d["op"], d["first"],
  8585. d.get("second", None))
  8586. left = right = built = None
  8587. if op in ("AND", "OR"):
  8588. if not (type(first), type(second)) == (dict, dict):
  8589. raise SyntaxError("Invalid AND/OR query")
  8590. if op == "AND":
  8591. built = self.build(first) & self.build(second)
  8592. else: built = self.build(first) | self.build(second)
  8593. elif op == "NOT":
  8594. if first is None:
  8595. raise SyntaxError("Invalid NOT query")
  8596. built = ~self.build(first)
  8597. else:
  8598. # normal operation (GT, EQ, LT, ...)
  8599. for k, v in {"left": first, "right": second}.items():
  8600. if isinstance(v, dict) and v.get("op"):
  8601. v = self.build(v)
  8602. if isinstance(v, dict) and ("tablename" in v):
  8603. v = self.db[v["tablename"]][v["fieldname"]]
  8604. if k == "left": left = v
  8605. else: right = v
  8606. if hasattr(self.db._adapter, op):
  8607. opm = getattr(self.db._adapter, op)
  8608. if op == "EQ": built = left == right
  8609. elif op == "NE": built = left != right
  8610. elif op == "GT": built = left > right
  8611. elif op == "GE": built = left >= right
  8612. elif op == "LT": built = left < right
  8613. elif op == "LE": built = left <= right
  8614. elif op in ("JOIN", "LEFT_JOIN", "RANDOM", "ALLOW_NULL"):
  8615. built = Expression(self.db, opm)
  8616. elif op in ("LOWER", "UPPER", "EPOCH", "PRIMARY_KEY",
  8617. "COALESCE_ZERO", "RAW", "INVERT"):
  8618. built = Expression(self.db, opm, left)
  8619. elif op in ("COUNT", "EXTRACT", "AGGREGATE", "SUBSTRING",
  8620. "REGEXP", "LIKE", "ILIKE", "STARTSWITH",
  8621. "ENDSWITH", "ADD", "SUB", "MUL", "DIV",
  8622. "MOD", "AS", "ON", "COMMA", "NOT_NULL",
  8623. "COALESCE", "CONTAINS", "BELONGS"):
  8624. built = Expression(self.db, opm, left, right)
  8625. # expression as string
  8626. elif not (left or right): built = Expression(self.db, op)
  8627. else:
  8628. raise SyntaxError("Operator not supported: %s" % op)
  8629. return built
  8630. def isempty(self):
  8631. return not self.select(limitby=(0,1))
  8632. def count(self,distinct=None, cache=None):
  8633. db = self.db
  8634. if cache:
  8635. cache_model, time_expire = cache
  8636. sql = self._count(distinct=distinct)
  8637. key = db._uri + '/' + sql
  8638. if len(key)>200: key = hashlib_md5(key).hexdigest()
  8639. return cache_model(
  8640. key,
  8641. (lambda self=self,distinct=distinct: \
  8642. db._adapter.count(self.query,distinct)),
  8643. time_expire)
  8644. return db._adapter.count(self.query,distinct)
  8645. def select(self, *fields, **attributes):
  8646. adapter = self.db._adapter
  8647. tablenames = adapter.tables(self.query,
  8648. attributes.get('join',None),
  8649. attributes.get('left',None),
  8650. attributes.get('orderby',None),
  8651. attributes.get('groupby',None))
  8652. fields = adapter.expand_all(fields, tablenames)
  8653. return adapter.select(self.query,fields,attributes)
  8654. def nested_select(self,*fields,**attributes):
  8655. return Expression(self.db,self._select(*fields,**attributes))
  8656. def delete(self):
  8657. db = self.db
  8658. tablename = db._adapter.get_table(self.query)
  8659. table = db[tablename]
  8660. if any(f(self) for f in table._before_delete): return 0
  8661. ret = db._adapter.delete(tablename,self.query)
  8662. ret and [f(self) for f in table._after_delete]
  8663. return ret
  8664. def update(self, **update_fields):
  8665. db = self.db
  8666. tablename = db._adapter.get_table(self.query)
  8667. table = db[tablename]
  8668. table._attempt_upload(update_fields)
  8669. if any(f(self,update_fields) for f in table._before_update):
  8670. return 0
  8671. fields = table._listify(update_fields,update=True)
  8672. if not fields:
  8673. raise SyntaxError("No fields to update")
  8674. ret = db._adapter.update(tablename,self.query,fields)
  8675. ret and [f(self,update_fields) for f in table._after_update]
  8676. return ret
  8677. def update_naive(self, **update_fields):
  8678. """
  8679. same as update but does not call table._before_update and _after_update
  8680. """
  8681. tablename = self.db._adapter.get_table(self.query)
  8682. table = self.db[tablename]
  8683. fields = table._listify(update_fields,update=True)
  8684. if not fields: raise SyntaxError("No fields to update")
  8685. ret = self.db._adapter.update(tablename,self.query,fields)
  8686. return ret
  8687. def validate_and_update(self, **update_fields):
  8688. tablename = self.db._adapter.get_table(self.query)
  8689. response = Row()
  8690. response.errors = Row()
  8691. new_fields = copy.copy(update_fields)
  8692. for key,value in update_fields.iteritems():
  8693. value,error = self.db[tablename][key].validate(value)
  8694. if error:
  8695. response.errors[key] = error
  8696. else:
  8697. new_fields[key] = value
  8698. table = self.db[tablename]
  8699. if response.errors:
  8700. response.updated = None
  8701. else:
  8702. if not any(f(self,new_fields) for f in table._before_update):
  8703. fields = table._listify(new_fields,update=True)
  8704. if not fields: raise SyntaxError("No fields to update")
  8705. ret = self.db._adapter.update(tablename,self.query,fields)
  8706. ret and [f(self,new_fields) for f in table._after_update]
  8707. else:
  8708. ret = 0
  8709. response.updated = ret
  8710. return response
  8711. def delete_uploaded_files(self, upload_fields=None):
  8712. table = self.db[self.db._adapter.tables(self.query)[0]]
  8713. # ## mind uploadfield==True means file is not in DB
  8714. if upload_fields:
  8715. fields = upload_fields.keys()
  8716. else:
  8717. fields = table.fields
  8718. fields = [f for f in fields if table[f].type == 'upload'
  8719. and table[f].uploadfield == True
  8720. and table[f].autodelete]
  8721. if not fields:
  8722. return False
  8723. for record in self.select(*[table[f] for f in fields]):
  8724. for fieldname in fields:
  8725. field = table[fieldname]
  8726. oldname = record.get(fieldname, None)
  8727. if not oldname:
  8728. continue
  8729. if upload_fields and oldname == upload_fields[fieldname]:
  8730. continue
  8731. if field.custom_delete:
  8732. field.custom_delete(oldname)
  8733. else:
  8734. uploadfolder = field.uploadfolder
  8735. if not uploadfolder:
  8736. uploadfolder = pjoin(
  8737. self.db._adapter.folder, '..', 'uploads')
  8738. if field.uploadseparate:
  8739. items = oldname.split('.')
  8740. uploadfolder = pjoin(
  8741. uploadfolder,
  8742. "%s.%s" % (items[0], items[1]),
  8743. items[2][:2])
  8744. oldpath = pjoin(uploadfolder, oldname)
  8745. if exists(oldpath):
  8746. os.unlink(oldpath)
  8747. return False
  8748. class RecordUpdater(object):
  8749. def __init__(self, colset, table, id):
  8750. self.colset, self.db, self.tablename, self.id = \
  8751. colset, table._db, table._tablename, id
  8752. def __call__(self, **fields):
  8753. colset, db, tablename, id = self.colset, self.db, self.tablename, self.id
  8754. table = db[tablename]
  8755. newfields = fields or dict(colset)
  8756. for fieldname in newfields.keys():
  8757. if not fieldname in table.fields or table[fieldname].type=='id':
  8758. del newfields[fieldname]
  8759. table._db(table._id==id,ignore_common_filters=True).update(**newfields)
  8760. colset.update(newfields)
  8761. return colset
  8762. class RecordDeleter(object):
  8763. def __init__(self, table, id):
  8764. self.db, self.tablename, self.id = table._db, table._tablename, id
  8765. def __call__(self):
  8766. return self.db(self.db[self.tablename]._id==self.id).delete()
  8767. class LazySet(object):
  8768. def __init__(self, field, id):
  8769. self.db, self.tablename, self.fieldname, self.id = \
  8770. field.db, field._tablename, field.name, id
  8771. def _getset(self):
  8772. query = self.db[self.tablename][self.fieldname]==self.id
  8773. return Set(self.db,query)
  8774. def __repr__(self):
  8775. return repr(self._getset())
  8776. def __call__(self, query, ignore_common_filters=False):
  8777. return self._getset()(query, ignore_common_filters)
  8778. def _count(self,distinct=None):
  8779. return self._getset()._count(distinct)
  8780. def _select(self, *fields, **attributes):
  8781. return self._getset()._select(*fields,**attributes)
  8782. def _delete(self):
  8783. return self._getset()._delete()
  8784. def _update(self, **update_fields):
  8785. return self._getset()._update(**update_fields)
  8786. def isempty(self):
  8787. return self._getset().isempty()
  8788. def count(self,distinct=None, cache=None):
  8789. return self._getset().count(distinct,cache)
  8790. def select(self, *fields, **attributes):
  8791. return self._getset().select(*fields,**attributes)
  8792. def nested_select(self,*fields,**attributes):
  8793. return self._getset().nested_select(*fields,**attributes)
  8794. def delete(self):
  8795. return self._getset().delete()
  8796. def update(self, **update_fields):
  8797. return self._getset().update(**update_fields)
  8798. def update_naive(self, **update_fields):
  8799. return self._getset().update_naive(**update_fields)
  8800. def validate_and_update(self, **update_fields):
  8801. return self._getset().validate_and_update(**update_fields)
  8802. def delete_uploaded_files(self, upload_fields=None):
  8803. return self._getset().delete_uploaded_files(upload_fields)
  8804. class VirtualCommand(object):
  8805. def __init__(self,method,row):
  8806. self.method=method
  8807. self.row=row
  8808. def __call__(self,*args,**kwargs):
  8809. return self.method(self.row,*args,**kwargs)
  8810. def lazy_virtualfield(f):
  8811. f.__lazy__ = True
  8812. return f
  8813. class Rows(object):
  8814. """
  8815. A wrapper for the return value of a select. It basically represents a table.
  8816. It has an iterator and each row is represented as a dictionary.
  8817. """
  8818. # ## TODO: this class still needs some work to care for ID/OID
  8819. def __init__(
  8820. self,
  8821. db=None,
  8822. records=[],
  8823. colnames=[],
  8824. compact=True,
  8825. rawrows=None
  8826. ):
  8827. self.db = db
  8828. self.records = records
  8829. self.colnames = colnames
  8830. self.compact = compact
  8831. self.response = rawrows
  8832. def __repr__(self):
  8833. return '<Rows (%s)>' % len(self.records)
  8834. def setvirtualfields(self,**keyed_virtualfields):
  8835. """
  8836. db.define_table('x',Field('number','integer'))
  8837. if db(db.x).isempty(): [db.x.insert(number=i) for i in range(10)]
  8838. from gluon.dal import lazy_virtualfield
  8839. class MyVirtualFields(object):
  8840. # normal virtual field (backward compatible, discouraged)
  8841. def normal_shift(self): return self.x.number+1
  8842. # lazy virtual field (because of @staticmethod)
  8843. @lazy_virtualfield
  8844. def lazy_shift(instance,row,delta=4): return row.x.number+delta
  8845. db.x.virtualfields.append(MyVirtualFields())
  8846. for row in db(db.x).select():
  8847. print row.number, row.normal_shift, row.lazy_shift(delta=7)
  8848. """
  8849. if not keyed_virtualfields:
  8850. return self
  8851. for row in self.records:
  8852. for (tablename,virtualfields) in keyed_virtualfields.iteritems():
  8853. attributes = dir(virtualfields)
  8854. if not tablename in row:
  8855. box = row[tablename] = Row()
  8856. else:
  8857. box = row[tablename]
  8858. updated = False
  8859. for attribute in attributes:
  8860. if attribute[0] != '_':
  8861. method = getattr(virtualfields,attribute)
  8862. if hasattr(method,'__lazy__'):
  8863. box[attribute]=VirtualCommand(method,row)
  8864. elif type(method)==types.MethodType:
  8865. if not updated:
  8866. virtualfields.__dict__.update(row)
  8867. updated = True
  8868. box[attribute]=method()
  8869. return self
  8870. def __and__(self,other):
  8871. if self.colnames!=other.colnames:
  8872. raise Exception('Cannot & incompatible Rows objects')
  8873. records = self.records+other.records
  8874. return Rows(self.db,records,self.colnames)
  8875. def __or__(self,other):
  8876. if self.colnames!=other.colnames:
  8877. raise Exception('Cannot | incompatible Rows objects')
  8878. records = self.records
  8879. records += [record for record in other.records \
  8880. if not record in records]
  8881. return Rows(self.db,records,self.colnames)
  8882. def __nonzero__(self):
  8883. if len(self.records):
  8884. return 1
  8885. return 0
  8886. def __len__(self):
  8887. return len(self.records)
  8888. def __getslice__(self, a, b):
  8889. return Rows(self.db,self.records[a:b],self.colnames)
  8890. def __getitem__(self, i):
  8891. row = self.records[i]
  8892. keys = row.keys()
  8893. if self.compact and len(keys) == 1 and keys[0] != '_extra':
  8894. return row[row.keys()[0]]
  8895. return row
  8896. def __iter__(self):
  8897. """
  8898. iterator over records
  8899. """
  8900. for i in xrange(len(self)):
  8901. yield self[i]
  8902. def __str__(self):
  8903. """
  8904. serializes the table into a csv file
  8905. """
  8906. s = StringIO.StringIO()
  8907. self.export_to_csv_file(s)
  8908. return s.getvalue()
  8909. def first(self):
  8910. if not self.records:
  8911. return None
  8912. return self[0]
  8913. def last(self):
  8914. if not self.records:
  8915. return None
  8916. return self[-1]
  8917. def find(self,f,limitby=None):
  8918. """
  8919. returns a new Rows object, a subset of the original object,
  8920. filtered by the function f
  8921. """
  8922. if not self:
  8923. return Rows(self.db, [], self.colnames)
  8924. records = []
  8925. if limitby:
  8926. a,b = limitby
  8927. else:
  8928. a,b = 0,len(self)
  8929. k = 0
  8930. for row in self:
  8931. if f(row):
  8932. if a<=k: records.append(row)
  8933. k += 1
  8934. if k==b: break
  8935. return Rows(self.db, records, self.colnames)
  8936. def exclude(self, f):
  8937. """
  8938. removes elements from the calling Rows object, filtered by the function f,
  8939. and returns a new Rows object containing the removed elements
  8940. """
  8941. if not self.records:
  8942. return Rows(self.db, [], self.colnames)
  8943. removed = []
  8944. i=0
  8945. while i<len(self):
  8946. row = self[i]
  8947. if f(row):
  8948. removed.append(self.records[i])
  8949. del self.records[i]
  8950. else:
  8951. i += 1
  8952. return Rows(self.db, removed, self.colnames)
  8953. def sort(self, f, reverse=False):
  8954. """
  8955. returns a list of sorted elements (not sorted in place)
  8956. """
  8957. rows = Rows(self.db,[],self.colnames,compact=False)
  8958. rows.records = sorted(self,key=f,reverse=reverse)
  8959. return rows
  8960. def group_by_value(self, field):
  8961. """
  8962. regroups the rows, by one of the fields
  8963. """
  8964. if not self.records:
  8965. return {}
  8966. key = str(field)
  8967. grouped_row_group = dict()
  8968. for row in self:
  8969. value = row[key]
  8970. if not value in grouped_row_group:
  8971. grouped_row_group[value] = [row]
  8972. else:
  8973. grouped_row_group[value].append(row)
  8974. return grouped_row_group
  8975. def as_list(self,
  8976. compact=True,
  8977. storage_to_dict=True,
  8978. datetime_to_str=True,
  8979. custom_types=None):
  8980. """
  8981. returns the data as a list or dictionary.
  8982. :param storage_to_dict: when True returns a dict, otherwise a list(default True)
  8983. :param datetime_to_str: convert datetime fields as strings (default True)
  8984. """
  8985. (oc, self.compact) = (self.compact, compact)
  8986. if storage_to_dict:
  8987. items = [item.as_dict(datetime_to_str, custom_types) for item in self]
  8988. else:
  8989. items = [item for item in self]
  8990. self.compact = compact
  8991. return items
  8992. def as_dict(self,
  8993. key='id',
  8994. compact=True,
  8995. storage_to_dict=True,
  8996. datetime_to_str=True,
  8997. custom_types=None):
  8998. """
  8999. returns the data as a dictionary of dictionaries (storage_to_dict=True) or records (False)
  9000. :param key: the name of the field to be used as dict key, normally the id
  9001. :param compact: ? (default True)
  9002. :param storage_to_dict: when True returns a dict, otherwise a list(default True)
  9003. :param datetime_to_str: convert datetime fields as strings (default True)
  9004. """
  9005. # test for multiple rows
  9006. multi = False
  9007. f = self.first()
  9008. if f:
  9009. multi = any([isinstance(v, f.__class__) for v in f.values()])
  9010. if (not "." in key) and multi:
  9011. # No key provided, default to int indices
  9012. def new_key():
  9013. i = 0
  9014. while True:
  9015. yield i
  9016. i += 1
  9017. key_generator = new_key()
  9018. key = lambda r: key_generator.next()
  9019. rows = self.as_list(compact, storage_to_dict, datetime_to_str, custom_types)
  9020. if isinstance(key,str) and key.count('.')==1:
  9021. (table, field) = key.split('.')
  9022. return dict([(r[table][field],r) for r in rows])
  9023. elif isinstance(key,str):
  9024. return dict([(r[key],r) for r in rows])
  9025. else:
  9026. return dict([(key(r),r) for r in rows])
  9027. def export_to_csv_file(self, ofile, null='<NULL>', *args, **kwargs):
  9028. """
  9029. export data to csv, the first line contains the column names
  9030. :param ofile: where the csv must be exported to
  9031. :param null: how null values must be represented (default '<NULL>')
  9032. :param delimiter: delimiter to separate values (default ',')
  9033. :param quotechar: character to use to quote string values (default '"')
  9034. :param quoting: quote system, use csv.QUOTE_*** (default csv.QUOTE_MINIMAL)
  9035. :param represent: use the fields .represent value (default False)
  9036. :param colnames: list of column names to use (default self.colnames)
  9037. This will only work when exporting rows objects!!!!
  9038. DO NOT use this with db.export_to_csv()
  9039. """
  9040. delimiter = kwargs.get('delimiter', ',')
  9041. quotechar = kwargs.get('quotechar', '"')
  9042. quoting = kwargs.get('quoting', csv.QUOTE_MINIMAL)
  9043. represent = kwargs.get('represent', False)
  9044. writer = csv.writer(ofile, delimiter=delimiter,
  9045. quotechar=quotechar, quoting=quoting)
  9046. colnames = kwargs.get('colnames', self.colnames)
  9047. write_colnames = kwargs.get('write_colnames',True)
  9048. # a proper csv starting with the column names
  9049. if write_colnames:
  9050. writer.writerow(colnames)
  9051. def none_exception(value):
  9052. """
  9053. returns a cleaned up value that can be used for csv export:
  9054. - unicode text is encoded as such
  9055. - None values are replaced with the given representation (default <NULL>)
  9056. """
  9057. if value is None:
  9058. return null
  9059. elif isinstance(value, unicode):
  9060. return value.encode('utf8')
  9061. elif isinstance(value,Reference):
  9062. return int(value)
  9063. elif hasattr(value, 'isoformat'):
  9064. return value.isoformat()[:19].replace('T', ' ')
  9065. elif isinstance(value, (list,tuple)): # for type='list:..'
  9066. return bar_encode(value)
  9067. return value
  9068. for record in self:
  9069. row = []
  9070. for col in colnames:
  9071. if not REGEX_TABLE_DOT_FIELD.match(col):
  9072. row.append(record._extra[col])
  9073. else:
  9074. (t, f) = col.split('.')
  9075. field = self.db[t][f]
  9076. if isinstance(record.get(t, None), (Row,dict)):
  9077. value = record[t][f]
  9078. else:
  9079. value = record[f]
  9080. if field.type=='blob' and not value is None:
  9081. value = base64.b64encode(value)
  9082. elif represent and field.represent:
  9083. value = field.represent(value)
  9084. row.append(none_exception(value))
  9085. writer.writerow(row)
  9086. def xml(self,strict=False,row_name='row',rows_name='rows'):
  9087. """
  9088. serializes the table using sqlhtml.SQLTABLE (if present)
  9089. """
  9090. if strict:
  9091. ncols = len(self.colnames)
  9092. return '<%s>\n%s\n</%s>' % (rows_name,
  9093. '\n'.join(row.as_xml(row_name=row_name,
  9094. colnames=self.colnames) for
  9095. row in self), rows_name)
  9096. import sqlhtml
  9097. return sqlhtml.SQLTABLE(self).xml()
  9098. def as_xml(self,row_name='row',rows_name='rows'):
  9099. return self.xml(strict=True, row_name=row_name, rows_name=rows_name)
  9100. def as_json(self, mode='object', default=None):
  9101. """
  9102. serializes the table to a JSON list of objects
  9103. """
  9104. items = [record.as_json(mode=mode, default=default,
  9105. serialize=False,
  9106. colnames=self.colnames) for
  9107. record in self]
  9108. if have_serializers:
  9109. return serializers.json(items,
  9110. default=default or
  9111. serializers.custom_json)
  9112. elif simplejson:
  9113. return simplejson.dumps(items)
  9114. else:
  9115. raise RuntimeError("missing simplejson")
  9116. # for consistent naming yet backwards compatible
  9117. as_csv = __str__
  9118. json = as_json
  9119. ################################################################################
  9120. # dummy function used to define some doctests
  9121. ################################################################################
  9122. def test_all():
  9123. """
  9124. >>> if len(sys.argv)<2: db = DAL(\"sqlite://test.db\")
  9125. >>> if len(sys.argv)>1: db = DAL(sys.argv[1])
  9126. >>> tmp = db.define_table('users',\
  9127. Field('stringf', 'string', length=32, required=True),\
  9128. Field('booleanf', 'boolean', default=False),\
  9129. Field('passwordf', 'password', notnull=True),\
  9130. Field('uploadf', 'upload'),\
  9131. Field('blobf', 'blob'),\
  9132. Field('integerf', 'integer', unique=True),\
  9133. Field('doublef', 'double', unique=True,notnull=True),\
  9134. Field('jsonf', 'json'),\
  9135. Field('datef', 'date', default=datetime.date.today()),\
  9136. Field('timef', 'time'),\
  9137. Field('datetimef', 'datetime'),\
  9138. migrate='test_user.table')
  9139. Insert a field
  9140. >>> db.users.insert(stringf='a', booleanf=True, passwordf='p', blobf='0A',\
  9141. uploadf=None, integerf=5, doublef=3.14,\
  9142. jsonf={"j": True},\
  9143. datef=datetime.date(2001, 1, 1),\
  9144. timef=datetime.time(12, 30, 15),\
  9145. datetimef=datetime.datetime(2002, 2, 2, 12, 30, 15))
  9146. 1
  9147. Drop the table
  9148. >>> db.users.drop()
  9149. Examples of insert, select, update, delete
  9150. >>> tmp = db.define_table('person',\
  9151. Field('name'),\
  9152. Field('birth','date'),\
  9153. migrate='test_person.table')
  9154. >>> person_id = db.person.insert(name=\"Marco\",birth='2005-06-22')
  9155. >>> person_id = db.person.insert(name=\"Massimo\",birth='1971-12-21')
  9156. commented len(db().select(db.person.ALL))
  9157. commented 2
  9158. >>> me = db(db.person.id==person_id).select()[0] # test select
  9159. >>> me.name
  9160. 'Massimo'
  9161. >>> db.person[2].name
  9162. 'Massimo'
  9163. >>> db.person(2).name
  9164. 'Massimo'
  9165. >>> db.person(name='Massimo').name
  9166. 'Massimo'
  9167. >>> db.person(db.person.name=='Massimo').name
  9168. 'Massimo'
  9169. >>> row = db.person[2]
  9170. >>> row.name == row['name'] == row['person.name'] == row('person.name')
  9171. True
  9172. >>> db(db.person.name=='Massimo').update(name='massimo') # test update
  9173. 1
  9174. >>> db(db.person.name=='Marco').select().first().delete_record() # test delete
  9175. 1
  9176. Update a single record
  9177. >>> me.update_record(name=\"Max\")
  9178. <Row {'name': 'Max', 'birth': datetime.date(1971, 12, 21), 'id': 2}>
  9179. >>> me.name
  9180. 'Max'
  9181. Examples of complex search conditions
  9182. >>> len(db((db.person.name=='Max')&(db.person.birth<'2003-01-01')).select())
  9183. 1
  9184. >>> len(db((db.person.name=='Max')&(db.person.birth<datetime.date(2003,01,01))).select())
  9185. 1
  9186. >>> len(db((db.person.name=='Max')|(db.person.birth<'2003-01-01')).select())
  9187. 1
  9188. >>> me = db(db.person.id==person_id).select(db.person.name)[0]
  9189. >>> me.name
  9190. 'Max'
  9191. Examples of search conditions using extract from date/datetime/time
  9192. >>> len(db(db.person.birth.month()==12).select())
  9193. 1
  9194. >>> len(db(db.person.birth.year()>1900).select())
  9195. 1
  9196. Example of usage of NULL
  9197. >>> len(db(db.person.birth==None).select()) ### test NULL
  9198. 0
  9199. >>> len(db(db.person.birth!=None).select()) ### test NULL
  9200. 1
  9201. Examples of search conditions using lower, upper, and like
  9202. >>> len(db(db.person.name.upper()=='MAX').select())
  9203. 1
  9204. >>> len(db(db.person.name.like('%ax')).select())
  9205. 1
  9206. >>> len(db(db.person.name.upper().like('%AX')).select())
  9207. 1
  9208. >>> len(db(~db.person.name.upper().like('%AX')).select())
  9209. 0
  9210. orderby, groupby and limitby
  9211. >>> people = db().select(db.person.name, orderby=db.person.name)
  9212. >>> order = db.person.name|~db.person.birth
  9213. >>> people = db().select(db.person.name, orderby=order)
  9214. >>> people = db().select(db.person.name, orderby=db.person.name, groupby=db.person.name)
  9215. >>> people = db().select(db.person.name, orderby=order, limitby=(0,100))
  9216. Example of one 2 many relation
  9217. >>> tmp = db.define_table('dog',\
  9218. Field('name'),\
  9219. Field('birth','date'),\
  9220. Field('owner',db.person),\
  9221. migrate='test_dog.table')
  9222. >>> db.dog.insert(name='Snoopy', birth=None, owner=person_id)
  9223. 1
  9224. A simple JOIN
  9225. >>> len(db(db.dog.owner==db.person.id).select())
  9226. 1
  9227. >>> len(db().select(db.person.ALL, db.dog.name,left=db.dog.on(db.dog.owner==db.person.id)))
  9228. 1
  9229. Drop tables
  9230. >>> db.dog.drop()
  9231. >>> db.person.drop()
  9232. Example of many 2 many relation and Set
  9233. >>> tmp = db.define_table('author', Field('name'),\
  9234. migrate='test_author.table')
  9235. >>> tmp = db.define_table('paper', Field('title'),\
  9236. migrate='test_paper.table')
  9237. >>> tmp = db.define_table('authorship',\
  9238. Field('author_id', db.author),\
  9239. Field('paper_id', db.paper),\
  9240. migrate='test_authorship.table')
  9241. >>> aid = db.author.insert(name='Massimo')
  9242. >>> pid = db.paper.insert(title='QCD')
  9243. >>> tmp = db.authorship.insert(author_id=aid, paper_id=pid)
  9244. Define a Set
  9245. >>> authored_papers = db((db.author.id==db.authorship.author_id)&(db.paper.id==db.authorship.paper_id))
  9246. >>> rows = authored_papers.select(db.author.name, db.paper.title)
  9247. >>> for row in rows: print row.author.name, row.paper.title
  9248. Massimo QCD
  9249. Example of search condition using belongs
  9250. >>> set = (1, 2, 3)
  9251. >>> rows = db(db.paper.id.belongs(set)).select(db.paper.ALL)
  9252. >>> print rows[0].title
  9253. QCD
  9254. Example of search condition using nested select
  9255. >>> nested_select = db()._select(db.authorship.paper_id)
  9256. >>> rows = db(db.paper.id.belongs(nested_select)).select(db.paper.ALL)
  9257. >>> print rows[0].title
  9258. QCD
  9259. Example of expressions
  9260. >>> mynumber = db.define_table('mynumber', Field('x', 'integer'))
  9261. >>> db(mynumber).delete()
  9262. 0
  9263. >>> for i in range(10): tmp = mynumber.insert(x=i)
  9264. >>> db(mynumber).select(mynumber.x.sum())[0](mynumber.x.sum())
  9265. 45
  9266. >>> db(mynumber.x+2==5).select(mynumber.x + 2)[0](mynumber.x + 2)
  9267. 5
  9268. Output in csv
  9269. >>> print str(authored_papers.select(db.author.name, db.paper.title)).strip()
  9270. author.name,paper.title\r
  9271. Massimo,QCD
  9272. Delete all leftover tables
  9273. >>> DAL.distributed_transaction_commit(db)
  9274. >>> db.mynumber.drop()
  9275. >>> db.authorship.drop()
  9276. >>> db.author.drop()
  9277. >>> db.paper.drop()
  9278. """
  9279. ################################################################################
  9280. # deprecated since the new DAL; here only for backward compatibility
  9281. ################################################################################
  9282. SQLField = Field
  9283. SQLTable = Table
  9284. SQLXorable = Expression
  9285. SQLQuery = Query
  9286. SQLSet = Set
  9287. SQLRows = Rows
  9288. SQLStorage = Row
  9289. SQLDB = DAL
  9290. GQLDB = DAL
  9291. DAL.Field = Field # was necessary in gluon/globals.py session.connect
  9292. DAL.Table = Table # was necessary in gluon/globals.py session.connect
  9293. ################################################################################
  9294. # Geodal utils
  9295. ################################################################################
  9296. def geoPoint(x,y):
  9297. return "POINT (%f %f)" % (x,y)
  9298. def geoLine(*line):
  9299. return "LINESTRING (%s)" % ','.join("%f %f" % item for item in line)
  9300. def geoPolygon(*line):
  9301. return "POLYGON ((%s))" % ','.join("%f %f" % item for item in line)
  9302. ################################################################################
  9303. # run tests
  9304. ################################################################################
  9305. if __name__ == '__main__':
  9306. import doctest
  9307. doctest.testmod()