PageRenderTime 138ms CodeModel.GetById 19ms RepoModel.GetById 0ms app.codeStats 1ms

/gluon/dal.py

https://github.com/gokceneraslan/web2py
Python | 10770 lines | 10501 code | 101 blank | 168 comment | 129 complexity | 844dc509b81445288502e2e1825430d5 MD5 | raw file
Possible License(s): BSD-2-Clause, MIT, BSD-3-Clause
  1. #!/bin/env python
  2. # -*- coding: utf-8 -*-
  3. """
  4. This file is part of the web2py Web Framework
  5. Copyrighted by Massimo Di Pierro <mdipierro@cs.depaul.edu>
  6. License: LGPLv3 (http://www.gnu.org/licenses/lgpl.html)
  7. Thanks to
  8. * Niall Sweeny <niall.sweeny@fonjax.com> for MS SQL support
  9. * Marcel Leuthi <mluethi@mlsystems.ch> for Oracle support
  10. * Denes
  11. * Chris Clark
  12. * clach05
  13. * Denes Lengyel
  14. * and many others who have contributed to current and previous versions
  15. This file contains the DAL support for many relational databases,
  16. including:
  17. - SQLite & SpatiaLite
  18. - MySQL
  19. - Postgres
  20. - Firebird
  21. - Oracle
  22. - MS SQL
  23. - DB2
  24. - Interbase
  25. - Ingres
  26. - Informix (9+ and SE)
  27. - SapDB (experimental)
  28. - Cubrid (experimental)
  29. - CouchDB (experimental)
  30. - MongoDB (in progress)
  31. - Google:nosql
  32. - Google:sql
  33. - Teradata
  34. - IMAP (experimental)
  35. Example of usage:
  36. >>> # from dal import DAL, Field
  37. ### create DAL connection (and create DB if it doesn't exist)
  38. >>> db = DAL(('sqlite://storage.sqlite','mysql://a:b@localhost/x'),
  39. ... folder=None)
  40. ### define a table 'person' (create/alter as necessary)
  41. >>> person = db.define_table('person',Field('name','string'))
  42. ### insert a record
  43. >>> id = person.insert(name='James')
  44. ### retrieve it by id
  45. >>> james = person(id)
  46. ### retrieve it by name
  47. >>> james = person(name='James')
  48. ### retrieve it by arbitrary query
  49. >>> query = (person.name=='James') & (person.name.startswith('J'))
  50. >>> james = db(query).select(person.ALL)[0]
  51. ### update one record
  52. >>> james.update_record(name='Jim')
  53. <Row {'id': 1, 'name': 'Jim'}>
  54. ### update multiple records by query
  55. >>> db(person.name.like('J%')).update(name='James')
  56. 1
  57. ### delete records by query
  58. >>> db(person.name.lower() == 'jim').delete()
  59. 0
  60. ### retrieve multiple records (rows)
  61. >>> people = db(person).select(orderby=person.name,
  62. ... groupby=person.name, limitby=(0,100))
  63. ### further filter them
  64. >>> james = people.find(lambda row: row.name == 'James').first()
  65. >>> print james.id, james.name
  66. 1 James
  67. ### check aggregates
  68. >>> counter = person.id.count()
  69. >>> print db(person).select(counter).first()(counter)
  70. 1
  71. ### delete one record
  72. >>> james.delete_record()
  73. 1
  74. ### delete (drop) entire database table
  75. >>> person.drop()
  76. Supported field types:
  77. id string text boolean integer double decimal password upload
  78. blob time date datetime
  79. Supported DAL URI strings:
  80. 'sqlite://test.db'
  81. 'spatialite://test.db'
  82. 'sqlite:memory'
  83. 'spatialite:memory'
  84. 'jdbc:sqlite://test.db'
  85. 'mysql://root:none@localhost/test'
  86. 'postgres://mdipierro:password@localhost/test'
  87. 'postgres:psycopg2://mdipierro:password@localhost/test'
  88. 'postgres:pg8000://mdipierro:password@localhost/test'
  89. 'jdbc:postgres://mdipierro:none@localhost/test'
  90. 'mssql://web2py:none@A64X2/web2py_test'
  91. 'mssql2://web2py:none@A64X2/web2py_test' # alternate mappings
  92. 'oracle://username:password@database'
  93. 'firebird://user:password@server:3050/database'
  94. 'db2://DSN=dsn;UID=user;PWD=pass'
  95. 'firebird://username:password@hostname/database'
  96. 'firebird_embedded://username:password@c://path'
  97. 'informix://user:password@server:3050/database'
  98. 'informixu://user:password@server:3050/database' # unicode informix
  99. 'ingres://database' # or use an ODBC connection string, e.g. 'ingres://dsn=dsn_name'
  100. 'google:datastore' # for google app engine datastore
  101. 'google:sql' # for google app engine with sql (mysql compatible)
  102. 'teradata://DSN=dsn;UID=user;PWD=pass; DATABASE=database' # experimental
  103. 'imap://user:password@server:port' # experimental
  104. 'mongodb://user:password@server:port/database' # experimental
  105. For more info:
  106. help(DAL)
  107. help(Field)
  108. """
  109. ###################################################################################
  110. # this file only exposes DAL and Field
  111. ###################################################################################
  112. __all__ = ['DAL', 'Field']
  113. DEFAULTLENGTH = {'string':512,
  114. 'password':512,
  115. 'upload':512,
  116. 'text':2**15,
  117. 'blob':2**31}
  118. TIMINGSSIZE = 100
  119. SPATIALLIBS = {
  120. 'Windows':'libspatialite',
  121. 'Linux':'libspatialite.so',
  122. 'Darwin':'libspatialite.dylib'
  123. }
  124. DEFAULT_URI = 'sqlite://dummy.db'
  125. import re
  126. import sys
  127. import locale
  128. import os
  129. import types
  130. import datetime
  131. import threading
  132. import time
  133. import csv
  134. import cgi
  135. import copy
  136. import socket
  137. import logging
  138. import base64
  139. import shutil
  140. import marshal
  141. import decimal
  142. import struct
  143. import urllib
  144. import hashlib
  145. import uuid
  146. import glob
  147. import traceback
  148. import platform
  149. PYTHON_VERSION = sys.version_info[0]
  150. if PYTHON_VERSION == 2:
  151. import cPickle as pickle
  152. import cStringIO as StringIO
  153. import copy_reg as copyreg
  154. hashlib_md5 = hashlib.md5
  155. bytes, unicode = str, unicode
  156. else:
  157. import pickle
  158. from io import StringIO as StringIO
  159. import copyreg
  160. long = int
  161. hashlib_md5 = lambda s: hashlib.md5(bytes(s,'utf8'))
  162. bytes, unicode = bytes, str
  163. CALLABLETYPES = (types.LambdaType, types.FunctionType,
  164. types.BuiltinFunctionType,
  165. types.MethodType, types.BuiltinMethodType)
  166. TABLE_ARGS = set(
  167. ('migrate','primarykey','fake_migrate','format','redefine',
  168. 'singular','plural','trigger_name','sequence_name',
  169. 'common_filter','polymodel','table_class','on_define','actual_name'))
  170. SELECT_ARGS = set(
  171. ('orderby', 'groupby', 'limitby','required', 'cache', 'left',
  172. 'distinct', 'having', 'join','for_update', 'processor','cacheable', 'orderby_on_limitby'))
  173. ogetattr = object.__getattribute__
  174. osetattr = object.__setattr__
  175. exists = os.path.exists
  176. pjoin = os.path.join
  177. ###################################################################################
  178. # following checks allow the use of dal without web2py, as a standalone module
  179. ###################################################################################
  180. try:
  181. from utils import web2py_uuid
  182. except (ImportError, SystemError):
  183. import uuid
  184. def web2py_uuid(): return str(uuid.uuid4())
  185. try:
  186. import portalocker
  187. have_portalocker = True
  188. except ImportError:
  189. have_portalocker = False
  190. try:
  191. import serializers
  192. have_serializers = True
  193. except ImportError:
  194. have_serializers = False
  195. try:
  196. import json as simplejson
  197. except ImportError:
  198. try:
  199. import gluon.contrib.simplejson as simplejson
  200. except ImportError:
  201. simplejson = None
  202. try:
  203. import validators
  204. have_validators = True
  205. except (ImportError, SyntaxError):
  206. have_validators = False
  207. LOGGER = logging.getLogger("web2py.dal")
  208. DEFAULT = lambda:0
  209. GLOBAL_LOCKER = threading.RLock()
  210. THREAD_LOCAL = threading.local()
  211. # internal representation of tables with field
  212. # <table>.<field>, tables and fields may only be [a-zA-Z0-9_]
  213. REGEX_TYPE = re.compile('^([\w\_\:]+)')
  214. REGEX_DBNAME = re.compile('^(\w+)(\:\w+)*')
  215. REGEX_W = re.compile('^\w+$')
  216. REGEX_TABLE_DOT_FIELD = re.compile('^(\w+)\.(\w+)$')
  217. REGEX_UPLOAD_PATTERN = re.compile('(?P<table>[\w\-]+)\.(?P<field>[\w\-]+)\.(?P<uuidkey>[\w\-]+)\.(?P<name>\w+)\.\w+$')
  218. REGEX_CLEANUP_FN = re.compile('[\'"\s;]+')
  219. REGEX_UNPACK = re.compile('(?<!\|)\|(?!\|)')
  220. REGEX_PYTHON_KEYWORDS = re.compile('^(and|del|from|not|while|as|elif|global|or|with|assert|else|if|pass|yield|break|except|import|print|class|exec|in|raise|continue|finally|is|return|def|for|lambda|try)$')
  221. REGEX_SELECT_AS_PARSER = re.compile("\s+AS\s+(\S+)")
  222. REGEX_CONST_STRING = re.compile('(\"[^\"]*?\")|(\'[^\']*?\')')
  223. REGEX_SEARCH_PATTERN = re.compile('^{[^\.]+\.[^\.]+(\.(lt|gt|le|ge|eq|ne|contains|startswith|year|month|day|hour|minute|second))?(\.not)?}$')
  224. REGEX_SQUARE_BRACKETS = re.compile('^.+\[.+\]$')
  225. REGEX_STORE_PATTERN = re.compile('\.(?P<e>\w{1,5})$')
  226. REGEX_QUOTES = re.compile("'[^']*'")
  227. REGEX_ALPHANUMERIC = re.compile('^[0-9a-zA-Z]\w*$')
  228. REGEX_PASSWORD = re.compile('\://([^:@]*)\:')
  229. REGEX_NOPASSWD = re.compile('\/\/[\w\.\-]+[\:\/](.+)(?=@)') # was '(?<=[\:\/])([^:@/]+)(?=@.+)'
  230. # list of drivers will be built on the fly
  231. # and lists only what is available
  232. DRIVERS = []
  233. try:
  234. from new import classobj
  235. from google.appengine.ext import db as gae
  236. from google.appengine.api import namespace_manager, rdbms
  237. from google.appengine.api.datastore_types import Key ### for belongs on ID
  238. from google.appengine.ext.db.polymodel import PolyModel
  239. DRIVERS.append('google')
  240. except ImportError:
  241. pass
  242. if not 'google' in DRIVERS:
  243. try:
  244. from pysqlite2 import dbapi2 as sqlite2
  245. DRIVERS.append('SQLite(sqlite2)')
  246. except ImportError:
  247. LOGGER.debug('no SQLite drivers pysqlite2.dbapi2')
  248. try:
  249. from sqlite3 import dbapi2 as sqlite3
  250. DRIVERS.append('SQLite(sqlite3)')
  251. except ImportError:
  252. LOGGER.debug('no SQLite drivers sqlite3')
  253. try:
  254. # first try contrib driver, then from site-packages (if installed)
  255. try:
  256. import contrib.pymysql as pymysql
  257. # monkeypatch pymysql because they havent fixed the bug:
  258. # https://github.com/petehunt/PyMySQL/issues/86
  259. pymysql.ESCAPE_REGEX = re.compile("'")
  260. pymysql.ESCAPE_MAP = {"'": "''"}
  261. # end monkeypatch
  262. except ImportError:
  263. import pymysql
  264. DRIVERS.append('MySQL(pymysql)')
  265. except ImportError:
  266. LOGGER.debug('no MySQL driver pymysql')
  267. try:
  268. import MySQLdb
  269. DRIVERS.append('MySQL(MySQLdb)')
  270. except ImportError:
  271. LOGGER.debug('no MySQL driver MySQLDB')
  272. try:
  273. import psycopg2
  274. from psycopg2.extensions import adapt as psycopg2_adapt
  275. DRIVERS.append('PostgreSQL(psycopg2)')
  276. except ImportError:
  277. LOGGER.debug('no PostgreSQL driver psycopg2')
  278. try:
  279. # first try contrib driver, then from site-packages (if installed)
  280. try:
  281. import contrib.pg8000.dbapi as pg8000
  282. except ImportError:
  283. import pg8000.dbapi as pg8000
  284. DRIVERS.append('PostgreSQL(pg8000)')
  285. except ImportError:
  286. LOGGER.debug('no PostgreSQL driver pg8000')
  287. try:
  288. import cx_Oracle
  289. DRIVERS.append('Oracle(cx_Oracle)')
  290. except ImportError:
  291. LOGGER.debug('no Oracle driver cx_Oracle')
  292. try:
  293. try:
  294. import pyodbc
  295. except ImportError:
  296. try:
  297. import contrib.pypyodbc as pyodbc
  298. except Exception, e:
  299. raise ImportError(str(e))
  300. DRIVERS.append('MSSQL(pyodbc)')
  301. DRIVERS.append('DB2(pyodbc)')
  302. DRIVERS.append('Teradata(pyodbc)')
  303. DRIVERS.append('Ingres(pyodbc)')
  304. except ImportError:
  305. LOGGER.debug('no MSSQL/DB2/Teradata/Ingres driver pyodbc')
  306. try:
  307. import Sybase
  308. DRIVERS.append('Sybase(Sybase)')
  309. except ImportError:
  310. LOGGER.debug('no Sybase driver')
  311. try:
  312. import kinterbasdb
  313. DRIVERS.append('Interbase(kinterbasdb)')
  314. DRIVERS.append('Firebird(kinterbasdb)')
  315. except ImportError:
  316. LOGGER.debug('no Firebird/Interbase driver kinterbasdb')
  317. try:
  318. import fdb
  319. DRIVERS.append('Firebird(fdb)')
  320. except ImportError:
  321. LOGGER.debug('no Firebird driver fdb')
  322. #####
  323. try:
  324. import firebirdsql
  325. DRIVERS.append('Firebird(firebirdsql)')
  326. except ImportError:
  327. LOGGER.debug('no Firebird driver firebirdsql')
  328. try:
  329. import informixdb
  330. DRIVERS.append('Informix(informixdb)')
  331. LOGGER.warning('Informix support is experimental')
  332. except ImportError:
  333. LOGGER.debug('no Informix driver informixdb')
  334. try:
  335. import sapdb
  336. DRIVERS.append('SQL(sapdb)')
  337. LOGGER.warning('SAPDB support is experimental')
  338. except ImportError:
  339. LOGGER.debug('no SAP driver sapdb')
  340. try:
  341. import cubriddb
  342. DRIVERS.append('Cubrid(cubriddb)')
  343. LOGGER.warning('Cubrid support is experimental')
  344. except ImportError:
  345. LOGGER.debug('no Cubrid driver cubriddb')
  346. try:
  347. from com.ziclix.python.sql import zxJDBC
  348. import java.sql
  349. # Try sqlite jdbc driver from http://www.zentus.com/sqlitejdbc/
  350. from org.sqlite import JDBC # required by java.sql; ensure we have it
  351. zxJDBC_sqlite = java.sql.DriverManager
  352. DRIVERS.append('PostgreSQL(zxJDBC)')
  353. DRIVERS.append('SQLite(zxJDBC)')
  354. LOGGER.warning('zxJDBC support is experimental')
  355. is_jdbc = True
  356. except ImportError:
  357. LOGGER.debug('no SQLite/PostgreSQL driver zxJDBC')
  358. is_jdbc = False
  359. try:
  360. import couchdb
  361. DRIVERS.append('CouchDB(couchdb)')
  362. except ImportError:
  363. LOGGER.debug('no Couchdb driver couchdb')
  364. try:
  365. import pymongo
  366. DRIVERS.append('MongoDB(pymongo)')
  367. except:
  368. LOGGER.debug('no MongoDB driver pymongo')
  369. try:
  370. import imaplib
  371. DRIVERS.append('IMAP(imaplib)')
  372. except:
  373. LOGGER.debug('no IMAP driver imaplib')
  374. PLURALIZE_RULES = [
  375. (re.compile('child$'), re.compile('child$'), 'children'),
  376. (re.compile('oot$'), re.compile('oot$'), 'eet'),
  377. (re.compile('ooth$'), re.compile('ooth$'), 'eeth'),
  378. (re.compile('l[eo]af$'), re.compile('l([eo])af$'), 'l\\1aves'),
  379. (re.compile('sis$'), re.compile('sis$'), 'ses'),
  380. (re.compile('man$'), re.compile('man$'), 'men'),
  381. (re.compile('ife$'), re.compile('ife$'), 'ives'),
  382. (re.compile('eau$'), re.compile('eau$'), 'eaux'),
  383. (re.compile('lf$'), re.compile('lf$'), 'lves'),
  384. (re.compile('[sxz]$'), re.compile('$'), 'es'),
  385. (re.compile('[^aeioudgkprt]h$'), re.compile('$'), 'es'),
  386. (re.compile('(qu|[^aeiou])y$'), re.compile('y$'), 'ies'),
  387. (re.compile('$'), re.compile('$'), 's'),
  388. ]
  389. def pluralize(singular, rules=PLURALIZE_RULES):
  390. for line in rules:
  391. re_search, re_sub, replace = line
  392. plural = re_search.search(singular) and re_sub.sub(replace, singular)
  393. if plural: return plural
  394. def hide_password(uri):
  395. if isinstance(uri,(list,tuple)):
  396. return [hide_password(item) for item in uri]
  397. return REGEX_NOPASSWD.sub('******',uri)
  398. def OR(a,b):
  399. return a|b
  400. def AND(a,b):
  401. return a&b
  402. def IDENTITY(x): return x
  403. def varquote_aux(name,quotestr='%s'):
  404. return name if REGEX_W.match(name) else quotestr % name
  405. def quote_keyword(a,keyword='timestamp'):
  406. regex = re.compile('\.keyword(?=\w)')
  407. a = regex.sub('."%s"' % keyword,a)
  408. return a
  409. if 'google' in DRIVERS:
  410. is_jdbc = False
  411. class GAEDecimalProperty(gae.Property):
  412. """
  413. GAE decimal implementation
  414. """
  415. data_type = decimal.Decimal
  416. def __init__(self, precision, scale, **kwargs):
  417. super(GAEDecimalProperty, self).__init__(self, **kwargs)
  418. d = '1.'
  419. for x in range(scale):
  420. d += '0'
  421. self.round = decimal.Decimal(d)
  422. def get_value_for_datastore(self, model_instance):
  423. value = super(GAEDecimalProperty, self)\
  424. .get_value_for_datastore(model_instance)
  425. if value is None or value == '':
  426. return None
  427. else:
  428. return str(value)
  429. def make_value_from_datastore(self, value):
  430. if value is None or value == '':
  431. return None
  432. else:
  433. return decimal.Decimal(value).quantize(self.round)
  434. def validate(self, value):
  435. value = super(GAEDecimalProperty, self).validate(value)
  436. if value is None or isinstance(value, decimal.Decimal):
  437. return value
  438. elif isinstance(value, basestring):
  439. return decimal.Decimal(value)
  440. raise gae.BadValueError("Property %s must be a Decimal or string."\
  441. % self.name)
  442. ###################################################################################
  443. # class that handles connection pooling (all adapters are derived from this one)
  444. ###################################################################################
  445. class ConnectionPool(object):
  446. POOLS = {}
  447. check_active_connection = True
  448. @staticmethod
  449. def set_folder(folder):
  450. THREAD_LOCAL.folder = folder
  451. # ## this allows gluon to commit/rollback all dbs in this thread
  452. def close(self,action='commit',really=True):
  453. if action:
  454. if callable(action):
  455. action(self)
  456. else:
  457. getattr(self, action)()
  458. # ## if you want pools, recycle this connection
  459. if self.pool_size:
  460. GLOBAL_LOCKER.acquire()
  461. pool = ConnectionPool.POOLS[self.uri]
  462. if len(pool) < self.pool_size:
  463. pool.append(self.connection)
  464. really = False
  465. GLOBAL_LOCKER.release()
  466. if really:
  467. self.close_connection()
  468. self.connection = None
  469. @staticmethod
  470. def close_all_instances(action):
  471. """ to close cleanly databases in a multithreaded environment """
  472. dbs = getattr(THREAD_LOCAL,'db_instances',{}).items()
  473. for db_uid, db_group in dbs:
  474. for db in db_group:
  475. if hasattr(db,'_adapter'):
  476. db._adapter.close(action)
  477. getattr(THREAD_LOCAL,'db_instances',{}).clear()
  478. getattr(THREAD_LOCAL,'db_instances_zombie',{}).clear()
  479. if callable(action):
  480. action(None)
  481. return
  482. def find_or_make_work_folder(self):
  483. """ this actually does not make the folder. it has to be there """
  484. self.folder = getattr(THREAD_LOCAL,'folder','')
  485. if (os.path.isabs(self.folder) and
  486. isinstance(self, UseDatabaseStoredFile) and
  487. self.folder.startswith(os.getcwd())):
  488. self.folder = os.path.relpath(self.folder, os.getcwd())
  489. # Creating the folder if it does not exist
  490. if False and self.folder and not exists(self.folder):
  491. os.mkdir(self.folder)
  492. def after_connection_hook(self):
  493. """hook for the after_connection parameter"""
  494. if callable(self._after_connection):
  495. self._after_connection(self)
  496. self.after_connection()
  497. def after_connection(self):
  498. """ this it is supposed to be overloaded by adapters"""
  499. pass
  500. def reconnect(self, f=None, cursor=True):
  501. """
  502. this function defines: self.connection and self.cursor
  503. (iff cursor is True)
  504. if self.pool_size>0 it will try pull the connection from the pool
  505. if the connection is not active (closed by db server) it will loop
  506. if not self.pool_size or no active connections in pool makes a new one
  507. """
  508. if getattr(self,'connection', None) != None:
  509. return
  510. if f is None:
  511. f = self.connector
  512. # if not hasattr(self, "driver") or self.driver is None:
  513. # LOGGER.debug("Skipping connection since there's no driver")
  514. # return
  515. if not self.pool_size:
  516. self.connection = f()
  517. self.cursor = cursor and self.connection.cursor()
  518. else:
  519. uri = self.uri
  520. POOLS = ConnectionPool.POOLS
  521. while True:
  522. GLOBAL_LOCKER.acquire()
  523. if not uri in POOLS:
  524. POOLS[uri] = []
  525. if POOLS[uri]:
  526. self.connection = POOLS[uri].pop()
  527. GLOBAL_LOCKER.release()
  528. self.cursor = cursor and self.connection.cursor()
  529. try:
  530. if self.cursor and self.check_active_connection:
  531. self.execute('SELECT 1;')
  532. break
  533. except:
  534. pass
  535. else:
  536. GLOBAL_LOCKER.release()
  537. self.connection = f()
  538. self.cursor = cursor and self.connection.cursor()
  539. break
  540. self.after_connection_hook()
  541. ###################################################################################
  542. # this is a generic adapter that does nothing; all others are derived from this one
  543. ###################################################################################
  544. class BaseAdapter(ConnectionPool):
  545. native_json = False
  546. driver = None
  547. driver_name = None
  548. drivers = () # list of drivers from which to pick
  549. connection = None
  550. commit_on_alter_table = False
  551. support_distributed_transaction = False
  552. uploads_in_blob = False
  553. can_select_for_update = True
  554. dbpath = None
  555. folder = None
  556. TRUE = 'T'
  557. FALSE = 'F'
  558. T_SEP = ' '
  559. QUOTE_TEMPLATE = '"%s"'
  560. types = {
  561. 'boolean': 'CHAR(1)',
  562. 'string': 'CHAR(%(length)s)',
  563. 'text': 'TEXT',
  564. 'json': 'TEXT',
  565. 'password': 'CHAR(%(length)s)',
  566. 'blob': 'BLOB',
  567. 'upload': 'CHAR(%(length)s)',
  568. 'integer': 'INTEGER',
  569. 'bigint': 'INTEGER',
  570. 'float':'DOUBLE',
  571. 'double': 'DOUBLE',
  572. 'decimal': 'DOUBLE',
  573. 'date': 'DATE',
  574. 'time': 'TIME',
  575. 'datetime': 'TIMESTAMP',
  576. 'id': 'INTEGER PRIMARY KEY AUTOINCREMENT',
  577. 'reference': 'INTEGER REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
  578. 'list:integer': 'TEXT',
  579. 'list:string': 'TEXT',
  580. 'list:reference': 'TEXT',
  581. # the two below are only used when DAL(...bigint_id=True) and replace 'id','reference'
  582. 'big-id': 'BIGINT PRIMARY KEY AUTOINCREMENT',
  583. 'big-reference': 'BIGINT REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
  584. }
  585. def isOperationalError(self,exception):
  586. if not hasattr(self.driver, "OperationalError"):
  587. return None
  588. return isinstance(exception, self.driver.OperationalError)
  589. def id_query(self, table):
  590. return table._id != None
  591. def adapt(self, obj):
  592. return "'%s'" % obj.replace("'", "''")
  593. def smart_adapt(self, obj):
  594. if isinstance(obj,(int,float)):
  595. return str(obj)
  596. return self.adapt(str(obj))
  597. def file_exists(self, filename):
  598. """
  599. to be used ONLY for files that on GAE may not be on filesystem
  600. """
  601. return exists(filename)
  602. def file_open(self, filename, mode='rb', lock=True):
  603. """
  604. to be used ONLY for files that on GAE may not be on filesystem
  605. """
  606. if have_portalocker and lock:
  607. fileobj = portalocker.LockedFile(filename,mode)
  608. else:
  609. fileobj = open(filename,mode)
  610. return fileobj
  611. def file_close(self, fileobj):
  612. """
  613. to be used ONLY for files that on GAE may not be on filesystem
  614. """
  615. if fileobj:
  616. fileobj.close()
  617. def file_delete(self, filename):
  618. os.unlink(filename)
  619. def find_driver(self,adapter_args,uri=None):
  620. self.adapter_args = adapter_args
  621. if getattr(self,'driver',None) != None:
  622. return
  623. drivers_available = [driver for driver in self.drivers
  624. if driver in globals()]
  625. if uri:
  626. items = uri.split('://',1)[0].split(':')
  627. request_driver = items[1] if len(items)>1 else None
  628. else:
  629. request_driver = None
  630. request_driver = request_driver or adapter_args.get('driver')
  631. if request_driver:
  632. if request_driver in drivers_available:
  633. self.driver_name = request_driver
  634. self.driver = globals().get(request_driver)
  635. else:
  636. raise RuntimeError("driver %s not available" % request_driver)
  637. elif drivers_available:
  638. self.driver_name = drivers_available[0]
  639. self.driver = globals().get(self.driver_name)
  640. else:
  641. raise RuntimeError("no driver available %s" % str(self.drivers))
  642. def log(self, message, table=None):
  643. """ Logs migrations
  644. It will not log changes if logfile is not specified. Defaults
  645. to sql.log
  646. """
  647. isabs = None
  648. logfilename = self.adapter_args.get('logfile','sql.log')
  649. writelog = bool(logfilename)
  650. if writelog:
  651. isabs = os.path.isabs(logfilename)
  652. if table and table._dbt and writelog and self.folder:
  653. if isabs:
  654. table._loggername = logfilename
  655. else:
  656. table._loggername = pjoin(self.folder, logfilename)
  657. logfile = self.file_open(table._loggername, 'a')
  658. logfile.write(message)
  659. self.file_close(logfile)
  660. def __init__(self, db,uri,pool_size=0, folder=None, db_codec='UTF-8',
  661. credential_decoder=IDENTITY, driver_args={},
  662. adapter_args={},do_connect=True, after_connection=None):
  663. self.db = db
  664. self.dbengine = "None"
  665. self.uri = uri
  666. self.pool_size = pool_size
  667. self.folder = folder
  668. self.db_codec = db_codec
  669. self._after_connection = after_connection
  670. class Dummy(object):
  671. lastrowid = 1
  672. def __getattr__(self, value):
  673. return lambda *a, **b: []
  674. self.connection = Dummy()
  675. self.cursor = Dummy()
  676. def sequence_name(self,tablename):
  677. return '%s_sequence' % tablename
  678. def trigger_name(self,tablename):
  679. return '%s_sequence' % tablename
  680. def varquote(self,name):
  681. return name
  682. def create_table(self, table,
  683. migrate=True,
  684. fake_migrate=False,
  685. polymodel=None):
  686. db = table._db
  687. fields = []
  688. # PostGIS geo fields are added after the table has been created
  689. postcreation_fields = []
  690. sql_fields = {}
  691. sql_fields_aux = {}
  692. TFK = {}
  693. tablename = table._tablename
  694. sortable = 0
  695. types = self.types
  696. for field in table:
  697. sortable += 1
  698. field_name = field.name
  699. field_type = field.type
  700. if isinstance(field_type,SQLCustomType):
  701. ftype = field_type.native or field_type.type
  702. elif field_type.startswith('reference'):
  703. referenced = field_type[10:].strip()
  704. if referenced == '.':
  705. referenced = tablename
  706. constraint_name = self.constraint_name(tablename, field_name)
  707. if not '.' in referenced \
  708. and referenced != tablename \
  709. and hasattr(table,'_primarykey'):
  710. ftype = types['integer']
  711. else:
  712. if hasattr(table,'_primarykey'):
  713. rtablename,rfieldname = referenced.split('.')
  714. rtable = db[rtablename]
  715. rfield = rtable[rfieldname]
  716. # must be PK reference or unique
  717. if rfieldname in rtable._primarykey or \
  718. rfield.unique:
  719. ftype = types[rfield.type[:9]] % \
  720. dict(length=rfield.length)
  721. # multicolumn primary key reference?
  722. if not rfield.unique and len(rtable._primarykey)>1:
  723. # then it has to be a table level FK
  724. if rtablename not in TFK:
  725. TFK[rtablename] = {}
  726. TFK[rtablename][rfieldname] = field_name
  727. else:
  728. ftype = ftype + \
  729. types['reference FK'] % dict(
  730. constraint_name = constraint_name, # should be quoted
  731. foreign_key = '%s (%s)' % (rtablename,
  732. rfieldname),
  733. table_name = tablename,
  734. field_name = field_name,
  735. on_delete_action=field.ondelete)
  736. else:
  737. # make a guess here for circular references
  738. if referenced in db:
  739. id_fieldname = db[referenced]._id.name
  740. elif referenced == tablename:
  741. id_fieldname = table._id.name
  742. else: #make a guess
  743. id_fieldname = 'id'
  744. ftype = types[field_type[:9]] % dict(
  745. index_name = field_name+'__idx',
  746. field_name = field_name,
  747. constraint_name = constraint_name,
  748. foreign_key = '%s (%s)' % (referenced,
  749. id_fieldname),
  750. on_delete_action=field.ondelete)
  751. elif field_type.startswith('list:reference'):
  752. ftype = types[field_type[:14]]
  753. elif field_type.startswith('decimal'):
  754. precision, scale = map(int,field_type[8:-1].split(','))
  755. ftype = types[field_type[:7]] % \
  756. dict(precision=precision,scale=scale)
  757. elif field_type.startswith('geo'):
  758. if not hasattr(self,'srid'):
  759. raise RuntimeError('Adapter does not support geometry')
  760. srid = self.srid
  761. geotype, parms = field_type[:-1].split('(')
  762. if not geotype in types:
  763. raise SyntaxError(
  764. 'Field: unknown field type: %s for %s' \
  765. % (field_type, field_name))
  766. ftype = types[geotype]
  767. if self.dbengine == 'postgres' and geotype == 'geometry':
  768. # parameters: schema, srid, dimension
  769. dimension = 2 # GIS.dimension ???
  770. parms = parms.split(',')
  771. if len(parms) == 3:
  772. schema, srid, dimension = parms
  773. elif len(parms) == 2:
  774. schema, srid = parms
  775. else:
  776. schema = parms[0]
  777. ftype = "SELECT AddGeometryColumn ('%%(schema)s', '%%(tablename)s', '%%(fieldname)s', %%(srid)s, '%s', %%(dimension)s);" % types[geotype]
  778. ftype = ftype % dict(schema=schema,
  779. tablename=tablename,
  780. fieldname=field_name, srid=srid,
  781. dimension=dimension)
  782. postcreation_fields.append(ftype)
  783. elif not field_type in types:
  784. raise SyntaxError('Field: unknown field type: %s for %s' % \
  785. (field_type, field_name))
  786. else:
  787. ftype = types[field_type]\
  788. % dict(length=field.length)
  789. if not field_type.startswith('id') and \
  790. not field_type.startswith('reference'):
  791. if field.notnull:
  792. ftype += ' NOT NULL'
  793. else:
  794. ftype += self.ALLOW_NULL()
  795. if field.unique:
  796. ftype += ' UNIQUE'
  797. if field.custom_qualifier:
  798. ftype += ' %s' % field.custom_qualifier
  799. # add to list of fields
  800. sql_fields[field_name] = dict(
  801. length=field.length,
  802. unique=field.unique,
  803. notnull=field.notnull,
  804. sortable=sortable,
  805. type=str(field_type),
  806. sql=ftype)
  807. if field.notnull and not field.default is None:
  808. # Caveat: sql_fields and sql_fields_aux
  809. # differ for default values.
  810. # sql_fields is used to trigger migrations and sql_fields_aux
  811. # is used for create tables.
  812. # The reason is that we do not want to trigger
  813. # a migration simply because a default value changes.
  814. not_null = self.NOT_NULL(field.default, field_type)
  815. ftype = ftype.replace('NOT NULL', not_null)
  816. sql_fields_aux[field_name] = dict(sql=ftype)
  817. # Postgres - PostGIS:
  818. # geometry fields are added after the table has been created, not now
  819. if not (self.dbengine == 'postgres' and \
  820. field_type.startswith('geom')):
  821. fields.append('%s %s' % (field_name, ftype))
  822. other = ';'
  823. # backend-specific extensions to fields
  824. if self.dbengine == 'mysql':
  825. if not hasattr(table, "_primarykey"):
  826. fields.append('PRIMARY KEY(%s)' % table._id.name)
  827. other = ' ENGINE=InnoDB CHARACTER SET utf8;'
  828. fields = ',\n '.join(fields)
  829. for rtablename in TFK:
  830. rfields = TFK[rtablename]
  831. pkeys = db[rtablename]._primarykey
  832. fkeys = [ rfields[k] for k in pkeys ]
  833. fields = fields + ',\n ' + \
  834. types['reference TFK'] % dict(
  835. table_name = tablename,
  836. field_name=', '.join(fkeys),
  837. foreign_table = rtablename,
  838. foreign_key = ', '.join(pkeys),
  839. on_delete_action = field.ondelete)
  840. if getattr(table,'_primarykey',None):
  841. query = "CREATE TABLE %s(\n %s,\n %s) %s" % \
  842. (tablename, fields,
  843. self.PRIMARY_KEY(', '.join(table._primarykey)),other)
  844. else:
  845. query = "CREATE TABLE %s(\n %s\n)%s" % \
  846. (tablename, fields, other)
  847. if self.uri.startswith('sqlite:///') \
  848. or self.uri.startswith('spatialite:///'):
  849. path_encoding = sys.getfilesystemencoding() \
  850. or locale.getdefaultlocale()[1] or 'utf8'
  851. dbpath = self.uri[9:self.uri.rfind('/')]\
  852. .decode('utf8').encode(path_encoding)
  853. else:
  854. dbpath = self.folder
  855. if not migrate:
  856. return query
  857. elif self.uri.startswith('sqlite:memory')\
  858. or self.uri.startswith('spatialite:memory'):
  859. table._dbt = None
  860. elif isinstance(migrate, str):
  861. table._dbt = pjoin(dbpath, migrate)
  862. else:
  863. table._dbt = pjoin(
  864. dbpath, '%s_%s.table' % (table._db._uri_hash, tablename))
  865. if not table._dbt or not self.file_exists(table._dbt):
  866. if table._dbt:
  867. self.log('timestamp: %s\n%s\n'
  868. % (datetime.datetime.today().isoformat(),
  869. query), table)
  870. if not fake_migrate:
  871. self.create_sequence_and_triggers(query,table)
  872. table._db.commit()
  873. # Postgres geom fields are added now,
  874. # after the table has been created
  875. for query in postcreation_fields:
  876. self.execute(query)
  877. table._db.commit()
  878. if table._dbt:
  879. tfile = self.file_open(table._dbt, 'w')
  880. pickle.dump(sql_fields, tfile)
  881. self.file_close(tfile)
  882. if fake_migrate:
  883. self.log('faked!\n', table)
  884. else:
  885. self.log('success!\n', table)
  886. else:
  887. tfile = self.file_open(table._dbt, 'r')
  888. try:
  889. sql_fields_old = pickle.load(tfile)
  890. except EOFError:
  891. self.file_close(tfile)
  892. raise RuntimeError('File %s appears corrupted' % table._dbt)
  893. self.file_close(tfile)
  894. if sql_fields != sql_fields_old:
  895. self.migrate_table(table,
  896. sql_fields, sql_fields_old,
  897. sql_fields_aux, None,
  898. fake_migrate=fake_migrate)
  899. return query
  900. def migrate_table(
  901. self,
  902. table,
  903. sql_fields,
  904. sql_fields_old,
  905. sql_fields_aux,
  906. logfile,
  907. fake_migrate=False,
  908. ):
  909. # logfile is deprecated (moved to adapter.log method)
  910. db = table._db
  911. db._migrated.append(table._tablename)
  912. tablename = table._tablename
  913. def fix(item):
  914. k,v=item
  915. if not isinstance(v,dict):
  916. v=dict(type='unknown',sql=v)
  917. return k.lower(),v
  918. # make sure all field names are lower case to avoid
  919. # migrations because of case cahnge
  920. sql_fields = dict(map(fix,sql_fields.iteritems()))
  921. sql_fields_old = dict(map(fix,sql_fields_old.iteritems()))
  922. sql_fields_aux = dict(map(fix,sql_fields_aux.iteritems()))
  923. if db._debug:
  924. logging.debug('migrating %s to %s' % (sql_fields_old,sql_fields))
  925. keys = sql_fields.keys()
  926. for key in sql_fields_old:
  927. if not key in keys:
  928. keys.append(key)
  929. new_add = self.concat_add(tablename)
  930. metadata_change = False
  931. sql_fields_current = copy.copy(sql_fields_old)
  932. for key in keys:
  933. query = None
  934. if not key in sql_fields_old:
  935. sql_fields_current[key] = sql_fields[key]
  936. if self.dbengine in ('postgres',) and \
  937. sql_fields[key]['type'].startswith('geometry'):
  938. # 'sql' == ftype in sql
  939. query = [ sql_fields[key]['sql'] ]
  940. else:
  941. query = ['ALTER TABLE %s ADD %s %s;' % \
  942. (tablename, key,
  943. sql_fields_aux[key]['sql'].replace(', ', new_add))]
  944. metadata_change = True
  945. elif self.dbengine in ('sqlite', 'spatialite'):
  946. if key in sql_fields:
  947. sql_fields_current[key] = sql_fields[key]
  948. metadata_change = True
  949. elif not key in sql_fields:
  950. del sql_fields_current[key]
  951. ftype = sql_fields_old[key]['type']
  952. if self.dbengine in ('postgres',) and ftype.startswith('geometry'):
  953. geotype, parms = ftype[:-1].split('(')
  954. schema = parms.split(',')[0]
  955. query = [ "SELECT DropGeometryColumn ('%(schema)s', '%(table)s', '%(field)s');" %
  956. dict(schema=schema, table=tablename, field=key,) ]
  957. elif self.dbengine in ('firebird',):
  958. query = ['ALTER TABLE %s DROP %s;' % (tablename, key)]
  959. else:
  960. query = ['ALTER TABLE %s DROP COLUMN %s;'
  961. % (tablename, key)]
  962. metadata_change = True
  963. elif sql_fields[key]['sql'] != sql_fields_old[key]['sql'] \
  964. and not (key in table.fields and
  965. isinstance(table[key].type, SQLCustomType)) \
  966. and not sql_fields[key]['type'].startswith('reference')\
  967. and not sql_fields[key]['type'].startswith('double')\
  968. and not sql_fields[key]['type'].startswith('id'):
  969. sql_fields_current[key] = sql_fields[key]
  970. t = tablename
  971. tt = sql_fields_aux[key]['sql'].replace(', ', new_add)
  972. if self.dbengine in ('firebird',):
  973. drop_expr = 'ALTER TABLE %s DROP %s;'
  974. else:
  975. drop_expr = 'ALTER TABLE %s DROP COLUMN %s;'
  976. key_tmp = key + '__tmp'
  977. query = ['ALTER TABLE %s ADD %s %s;' % (t, key_tmp, tt),
  978. 'UPDATE %s SET %s=%s;' % (t, key_tmp, key),
  979. drop_expr % (t, key),
  980. 'ALTER TABLE %s ADD %s %s;' % (t, key, tt),
  981. 'UPDATE %s SET %s=%s;' % (t, key, key_tmp),
  982. drop_expr % (t, key_tmp)]
  983. metadata_change = True
  984. elif sql_fields[key]['type'] != sql_fields_old[key]['type']:
  985. sql_fields_current[key] = sql_fields[key]
  986. metadata_change = True
  987. if query:
  988. self.log('timestamp: %s\n'
  989. % datetime.datetime.today().isoformat(), table)
  990. db['_lastsql'] = '\n'.join(query)
  991. for sub_query in query:
  992. self.log(sub_query + '\n', table)
  993. if fake_migrate:
  994. if db._adapter.commit_on_alter_table:
  995. self.save_dbt(table,sql_fields_current)
  996. self.log('faked!\n', table)
  997. else:
  998. self.execute(sub_query)
  999. # Caveat: mysql, oracle and firebird do not allow multiple alter table
  1000. # in one transaction so we must commit partial transactions and
  1001. # update table._dbt after alter table.
  1002. if db._adapter.commit_on_alter_table:
  1003. db.commit()
  1004. self.save_dbt(table,sql_fields_current)
  1005. self.log('success!\n', table)
  1006. elif metadata_change:
  1007. self.save_dbt(table,sql_fields_current)
  1008. if metadata_change and not (query and db._adapter.commit_on_alter_table):
  1009. db.commit()
  1010. self.save_dbt(table,sql_fields_current)
  1011. self.log('success!\n', table)
  1012. def save_dbt(self,table, sql_fields_current):
  1013. tfile = self.file_open(table._dbt, 'w')
  1014. pickle.dump(sql_fields_current, tfile)
  1015. self.file_close(tfile)
  1016. def LOWER(self, first):
  1017. return 'LOWER(%s)' % self.expand(first)
  1018. def UPPER(self, first):
  1019. return 'UPPER(%s)' % self.expand(first)
  1020. def COUNT(self, first, distinct=None):
  1021. return ('COUNT(%s)' if not distinct else 'COUNT(DISTINCT %s)') \
  1022. % self.expand(first)
  1023. def EXTRACT(self, first, what):
  1024. return "EXTRACT(%s FROM %s)" % (what, self.expand(first))
  1025. def EPOCH(self, first):
  1026. return self.EXTRACT(first, 'epoch')
  1027. def LENGTH(self, first):
  1028. return "LENGTH(%s)" % self.expand(first)
  1029. def AGGREGATE(self, first, what):
  1030. return "%s(%s)" % (what, self.expand(first))
  1031. def JOIN(self):
  1032. return 'JOIN'
  1033. def LEFT_JOIN(self):
  1034. return 'LEFT JOIN'
  1035. def RANDOM(self):
  1036. return 'Random()'
  1037. def NOT_NULL(self, default, field_type):
  1038. return 'NOT NULL DEFAULT %s' % self.represent(default,field_type)
  1039. def COALESCE(self, first, second):
  1040. expressions = [self.expand(first)]+[self.expand(e) for e in second]
  1041. return 'COALESCE(%s)' % ','.join(expressions)
  1042. def COALESCE_ZERO(self, first):
  1043. return 'COALESCE(%s,0)' % self.expand(first)
  1044. def RAW(self, first):
  1045. return first
  1046. def ALLOW_NULL(self):
  1047. return ''
  1048. def SUBSTRING(self, field, parameters):
  1049. return 'SUBSTR(%s,%s,%s)' % (self.expand(field), parameters[0], parameters[1])
  1050. def PRIMARY_KEY(self, key):
  1051. return 'PRIMARY KEY(%s)' % key
  1052. def _drop(self, table, mode):
  1053. return ['DROP TABLE %s;' % table]
  1054. def drop(self, table, mode=''):
  1055. db = table._db
  1056. queries = self._drop(table, mode)
  1057. for query in queries:
  1058. if table._dbt:
  1059. self.log(query + '\n', table)
  1060. self.execute(query)
  1061. db.commit()
  1062. del db[table._tablename]
  1063. del db.tables[db.tables.index(table._tablename)]
  1064. db._remove_references_to(table)
  1065. if table._dbt:
  1066. self.file_delete(table._dbt)
  1067. self.log('success!\n', table)
  1068. def _insert(self, table, fields):
  1069. if fields:
  1070. keys = ','.join(f.name for f, v in fields)
  1071. values = ','.join(self.expand(v, f.type) for f, v in fields)
  1072. return 'INSERT INTO %s(%s) VALUES (%s);' % (table, keys, values)
  1073. else:
  1074. return self._insert_empty(table)
  1075. def _insert_empty(self, table):
  1076. return 'INSERT INTO %s DEFAULT VALUES;' % table
  1077. def insert(self, table, fields):
  1078. query = self._insert(table,fields)
  1079. try:
  1080. self.execute(query)
  1081. except Exception:
  1082. e = sys.exc_info()[1]
  1083. if hasattr(table,'_on_insert_error'):
  1084. return table._on_insert_error(table,fields,e)
  1085. raise e
  1086. if hasattr(table,'_primarykey'):
  1087. return dict([(k[0].name, k[1]) for k in fields \
  1088. if k[0].name in table._primarykey])
  1089. id = self.lastrowid(table)
  1090. if not isinstance(id,int):
  1091. return id
  1092. rid = Reference(id)
  1093. (rid._table, rid._record) = (table, None)
  1094. return rid
  1095. def bulk_insert(self, table, items):
  1096. return [self.insert(table,item) for item in items]
  1097. def NOT(self, first):
  1098. return '(NOT %s)' % self.expand(first)
  1099. def AND(self, first, second):
  1100. return '(%s AND %s)' % (self.expand(first), self.expand(second))
  1101. def OR(self, first, second):
  1102. return '(%s OR %s)' % (self.expand(first), self.expand(second))
  1103. def BELONGS(self, first, second):
  1104. if isinstance(second, str):
  1105. return '(%s IN (%s))' % (self.expand(first), second[:-1])
  1106. elif not second:
  1107. return '(1=0)'
  1108. items = ','.join(self.expand(item, first.type) for item in second)
  1109. return '(%s IN (%s))' % (self.expand(first), items)
  1110. def REGEXP(self, first, second):
  1111. "regular expression operator"
  1112. raise NotImplementedError
  1113. def LIKE(self, first, second):
  1114. "case sensitive like operator"
  1115. raise NotImplementedError
  1116. def ILIKE(self, first, second):
  1117. "case in-sensitive like operator"
  1118. return '(%s LIKE %s)' % (self.expand(first),
  1119. self.expand(second, 'string'))
  1120. def STARTSWITH(self, first, second):
  1121. return '(%s LIKE %s)' % (self.expand(first),
  1122. self.expand(second+'%', 'string'))
  1123. def ENDSWITH(self, first, second):
  1124. return '(%s LIKE %s)' % (self.expand(first),
  1125. self.expand('%'+second, 'string'))
  1126. def CONTAINS(self,first,second,case_sensitive=False):
  1127. if first.type in ('string','text', 'json'):
  1128. if isinstance(second,Expression):
  1129. second = Expression(None,self.CONCAT('%',Expression(
  1130. None,self.REPLACE(second,('%','%%'))),'%'))
  1131. else:
  1132. second = '%'+str(second).replace('%','%%')+'%'
  1133. elif first.type.startswith('list:'):
  1134. if isinstance(second,Expression):
  1135. second = Expression(None,self.CONCAT(
  1136. '%|',Expression(None,self.REPLACE(
  1137. Expression(None,self.REPLACE(
  1138. second,('%','%%'))),('|','||'))),'|%'))
  1139. else:
  1140. second = '%|'+str(second).replace('%','%%')\
  1141. .replace('|','||')+'|%'
  1142. op = case_sensitive and self.LIKE or self.ILIKE
  1143. return op(first,second)
  1144. def EQ(self, first, second=None):
  1145. if second is None:
  1146. return '(%s IS NULL)' % self.expand(first)
  1147. return '(%s = %s)' % (self.expand(first),
  1148. self.expand(second, first.type))
  1149. def NE(self, first, second=None):
  1150. if second is None:
  1151. return '(%s IS NOT NULL)' % self.expand(first)
  1152. return '(%s <> %s)' % (self.expand(first),
  1153. self.expand(second, first.type))
  1154. def LT(self,first,second=None):
  1155. if second is None:
  1156. raise RuntimeError("Cannot compare %s < None" % first)
  1157. return '(%s < %s)' % (self.expand(first),
  1158. self.expand(second,first.type))
  1159. def LE(self,first,second=None):
  1160. if second is None:
  1161. raise RuntimeError("Cannot compare %s <= None" % first)
  1162. return '(%s <= %s)' % (self.expand(first),
  1163. self.expand(second,first.type))
  1164. def GT(self,first,second=None):
  1165. if second is None:
  1166. raise RuntimeError("Cannot compare %s > None" % first)
  1167. return '(%s > %s)' % (self.expand(first),
  1168. self.expand(second,first.type))
  1169. def GE(self,first,second=None):
  1170. if second is None:
  1171. raise RuntimeError("Cannot compare %s >= None" % first)
  1172. return '(%s >= %s)' % (self.expand(first),
  1173. self.expand(second,first.type))
  1174. def is_numerical_type(self, ftype):
  1175. return ftype in ('integer','boolean','double','bigint') or \
  1176. ftype.startswith('decimal')
  1177. def REPLACE(self, first, (second, third)):
  1178. return 'REPLACE(%s,%s,%s)' % (self.expand(first,'string'),
  1179. self.expand(second,'string'),
  1180. self.expand(third,'string'))
  1181. def CONCAT(self, *items):
  1182. return '(%s)' % ' || '.join(self.expand(x,'string') for x in items)
  1183. def ADD(self, first, second):
  1184. if self.is_numerical_type(first.type):
  1185. return '(%s + %s)' % (self.expand(first),
  1186. self.expand(second, first.type))
  1187. else:
  1188. return self.CONCAT(first, second)
  1189. def SUB(self, first, second):
  1190. return '(%s - %s)' % (self.expand(first),
  1191. self.expand(second, first.type))
  1192. def MUL(self, first, second):
  1193. return '(%s * %s)' % (self.expand(first),
  1194. self.expand(second, first.type))
  1195. def DIV(self, first, second):
  1196. return '(%s / %s)' % (self.expand(first),
  1197. self.expand(second, first.type))
  1198. def MOD(self, first, second):
  1199. return '(%s %% %s)' % (self.expand(first),
  1200. self.expand(second, first.type))
  1201. def AS(self, first, second):
  1202. return '%s AS %s' % (self.expand(first), second)
  1203. def ON(self, first, second):
  1204. if use_common_filters(second):
  1205. second = self.common_filter(second,[first._tablename])
  1206. return '%s ON %s' % (self.expand(first), self.expand(second))
  1207. def INVERT(self, first):
  1208. return '%s DESC' % self.expand(first)
  1209. def COMMA(self, first, second):
  1210. return '%s, %s' % (self.expand(first), self.expand(second))
  1211. def expand(self, expression, field_type=None):
  1212. if isinstance(expression, Field):
  1213. out = '%s.%s' % (expression.table._tablename, expression.name)
  1214. if field_type == 'string' and not expression.type in (
  1215. 'string','text','json','password'):
  1216. out = 'CAST(%s AS %s)' % (out, self.types['text'])
  1217. return out
  1218. elif isinstance(expression, (Expression, Query)):
  1219. first = expression.first
  1220. second = expression.second
  1221. op = expression.op
  1222. optional_args = expression.optional_args or {}
  1223. if not second is None:
  1224. out = op(first, second, **optional_args)
  1225. elif not first is None:
  1226. out = op(first,**optional_args)
  1227. elif isinstance(op, str):
  1228. if op.endswith(';'):
  1229. op=op[:-1]
  1230. out = '(%s)' % op
  1231. else:
  1232. out = op()
  1233. return out
  1234. elif field_type:
  1235. return str(self.represent(expression,field_type))
  1236. elif isinstance(expression,(list,tuple)):
  1237. return ','.join(self.represent(item,field_type) \
  1238. for item in expression)
  1239. elif isinstance(expression, bool):
  1240. return '1' if expression else '0'
  1241. else:
  1242. return str(expression)
  1243. def table_alias(self,name):
  1244. return str(name if isinstance(name,Table) else self.db[name])
  1245. def alias(self, table, alias):
  1246. """
  1247. Given a table object, makes a new table object
  1248. with alias name.
  1249. """
  1250. other = copy.copy(table)
  1251. other['_ot'] = other._ot or other._tablename
  1252. other['ALL'] = SQLALL(other)
  1253. other['_tablename'] = alias
  1254. for fieldname in other.fields:
  1255. other[fieldname] = copy.copy(other[fieldname])
  1256. other[fieldname]._tablename = alias
  1257. other[fieldname].tablename = alias
  1258. other[fieldname].table = other
  1259. table._db[alias] = other
  1260. return other
  1261. def _truncate(self, table, mode=''):
  1262. tablename = table._tablename
  1263. return ['TRUNCATE TABLE %s %s;' % (tablename, mode or '')]
  1264. def truncate(self, table, mode= ' '):
  1265. # Prepare functions "write_to_logfile" and "close_logfile"
  1266. try:
  1267. queries = table._db._adapter._truncate(table, mode)
  1268. for query in queries:
  1269. self.log(query + '\n', table)
  1270. self.execute(query)
  1271. table._db.commit()
  1272. self.log('success!\n', table)
  1273. finally:
  1274. pass
  1275. def _update(self, tablename, query, fields):
  1276. if query:
  1277. if use_common_filters(query):
  1278. query = self.common_filter(query, [tablename])
  1279. sql_w = ' WHERE ' + self.expand(query)
  1280. else:
  1281. sql_w = ''
  1282. sql_v = ','.join(['%s=%s' % (field.name,
  1283. self.expand(value, field.type)) \
  1284. for (field, value) in fields])
  1285. tablename = "%s" % self.db[tablename]
  1286. return 'UPDATE %s SET %s%s;' % (tablename, sql_v, sql_w)
  1287. def update(self, tablename, query, fields):
  1288. sql = self._update(tablename, query, fields)
  1289. try:
  1290. self.execute(sql)
  1291. except Exception:
  1292. e = sys.exc_info()[1]
  1293. table = self.db[tablename]
  1294. if hasattr(table,'_on_update_error'):
  1295. return table._on_update_error(table,query,fields,e)
  1296. raise e
  1297. try:
  1298. return self.cursor.rowcount
  1299. except:
  1300. return None
  1301. def _delete(self, tablename, query):
  1302. if query:
  1303. if use_common_filters(query):
  1304. query = self.common_filter(query, [tablename])
  1305. sql_w = ' WHERE ' + self.expand(query)
  1306. else:
  1307. sql_w = ''
  1308. return 'DELETE FROM %s%s;' % (tablename, sql_w)
  1309. def delete(self, tablename, query):
  1310. sql = self._delete(tablename, query)
  1311. ### special code to handle CASCADE in SQLite & SpatiaLite
  1312. db = self.db
  1313. table = db[tablename]
  1314. if self.dbengine in ('sqlite', 'spatialite') and table._referenced_by:
  1315. deleted = [x[table._id.name] for x in db(query).select(table._id)]
  1316. ### end special code to handle CASCADE in SQLite & SpatiaLite
  1317. self.execute(sql)
  1318. try:
  1319. counter = self.cursor.rowcount
  1320. except:
  1321. counter = None
  1322. ### special code to handle CASCADE in SQLite & SpatiaLite
  1323. if self.dbengine in ('sqlite', 'spatialite') and counter:
  1324. for field in table._referenced_by:
  1325. if field.type=='reference '+table._tablename \
  1326. and field.ondelete=='CASCADE':
  1327. db(field.belongs(deleted)).delete()
  1328. ### end special code to handle CASCADE in SQLite & SpatiaLite
  1329. return counter
  1330. def get_table(self, query):
  1331. tablenames = self.tables(query)
  1332. if len(tablenames)==1:
  1333. return tablenames[0]
  1334. elif len(tablenames)<1:
  1335. raise RuntimeError("No table selected")
  1336. else:
  1337. raise RuntimeError("Too many tables selected")
  1338. def expand_all(self, fields, tablenames):
  1339. db = self.db
  1340. new_fields = []
  1341. append = new_fields.append
  1342. for item in fields:
  1343. if isinstance(item,SQLALL):
  1344. new_fields += item._table
  1345. elif isinstance(item,str):
  1346. if REGEX_TABLE_DOT_FIELD.match(item):
  1347. tablename,fieldname = item.split('.')
  1348. append(db[tablename][fieldname])
  1349. else:
  1350. append(Expression(db,lambda item=item:item))
  1351. else:
  1352. append(item)
  1353. # ## if no fields specified take them all from the requested tables
  1354. if not new_fields:
  1355. for table in tablenames:
  1356. for field in db[table]:
  1357. append(field)
  1358. return new_fields
  1359. def _select(self, query, fields, attributes):
  1360. tables = self.tables
  1361. for key in set(attributes.keys())-SELECT_ARGS:
  1362. raise SyntaxError('invalid select attribute: %s' % key)
  1363. args_get = attributes.get
  1364. tablenames = tables(query)
  1365. tablenames_for_common_filters = tablenames
  1366. for field in fields:
  1367. if isinstance(field, basestring) \
  1368. and REGEX_TABLE_DOT_FIELD.match(field):
  1369. tn,fn = field.split('.')
  1370. field = self.db[tn][fn]
  1371. for tablename in tables(field):
  1372. if not tablename in tablenames:
  1373. tablenames.append(tablename)
  1374. if len(tablenames) < 1:
  1375. raise SyntaxError('Set: no tables selected')
  1376. self._colnames = map(self.expand, fields)
  1377. def geoexpand(field):
  1378. if isinstance(field.type,str) and field.type.startswith('geometry'):
  1379. field = field.st_astext()
  1380. return self.expand(field)
  1381. sql_f = ', '.join(map(geoexpand, fields))
  1382. sql_o = ''
  1383. sql_s = ''
  1384. left = args_get('left', False)
  1385. inner_join = args_get('join', False)
  1386. distinct = args_get('distinct', False)
  1387. groupby = args_get('groupby', False)
  1388. orderby = args_get('orderby', False)
  1389. having = args_get('having', False)
  1390. limitby = args_get('limitby', False)
  1391. orderby_on_limitby = args_get('orderby_on_limitby', True)
  1392. for_update = args_get('for_update', False)
  1393. if self.can_select_for_update is False and for_update is True:
  1394. raise SyntaxError('invalid select attribute: for_update')
  1395. if distinct is True:
  1396. sql_s += 'DISTINCT'
  1397. elif distinct:
  1398. sql_s += 'DISTINCT ON (%s)' % distinct
  1399. if inner_join:
  1400. icommand = self.JOIN()
  1401. if not isinstance(inner_join, (tuple, list)):
  1402. inner_join = [inner_join]
  1403. ijoint = [t._tablename for t in inner_join
  1404. if not isinstance(t,Expression)]
  1405. ijoinon = [t for t in inner_join if isinstance(t, Expression)]
  1406. itables_to_merge={} #issue 490
  1407. [itables_to_merge.update(
  1408. dict.fromkeys(tables(t))) for t in ijoinon]
  1409. ijoinont = [t.first._tablename for t in ijoinon]
  1410. [itables_to_merge.pop(t) for t in ijoinont
  1411. if t in itables_to_merge] #issue 490
  1412. iimportant_tablenames = ijoint + ijoinont + itables_to_merge.keys()
  1413. iexcluded = [t for t in tablenames
  1414. if not t in iimportant_tablenames]
  1415. if left:
  1416. join = attributes['left']
  1417. command = self.LEFT_JOIN()
  1418. if not isinstance(join, (tuple, list)):
  1419. join = [join]
  1420. joint = [t._tablename for t in join
  1421. if not isinstance(t, Expression)]
  1422. joinon = [t for t in join if isinstance(t, Expression)]
  1423. #patch join+left patch (solves problem with ordering in left joins)
  1424. tables_to_merge={}
  1425. [tables_to_merge.update(
  1426. dict.fromkeys(tables(t))) for t in joinon]
  1427. joinont = [t.first._tablename for t in joinon]
  1428. [tables_to_merge.pop(t) for t in joinont if t in tables_to_merge]
  1429. tablenames_for_common_filters = [t for t in tablenames
  1430. if not t in joinont ]
  1431. important_tablenames = joint + joinont + tables_to_merge.keys()
  1432. excluded = [t for t in tablenames
  1433. if not t in important_tablenames ]
  1434. else:
  1435. excluded = tablenames
  1436. if use_common_filters(query):
  1437. query = self.common_filter(query,tablenames_for_common_filters)
  1438. sql_w = ' WHERE ' + self.expand(query) if query else ''
  1439. if inner_join and not left:
  1440. sql_t = ', '.join([self.table_alias(t) for t in iexcluded + \
  1441. itables_to_merge.keys()])
  1442. for t in ijoinon:
  1443. sql_t += ' %s %s' % (icommand, t)
  1444. elif not inner_join and left:
  1445. sql_t = ', '.join([self.table_alias(t) for t in excluded + \
  1446. tables_to_merge.keys()])
  1447. if joint:
  1448. sql_t += ' %s %s' % (command,
  1449. ','.join([self.table_alias(t) for t in joint]))
  1450. for t in joinon:
  1451. sql_t += ' %s %s' % (command, t)
  1452. elif inner_join and left:
  1453. all_tables_in_query = set(important_tablenames + \
  1454. iimportant_tablenames + \
  1455. tablenames)
  1456. tables_in_joinon = set(joinont + ijoinont)
  1457. tables_not_in_joinon = \
  1458. all_tables_in_query.difference(tables_in_joinon)
  1459. sql_t = ','.join([self.table_alias(t) for t in tables_not_in_joinon])
  1460. for t in ijoinon:
  1461. sql_t += ' %s %s' % (icommand, t)
  1462. if joint:
  1463. sql_t += ' %s %s' % (command,
  1464. ','.join([self.table_alias(t) for t in joint]))
  1465. for t in joinon:
  1466. sql_t += ' %s %s' % (command, t)
  1467. else:
  1468. sql_t = ', '.join(self.table_alias(t) for t in tablenames)
  1469. if groupby:
  1470. if isinstance(groupby, (list, tuple)):
  1471. groupby = xorify(groupby)
  1472. sql_o += ' GROUP BY %s' % self.expand(groupby)
  1473. if having:
  1474. sql_o += ' HAVING %s' % attributes['having']
  1475. if orderby:
  1476. if isinstance(orderby, (list, tuple)):
  1477. orderby = xorify(orderby)
  1478. if str(orderby) == '<random>':
  1479. sql_o += ' ORDER BY %s' % self.RANDOM()
  1480. else:
  1481. sql_o += ' ORDER BY %s' % self.expand(orderby)
  1482. if (limitby and not groupby and tablenames and orderby_on_limitby):
  1483. sql_o += ' ORDER BY %s' % ', '.join(['%s.%s'%(t,x) for t in tablenames for x in (hasattr(self.db[t],'_primarykey') and self.db[t]._primarykey or [self.db[t]._id.name])])
  1484. # oracle does not support limitby
  1485. sql = self.select_limitby(sql_s, sql_f, sql_t, sql_w, sql_o, limitby)
  1486. if for_update and self.can_select_for_update is True:
  1487. sql = sql.rstrip(';') + ' FOR UPDATE;'
  1488. return sql
  1489. def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
  1490. if limitby:
  1491. (lmin, lmax) = limitby
  1492. sql_o += ' LIMIT %i OFFSET %i' % (lmax - lmin, lmin)
  1493. return 'SELECT %s %s FROM %s%s%s;' % \
  1494. (sql_s, sql_f, sql_t, sql_w, sql_o)
  1495. def _fetchall(self):
  1496. return self.cursor.fetchall()
  1497. def _select_aux(self,sql,fields,attributes):
  1498. args_get = attributes.get
  1499. cache = args_get('cache',None)
  1500. if not cache:
  1501. self.execute(sql)
  1502. rows = self._fetchall()
  1503. else:
  1504. (cache_model, time_expire) = cache
  1505. key = self.uri + '/' + sql + '/rows'
  1506. if len(key)>200: key = hashlib_md5(key).hexdigest()
  1507. def _select_aux2():
  1508. self.execute(sql)
  1509. return self._fetchall()
  1510. rows = cache_model(key,_select_aux2,time_expire)
  1511. if isinstance(rows,tuple):
  1512. rows = list(rows)
  1513. limitby = args_get('limitby', None) or (0,)
  1514. rows = self.rowslice(rows,limitby[0],None)
  1515. processor = args_get('processor',self.parse)
  1516. cacheable = args_get('cacheable',False)
  1517. return processor(rows,fields,self._colnames,cacheable=cacheable)
  1518. def select(self, query, fields, attributes):
  1519. """
  1520. Always returns a Rows object, possibly empty.
  1521. """
  1522. sql = self._select(query, fields, attributes)
  1523. cache = attributes.get('cache', None)
  1524. if cache and attributes.get('cacheable',False):
  1525. del attributes['cache']
  1526. (cache_model, time_expire) = cache
  1527. key = self.uri + '/' + sql
  1528. if len(key)>200: key = hashlib_md5(key).hexdigest()
  1529. args = (sql,fields,attributes)
  1530. return cache_model(
  1531. key,
  1532. lambda self=self,args=args:self._select_aux(*args),
  1533. time_expire)
  1534. else:
  1535. return self._select_aux(sql,fields,attributes)
  1536. def _count(self, query, distinct=None):
  1537. tablenames = self.tables(query)
  1538. if query:
  1539. if use_common_filters(query):
  1540. query = self.common_filter(query, tablenames)
  1541. sql_w = ' WHERE ' + self.expand(query)
  1542. else:
  1543. sql_w = ''
  1544. sql_t = ','.join(self.table_alias(t) for t in tablenames)
  1545. if distinct:
  1546. if isinstance(distinct,(list, tuple)):
  1547. distinct = xorify(distinct)
  1548. sql_d = self.expand(distinct)
  1549. return 'SELECT count(DISTINCT %s) FROM %s%s;' % \
  1550. (sql_d, sql_t, sql_w)
  1551. return 'SELECT count(*) FROM %s%s;' % (sql_t, sql_w)
  1552. def count(self, query, distinct=None):
  1553. self.execute(self._count(query, distinct))
  1554. return self.cursor.fetchone()[0]
  1555. def tables(self, *queries):
  1556. tables = set()
  1557. for query in queries:
  1558. if isinstance(query, Field):
  1559. tables.add(query.tablename)
  1560. elif isinstance(query, (Expression, Query)):
  1561. if not query.first is None:
  1562. tables = tables.union(self.tables(query.first))
  1563. if not query.second is None:
  1564. tables = tables.union(self.tables(query.second))
  1565. return list(tables)
  1566. def commit(self):
  1567. if self.connection: return self.connection.commit()
  1568. def rollback(self):
  1569. if self.connection: return self.connection.rollback()
  1570. def close_connection(self):
  1571. if self.connection: return self.connection.close()
  1572. def distributed_transaction_begin(self, key):
  1573. return
  1574. def prepare(self, key):
  1575. if self.connection: self.connection.prepare()
  1576. def commit_prepared(self, key):
  1577. if self.connection: self.connection.commit()
  1578. def rollback_prepared(self, key):
  1579. if self.connection: self.connection.rollback()
  1580. def concat_add(self, tablename):
  1581. return ', ADD '
  1582. def constraint_name(self, table, fieldname):
  1583. return '%s_%s__constraint' % (table,fieldname)
  1584. def create_sequence_and_triggers(self, query, table, **args):
  1585. self.execute(query)
  1586. def log_execute(self, *a, **b):
  1587. if not self.connection: return None
  1588. command = a[0]
  1589. if hasattr(self,'filter_sql_command'):
  1590. command = self.filter_sql_command(command)
  1591. if self.db._debug:
  1592. LOGGER.debug('SQL: %s' % command)
  1593. self.db._lastsql = command
  1594. t0 = time.time()
  1595. ret = self.cursor.execute(command, *a[1:], **b)
  1596. self.db._timings.append((command,time.time()-t0))
  1597. del self.db._timings[:-TIMINGSSIZE]
  1598. return ret
  1599. def execute(self, *a, **b):
  1600. return self.log_execute(*a, **b)
  1601. def represent(self, obj, fieldtype):
  1602. field_is_type = fieldtype.startswith
  1603. if isinstance(obj, CALLABLETYPES):
  1604. obj = obj()
  1605. if isinstance(fieldtype, SQLCustomType):
  1606. value = fieldtype.encoder(obj)
  1607. if fieldtype.type in ('string','text', 'json'):
  1608. return self.adapt(value)
  1609. return value
  1610. if isinstance(obj, (Expression, Field)):
  1611. return str(obj)
  1612. if field_is_type('list:'):
  1613. if not obj:
  1614. obj = []
  1615. elif not isinstance(obj, (list, tuple)):
  1616. obj = [obj]
  1617. if field_is_type('list:string'):
  1618. obj = map(str,obj)
  1619. else:
  1620. obj = map(int,[o for o in obj if o != ''])
  1621. # we don't want to bar_encode json objects
  1622. if isinstance(obj, (list, tuple)) and (not fieldtype == "json"):
  1623. obj = bar_encode(obj)
  1624. if obj is None:
  1625. return 'NULL'
  1626. if obj == '' and not fieldtype[:2] in ['st', 'te', 'js', 'pa', 'up']:
  1627. return 'NULL'
  1628. r = self.represent_exceptions(obj, fieldtype)
  1629. if not r is None:
  1630. return r
  1631. if fieldtype == 'boolean':
  1632. if obj and not str(obj)[:1].upper() in '0F':
  1633. return self.smart_adapt(self.TRUE)
  1634. else:
  1635. return self.smart_adapt(self.FALSE)
  1636. if fieldtype == 'id' or fieldtype == 'integer':
  1637. return str(long(obj))
  1638. if field_is_type('decimal'):
  1639. return str(obj)
  1640. elif field_is_type('reference'): # reference
  1641. if fieldtype.find('.')>0:
  1642. return repr(obj)
  1643. elif isinstance(obj, (Row, Reference)):
  1644. return str(obj['id'])
  1645. return str(long(obj))
  1646. elif fieldtype == 'double':
  1647. return repr(float(obj))
  1648. if isinstance(obj, unicode):
  1649. obj = obj.encode(self.db_codec)
  1650. if fieldtype == 'blob':
  1651. obj = base64.b64encode(str(obj))
  1652. elif fieldtype == 'date':
  1653. if isinstance(obj, (datetime.date, datetime.datetime)):
  1654. obj = obj.isoformat()[:10]
  1655. else:
  1656. obj = str(obj)
  1657. elif fieldtype == 'datetime':
  1658. if isinstance(obj, datetime.datetime):
  1659. obj = obj.isoformat(self.T_SEP)[:19]
  1660. elif isinstance(obj, datetime.date):
  1661. obj = obj.isoformat()[:10]+self.T_SEP+'00:00:00'
  1662. else:
  1663. obj = str(obj)
  1664. elif fieldtype == 'time':
  1665. if isinstance(obj, datetime.time):
  1666. obj = obj.isoformat()[:10]
  1667. else:
  1668. obj = str(obj)
  1669. elif fieldtype == 'json':
  1670. if not self.native_json:
  1671. if have_serializers:
  1672. obj = serializers.json(obj)
  1673. elif simplejson:
  1674. obj = simplejson.dumps(obj)
  1675. else:
  1676. raise RuntimeError("missing simplejson")
  1677. if not isinstance(obj,bytes):
  1678. obj = bytes(obj)
  1679. try:
  1680. obj.decode(self.db_codec)
  1681. except:
  1682. obj = obj.decode('latin1').encode(self.db_codec)
  1683. return self.adapt(obj)
  1684. def represent_exceptions(self, obj, fieldtype):
  1685. return None
  1686. def lastrowid(self, table):
  1687. return None
  1688. def rowslice(self, rows, minimum=0, maximum=None):
  1689. """
  1690. By default this function does nothing;
  1691. overload when db does not do slicing.
  1692. """
  1693. return rows
  1694. def parse_value(self, value, field_type, blob_decode=True):
  1695. if field_type != 'blob' and isinstance(value, str):
  1696. try:
  1697. value = value.decode(self.db._db_codec)
  1698. except Exception:
  1699. pass
  1700. if isinstance(value, unicode):
  1701. value = value.encode('utf-8')
  1702. if isinstance(field_type, SQLCustomType):
  1703. value = field_type.decoder(value)
  1704. if not isinstance(field_type, str) or value is None:
  1705. return value
  1706. elif field_type in ('string', 'text', 'password', 'upload', 'dict'):
  1707. return value
  1708. elif field_type.startswith('geo'):
  1709. return value
  1710. elif field_type == 'blob' and not blob_decode:
  1711. return value
  1712. else:
  1713. key = REGEX_TYPE.match(field_type).group(0)
  1714. return self.parsemap[key](value,field_type)
  1715. def parse_reference(self, value, field_type):
  1716. referee = field_type[10:].strip()
  1717. if not '.' in referee:
  1718. value = Reference(value)
  1719. value._table, value._record = self.db[referee], None
  1720. return value
  1721. def parse_boolean(self, value, field_type):
  1722. return value == self.TRUE or str(value)[:1].lower() == 't'
  1723. def parse_date(self, value, field_type):
  1724. if isinstance(value, datetime.datetime):
  1725. return value.date()
  1726. if not isinstance(value, (datetime.date,datetime.datetime)):
  1727. (y, m, d) = map(int, str(value)[:10].strip().split('-'))
  1728. value = datetime.date(y, m, d)
  1729. return value
  1730. def parse_time(self, value, field_type):
  1731. if not isinstance(value, datetime.time):
  1732. time_items = map(int,str(value)[:8].strip().split(':')[:3])
  1733. if len(time_items) == 3:
  1734. (h, mi, s) = time_items
  1735. else:
  1736. (h, mi, s) = time_items + [0]
  1737. value = datetime.time(h, mi, s)
  1738. return value
  1739. def parse_datetime(self, value, field_type):
  1740. if not isinstance(value, datetime.datetime):
  1741. value = str(value)
  1742. date_part,time_part,timezone = value[:10],value[11:19],value[19:]
  1743. if '+' in timezone:
  1744. ms,tz = timezone.split('+')
  1745. h,m = tz.split(':')
  1746. dt = datetime.timedelta(seconds=3600*int(h)+60*int(m))
  1747. elif '-' in timezone:
  1748. ms,tz = timezone.split('-')
  1749. h,m = tz.split(':')
  1750. dt = -datetime.timedelta(seconds=3600*int(h)+60*int(m))
  1751. else:
  1752. dt = None
  1753. (y, m, d) = map(int,date_part.split('-'))
  1754. time_parts = time_part and time_part.split(':')[:3] or (0,0,0)
  1755. while len(time_parts)<3: time_parts.append(0)
  1756. time_items = map(int,time_parts)
  1757. (h, mi, s) = time_items
  1758. value = datetime.datetime(y, m, d, h, mi, s)
  1759. if dt:
  1760. value = value + dt
  1761. return value
  1762. def parse_blob(self, value, field_type):
  1763. return base64.b64decode(str(value))
  1764. def parse_decimal(self, value, field_type):
  1765. decimals = int(field_type[8:-1].split(',')[-1])
  1766. if self.dbengine in ('sqlite', 'spatialite'):
  1767. value = ('%.' + str(decimals) + 'f') % value
  1768. if not isinstance(value, decimal.Decimal):
  1769. value = decimal.Decimal(str(value))
  1770. return value
  1771. def parse_list_integers(self, value, field_type):
  1772. if not isinstance(self, NoSQLAdapter):
  1773. value = bar_decode_integer(value)
  1774. return value
  1775. def parse_list_references(self, value, field_type):
  1776. if not isinstance(self, NoSQLAdapter):
  1777. value = bar_decode_integer(value)
  1778. return [self.parse_reference(r, field_type[5:]) for r in value]
  1779. def parse_list_strings(self, value, field_type):
  1780. if not isinstance(self, NoSQLAdapter):
  1781. value = bar_decode_string(value)
  1782. return value
  1783. def parse_id(self, value, field_type):
  1784. return long(value)
  1785. def parse_integer(self, value, field_type):
  1786. return long(value)
  1787. def parse_double(self, value, field_type):
  1788. return float(value)
  1789. def parse_json(self, value, field_type):
  1790. if not self.native_json:
  1791. if not isinstance(value, basestring):
  1792. raise RuntimeError('json data not a string')
  1793. if isinstance(value, unicode):
  1794. value = value.encode('utf-8')
  1795. if have_serializers:
  1796. value = serializers.loads_json(value)
  1797. elif simplejson:
  1798. value = simplejson.loads(value)
  1799. else:
  1800. raise RuntimeError("missing simplejson")
  1801. return value
  1802. def build_parsemap(self):
  1803. self.parsemap = {
  1804. 'id':self.parse_id,
  1805. 'integer':self.parse_integer,
  1806. 'bigint':self.parse_integer,
  1807. 'float':self.parse_double,
  1808. 'double':self.parse_double,
  1809. 'reference':self.parse_reference,
  1810. 'boolean':self.parse_boolean,
  1811. 'date':self.parse_date,
  1812. 'time':self.parse_time,
  1813. 'datetime':self.parse_datetime,
  1814. 'blob':self.parse_blob,
  1815. 'decimal':self.parse_decimal,
  1816. 'json':self.parse_json,
  1817. 'list:integer':self.parse_list_integers,
  1818. 'list:reference':self.parse_list_references,
  1819. 'list:string':self.parse_list_strings,
  1820. }
  1821. def parse(self, rows, fields, colnames, blob_decode=True,
  1822. cacheable = False):
  1823. db = self.db
  1824. virtualtables = []
  1825. new_rows = []
  1826. tmps = []
  1827. for colname in colnames:
  1828. if not REGEX_TABLE_DOT_FIELD.match(colname):
  1829. tmps.append(None)
  1830. else:
  1831. (tablename, fieldname) = colname.split('.')
  1832. table = db[tablename]
  1833. field = table[fieldname]
  1834. ft = field.type
  1835. tmps.append((tablename,fieldname,table,field,ft))
  1836. for (i,row) in enumerate(rows):
  1837. new_row = Row()
  1838. for (j,colname) in enumerate(colnames):
  1839. value = row[j]
  1840. tmp = tmps[j]
  1841. if tmp:
  1842. (tablename,fieldname,table,field,ft) = tmp
  1843. if tablename in new_row:
  1844. colset = new_row[tablename]
  1845. else:
  1846. colset = new_row[tablename] = Row()
  1847. if tablename not in virtualtables:
  1848. virtualtables.append(tablename)
  1849. value = self.parse_value(value,ft,blob_decode)
  1850. if field.filter_out:
  1851. value = field.filter_out(value)
  1852. colset[fieldname] = value
  1853. # for backward compatibility
  1854. if ft=='id' and fieldname!='id' and \
  1855. not 'id' in table.fields:
  1856. colset['id'] = value
  1857. if ft == 'id' and not cacheable:
  1858. # temporary hack to deal with
  1859. # GoogleDatastoreAdapter
  1860. # references
  1861. if isinstance(self, GoogleDatastoreAdapter):
  1862. id = value.key().id_or_name()
  1863. colset[fieldname] = id
  1864. colset.gae_item = value
  1865. else:
  1866. id = value
  1867. colset.update_record = RecordUpdater(colset,table,id)
  1868. colset.delete_record = RecordDeleter(table,id)
  1869. for rfield in table._referenced_by:
  1870. referee_link = db._referee_name and \
  1871. db._referee_name % dict(
  1872. table=rfield.tablename,field=rfield.name)
  1873. if referee_link and not referee_link in colset:
  1874. colset[referee_link] = LazySet(rfield,id)
  1875. else:
  1876. if not '_extra' in new_row:
  1877. new_row['_extra'] = Row()
  1878. new_row['_extra'][colname] = \
  1879. self.parse_value(value,
  1880. fields[j].type,blob_decode)
  1881. new_column_name = \
  1882. REGEX_SELECT_AS_PARSER.search(colname)
  1883. if not new_column_name is None:
  1884. column_name = new_column_name.groups(0)
  1885. setattr(new_row,column_name[0],value)
  1886. new_rows.append(new_row)
  1887. rowsobj = Rows(db, new_rows, colnames, rawrows=rows)
  1888. for tablename in virtualtables:
  1889. table = db[tablename]
  1890. fields_virtual = [(f,v) for (f,v) in table.iteritems()
  1891. if isinstance(v,FieldVirtual)]
  1892. fields_lazy = [(f,v) for (f,v) in table.iteritems()
  1893. if isinstance(v,FieldMethod)]
  1894. if fields_virtual or fields_lazy:
  1895. for row in rowsobj.records:
  1896. box = row[tablename]
  1897. for f,v in fields_virtual:
  1898. box[f] = v.f(row)
  1899. for f,v in fields_lazy:
  1900. box[f] = (v.handler or VirtualCommand)(v.f,row)
  1901. ### old style virtual fields
  1902. for item in table.virtualfields:
  1903. try:
  1904. rowsobj = rowsobj.setvirtualfields(**{tablename:item})
  1905. except (KeyError, AttributeError):
  1906. # to avoid breaking virtualfields when partial select
  1907. pass
  1908. return rowsobj
  1909. def common_filter(self, query, tablenames):
  1910. tenant_fieldname = self.db._request_tenant
  1911. for tablename in tablenames:
  1912. table = self.db[tablename]
  1913. # deal with user provided filters
  1914. if table._common_filter != None:
  1915. query = query & table._common_filter(query)
  1916. # deal with multi_tenant filters
  1917. if tenant_fieldname in table:
  1918. default = table[tenant_fieldname].default
  1919. if not default is None:
  1920. newquery = table[tenant_fieldname] == default
  1921. if query is None:
  1922. query = newquery
  1923. else:
  1924. query = query & newquery
  1925. return query
  1926. def CASE(self,query,t,f):
  1927. def represent(x):
  1928. types = {type(True):'boolean',type(0):'integer',type(1.0):'double'}
  1929. if x is None: return 'NULL'
  1930. elif isinstance(x,Expression): return str(x)
  1931. else: return self.represent(x,types.get(type(x),'string'))
  1932. return Expression(self.db,'CASE WHEN %s THEN %s ELSE %s END' % \
  1933. (self.expand(query),represent(t),represent(f)))
  1934. ###################################################################################
  1935. # List of all the available adapters; they all extend BaseAdapter.
  1936. ###################################################################################
  1937. class SQLiteAdapter(BaseAdapter):
  1938. drivers = ('sqlite2','sqlite3')
  1939. can_select_for_update = None # support ourselves with BEGIN TRANSACTION
  1940. def EXTRACT(self,field,what):
  1941. return "web2py_extract('%s',%s)" % (what, self.expand(field))
  1942. @staticmethod
  1943. def web2py_extract(lookup, s):
  1944. table = {
  1945. 'year': (0, 4),
  1946. 'month': (5, 7),
  1947. 'day': (8, 10),
  1948. 'hour': (11, 13),
  1949. 'minute': (14, 16),
  1950. 'second': (17, 19),
  1951. }
  1952. try:
  1953. if lookup != 'epoch':
  1954. (i, j) = table[lookup]
  1955. return int(s[i:j])
  1956. else:
  1957. return time.mktime(datetime.datetime.strptime(s, '%Y-%m-%d %H:%M:%S').timetuple())
  1958. except:
  1959. return None
  1960. @staticmethod
  1961. def web2py_regexp(expression, item):
  1962. return re.compile(expression).search(item) is not None
  1963. def __init__(self, db, uri, pool_size=0, folder=None, db_codec ='UTF-8',
  1964. credential_decoder=IDENTITY, driver_args={},
  1965. adapter_args={}, do_connect=True, after_connection=None):
  1966. self.db = db
  1967. self.dbengine = "sqlite"
  1968. self.uri = uri
  1969. if do_connect: self.find_driver(adapter_args)
  1970. self.pool_size = 0
  1971. self.folder = folder
  1972. self.db_codec = db_codec
  1973. self._after_connection = after_connection
  1974. self.find_or_make_work_folder()
  1975. path_encoding = sys.getfilesystemencoding() \
  1976. or locale.getdefaultlocale()[1] or 'utf8'
  1977. if uri.startswith('sqlite:memory'):
  1978. self.dbpath = ':memory:'
  1979. else:
  1980. self.dbpath = uri.split('://',1)[1]
  1981. if self.dbpath[0] != '/':
  1982. if PYTHON_VERSION == 2:
  1983. self.dbpath = pjoin(
  1984. self.folder.decode(path_encoding).encode('utf8'), self.dbpath)
  1985. else:
  1986. self.dbpath = pjoin(self.folder, self.dbpath)
  1987. if not 'check_same_thread' in driver_args:
  1988. driver_args['check_same_thread'] = False
  1989. if not 'detect_types' in driver_args and do_connect:
  1990. driver_args['detect_types'] = self.driver.PARSE_DECLTYPES
  1991. def connector(dbpath=self.dbpath, driver_args=driver_args):
  1992. return self.driver.Connection(dbpath, **driver_args)
  1993. self.connector = connector
  1994. if do_connect: self.reconnect()
  1995. def after_connection(self):
  1996. self.connection.create_function('web2py_extract', 2,
  1997. SQLiteAdapter.web2py_extract)
  1998. self.connection.create_function("REGEXP", 2,
  1999. SQLiteAdapter.web2py_regexp)
  2000. def _truncate(self, table, mode=''):
  2001. tablename = table._tablename
  2002. return ['DELETE FROM %s;' % tablename,
  2003. "DELETE FROM sqlite_sequence WHERE name='%s';" % tablename]
  2004. def lastrowid(self, table):
  2005. return self.cursor.lastrowid
  2006. def REGEXP(self,first,second):
  2007. return '(%s REGEXP %s)' % (self.expand(first),
  2008. self.expand(second,'string'))
  2009. def select(self, query, fields, attributes):
  2010. """
  2011. Simulate SELECT ... FOR UPDATE with BEGIN IMMEDIATE TRANSACTION.
  2012. Note that the entire database, rather than one record, is locked
  2013. (it will be locked eventually anyway by the following UPDATE).
  2014. """
  2015. if attributes.get('for_update', False) and not 'cache' in attributes:
  2016. self.execute('BEGIN IMMEDIATE TRANSACTION;')
  2017. return super(SQLiteAdapter, self).select(query, fields, attributes)
  2018. class SpatiaLiteAdapter(SQLiteAdapter):
  2019. drivers = ('sqlite3','sqlite2')
  2020. types = copy.copy(BaseAdapter.types)
  2021. types.update(geometry='GEOMETRY')
  2022. def __init__(self, db, uri, pool_size=0, folder=None, db_codec ='UTF-8',
  2023. credential_decoder=IDENTITY, driver_args={},
  2024. adapter_args={}, do_connect=True, srid=4326, after_connection=None):
  2025. self.db = db
  2026. self.dbengine = "spatialite"
  2027. self.uri = uri
  2028. if do_connect: self.find_driver(adapter_args)
  2029. self.pool_size = 0
  2030. self.folder = folder
  2031. self.db_codec = db_codec
  2032. self._after_connection = after_connection
  2033. self.find_or_make_work_folder()
  2034. self.srid = srid
  2035. path_encoding = sys.getfilesystemencoding() \
  2036. or locale.getdefaultlocale()[1] or 'utf8'
  2037. if uri.startswith('spatialite:memory'):
  2038. self.dbpath = ':memory:'
  2039. else:
  2040. self.dbpath = uri.split('://',1)[1]
  2041. if self.dbpath[0] != '/':
  2042. self.dbpath = pjoin(
  2043. self.folder.decode(path_encoding).encode('utf8'), self.dbpath)
  2044. if not 'check_same_thread' in driver_args:
  2045. driver_args['check_same_thread'] = False
  2046. if not 'detect_types' in driver_args and do_connect:
  2047. driver_args['detect_types'] = self.driver.PARSE_DECLTYPES
  2048. def connector(dbpath=self.dbpath, driver_args=driver_args):
  2049. return self.driver.Connection(dbpath, **driver_args)
  2050. self.connector = connector
  2051. if do_connect: self.reconnect()
  2052. def after_connection(self):
  2053. self.connection.enable_load_extension(True)
  2054. # for Windows, rename libspatialite-2.dll to libspatialite.dll
  2055. # Linux uses libspatialite.so
  2056. # Mac OS X uses libspatialite.dylib
  2057. libspatialite = SPATIALLIBS[platform.system()]
  2058. self.execute(r'SELECT load_extension("%s");' % libspatialite)
  2059. self.connection.create_function('web2py_extract', 2,
  2060. SQLiteAdapter.web2py_extract)
  2061. self.connection.create_function("REGEXP", 2,
  2062. SQLiteAdapter.web2py_regexp)
  2063. # GIS functions
  2064. def ST_ASGEOJSON(self, first, second):
  2065. return 'AsGeoJSON(%s,%s,%s)' %(self.expand(first),
  2066. second['precision'], second['options'])
  2067. def ST_ASTEXT(self, first):
  2068. return 'AsText(%s)' %(self.expand(first))
  2069. def ST_CONTAINS(self, first, second):
  2070. return 'Contains(%s,%s)' %(self.expand(first),
  2071. self.expand(second, first.type))
  2072. def ST_DISTANCE(self, first, second):
  2073. return 'Distance(%s,%s)' %(self.expand(first),
  2074. self.expand(second, first.type))
  2075. def ST_EQUALS(self, first, second):
  2076. return 'Equals(%s,%s)' %(self.expand(first),
  2077. self.expand(second, first.type))
  2078. def ST_INTERSECTS(self, first, second):
  2079. return 'Intersects(%s,%s)' %(self.expand(first),
  2080. self.expand(second, first.type))
  2081. def ST_OVERLAPS(self, first, second):
  2082. return 'Overlaps(%s,%s)' %(self.expand(first),
  2083. self.expand(second, first.type))
  2084. def ST_SIMPLIFY(self, first, second):
  2085. return 'Simplify(%s,%s)' %(self.expand(first),
  2086. self.expand(second, 'double'))
  2087. def ST_TOUCHES(self, first, second):
  2088. return 'Touches(%s,%s)' %(self.expand(first),
  2089. self.expand(second, first.type))
  2090. def ST_WITHIN(self, first, second):
  2091. return 'Within(%s,%s)' %(self.expand(first),
  2092. self.expand(second, first.type))
  2093. def represent(self, obj, fieldtype):
  2094. field_is_type = fieldtype.startswith
  2095. if field_is_type('geo'):
  2096. srid = 4326 # Spatialite default srid for geometry
  2097. geotype, parms = fieldtype[:-1].split('(')
  2098. parms = parms.split(',')
  2099. if len(parms) >= 2:
  2100. schema, srid = parms[:2]
  2101. # if field_is_type('geometry'):
  2102. value = "ST_GeomFromText('%s',%s)" %(obj, srid)
  2103. # elif field_is_type('geography'):
  2104. # value = "ST_GeogFromText('SRID=%s;%s')" %(srid, obj)
  2105. # else:
  2106. # raise SyntaxError, 'Invalid field type %s' %fieldtype
  2107. return value
  2108. return BaseAdapter.represent(self, obj, fieldtype)
  2109. class JDBCSQLiteAdapter(SQLiteAdapter):
  2110. drivers = ('zxJDBC_sqlite',)
  2111. def __init__(self, db, uri, pool_size=0, folder=None, db_codec='UTF-8',
  2112. credential_decoder=IDENTITY, driver_args={},
  2113. adapter_args={}, do_connect=True, after_connection=None):
  2114. self.db = db
  2115. self.dbengine = "sqlite"
  2116. self.uri = uri
  2117. if do_connect: self.find_driver(adapter_args)
  2118. self.pool_size = pool_size
  2119. self.folder = folder
  2120. self.db_codec = db_codec
  2121. self._after_connection = after_connection
  2122. self.find_or_make_work_folder()
  2123. path_encoding = sys.getfilesystemencoding() \
  2124. or locale.getdefaultlocale()[1] or 'utf8'
  2125. if uri.startswith('sqlite:memory'):
  2126. self.dbpath = ':memory:'
  2127. else:
  2128. self.dbpath = uri.split('://',1)[1]
  2129. if self.dbpath[0] != '/':
  2130. self.dbpath = pjoin(
  2131. self.folder.decode(path_encoding).encode('utf8'), self.dbpath)
  2132. def connector(dbpath=self.dbpath,driver_args=driver_args):
  2133. return self.driver.connect(
  2134. self.driver.getConnection('jdbc:sqlite:'+dbpath),
  2135. **driver_args)
  2136. self.connector = connector
  2137. if do_connect: self.reconnect()
  2138. def after_connection(self):
  2139. # FIXME http://www.zentus.com/sqlitejdbc/custom_functions.html for UDFs
  2140. self.connection.create_function('web2py_extract', 2,
  2141. SQLiteAdapter.web2py_extract)
  2142. def execute(self, a):
  2143. return self.log_execute(a)
  2144. class MySQLAdapter(BaseAdapter):
  2145. drivers = ('MySQLdb','pymysql')
  2146. commit_on_alter_table = True
  2147. support_distributed_transaction = True
  2148. types = {
  2149. 'boolean': 'CHAR(1)',
  2150. 'string': 'VARCHAR(%(length)s)',
  2151. 'text': 'LONGTEXT',
  2152. 'json': 'LONGTEXT',
  2153. 'password': 'VARCHAR(%(length)s)',
  2154. 'blob': 'LONGBLOB',
  2155. 'upload': 'VARCHAR(%(length)s)',
  2156. 'integer': 'INT',
  2157. 'bigint': 'BIGINT',
  2158. 'float': 'FLOAT',
  2159. 'double': 'DOUBLE',
  2160. 'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
  2161. 'date': 'DATE',
  2162. 'time': 'TIME',
  2163. 'datetime': 'DATETIME',
  2164. 'id': 'INT AUTO_INCREMENT NOT NULL',
  2165. 'reference': 'INT, INDEX %(index_name)s (%(field_name)s), FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
  2166. 'list:integer': 'LONGTEXT',
  2167. 'list:string': 'LONGTEXT',
  2168. 'list:reference': 'LONGTEXT',
  2169. 'big-id': 'BIGINT AUTO_INCREMENT NOT NULL',
  2170. 'big-reference': 'BIGINT, INDEX %(index_name)s (%(field_name)s), FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
  2171. }
  2172. QUOTE_TEMPLATE = "`%s`"
  2173. def varquote(self,name):
  2174. return varquote_aux(name,'`%s`')
  2175. def RANDOM(self):
  2176. return 'RAND()'
  2177. def SUBSTRING(self,field,parameters):
  2178. return 'SUBSTRING(%s,%s,%s)' % (self.expand(field),
  2179. parameters[0], parameters[1])
  2180. def EPOCH(self, first):
  2181. return "UNIX_TIMESTAMP(%s)" % self.expand(first)
  2182. def CONCAT(self, *items):
  2183. return 'CONCAT(%s)' % ','.join(self.expand(x,'string') for x in items)
  2184. def REGEXP(self,first,second):
  2185. return '(%s REGEXP %s)' % (self.expand(first),
  2186. self.expand(second,'string'))
  2187. def _drop(self,table,mode):
  2188. # breaks db integrity but without this mysql does not drop table
  2189. return ['SET FOREIGN_KEY_CHECKS=0;','DROP TABLE %s;' % table,
  2190. 'SET FOREIGN_KEY_CHECKS=1;']
  2191. def _insert_empty(self, table):
  2192. return 'INSERT INTO %s VALUES (DEFAULT);' % table
  2193. def distributed_transaction_begin(self,key):
  2194. self.execute('XA START;')
  2195. def prepare(self,key):
  2196. self.execute("XA END;")
  2197. self.execute("XA PREPARE;")
  2198. def commit_prepared(self,ley):
  2199. self.execute("XA COMMIT;")
  2200. def rollback_prepared(self,key):
  2201. self.execute("XA ROLLBACK;")
  2202. REGEX_URI = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:/]+)(\:(?P<port>[0-9]+))?/(?P<db>[^?]+)(\?set_encoding=(?P<charset>\w+))?$')
  2203. def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
  2204. credential_decoder=IDENTITY, driver_args={},
  2205. adapter_args={}, do_connect=True, after_connection=None):
  2206. self.db = db
  2207. self.dbengine = "mysql"
  2208. self.uri = uri
  2209. if do_connect: self.find_driver(adapter_args,uri)
  2210. self.pool_size = pool_size
  2211. self.folder = folder
  2212. self.db_codec = db_codec
  2213. self._after_connection = after_connection
  2214. self.find_or_make_work_folder()
  2215. ruri = uri.split('://',1)[1]
  2216. m = self.REGEX_URI.match(ruri)
  2217. if not m:
  2218. raise SyntaxError(
  2219. "Invalid URI string in DAL: %s" % self.uri)
  2220. user = credential_decoder(m.group('user'))
  2221. if not user:
  2222. raise SyntaxError('User required')
  2223. password = credential_decoder(m.group('password'))
  2224. if not password:
  2225. password = ''
  2226. host = m.group('host')
  2227. if not host:
  2228. raise SyntaxError('Host name required')
  2229. db = m.group('db')
  2230. if not db:
  2231. raise SyntaxError('Database name required')
  2232. port = int(m.group('port') or '3306')
  2233. charset = m.group('charset') or 'utf8'
  2234. driver_args.update(db=db,
  2235. user=credential_decoder(user),
  2236. passwd=credential_decoder(password),
  2237. host=host,
  2238. port=port,
  2239. charset=charset)
  2240. def connector(driver_args=driver_args):
  2241. return self.driver.connect(**driver_args)
  2242. self.connector = connector
  2243. if do_connect: self.reconnect()
  2244. def after_connection(self):
  2245. self.execute('SET FOREIGN_KEY_CHECKS=1;')
  2246. self.execute("SET sql_mode='NO_BACKSLASH_ESCAPES';")
  2247. def lastrowid(self,table):
  2248. self.execute('select last_insert_id();')
  2249. return int(self.cursor.fetchone()[0])
  2250. class PostgreSQLAdapter(BaseAdapter):
  2251. drivers = ('psycopg2','pg8000')
  2252. support_distributed_transaction = True
  2253. types = {
  2254. 'boolean': 'CHAR(1)',
  2255. 'string': 'VARCHAR(%(length)s)',
  2256. 'text': 'TEXT',
  2257. 'json': 'TEXT',
  2258. 'password': 'VARCHAR(%(length)s)',
  2259. 'blob': 'BYTEA',
  2260. 'upload': 'VARCHAR(%(length)s)',
  2261. 'integer': 'INTEGER',
  2262. 'bigint': 'BIGINT',
  2263. 'float': 'FLOAT',
  2264. 'double': 'FLOAT8',
  2265. 'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
  2266. 'date': 'DATE',
  2267. 'time': 'TIME',
  2268. 'datetime': 'TIMESTAMP',
  2269. 'id': 'SERIAL PRIMARY KEY',
  2270. 'reference': 'INTEGER REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
  2271. 'list:integer': 'TEXT',
  2272. 'list:string': 'TEXT',
  2273. 'list:reference': 'TEXT',
  2274. 'geometry': 'GEOMETRY',
  2275. 'geography': 'GEOGRAPHY',
  2276. 'big-id': 'BIGSERIAL PRIMARY KEY',
  2277. 'big-reference': 'BIGINT REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
  2278. 'reference FK': ', CONSTRAINT FK_%(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
  2279. 'reference TFK': ' CONSTRAINT FK_%(foreign_table)s_PK FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s) ON DELETE %(on_delete_action)s',
  2280. }
  2281. QUOTE_TEMPLATE = '%s'
  2282. def varquote(self,name):
  2283. return varquote_aux(name,'"%s"')
  2284. def adapt(self,obj):
  2285. if self.driver_name == 'psycopg2':
  2286. return psycopg2_adapt(obj).getquoted()
  2287. elif self.driver_name == 'pg8000':
  2288. return "'%s'" % str(obj).replace("%","%%").replace("'","''")
  2289. else:
  2290. return "'%s'" % str(obj).replace("'","''")
  2291. def sequence_name(self,table):
  2292. return '%s_id_Seq' % table
  2293. def RANDOM(self):
  2294. return 'RANDOM()'
  2295. def ADD(self, first, second):
  2296. t = first.type
  2297. if t in ('text','string','password', 'json', 'upload','blob'):
  2298. return '(%s || %s)' % (self.expand(first), self.expand(second, t))
  2299. else:
  2300. return '(%s + %s)' % (self.expand(first), self.expand(second, t))
  2301. def distributed_transaction_begin(self,key):
  2302. return
  2303. def prepare(self,key):
  2304. self.execute("PREPARE TRANSACTION '%s';" % key)
  2305. def commit_prepared(self,key):
  2306. self.execute("COMMIT PREPARED '%s';" % key)
  2307. def rollback_prepared(self,key):
  2308. self.execute("ROLLBACK PREPARED '%s';" % key)
  2309. def create_sequence_and_triggers(self, query, table, **args):
  2310. # following lines should only be executed if table._sequence_name does not exist
  2311. # self.execute('CREATE SEQUENCE %s;' % table._sequence_name)
  2312. # self.execute("ALTER TABLE %s ALTER COLUMN %s SET DEFAULT NEXTVAL('%s');" \
  2313. # % (table._tablename, table._fieldname, table._sequence_name))
  2314. self.execute(query)
  2315. REGEX_URI = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:@]+)(\:(?P<port>[0-9]+))?/(?P<db>[^\?]+)(\?sslmode=(?P<sslmode>.+))?$')
  2316. def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
  2317. credential_decoder=IDENTITY, driver_args={},
  2318. adapter_args={}, do_connect=True, srid=4326,
  2319. after_connection=None):
  2320. self.db = db
  2321. self.dbengine = "postgres"
  2322. self.uri = uri
  2323. if do_connect: self.find_driver(adapter_args,uri)
  2324. self.pool_size = pool_size
  2325. self.folder = folder
  2326. self.db_codec = db_codec
  2327. self._after_connection = after_connection
  2328. self.srid = srid
  2329. self.find_or_make_work_folder()
  2330. ruri = uri.split('://',1)[1]
  2331. m = self.REGEX_URI.match(ruri)
  2332. if not m:
  2333. raise SyntaxError("Invalid URI string in DAL")
  2334. user = credential_decoder(m.group('user'))
  2335. if not user:
  2336. raise SyntaxError('User required')
  2337. password = credential_decoder(m.group('password'))
  2338. if not password:
  2339. password = ''
  2340. host = m.group('host')
  2341. if not host:
  2342. raise SyntaxError('Host name required')
  2343. db = m.group('db')
  2344. if not db:
  2345. raise SyntaxError('Database name required')
  2346. port = m.group('port') or '5432'
  2347. sslmode = m.group('sslmode')
  2348. if sslmode:
  2349. msg = ("dbname='%s' user='%s' host='%s' "
  2350. "port=%s password='%s' sslmode='%s'") \
  2351. % (db, user, host, port, password, sslmode)
  2352. else:
  2353. msg = ("dbname='%s' user='%s' host='%s' "
  2354. "port=%s password='%s'") \
  2355. % (db, user, host, port, password)
  2356. # choose diver according uri
  2357. if self.driver:
  2358. self.__version__ = "%s %s" % (self.driver.__name__,
  2359. self.driver.__version__)
  2360. else:
  2361. self.__version__ = None
  2362. def connector(msg=msg,driver_args=driver_args):
  2363. return self.driver.connect(msg,**driver_args)
  2364. self.connector = connector
  2365. if do_connect: self.reconnect()
  2366. def after_connection(self):
  2367. self.connection.set_client_encoding('UTF8')
  2368. self.execute("SET standard_conforming_strings=on;")
  2369. self.try_json()
  2370. def lastrowid(self,table):
  2371. self.execute("select currval('%s')" % table._sequence_name)
  2372. return int(self.cursor.fetchone()[0])
  2373. def try_json(self):
  2374. # check JSON data type support
  2375. # (to be added to after_connection)
  2376. if self.driver_name == "pg8000":
  2377. supports_json = self.connection.server_version >= "9.2.0"
  2378. elif (self.driver_name == "psycopg2") and \
  2379. (self.driver.__version__ >= "2.0.12"):
  2380. supports_json = self.connection.server_version >= 90200
  2381. elif self.driver_name == "zxJDBC":
  2382. supports_json = self.connection.dbversion >= "9.2.0"
  2383. else: supports_json = None
  2384. if supports_json:
  2385. self.types["json"] = "JSON"
  2386. self.native_json = True
  2387. else: LOGGER.debug("Your database version does not support the JSON data type (using TEXT instead)")
  2388. def LIKE(self,first,second):
  2389. args = (self.expand(first), self.expand(second,'string'))
  2390. if not first.type in ('string', 'text', 'json'):
  2391. return '(CAST(%s AS CHAR(%s)) LIKE %s)' % (args[0], first.length, args[1])
  2392. else:
  2393. return '(%s LIKE %s)' % args
  2394. def ILIKE(self,first,second):
  2395. args = (self.expand(first), self.expand(second,'string'))
  2396. if not first.type in ('string', 'text', 'json'):
  2397. return '(CAST(%s AS CHAR(%s)) LIKE %s)' % (args[0], first.length, args[1])
  2398. else:
  2399. return '(%s ILIKE %s)' % args
  2400. def REGEXP(self,first,second):
  2401. return '(%s ~ %s)' % (self.expand(first),
  2402. self.expand(second,'string'))
  2403. def STARTSWITH(self,first,second):
  2404. return '(%s ILIKE %s)' % (self.expand(first),
  2405. self.expand(second+'%','string'))
  2406. def ENDSWITH(self,first,second):
  2407. return '(%s ILIKE %s)' % (self.expand(first),
  2408. self.expand('%'+second,'string'))
  2409. # GIS functions
  2410. def ST_ASGEOJSON(self, first, second):
  2411. """
  2412. http://postgis.org/docs/ST_AsGeoJSON.html
  2413. """
  2414. return 'ST_AsGeoJSON(%s,%s,%s,%s)' %(second['version'],
  2415. self.expand(first), second['precision'], second['options'])
  2416. def ST_ASTEXT(self, first):
  2417. """
  2418. http://postgis.org/docs/ST_AsText.html
  2419. """
  2420. return 'ST_AsText(%s)' %(self.expand(first))
  2421. def ST_X(self, first):
  2422. """
  2423. http://postgis.org/docs/ST_X.html
  2424. """
  2425. return 'ST_X(%s)' %(self.expand(first))
  2426. def ST_Y(self, first):
  2427. """
  2428. http://postgis.org/docs/ST_Y.html
  2429. """
  2430. return 'ST_Y(%s)' %(self.expand(first))
  2431. def ST_CONTAINS(self, first, second):
  2432. """
  2433. http://postgis.org/docs/ST_Contains.html
  2434. """
  2435. return 'ST_Contains(%s,%s)' %(self.expand(first), self.expand(second, first.type))
  2436. def ST_DISTANCE(self, first, second):
  2437. """
  2438. http://postgis.org/docs/ST_Distance.html
  2439. """
  2440. return 'ST_Distance(%s,%s)' %(self.expand(first), self.expand(second, first.type))
  2441. def ST_EQUALS(self, first, second):
  2442. """
  2443. http://postgis.org/docs/ST_Equals.html
  2444. """
  2445. return 'ST_Equals(%s,%s)' %(self.expand(first), self.expand(second, first.type))
  2446. def ST_INTERSECTS(self, first, second):
  2447. """
  2448. http://postgis.org/docs/ST_Intersects.html
  2449. """
  2450. return 'ST_Intersects(%s,%s)' %(self.expand(first), self.expand(second, first.type))
  2451. def ST_OVERLAPS(self, first, second):
  2452. """
  2453. http://postgis.org/docs/ST_Overlaps.html
  2454. """
  2455. return 'ST_Overlaps(%s,%s)' %(self.expand(first), self.expand(second, first.type))
  2456. def ST_SIMPLIFY(self, first, second):
  2457. """
  2458. http://postgis.org/docs/ST_Simplify.html
  2459. """
  2460. return 'ST_Simplify(%s,%s)' %(self.expand(first), self.expand(second, 'double'))
  2461. def ST_TOUCHES(self, first, second):
  2462. """
  2463. http://postgis.org/docs/ST_Touches.html
  2464. """
  2465. return 'ST_Touches(%s,%s)' %(self.expand(first), self.expand(second, first.type))
  2466. def ST_WITHIN(self, first, second):
  2467. """
  2468. http://postgis.org/docs/ST_Within.html
  2469. """
  2470. return 'ST_Within(%s,%s)' %(self.expand(first), self.expand(second, first.type))
  2471. def represent(self, obj, fieldtype):
  2472. field_is_type = fieldtype.startswith
  2473. if field_is_type('geo'):
  2474. srid = 4326 # postGIS default srid for geometry
  2475. geotype, parms = fieldtype[:-1].split('(')
  2476. parms = parms.split(',')
  2477. if len(parms) >= 2:
  2478. schema, srid = parms[:2]
  2479. if field_is_type('geometry'):
  2480. value = "ST_GeomFromText('%s',%s)" %(obj, srid)
  2481. elif field_is_type('geography'):
  2482. value = "ST_GeogFromText('SRID=%s;%s')" %(srid, obj)
  2483. # else:
  2484. # raise SyntaxError('Invalid field type %s' %fieldtype)
  2485. return value
  2486. return BaseAdapter.represent(self, obj, fieldtype)
  2487. class NewPostgreSQLAdapter(PostgreSQLAdapter):
  2488. drivers = ('psycopg2','pg8000')
  2489. types = {
  2490. 'boolean': 'CHAR(1)',
  2491. 'string': 'VARCHAR(%(length)s)',
  2492. 'text': 'TEXT',
  2493. 'json': 'TEXT',
  2494. 'password': 'VARCHAR(%(length)s)',
  2495. 'blob': 'BYTEA',
  2496. 'upload': 'VARCHAR(%(length)s)',
  2497. 'integer': 'INTEGER',
  2498. 'bigint': 'BIGINT',
  2499. 'float': 'FLOAT',
  2500. 'double': 'FLOAT8',
  2501. 'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
  2502. 'date': 'DATE',
  2503. 'time': 'TIME',
  2504. 'datetime': 'TIMESTAMP',
  2505. 'id': 'SERIAL PRIMARY KEY',
  2506. 'reference': 'INTEGER REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
  2507. 'list:integer': 'BIGINT[]',
  2508. 'list:string': 'TEXT[]',
  2509. 'list:reference': 'BIGINT[]',
  2510. 'geometry': 'GEOMETRY',
  2511. 'geography': 'GEOGRAPHY',
  2512. 'big-id': 'BIGSERIAL PRIMARY KEY',
  2513. 'big-reference': 'BIGINT REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
  2514. }
  2515. def parse_list_integers(self, value, field_type):
  2516. return value
  2517. def parse_list_references(self, value, field_type):
  2518. return [self.parse_reference(r, field_type[5:]) for r in value]
  2519. def parse_list_strings(self, value, field_type):
  2520. return value
  2521. def represent(self, obj, fieldtype):
  2522. field_is_type = fieldtype.startswith
  2523. if field_is_type('list:'):
  2524. if not obj:
  2525. obj = []
  2526. elif not isinstance(obj, (list, tuple)):
  2527. obj = [obj]
  2528. if field_is_type('list:string'):
  2529. obj = map(str,obj)
  2530. else:
  2531. obj = map(int,obj)
  2532. return 'ARRAY[%s]' % ','.join(repr(item) for item in obj)
  2533. return BaseAdapter.represent(self, obj, fieldtype)
  2534. class JDBCPostgreSQLAdapter(PostgreSQLAdapter):
  2535. drivers = ('zxJDBC',)
  2536. REGEX_URI = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:/]+)(\:(?P<port>[0-9]+))?/(?P<db>.+)$')
  2537. def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
  2538. credential_decoder=IDENTITY, driver_args={},
  2539. adapter_args={}, do_connect=True, after_connection=None ):
  2540. self.db = db
  2541. self.dbengine = "postgres"
  2542. self.uri = uri
  2543. if do_connect: self.find_driver(adapter_args,uri)
  2544. self.pool_size = pool_size
  2545. self.folder = folder
  2546. self.db_codec = db_codec
  2547. self._after_connection = after_connection
  2548. self.find_or_make_work_folder()
  2549. ruri = uri.split('://',1)[1]
  2550. m = self.REGEX_URI.match(ruri)
  2551. if not m:
  2552. raise SyntaxError("Invalid URI string in DAL")
  2553. user = credential_decoder(m.group('user'))
  2554. if not user:
  2555. raise SyntaxError('User required')
  2556. password = credential_decoder(m.group('password'))
  2557. if not password:
  2558. password = ''
  2559. host = m.group('host')
  2560. if not host:
  2561. raise SyntaxError('Host name required')
  2562. db = m.group('db')
  2563. if not db:
  2564. raise SyntaxError('Database name required')
  2565. port = m.group('port') or '5432'
  2566. msg = ('jdbc:postgresql://%s:%s/%s' % (host, port, db), user, password)
  2567. def connector(msg=msg,driver_args=driver_args):
  2568. return self.driver.connect(*msg,**driver_args)
  2569. self.connector = connector
  2570. if do_connect: self.reconnect()
  2571. def after_connection(self):
  2572. self.connection.set_client_encoding('UTF8')
  2573. self.execute('BEGIN;')
  2574. self.execute("SET CLIENT_ENCODING TO 'UNICODE';")
  2575. self.try_json()
  2576. class OracleAdapter(BaseAdapter):
  2577. drivers = ('cx_Oracle',)
  2578. commit_on_alter_table = False
  2579. types = {
  2580. 'boolean': 'CHAR(1)',
  2581. 'string': 'VARCHAR2(%(length)s)',
  2582. 'text': 'CLOB',
  2583. 'json': 'CLOB',
  2584. 'password': 'VARCHAR2(%(length)s)',
  2585. 'blob': 'CLOB',
  2586. 'upload': 'VARCHAR2(%(length)s)',
  2587. 'integer': 'INT',
  2588. 'bigint': 'NUMBER',
  2589. 'float': 'FLOAT',
  2590. 'double': 'BINARY_DOUBLE',
  2591. 'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
  2592. 'date': 'DATE',
  2593. 'time': 'CHAR(8)',
  2594. 'datetime': 'DATE',
  2595. 'id': 'NUMBER PRIMARY KEY',
  2596. 'reference': 'NUMBER, CONSTRAINT %(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
  2597. 'list:integer': 'CLOB',
  2598. 'list:string': 'CLOB',
  2599. 'list:reference': 'CLOB',
  2600. 'big-id': 'NUMBER PRIMARY KEY',
  2601. 'big-reference': 'NUMBER, CONSTRAINT %(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
  2602. 'reference FK': ', CONSTRAINT FK_%(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
  2603. 'reference TFK': ' CONSTRAINT FK_%(foreign_table)s_PK FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s) ON DELETE %(on_delete_action)s',
  2604. }
  2605. def sequence_name(self,tablename):
  2606. return '%s_sequence' % tablename
  2607. def trigger_name(self,tablename):
  2608. return '%s_trigger' % tablename
  2609. def LEFT_JOIN(self):
  2610. return 'LEFT OUTER JOIN'
  2611. def RANDOM(self):
  2612. return 'dbms_random.value'
  2613. def NOT_NULL(self,default,field_type):
  2614. return 'DEFAULT %s NOT NULL' % self.represent(default,field_type)
  2615. def _drop(self,table,mode):
  2616. sequence_name = table._sequence_name
  2617. return ['DROP TABLE %s %s;' % (table, mode), 'DROP SEQUENCE %s;' % sequence_name]
  2618. def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
  2619. if limitby:
  2620. (lmin, lmax) = limitby
  2621. if len(sql_w) > 1:
  2622. sql_w_row = sql_w + ' AND w_row > %i' % lmin
  2623. else:
  2624. sql_w_row = 'WHERE w_row > %i' % lmin
  2625. return 'SELECT %s %s FROM (SELECT w_tmp.*, ROWNUM w_row FROM (SELECT %s FROM %s%s%s) w_tmp WHERE ROWNUM<=%i) %s %s %s;' % (sql_s, sql_f, sql_f, sql_t, sql_w, sql_o, lmax, sql_t, sql_w_row, sql_o)
  2626. return 'SELECT %s %s FROM %s%s%s;' % (sql_s, sql_f, sql_t, sql_w, sql_o)
  2627. def constraint_name(self, tablename, fieldname):
  2628. constraint_name = BaseAdapter.constraint_name(self, tablename, fieldname)
  2629. if len(constraint_name)>30:
  2630. constraint_name = '%s_%s__constraint' % (tablename[:10], fieldname[:7])
  2631. return constraint_name
  2632. def represent_exceptions(self, obj, fieldtype):
  2633. if fieldtype == 'blob':
  2634. obj = base64.b64encode(str(obj))
  2635. return ":CLOB('%s')" % obj
  2636. elif fieldtype == 'date':
  2637. if isinstance(obj, (datetime.date, datetime.datetime)):
  2638. obj = obj.isoformat()[:10]
  2639. else:
  2640. obj = str(obj)
  2641. return "to_date('%s','yyyy-mm-dd')" % obj
  2642. elif fieldtype == 'datetime':
  2643. if isinstance(obj, datetime.datetime):
  2644. obj = obj.isoformat()[:19].replace('T',' ')
  2645. elif isinstance(obj, datetime.date):
  2646. obj = obj.isoformat()[:10]+' 00:00:00'
  2647. else:
  2648. obj = str(obj)
  2649. return "to_date('%s','yyyy-mm-dd hh24:mi:ss')" % obj
  2650. return None
  2651. def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
  2652. credential_decoder=IDENTITY, driver_args={},
  2653. adapter_args={}, do_connect=True, after_connection=None):
  2654. self.db = db
  2655. self.dbengine = "oracle"
  2656. self.uri = uri
  2657. if do_connect: self.find_driver(adapter_args,uri)
  2658. self.pool_size = pool_size
  2659. self.folder = folder
  2660. self.db_codec = db_codec
  2661. self._after_connection = after_connection
  2662. self.find_or_make_work_folder()
  2663. ruri = uri.split('://',1)[1]
  2664. if not 'threaded' in driver_args:
  2665. driver_args['threaded']=True
  2666. def connector(uri=ruri,driver_args=driver_args):
  2667. return self.driver.connect(uri,**driver_args)
  2668. self.connector = connector
  2669. if do_connect: self.reconnect()
  2670. def after_connection(self):
  2671. self.execute("ALTER SESSION SET NLS_DATE_FORMAT = 'YYYY-MM-DD HH24:MI:SS';")
  2672. self.execute("ALTER SESSION SET NLS_TIMESTAMP_FORMAT = 'YYYY-MM-DD HH24:MI:SS';")
  2673. oracle_fix = re.compile("[^']*('[^']*'[^']*)*\:(?P<clob>CLOB\('([^']+|'')*'\))")
  2674. def execute(self, command, args=None):
  2675. args = args or []
  2676. i = 1
  2677. while True:
  2678. m = self.oracle_fix.match(command)
  2679. if not m:
  2680. break
  2681. command = command[:m.start('clob')] + str(i) + command[m.end('clob'):]
  2682. args.append(m.group('clob')[6:-2].replace("''", "'"))
  2683. i += 1
  2684. if command[-1:]==';':
  2685. command = command[:-1]
  2686. return self.log_execute(command, args)
  2687. def create_sequence_and_triggers(self, query, table, **args):
  2688. tablename = table._tablename
  2689. id_name = table._id.name
  2690. sequence_name = table._sequence_name
  2691. trigger_name = table._trigger_name
  2692. self.execute(query)
  2693. self.execute('CREATE SEQUENCE %s START WITH 1 INCREMENT BY 1 NOMAXVALUE MINVALUE -1;' % sequence_name)
  2694. self.execute("""
  2695. CREATE OR REPLACE TRIGGER %(trigger_name)s BEFORE INSERT ON %(tablename)s FOR EACH ROW
  2696. DECLARE
  2697. curr_val NUMBER;
  2698. diff_val NUMBER;
  2699. PRAGMA autonomous_transaction;
  2700. BEGIN
  2701. IF :NEW.%(id)s IS NOT NULL THEN
  2702. EXECUTE IMMEDIATE 'SELECT %(sequence_name)s.nextval FROM dual' INTO curr_val;
  2703. diff_val := :NEW.%(id)s - curr_val - 1;
  2704. IF diff_val != 0 THEN
  2705. EXECUTE IMMEDIATE 'alter sequence %(sequence_name)s increment by '|| diff_val;
  2706. EXECUTE IMMEDIATE 'SELECT %(sequence_name)s.nextval FROM dual' INTO curr_val;
  2707. EXECUTE IMMEDIATE 'alter sequence %(sequence_name)s increment by 1';
  2708. END IF;
  2709. END IF;
  2710. SELECT %(sequence_name)s.nextval INTO :NEW.%(id)s FROM DUAL;
  2711. END;
  2712. """ % dict(trigger_name=trigger_name, tablename=tablename,
  2713. sequence_name=sequence_name,id=id_name))
  2714. def lastrowid(self,table):
  2715. sequence_name = table._sequence_name
  2716. self.execute('SELECT %s.currval FROM dual;' % sequence_name)
  2717. return long(self.cursor.fetchone()[0])
  2718. #def parse_value(self, value, field_type, blob_decode=True):
  2719. # if blob_decode and isinstance(value, cx_Oracle.LOB):
  2720. # try:
  2721. # value = value.read()
  2722. # except self.driver.ProgrammingError:
  2723. # # After a subsequent fetch the LOB value is not valid anymore
  2724. # pass
  2725. # return BaseAdapter.parse_value(self, value, field_type, blob_decode)
  2726. def _fetchall(self):
  2727. if any(x[1]==cx_Oracle.CLOB for x in self.cursor.description):
  2728. return [tuple([(c.read() if type(c) == cx_Oracle.LOB else c) \
  2729. for c in r]) for r in self.cursor]
  2730. else:
  2731. return self.cursor.fetchall()
  2732. class MSSQLAdapter(BaseAdapter):
  2733. drivers = ('pyodbc',)
  2734. T_SEP = 'T'
  2735. QUOTE_TEMPLATE = "[%s]"
  2736. types = {
  2737. 'boolean': 'BIT',
  2738. 'string': 'VARCHAR(%(length)s)',
  2739. 'text': 'TEXT',
  2740. 'json': 'TEXT',
  2741. 'password': 'VARCHAR(%(length)s)',
  2742. 'blob': 'IMAGE',
  2743. 'upload': 'VARCHAR(%(length)s)',
  2744. 'integer': 'INT',
  2745. 'bigint': 'BIGINT',
  2746. 'float': 'FLOAT',
  2747. 'double': 'FLOAT',
  2748. 'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
  2749. 'date': 'DATETIME',
  2750. 'time': 'CHAR(8)',
  2751. 'datetime': 'DATETIME',
  2752. 'id': 'INT IDENTITY PRIMARY KEY',
  2753. 'reference': 'INT NULL, CONSTRAINT %(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
  2754. 'list:integer': 'TEXT',
  2755. 'list:string': 'TEXT',
  2756. 'list:reference': 'TEXT',
  2757. 'geometry': 'geometry',
  2758. 'geography': 'geography',
  2759. 'big-id': 'BIGINT IDENTITY PRIMARY KEY',
  2760. 'big-reference': 'BIGINT NULL, CONSTRAINT %(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
  2761. 'reference FK': ', CONSTRAINT FK_%(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
  2762. 'reference TFK': ' CONSTRAINT FK_%(foreign_table)s_PK FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s) ON DELETE %(on_delete_action)s',
  2763. }
  2764. def concat_add(self,tablename):
  2765. return '; ALTER TABLE %s ADD ' % tablename
  2766. def varquote(self,name):
  2767. return varquote_aux(name,'[%s]')
  2768. def EXTRACT(self,field,what):
  2769. return "DATEPART(%s,%s)" % (what, self.expand(field))
  2770. def LEFT_JOIN(self):
  2771. return 'LEFT OUTER JOIN'
  2772. def RANDOM(self):
  2773. return 'NEWID()'
  2774. def ALLOW_NULL(self):
  2775. return ' NULL'
  2776. def SUBSTRING(self,field,parameters):
  2777. return 'SUBSTRING(%s,%s,%s)' % (self.expand(field), parameters[0], parameters[1])
  2778. def PRIMARY_KEY(self,key):
  2779. return 'PRIMARY KEY CLUSTERED (%s)' % key
  2780. def AGGREGATE(self, first, what):
  2781. if what == 'LENGTH':
  2782. what = 'LEN'
  2783. return "%s(%s)" % (what, self.expand(first))
  2784. def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
  2785. if limitby:
  2786. (lmin, lmax) = limitby
  2787. sql_s += ' TOP %i' % lmax
  2788. return 'SELECT %s %s FROM %s%s%s;' % (sql_s, sql_f, sql_t, sql_w, sql_o)
  2789. TRUE = 1
  2790. FALSE = 0
  2791. REGEX_DSN = re.compile('^(?P<dsn>.+)$')
  2792. REGEX_URI = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:/]+)(\:(?P<port>[0-9]+))?/(?P<db>[^\?]+)(\?(?P<urlargs>.*))?$')
  2793. REGEX_ARGPATTERN = re.compile('(?P<argkey>[^=]+)=(?P<argvalue>[^&]*)')
  2794. def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
  2795. credential_decoder=IDENTITY, driver_args={},
  2796. adapter_args={}, do_connect=True, srid=4326,
  2797. after_connection=None):
  2798. self.db = db
  2799. self.dbengine = "mssql"
  2800. self.uri = uri
  2801. if do_connect: self.find_driver(adapter_args,uri)
  2802. self.pool_size = pool_size
  2803. self.folder = folder
  2804. self.db_codec = db_codec
  2805. self._after_connection = after_connection
  2806. self.srid = srid
  2807. self.find_or_make_work_folder()
  2808. # ## read: http://bytes.com/groups/python/460325-cx_oracle-utf8
  2809. ruri = uri.split('://',1)[1]
  2810. if '@' not in ruri:
  2811. try:
  2812. m = self.REGEX_DSN.match(ruri)
  2813. if not m:
  2814. raise SyntaxError(
  2815. 'Parsing uri string(%s) has no result' % self.uri)
  2816. dsn = m.group('dsn')
  2817. if not dsn:
  2818. raise SyntaxError('DSN required')
  2819. except SyntaxError:
  2820. e = sys.exc_info()[1]
  2821. LOGGER.error('NdGpatch error')
  2822. raise e
  2823. # was cnxn = 'DSN=%s' % dsn
  2824. cnxn = dsn
  2825. else:
  2826. m = self.REGEX_URI.match(ruri)
  2827. if not m:
  2828. raise SyntaxError(
  2829. "Invalid URI string in DAL: %s" % self.uri)
  2830. user = credential_decoder(m.group('user'))
  2831. if not user:
  2832. raise SyntaxError('User required')
  2833. password = credential_decoder(m.group('password'))
  2834. if not password:
  2835. password = ''
  2836. host = m.group('host')
  2837. if not host:
  2838. raise SyntaxError('Host name required')
  2839. db = m.group('db')
  2840. if not db:
  2841. raise SyntaxError('Database name required')
  2842. port = m.group('port') or '1433'
  2843. # Parse the optional url name-value arg pairs after the '?'
  2844. # (in the form of arg1=value1&arg2=value2&...)
  2845. # Default values (drivers like FreeTDS insist on uppercase parameter keys)
  2846. argsdict = { 'DRIVER':'{SQL Server}' }
  2847. urlargs = m.group('urlargs') or ''
  2848. for argmatch in self.REGEX_ARGPATTERN.finditer(urlargs):
  2849. argsdict[str(argmatch.group('argkey')).upper()] = argmatch.group('argvalue')
  2850. urlargs = ';'.join(['%s=%s' % (ak, av) for (ak, av) in argsdict.iteritems()])
  2851. cnxn = 'SERVER=%s;PORT=%s;DATABASE=%s;UID=%s;PWD=%s;%s' \
  2852. % (host, port, db, user, password, urlargs)
  2853. def connector(cnxn=cnxn,driver_args=driver_args):
  2854. return self.driver.connect(cnxn,**driver_args)
  2855. self.connector = connector
  2856. if do_connect: self.reconnect()
  2857. def lastrowid(self,table):
  2858. #self.execute('SELECT @@IDENTITY;')
  2859. self.execute('SELECT SCOPE_IDENTITY();')
  2860. return long(self.cursor.fetchone()[0])
  2861. def rowslice(self,rows,minimum=0,maximum=None):
  2862. if maximum is None:
  2863. return rows[minimum:]
  2864. return rows[minimum:maximum]
  2865. def EPOCH(self, first):
  2866. return "DATEDIFF(second, '1970-01-01 00:00:00', %s)" % self.expand(first)
  2867. def CONCAT(self, *items):
  2868. return '(%s)' % ' + '.join(self.expand(x,'string') for x in items)
  2869. # GIS Spatial Extensions
  2870. # No STAsGeoJSON in MSSQL
  2871. def ST_ASTEXT(self, first):
  2872. return '%s.STAsText()' %(self.expand(first))
  2873. def ST_CONTAINS(self, first, second):
  2874. return '%s.STContains(%s)=1' %(self.expand(first), self.expand(second, first.type))
  2875. def ST_DISTANCE(self, first, second):
  2876. return '%s.STDistance(%s)' %(self.expand(first), self.expand(second, first.type))
  2877. def ST_EQUALS(self, first, second):
  2878. return '%s.STEquals(%s)=1' %(self.expand(first), self.expand(second, first.type))
  2879. def ST_INTERSECTS(self, first, second):
  2880. return '%s.STIntersects(%s)=1' %(self.expand(first), self.expand(second, first.type))
  2881. def ST_OVERLAPS(self, first, second):
  2882. return '%s.STOverlaps(%s)=1' %(self.expand(first), self.expand(second, first.type))
  2883. # no STSimplify in MSSQL
  2884. def ST_TOUCHES(self, first, second):
  2885. return '%s.STTouches(%s)=1' %(self.expand(first), self.expand(second, first.type))
  2886. def ST_WITHIN(self, first, second):
  2887. return '%s.STWithin(%s)=1' %(self.expand(first), self.expand(second, first.type))
  2888. def represent(self, obj, fieldtype):
  2889. field_is_type = fieldtype.startswith
  2890. if field_is_type('geometry'):
  2891. srid = 0 # MS SQL default srid for geometry
  2892. geotype, parms = fieldtype[:-1].split('(')
  2893. if parms:
  2894. srid = parms
  2895. return "geometry::STGeomFromText('%s',%s)" %(obj, srid)
  2896. elif fieldtype == 'geography':
  2897. srid = 4326 # MS SQL default srid for geography
  2898. geotype, parms = fieldtype[:-1].split('(')
  2899. if parms:
  2900. srid = parms
  2901. return "geography::STGeomFromText('%s',%s)" %(obj, srid)
  2902. # else:
  2903. # raise SyntaxError('Invalid field type %s' %fieldtype)
  2904. return "geometry::STGeomFromText('%s',%s)" %(obj, srid)
  2905. return BaseAdapter.represent(self, obj, fieldtype)
  2906. class MSSQL3Adapter(MSSQLAdapter):
  2907. """ experimental support for pagination in MSSQL"""
  2908. def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
  2909. if limitby:
  2910. (lmin, lmax) = limitby
  2911. if lmin == 0:
  2912. sql_s += ' TOP %i' % lmax
  2913. return 'SELECT %s %s FROM %s%s%s;' % (sql_s, sql_f, sql_t, sql_w, sql_o)
  2914. lmin += 1
  2915. sql_o_inner = sql_o[sql_o.find('ORDER BY ')+9:]
  2916. sql_g_inner = sql_o[:sql_o.find('ORDER BY ')]
  2917. sql_f_outer = ['f_%s' % f for f in range(len(sql_f.split(',')))]
  2918. sql_f_inner = [f for f in sql_f.split(',')]
  2919. sql_f_iproxy = ['%s AS %s' % (o, n) for (o, n) in zip(sql_f_inner, sql_f_outer)]
  2920. sql_f_iproxy = ', '.join(sql_f_iproxy)
  2921. sql_f_oproxy = ', '.join(sql_f_outer)
  2922. return 'SELECT %s %s FROM (SELECT %s ROW_NUMBER() OVER (ORDER BY %s) AS w_row, %s FROM %s%s%s) TMP WHERE w_row BETWEEN %i AND %s;' % (sql_s,sql_f_oproxy,sql_s,sql_f,sql_f_iproxy,sql_t,sql_w,sql_g_inner,lmin,lmax)
  2923. return 'SELECT %s %s FROM %s%s%s;' % (sql_s,sql_f,sql_t,sql_w,sql_o)
  2924. def rowslice(self,rows,minimum=0,maximum=None):
  2925. return rows
  2926. class MSSQL2Adapter(MSSQLAdapter):
  2927. drivers = ('pyodbc',)
  2928. types = {
  2929. 'boolean': 'CHAR(1)',
  2930. 'string': 'NVARCHAR(%(length)s)',
  2931. 'text': 'NTEXT',
  2932. 'json': 'NTEXT',
  2933. 'password': 'NVARCHAR(%(length)s)',
  2934. 'blob': 'IMAGE',
  2935. 'upload': 'NVARCHAR(%(length)s)',
  2936. 'integer': 'INT',
  2937. 'bigint': 'BIGINT',
  2938. 'float': 'FLOAT',
  2939. 'double': 'FLOAT',
  2940. 'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
  2941. 'date': 'DATETIME',
  2942. 'time': 'CHAR(8)',
  2943. 'datetime': 'DATETIME',
  2944. 'id': 'INT IDENTITY PRIMARY KEY',
  2945. 'reference': 'INT, CONSTRAINT %(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
  2946. 'list:integer': 'NTEXT',
  2947. 'list:string': 'NTEXT',
  2948. 'list:reference': 'NTEXT',
  2949. 'big-id': 'BIGINT IDENTITY PRIMARY KEY',
  2950. 'big-reference': 'BIGINT, CONSTRAINT %(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
  2951. 'reference FK': ', CONSTRAINT FK_%(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
  2952. 'reference TFK': ' CONSTRAINT FK_%(foreign_table)s_PK FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s) ON DELETE %(on_delete_action)s',
  2953. }
  2954. def represent(self, obj, fieldtype):
  2955. value = BaseAdapter.represent(self, obj, fieldtype)
  2956. if fieldtype in ('string','text', 'json') and value[:1]=="'":
  2957. value = 'N'+value
  2958. return value
  2959. def execute(self,a):
  2960. return self.log_execute(a.decode('utf8'))
  2961. class VerticaAdapter(MSSQLAdapter):
  2962. drivers = ('pyodbc',)
  2963. T_SEP = ' '
  2964. types = {
  2965. 'boolean': 'BOOLEAN',
  2966. 'string': 'VARCHAR(%(length)s)',
  2967. 'text': 'BYTEA',
  2968. 'json': 'VARCHAR(%(length)s)',
  2969. 'password': 'VARCHAR(%(length)s)',
  2970. 'blob': 'BYTEA',
  2971. 'upload': 'VARCHAR(%(length)s)',
  2972. 'integer': 'INT',
  2973. 'bigint': 'BIGINT',
  2974. 'float': 'FLOAT',
  2975. 'double': 'DOUBLE PRECISION',
  2976. 'decimal': 'DECIMAL(%(precision)s,%(scale)s)',
  2977. 'date': 'DATE',
  2978. 'time': 'TIME',
  2979. 'datetime': 'DATETIME',
  2980. 'id': 'IDENTITY',
  2981. 'reference': 'INT REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
  2982. 'list:integer': 'BYTEA',
  2983. 'list:string': 'BYTEA',
  2984. 'list:reference': 'BYTEA',
  2985. 'big-reference': 'BIGINT REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
  2986. }
  2987. def EXTRACT(self, first, what):
  2988. return "DATE_PART('%s', TIMESTAMP %s)" % (what, self.expand(first))
  2989. def _truncate(self, table, mode=''):
  2990. tablename = table._tablename
  2991. return ['TRUNCATE %s %s;' % (tablename, mode or '')]
  2992. def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
  2993. if limitby:
  2994. (lmin, lmax) = limitby
  2995. sql_o += ' LIMIT %i OFFSET %i' % (lmax - lmin, lmin)
  2996. return 'SELECT %s %s FROM %s%s%s;' % \
  2997. (sql_s, sql_f, sql_t, sql_w, sql_o)
  2998. def lastrowid(self,table):
  2999. self.execute('SELECT LAST_INSERT_ID();')
  3000. return long(self.cursor.fetchone()[0])
  3001. def execute(self, a):
  3002. return self.log_execute(a)
  3003. class SybaseAdapter(MSSQLAdapter):
  3004. drivers = ('Sybase',)
  3005. types = {
  3006. 'boolean': 'BIT',
  3007. 'string': 'CHAR VARYING(%(length)s)',
  3008. 'text': 'TEXT',
  3009. 'json': 'TEXT',
  3010. 'password': 'CHAR VARYING(%(length)s)',
  3011. 'blob': 'IMAGE',
  3012. 'upload': 'CHAR VARYING(%(length)s)',
  3013. 'integer': 'INT',
  3014. 'bigint': 'BIGINT',
  3015. 'float': 'FLOAT',
  3016. 'double': 'FLOAT',
  3017. 'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
  3018. 'date': 'DATETIME',
  3019. 'time': 'CHAR(8)',
  3020. 'datetime': 'DATETIME',
  3021. 'id': 'INT IDENTITY PRIMARY KEY',
  3022. 'reference': 'INT NULL, CONSTRAINT %(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
  3023. 'list:integer': 'TEXT',
  3024. 'list:string': 'TEXT',
  3025. 'list:reference': 'TEXT',
  3026. 'geometry': 'geometry',
  3027. 'geography': 'geography',
  3028. 'big-id': 'BIGINT IDENTITY PRIMARY KEY',
  3029. 'big-reference': 'BIGINT NULL, CONSTRAINT %(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
  3030. 'reference FK': ', CONSTRAINT FK_%(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
  3031. 'reference TFK': ' CONSTRAINT FK_%(foreign_table)s_PK FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s) ON DELETE %(on_delete_action)s',
  3032. }
  3033. def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
  3034. credential_decoder=IDENTITY, driver_args={},
  3035. adapter_args={}, do_connect=True, srid=4326,
  3036. after_connection=None):
  3037. self.db = db
  3038. self.dbengine = "sybase"
  3039. self.uri = uri
  3040. if do_connect: self.find_driver(adapter_args,uri)
  3041. self.pool_size = pool_size
  3042. self.folder = folder
  3043. self.db_codec = db_codec
  3044. self._after_connection = after_connection
  3045. self.srid = srid
  3046. self.find_or_make_work_folder()
  3047. # ## read: http://bytes.com/groups/python/460325-cx_oracle-utf8
  3048. ruri = uri.split('://',1)[1]
  3049. if '@' not in ruri:
  3050. try:
  3051. m = self.REGEX_DSN.match(ruri)
  3052. if not m:
  3053. raise SyntaxError(
  3054. 'Parsing uri string(%s) has no result' % self.uri)
  3055. dsn = m.group('dsn')
  3056. if not dsn:
  3057. raise SyntaxError('DSN required')
  3058. except SyntaxError:
  3059. e = sys.exc_info()[1]
  3060. LOGGER.error('NdGpatch error')
  3061. raise e
  3062. else:
  3063. m = self.REGEX_URI.match(uri)
  3064. if not m:
  3065. raise SyntaxError(
  3066. "Invalid URI string in DAL: %s" % self.uri)
  3067. user = credential_decoder(m.group('user'))
  3068. if not user:
  3069. raise SyntaxError('User required')
  3070. password = credential_decoder(m.group('password'))
  3071. if not password:
  3072. password = ''
  3073. host = m.group('host')
  3074. if not host:
  3075. raise SyntaxError('Host name required')
  3076. db = m.group('db')
  3077. if not db:
  3078. raise SyntaxError('Database name required')
  3079. port = m.group('port') or '1433'
  3080. dsn = 'sybase:host=%s:%s;dbname=%s' % (host,port,db)
  3081. driver_args.update(user = credential_decoder(user),
  3082. password = credential_decoder(password))
  3083. def connector(dsn=dsn,driver_args=driver_args):
  3084. return self.driver.connect(dsn,**driver_args)
  3085. self.connector = connector
  3086. if do_connect: self.reconnect()
  3087. class FireBirdAdapter(BaseAdapter):
  3088. drivers = ('kinterbasdb','firebirdsql','fdb','pyodbc')
  3089. commit_on_alter_table = False
  3090. support_distributed_transaction = True
  3091. types = {
  3092. 'boolean': 'CHAR(1)',
  3093. 'string': 'VARCHAR(%(length)s)',
  3094. 'text': 'BLOB SUB_TYPE 1',
  3095. 'json': 'BLOB SUB_TYPE 1',
  3096. 'password': 'VARCHAR(%(length)s)',
  3097. 'blob': 'BLOB SUB_TYPE 0',
  3098. 'upload': 'VARCHAR(%(length)s)',
  3099. 'integer': 'INTEGER',
  3100. 'bigint': 'BIGINT',
  3101. 'float': 'FLOAT',
  3102. 'double': 'DOUBLE PRECISION',
  3103. 'decimal': 'DECIMAL(%(precision)s,%(scale)s)',
  3104. 'date': 'DATE',
  3105. 'time': 'TIME',
  3106. 'datetime': 'TIMESTAMP',
  3107. 'id': 'INTEGER PRIMARY KEY',
  3108. 'reference': 'INTEGER REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
  3109. 'list:integer': 'BLOB SUB_TYPE 1',
  3110. 'list:string': 'BLOB SUB_TYPE 1',
  3111. 'list:reference': 'BLOB SUB_TYPE 1',
  3112. 'big-id': 'BIGINT PRIMARY KEY',
  3113. 'big-reference': 'BIGINT REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
  3114. }
  3115. def sequence_name(self,tablename):
  3116. return 'genid_%s' % tablename
  3117. def trigger_name(self,tablename):
  3118. return 'trg_id_%s' % tablename
  3119. def RANDOM(self):
  3120. return 'RAND()'
  3121. def EPOCH(self, first):
  3122. return "DATEDIFF(second, '1970-01-01 00:00:00', %s)" % self.expand(first)
  3123. def NOT_NULL(self,default,field_type):
  3124. return 'DEFAULT %s NOT NULL' % self.represent(default,field_type)
  3125. def SUBSTRING(self,field,parameters):
  3126. return 'SUBSTRING(%s from %s for %s)' % (self.expand(field), parameters[0], parameters[1])
  3127. def LENGTH(self, first):
  3128. return "CHAR_LENGTH(%s)" % self.expand(first)
  3129. def CONTAINS(self,first,second,case_sensitive=False):
  3130. if first.type.startswith('list:'):
  3131. second = Expression(None,self.CONCAT('|',Expression(
  3132. None,self.REPLACE(second,('|','||'))),'|'))
  3133. return '(%s CONTAINING %s)' % (self.expand(first),
  3134. self.expand(second, 'string'))
  3135. def _drop(self,table,mode):
  3136. sequence_name = table._sequence_name
  3137. return ['DROP TABLE %s %s;' % (table, mode), 'DROP GENERATOR %s;' % sequence_name]
  3138. def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
  3139. if limitby:
  3140. (lmin, lmax) = limitby
  3141. sql_s = ' FIRST %i SKIP %i %s' % (lmax - lmin, lmin, sql_s)
  3142. return 'SELECT %s %s FROM %s%s%s;' % (sql_s, sql_f, sql_t, sql_w, sql_o)
  3143. def _truncate(self,table,mode = ''):
  3144. return ['DELETE FROM %s;' % table._tablename,
  3145. 'SET GENERATOR %s TO 0;' % table._sequence_name]
  3146. REGEX_URI = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:/]+)(\:(?P<port>[0-9]+))?/(?P<db>.+?)(\?set_encoding=(?P<charset>\w+))?$')
  3147. def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
  3148. credential_decoder=IDENTITY, driver_args={},
  3149. adapter_args={}, do_connect=True, after_connection=None):
  3150. self.db = db
  3151. self.dbengine = "firebird"
  3152. self.uri = uri
  3153. if do_connect: self.find_driver(adapter_args,uri)
  3154. self.pool_size = pool_size
  3155. self.folder = folder
  3156. self.db_codec = db_codec
  3157. self._after_connection = after_connection
  3158. self.find_or_make_work_folder()
  3159. ruri = uri.split('://',1)[1]
  3160. m = self.REGEX_URI.match(ruri)
  3161. if not m:
  3162. raise SyntaxError("Invalid URI string in DAL: %s" % self.uri)
  3163. user = credential_decoder(m.group('user'))
  3164. if not user:
  3165. raise SyntaxError('User required')
  3166. password = credential_decoder(m.group('password'))
  3167. if not password:
  3168. password = ''
  3169. host = m.group('host')
  3170. if not host:
  3171. raise SyntaxError('Host name required')
  3172. port = int(m.group('port') or 3050)
  3173. db = m.group('db')
  3174. if not db:
  3175. raise SyntaxError('Database name required')
  3176. charset = m.group('charset') or 'UTF8'
  3177. driver_args.update(dsn='%s/%s:%s' % (host,port,db),
  3178. user = credential_decoder(user),
  3179. password = credential_decoder(password),
  3180. charset = charset)
  3181. def connector(driver_args=driver_args):
  3182. return self.driver.connect(**driver_args)
  3183. self.connector = connector
  3184. if do_connect: self.reconnect()
  3185. def create_sequence_and_triggers(self, query, table, **args):
  3186. tablename = table._tablename
  3187. sequence_name = table._sequence_name
  3188. trigger_name = table._trigger_name
  3189. self.execute(query)
  3190. self.execute('create generator %s;' % sequence_name)
  3191. self.execute('set generator %s to 0;' % sequence_name)
  3192. self.execute('create trigger %s for %s active before insert position 0 as\nbegin\nif(new.id is null) then\nbegin\nnew.id = gen_id(%s, 1);\nend\nend;' % (trigger_name, tablename, sequence_name))
  3193. def lastrowid(self,table):
  3194. sequence_name = table._sequence_name
  3195. self.execute('SELECT gen_id(%s, 0) FROM rdb$database' % sequence_name)
  3196. return long(self.cursor.fetchone()[0])
  3197. class FireBirdEmbeddedAdapter(FireBirdAdapter):
  3198. drivers = ('kinterbasdb','firebirdsql','fdb','pyodbc')
  3199. REGEX_URI = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<path>[^\?]+)(\?set_encoding=(?P<charset>\w+))?$')
  3200. def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
  3201. credential_decoder=IDENTITY, driver_args={},
  3202. adapter_args={}, do_connect=True, after_connection=None):
  3203. self.db = db
  3204. self.dbengine = "firebird"
  3205. self.uri = uri
  3206. if do_connect: self.find_driver(adapter_args,uri)
  3207. self.pool_size = pool_size
  3208. self.folder = folder
  3209. self.db_codec = db_codec
  3210. self._after_connection = after_connection
  3211. self.find_or_make_work_folder()
  3212. ruri = uri.split('://',1)[1]
  3213. m = self.REGEX_URI.match(ruri)
  3214. if not m:
  3215. raise SyntaxError(
  3216. "Invalid URI string in DAL: %s" % self.uri)
  3217. user = credential_decoder(m.group('user'))
  3218. if not user:
  3219. raise SyntaxError('User required')
  3220. password = credential_decoder(m.group('password'))
  3221. if not password:
  3222. password = ''
  3223. pathdb = m.group('path')
  3224. if not pathdb:
  3225. raise SyntaxError('Path required')
  3226. charset = m.group('charset')
  3227. if not charset:
  3228. charset = 'UTF8'
  3229. host = ''
  3230. driver_args.update(host=host,
  3231. database=pathdb,
  3232. user=credential_decoder(user),
  3233. password=credential_decoder(password),
  3234. charset=charset)
  3235. def connector(driver_args=driver_args):
  3236. return self.driver.connect(**driver_args)
  3237. self.connector = connector
  3238. if do_connect: self.reconnect()
  3239. class InformixAdapter(BaseAdapter):
  3240. drivers = ('informixdb',)
  3241. types = {
  3242. 'boolean': 'CHAR(1)',
  3243. 'string': 'VARCHAR(%(length)s)',
  3244. 'text': 'BLOB SUB_TYPE 1',
  3245. 'json': 'BLOB SUB_TYPE 1',
  3246. 'password': 'VARCHAR(%(length)s)',
  3247. 'blob': 'BLOB SUB_TYPE 0',
  3248. 'upload': 'VARCHAR(%(length)s)',
  3249. 'integer': 'INTEGER',
  3250. 'bigint': 'BIGINT',
  3251. 'float': 'FLOAT',
  3252. 'double': 'DOUBLE PRECISION',
  3253. 'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
  3254. 'date': 'DATE',
  3255. 'time': 'CHAR(8)',
  3256. 'datetime': 'DATETIME',
  3257. 'id': 'SERIAL',
  3258. 'reference': 'INTEGER REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
  3259. 'list:integer': 'BLOB SUB_TYPE 1',
  3260. 'list:string': 'BLOB SUB_TYPE 1',
  3261. 'list:reference': 'BLOB SUB_TYPE 1',
  3262. 'big-id': 'BIGSERIAL',
  3263. 'big-reference': 'BIGINT REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
  3264. 'reference FK': 'REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s CONSTRAINT FK_%(table_name)s_%(field_name)s',
  3265. 'reference TFK': 'FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s) ON DELETE %(on_delete_action)s CONSTRAINT TFK_%(table_name)s_%(field_name)s',
  3266. }
  3267. def RANDOM(self):
  3268. return 'Random()'
  3269. def NOT_NULL(self,default,field_type):
  3270. return 'DEFAULT %s NOT NULL' % self.represent(default,field_type)
  3271. def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
  3272. if limitby:
  3273. (lmin, lmax) = limitby
  3274. fetch_amt = lmax - lmin
  3275. dbms_version = int(self.connection.dbms_version.split('.')[0])
  3276. if lmin and (dbms_version >= 10):
  3277. # Requires Informix 10.0+
  3278. sql_s += ' SKIP %d' % (lmin, )
  3279. if fetch_amt and (dbms_version >= 9):
  3280. # Requires Informix 9.0+
  3281. sql_s += ' FIRST %d' % (fetch_amt, )
  3282. return 'SELECT %s %s FROM %s%s%s;' % (sql_s, sql_f, sql_t, sql_w, sql_o)
  3283. def represent_exceptions(self, obj, fieldtype):
  3284. if fieldtype == 'date':
  3285. if isinstance(obj, (datetime.date, datetime.datetime)):
  3286. obj = obj.isoformat()[:10]
  3287. else:
  3288. obj = str(obj)
  3289. return "to_date('%s','%%Y-%%m-%%d')" % obj
  3290. elif fieldtype == 'datetime':
  3291. if isinstance(obj, datetime.datetime):
  3292. obj = obj.isoformat()[:19].replace('T',' ')
  3293. elif isinstance(obj, datetime.date):
  3294. obj = obj.isoformat()[:10]+' 00:00:00'
  3295. else:
  3296. obj = str(obj)
  3297. return "to_date('%s','%%Y-%%m-%%d %%H:%%M:%%S')" % obj
  3298. return None
  3299. REGEX_URI = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:/]+)(\:(?P<port>[0-9]+))?/(?P<db>.+)$')
  3300. def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
  3301. credential_decoder=IDENTITY, driver_args={},
  3302. adapter_args={}, do_connect=True, after_connection=None):
  3303. self.db = db
  3304. self.dbengine = "informix"
  3305. self.uri = uri
  3306. if do_connect: self.find_driver(adapter_args,uri)
  3307. self.pool_size = pool_size
  3308. self.folder = folder
  3309. self.db_codec = db_codec
  3310. self._after_connection = after_connection
  3311. self.find_or_make_work_folder()
  3312. ruri = uri.split('://',1)[1]
  3313. m = self.REGEX_URI.match(ruri)
  3314. if not m:
  3315. raise SyntaxError(
  3316. "Invalid URI string in DAL: %s" % self.uri)
  3317. user = credential_decoder(m.group('user'))
  3318. if not user:
  3319. raise SyntaxError('User required')
  3320. password = credential_decoder(m.group('password'))
  3321. if not password:
  3322. password = ''
  3323. host = m.group('host')
  3324. if not host:
  3325. raise SyntaxError('Host name required')
  3326. db = m.group('db')
  3327. if not db:
  3328. raise SyntaxError('Database name required')
  3329. user = credential_decoder(user)
  3330. password = credential_decoder(password)
  3331. dsn = '%s@%s' % (db,host)
  3332. driver_args.update(user=user,password=password,autocommit=True)
  3333. def connector(dsn=dsn,driver_args=driver_args):
  3334. return self.driver.connect(dsn,**driver_args)
  3335. self.connector = connector
  3336. if do_connect: self.reconnect()
  3337. def execute(self,command):
  3338. if command[-1:]==';':
  3339. command = command[:-1]
  3340. return self.log_execute(command)
  3341. def lastrowid(self,table):
  3342. return self.cursor.sqlerrd[1]
  3343. class InformixSEAdapter(InformixAdapter):
  3344. """ work in progress """
  3345. def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
  3346. return 'SELECT %s %s FROM %s%s%s;' % \
  3347. (sql_s, sql_f, sql_t, sql_w, sql_o)
  3348. def rowslice(self,rows,minimum=0,maximum=None):
  3349. if maximum is None:
  3350. return rows[minimum:]
  3351. return rows[minimum:maximum]
  3352. class DB2Adapter(BaseAdapter):
  3353. drivers = ('pyodbc',)
  3354. types = {
  3355. 'boolean': 'CHAR(1)',
  3356. 'string': 'VARCHAR(%(length)s)',
  3357. 'text': 'CLOB',
  3358. 'json': 'CLOB',
  3359. 'password': 'VARCHAR(%(length)s)',
  3360. 'blob': 'BLOB',
  3361. 'upload': 'VARCHAR(%(length)s)',
  3362. 'integer': 'INT',
  3363. 'bigint': 'BIGINT',
  3364. 'float': 'REAL',
  3365. 'double': 'DOUBLE',
  3366. 'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
  3367. 'date': 'DATE',
  3368. 'time': 'TIME',
  3369. 'datetime': 'TIMESTAMP',
  3370. 'id': 'INT GENERATED ALWAYS AS IDENTITY PRIMARY KEY NOT NULL',
  3371. 'reference': 'INT, FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
  3372. 'list:integer': 'CLOB',
  3373. 'list:string': 'CLOB',
  3374. 'list:reference': 'CLOB',
  3375. 'big-id': 'BIGINT GENERATED ALWAYS AS IDENTITY PRIMARY KEY NOT NULL',
  3376. 'big-reference': 'BIGINT, FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
  3377. 'reference FK': ', CONSTRAINT FK_%(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
  3378. 'reference TFK': ' CONSTRAINT FK_%(foreign_table)s_PK FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s) ON DELETE %(on_delete_action)s',
  3379. }
  3380. def LEFT_JOIN(self):
  3381. return 'LEFT OUTER JOIN'
  3382. def RANDOM(self):
  3383. return 'RAND()'
  3384. def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
  3385. if limitby:
  3386. (lmin, lmax) = limitby
  3387. sql_o += ' FETCH FIRST %i ROWS ONLY' % lmax
  3388. return 'SELECT %s %s FROM %s%s%s;' % (sql_s, sql_f, sql_t, sql_w, sql_o)
  3389. def represent_exceptions(self, obj, fieldtype):
  3390. if fieldtype == 'blob':
  3391. obj = base64.b64encode(str(obj))
  3392. return "BLOB('%s')" % obj
  3393. elif fieldtype == 'datetime':
  3394. if isinstance(obj, datetime.datetime):
  3395. obj = obj.isoformat()[:19].replace('T','-').replace(':','.')
  3396. elif isinstance(obj, datetime.date):
  3397. obj = obj.isoformat()[:10]+'-00.00.00'
  3398. return "'%s'" % obj
  3399. return None
  3400. def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
  3401. credential_decoder=IDENTITY, driver_args={},
  3402. adapter_args={}, do_connect=True, after_connection=None):
  3403. self.db = db
  3404. self.dbengine = "db2"
  3405. self.uri = uri
  3406. if do_connect: self.find_driver(adapter_args,uri)
  3407. self.pool_size = pool_size
  3408. self.folder = folder
  3409. self.db_codec = db_codec
  3410. self._after_connection = after_connection
  3411. self.find_or_make_work_folder()
  3412. ruri = uri.split('://', 1)[1]
  3413. def connector(cnxn=ruri,driver_args=driver_args):
  3414. return self.driver.connect(cnxn,**driver_args)
  3415. self.connector = connector
  3416. if do_connect: self.reconnect()
  3417. def execute(self,command):
  3418. if command[-1:]==';':
  3419. command = command[:-1]
  3420. return self.log_execute(command)
  3421. def lastrowid(self,table):
  3422. self.execute('SELECT DISTINCT IDENTITY_VAL_LOCAL() FROM %s;' % table)
  3423. return long(self.cursor.fetchone()[0])
  3424. def rowslice(self,rows,minimum=0,maximum=None):
  3425. if maximum is None:
  3426. return rows[minimum:]
  3427. return rows[minimum:maximum]
  3428. class TeradataAdapter(BaseAdapter):
  3429. drivers = ('pyodbc',)
  3430. types = {
  3431. 'boolean': 'CHAR(1)',
  3432. 'string': 'VARCHAR(%(length)s)',
  3433. 'text': 'CLOB',
  3434. 'json': 'CLOB',
  3435. 'password': 'VARCHAR(%(length)s)',
  3436. 'blob': 'BLOB',
  3437. 'upload': 'VARCHAR(%(length)s)',
  3438. 'integer': 'INT',
  3439. 'bigint': 'BIGINT',
  3440. 'float': 'REAL',
  3441. 'double': 'DOUBLE',
  3442. 'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
  3443. 'date': 'DATE',
  3444. 'time': 'TIME',
  3445. 'datetime': 'TIMESTAMP',
  3446. # Modified Constraint syntax for Teradata.
  3447. # Teradata does not support ON DELETE.
  3448. 'id': 'INT GENERATED ALWAYS AS IDENTITY', # Teradata Specific
  3449. 'reference': 'INT',
  3450. 'list:integer': 'CLOB',
  3451. 'list:string': 'CLOB',
  3452. 'list:reference': 'CLOB',
  3453. 'big-id': 'BIGINT GENERATED ALWAYS AS IDENTITY', # Teradata Specific
  3454. 'big-reference': 'BIGINT',
  3455. 'reference FK': ' REFERENCES %(foreign_key)s',
  3456. 'reference TFK': ' FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s)',
  3457. }
  3458. def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
  3459. credential_decoder=IDENTITY, driver_args={},
  3460. adapter_args={}, do_connect=True, after_connection=None):
  3461. self.db = db
  3462. self.dbengine = "teradata"
  3463. self.uri = uri
  3464. if do_connect: self.find_driver(adapter_args,uri)
  3465. self.pool_size = pool_size
  3466. self.folder = folder
  3467. self.db_codec = db_codec
  3468. self._after_connection = after_connection
  3469. self.find_or_make_work_folder()
  3470. ruri = uri.split('://', 1)[1]
  3471. def connector(cnxn=ruri,driver_args=driver_args):
  3472. return self.driver.connect(cnxn,**driver_args)
  3473. self.connector = connector
  3474. if do_connect: self.reconnect()
  3475. def LEFT_JOIN(self):
  3476. return 'LEFT OUTER JOIN'
  3477. # Similar to MSSQL, Teradata can't specify a range (for Pageby)
  3478. def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
  3479. if limitby:
  3480. (lmin, lmax) = limitby
  3481. sql_s += ' TOP %i' % lmax
  3482. return 'SELECT %s %s FROM %s%s%s;' % (sql_s, sql_f, sql_t, sql_w, sql_o)
  3483. def _truncate(self, table, mode=''):
  3484. tablename = table._tablename
  3485. return ['DELETE FROM %s ALL;' % (tablename)]
  3486. INGRES_SEQNAME='ii***lineitemsequence' # NOTE invalid database object name
  3487. # (ANSI-SQL wants this form of name
  3488. # to be a delimited identifier)
  3489. class IngresAdapter(BaseAdapter):
  3490. drivers = ('pyodbc',)
  3491. types = {
  3492. 'boolean': 'CHAR(1)',
  3493. 'string': 'VARCHAR(%(length)s)',
  3494. 'text': 'CLOB',
  3495. 'json': 'CLOB',
  3496. 'password': 'VARCHAR(%(length)s)', ## Not sure what this contains utf8 or nvarchar. Or even bytes?
  3497. 'blob': 'BLOB',
  3498. 'upload': 'VARCHAR(%(length)s)', ## FIXME utf8 or nvarchar... or blob? what is this type?
  3499. 'integer': 'INTEGER4', # or int8...
  3500. 'bigint': 'BIGINT',
  3501. 'float': 'FLOAT',
  3502. 'double': 'FLOAT8',
  3503. 'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
  3504. 'date': 'ANSIDATE',
  3505. 'time': 'TIME WITHOUT TIME ZONE',
  3506. 'datetime': 'TIMESTAMP WITHOUT TIME ZONE',
  3507. 'id': 'int not null unique with default next value for %s' % INGRES_SEQNAME,
  3508. 'reference': 'INT, FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
  3509. 'list:integer': 'CLOB',
  3510. 'list:string': 'CLOB',
  3511. 'list:reference': 'CLOB',
  3512. 'big-id': 'bigint not null unique with default next value for %s' % INGRES_SEQNAME,
  3513. 'big-reference': 'BIGINT, FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
  3514. 'reference FK': ', CONSTRAINT FK_%(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
  3515. 'reference TFK': ' CONSTRAINT FK_%(foreign_table)s_PK FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s) ON DELETE %(on_delete_action)s', ## FIXME TODO
  3516. }
  3517. def LEFT_JOIN(self):
  3518. return 'LEFT OUTER JOIN'
  3519. def RANDOM(self):
  3520. return 'RANDOM()'
  3521. def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
  3522. if limitby:
  3523. (lmin, lmax) = limitby
  3524. fetch_amt = lmax - lmin
  3525. if fetch_amt:
  3526. sql_s += ' FIRST %d ' % (fetch_amt, )
  3527. if lmin:
  3528. # Requires Ingres 9.2+
  3529. sql_o += ' OFFSET %d' % (lmin, )
  3530. return 'SELECT %s %s FROM %s%s%s;' % (sql_s, sql_f, sql_t, sql_w, sql_o)
  3531. def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
  3532. credential_decoder=IDENTITY, driver_args={},
  3533. adapter_args={}, do_connect=True, after_connection=None):
  3534. self.db = db
  3535. self.dbengine = "ingres"
  3536. self._driver = pyodbc
  3537. self.uri = uri
  3538. if do_connect: self.find_driver(adapter_args,uri)
  3539. self.pool_size = pool_size
  3540. self.folder = folder
  3541. self.db_codec = db_codec
  3542. self._after_connection = after_connection
  3543. self.find_or_make_work_folder()
  3544. connstr = uri.split(':', 1)[1]
  3545. # Simple URI processing
  3546. connstr = connstr.lstrip()
  3547. while connstr.startswith('/'):
  3548. connstr = connstr[1:]
  3549. if '=' in connstr:
  3550. # Assume we have a regular ODBC connection string and just use it
  3551. ruri = connstr
  3552. else:
  3553. # Assume only (local) dbname is passed in with OS auth
  3554. database_name = connstr
  3555. default_driver_name = 'Ingres'
  3556. vnode = '(local)'
  3557. servertype = 'ingres'
  3558. ruri = 'Driver={%s};Server=%s;Database=%s' % (default_driver_name, vnode, database_name)
  3559. def connector(cnxn=ruri,driver_args=driver_args):
  3560. return self.driver.connect(cnxn,**driver_args)
  3561. self.connector = connector
  3562. # TODO if version is >= 10, set types['id'] to Identity column, see http://community.actian.com/wiki/Using_Ingres_Identity_Columns
  3563. if do_connect: self.reconnect()
  3564. def create_sequence_and_triggers(self, query, table, **args):
  3565. # post create table auto inc code (if needed)
  3566. # modify table to btree for performance....
  3567. # Older Ingres releases could use rule/trigger like Oracle above.
  3568. if hasattr(table,'_primarykey'):
  3569. modify_tbl_sql = 'modify %s to btree unique on %s' % \
  3570. (table._tablename,
  3571. ', '.join(["'%s'" % x for x in table.primarykey]))
  3572. self.execute(modify_tbl_sql)
  3573. else:
  3574. tmp_seqname='%s_iisq' % table._tablename
  3575. query=query.replace(INGRES_SEQNAME, tmp_seqname)
  3576. self.execute('create sequence %s' % tmp_seqname)
  3577. self.execute(query)
  3578. self.execute('modify %s to btree unique on %s' % (table._tablename, 'id'))
  3579. def lastrowid(self,table):
  3580. tmp_seqname='%s_iisq' % table
  3581. self.execute('select current value for %s' % tmp_seqname)
  3582. return long(self.cursor.fetchone()[0]) # don't really need int type cast here...
  3583. class IngresUnicodeAdapter(IngresAdapter):
  3584. drivers = ('pyodbc',)
  3585. types = {
  3586. 'boolean': 'CHAR(1)',
  3587. 'string': 'NVARCHAR(%(length)s)',
  3588. 'text': 'NCLOB',
  3589. 'json': 'NCLOB',
  3590. 'password': 'NVARCHAR(%(length)s)', ## Not sure what this contains utf8 or nvarchar. Or even bytes?
  3591. 'blob': 'BLOB',
  3592. 'upload': 'VARCHAR(%(length)s)', ## FIXME utf8 or nvarchar... or blob? what is this type?
  3593. 'integer': 'INTEGER4', # or int8...
  3594. 'bigint': 'BIGINT',
  3595. 'float': 'FLOAT',
  3596. 'double': 'FLOAT8',
  3597. 'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
  3598. 'date': 'ANSIDATE',
  3599. 'time': 'TIME WITHOUT TIME ZONE',
  3600. 'datetime': 'TIMESTAMP WITHOUT TIME ZONE',
  3601. 'id': 'INTEGER4 not null unique with default next value for %s'% INGRES_SEQNAME,
  3602. 'reference': 'INTEGER4, FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
  3603. 'list:integer': 'NCLOB',
  3604. 'list:string': 'NCLOB',
  3605. 'list:reference': 'NCLOB',
  3606. 'big-id': 'BIGINT not null unique with default next value for %s'% INGRES_SEQNAME,
  3607. 'big-reference': 'BIGINT, FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
  3608. 'reference FK': ', CONSTRAINT FK_%(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
  3609. 'reference TFK': ' CONSTRAINT FK_%(foreign_table)s_PK FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s) ON DELETE %(on_delete_action)s', ## FIXME TODO
  3610. }
  3611. class SAPDBAdapter(BaseAdapter):
  3612. drivers = ('sapdb',)
  3613. support_distributed_transaction = False
  3614. types = {
  3615. 'boolean': 'CHAR(1)',
  3616. 'string': 'VARCHAR(%(length)s)',
  3617. 'text': 'LONG',
  3618. 'json': 'LONG',
  3619. 'password': 'VARCHAR(%(length)s)',
  3620. 'blob': 'LONG',
  3621. 'upload': 'VARCHAR(%(length)s)',
  3622. 'integer': 'INT',
  3623. 'bigint': 'BIGINT',
  3624. 'float': 'FLOAT',
  3625. 'double': 'DOUBLE PRECISION',
  3626. 'decimal': 'FIXED(%(precision)s,%(scale)s)',
  3627. 'date': 'DATE',
  3628. 'time': 'TIME',
  3629. 'datetime': 'TIMESTAMP',
  3630. 'id': 'INT PRIMARY KEY',
  3631. 'reference': 'INT, FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
  3632. 'list:integer': 'LONG',
  3633. 'list:string': 'LONG',
  3634. 'list:reference': 'LONG',
  3635. 'big-id': 'BIGINT PRIMARY KEY',
  3636. 'big-reference': 'BIGINT, FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
  3637. }
  3638. def sequence_name(self,table):
  3639. return '%s_id_Seq' % table
  3640. def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
  3641. if limitby:
  3642. (lmin, lmax) = limitby
  3643. if len(sql_w) > 1:
  3644. sql_w_row = sql_w + ' AND w_row > %i' % lmin
  3645. else:
  3646. sql_w_row = 'WHERE w_row > %i' % lmin
  3647. return '%s %s FROM (SELECT w_tmp.*, ROWNO w_row FROM (SELECT %s FROM %s%s%s) w_tmp WHERE ROWNO=%i) %s %s %s;' % (sql_s, sql_f, sql_f, sql_t, sql_w, sql_o, lmax, sql_t, sql_w_row, sql_o)
  3648. return 'SELECT %s %s FROM %s%s%s;' % (sql_s, sql_f, sql_t, sql_w, sql_o)
  3649. def create_sequence_and_triggers(self, query, table, **args):
  3650. # following lines should only be executed if table._sequence_name does not exist
  3651. self.execute('CREATE SEQUENCE %s;' % table._sequence_name)
  3652. self.execute("ALTER TABLE %s ALTER COLUMN %s SET DEFAULT NEXTVAL('%s');" \
  3653. % (table._tablename, table._id.name, table._sequence_name))
  3654. self.execute(query)
  3655. REGEX_URI = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:@]+)(\:(?P<port>[0-9]+))?/(?P<db>[^\?]+)(\?sslmode=(?P<sslmode>.+))?$')
  3656. def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
  3657. credential_decoder=IDENTITY, driver_args={},
  3658. adapter_args={}, do_connect=True, after_connection=None):
  3659. self.db = db
  3660. self.dbengine = "sapdb"
  3661. self.uri = uri
  3662. if do_connect: self.find_driver(adapter_args,uri)
  3663. self.pool_size = pool_size
  3664. self.folder = folder
  3665. self.db_codec = db_codec
  3666. self._after_connection = after_connection
  3667. self.find_or_make_work_folder()
  3668. ruri = uri.split('://',1)[1]
  3669. m = self.REGEX_URI.match(ruri)
  3670. if not m:
  3671. raise SyntaxError("Invalid URI string in DAL")
  3672. user = credential_decoder(m.group('user'))
  3673. if not user:
  3674. raise SyntaxError('User required')
  3675. password = credential_decoder(m.group('password'))
  3676. if not password:
  3677. password = ''
  3678. host = m.group('host')
  3679. if not host:
  3680. raise SyntaxError('Host name required')
  3681. db = m.group('db')
  3682. if not db:
  3683. raise SyntaxError('Database name required')
  3684. def connector(user=user, password=password, database=db,
  3685. host=host, driver_args=driver_args):
  3686. return self.driver.Connection(user, password, database,
  3687. host, **driver_args)
  3688. self.connector = connector
  3689. if do_connect: self.reconnect()
  3690. def lastrowid(self,table):
  3691. self.execute("select %s.NEXTVAL from dual" % table._sequence_name)
  3692. return long(self.cursor.fetchone()[0])
  3693. class CubridAdapter(MySQLAdapter):
  3694. drivers = ('cubriddb',)
  3695. REGEX_URI = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:/]+)(\:(?P<port>[0-9]+))?/(?P<db>[^?]+)(\?set_encoding=(?P<charset>\w+))?$')
  3696. def __init__(self, db, uri, pool_size=0, folder=None, db_codec='UTF-8',
  3697. credential_decoder=IDENTITY, driver_args={},
  3698. adapter_args={}, do_connect=True, after_connection=None):
  3699. self.db = db
  3700. self.dbengine = "cubrid"
  3701. self.uri = uri
  3702. if do_connect: self.find_driver(adapter_args,uri)
  3703. self.pool_size = pool_size
  3704. self.folder = folder
  3705. self.db_codec = db_codec
  3706. self._after_connection = after_connection
  3707. self.find_or_make_work_folder()
  3708. ruri = uri.split('://',1)[1]
  3709. m = self.REGEX_URI.match(ruri)
  3710. if not m:
  3711. raise SyntaxError(
  3712. "Invalid URI string in DAL: %s" % self.uri)
  3713. user = credential_decoder(m.group('user'))
  3714. if not user:
  3715. raise SyntaxError('User required')
  3716. password = credential_decoder(m.group('password'))
  3717. if not password:
  3718. password = ''
  3719. host = m.group('host')
  3720. if not host:
  3721. raise SyntaxError('Host name required')
  3722. db = m.group('db')
  3723. if not db:
  3724. raise SyntaxError('Database name required')
  3725. port = int(m.group('port') or '30000')
  3726. charset = m.group('charset') or 'utf8'
  3727. user = credential_decoder(user)
  3728. passwd = credential_decoder(password)
  3729. def connector(host=host,port=port,db=db,
  3730. user=user,passwd=password,driver_args=driver_args):
  3731. return self.driver.connect(host,port,db,user,passwd,**driver_args)
  3732. self.connector = connector
  3733. if do_connect: self.reconnect()
  3734. def after_connection(self):
  3735. self.execute('SET FOREIGN_KEY_CHECKS=1;')
  3736. self.execute("SET sql_mode='NO_BACKSLASH_ESCAPES';")
  3737. ######## GAE MySQL ##########
  3738. class DatabaseStoredFile:
  3739. web2py_filesystem = False
  3740. def escape(self,obj):
  3741. return self.db._adapter.escape(obj)
  3742. def __init__(self,db,filename,mode):
  3743. if not db._adapter.dbengine in ('mysql', 'postgres', 'sqlite'):
  3744. raise RuntimeError("only MySQL/Postgres/SQLite can store metadata .table files in database for now")
  3745. self.db = db
  3746. self.filename = filename
  3747. self.mode = mode
  3748. if not self.web2py_filesystem:
  3749. if db._adapter.dbengine == 'mysql':
  3750. sql = "CREATE TABLE IF NOT EXISTS web2py_filesystem (path VARCHAR(255), content LONGTEXT, PRIMARY KEY(path) ) ENGINE=InnoDB;"
  3751. elif db._adapter.dbengine in ('postgres', 'sqlite'):
  3752. sql = "CREATE TABLE IF NOT EXISTS web2py_filesystem (path VARCHAR(255), content TEXT, PRIMARY KEY(path));"
  3753. self.db.executesql(sql)
  3754. DatabaseStoredFile.web2py_filesystem = True
  3755. self.p=0
  3756. self.data = ''
  3757. if mode in ('r','rw','a'):
  3758. query = "SELECT content FROM web2py_filesystem WHERE path='%s'" \
  3759. % filename
  3760. rows = self.db.executesql(query)
  3761. if rows:
  3762. self.data = rows[0][0]
  3763. elif exists(filename):
  3764. datafile = open(filename, 'r')
  3765. try:
  3766. self.data = datafile.read()
  3767. finally:
  3768. datafile.close()
  3769. elif mode in ('r','rw'):
  3770. raise RuntimeError("File %s does not exist" % filename)
  3771. def read(self, bytes):
  3772. data = self.data[self.p:self.p+bytes]
  3773. self.p += len(data)
  3774. return data
  3775. def readline(self):
  3776. i = self.data.find('\n',self.p)+1
  3777. if i>0:
  3778. data, self.p = self.data[self.p:i], i
  3779. else:
  3780. data, self.p = self.data[self.p:], len(self.data)
  3781. return data
  3782. def write(self,data):
  3783. self.data += data
  3784. def close_connection(self):
  3785. if self.db is not None:
  3786. self.db.executesql(
  3787. "DELETE FROM web2py_filesystem WHERE path='%s'" % self.filename)
  3788. query = "INSERT INTO web2py_filesystem(path,content) VALUES ('%s','%s')"\
  3789. % (self.filename, self.data.replace("'","''"))
  3790. self.db.executesql(query)
  3791. self.db.commit()
  3792. self.db = None
  3793. def close(self):
  3794. self.close_connection()
  3795. @staticmethod
  3796. def exists(db, filename):
  3797. if exists(filename):
  3798. return True
  3799. query = "SELECT path FROM web2py_filesystem WHERE path='%s'" % filename
  3800. try:
  3801. if db.executesql(query):
  3802. return True
  3803. except Exception, e:
  3804. if not db._adapter.isOperationalError(e):
  3805. raise
  3806. # no web2py_filesystem found?
  3807. tb = traceback.format_exc()
  3808. LOGGER.error("Could not retrieve %s\n%s" % (filename, tb))
  3809. return False
  3810. class UseDatabaseStoredFile:
  3811. def file_exists(self, filename):
  3812. return DatabaseStoredFile.exists(self.db,filename)
  3813. def file_open(self, filename, mode='rb', lock=True):
  3814. return DatabaseStoredFile(self.db,filename,mode)
  3815. def file_close(self, fileobj):
  3816. fileobj.close_connection()
  3817. def file_delete(self,filename):
  3818. query = "DELETE FROM web2py_filesystem WHERE path='%s'" % filename
  3819. self.db.executesql(query)
  3820. self.db.commit()
  3821. class GoogleSQLAdapter(UseDatabaseStoredFile,MySQLAdapter):
  3822. uploads_in_blob = True
  3823. REGEX_URI = re.compile('^(?P<instance>.*)/(?P<db>.*)$')
  3824. def __init__(self, db, uri='google:sql://realm:domain/database',
  3825. pool_size=0, folder=None, db_codec='UTF-8',
  3826. credential_decoder=IDENTITY, driver_args={},
  3827. adapter_args={}, do_connect=True, after_connection=None):
  3828. self.db = db
  3829. self.dbengine = "mysql"
  3830. self.uri = uri
  3831. self.pool_size = pool_size
  3832. self.db_codec = db_codec
  3833. self._after_connection = after_connection
  3834. self.folder = folder or pjoin('$HOME',THREAD_LOCAL.folder.split(
  3835. os.sep+'applications'+os.sep,1)[1])
  3836. ruri = uri.split("://")[1]
  3837. m = self.REGEX_URI.match(ruri)
  3838. if not m:
  3839. raise SyntaxError("Invalid URI string in SQLDB: %s" % self.uri)
  3840. instance = credential_decoder(m.group('instance'))
  3841. self.dbstring = db = credential_decoder(m.group('db'))
  3842. driver_args['instance'] = instance
  3843. if not 'charset' in driver_args:
  3844. driver_args['charset'] = 'utf8'
  3845. self.createdb = createdb = adapter_args.get('createdb',True)
  3846. if not createdb:
  3847. driver_args['database'] = db
  3848. def connector(driver_args=driver_args):
  3849. return rdbms.connect(**driver_args)
  3850. self.connector = connector
  3851. if do_connect: self.reconnect()
  3852. def after_connection(self):
  3853. if self.createdb:
  3854. # self.execute('DROP DATABASE %s' % self.dbstring)
  3855. self.execute('CREATE DATABASE IF NOT EXISTS %s' % self.dbstring)
  3856. self.execute('USE %s' % self.dbstring)
  3857. self.execute("SET FOREIGN_KEY_CHECKS=1;")
  3858. self.execute("SET sql_mode='NO_BACKSLASH_ESCAPES';")
  3859. def execute(self, command, *a, **b):
  3860. return self.log_execute(command.decode('utf8'), *a, **b)
  3861. class NoSQLAdapter(BaseAdapter):
  3862. can_select_for_update = False
  3863. @staticmethod
  3864. def to_unicode(obj):
  3865. if isinstance(obj, str):
  3866. return obj.decode('utf8')
  3867. elif not isinstance(obj, unicode):
  3868. return unicode(obj)
  3869. return obj
  3870. def id_query(self, table):
  3871. return table._id > 0
  3872. def represent(self, obj, fieldtype):
  3873. field_is_type = fieldtype.startswith
  3874. if isinstance(obj, CALLABLETYPES):
  3875. obj = obj()
  3876. if isinstance(fieldtype, SQLCustomType):
  3877. return fieldtype.encoder(obj)
  3878. if isinstance(obj, (Expression, Field)):
  3879. raise SyntaxError("non supported on GAE")
  3880. if self.dbengine == 'google:datastore':
  3881. if isinstance(fieldtype, gae.Property):
  3882. return obj
  3883. is_string = isinstance(fieldtype,str)
  3884. is_list = is_string and field_is_type('list:')
  3885. if is_list:
  3886. if not obj:
  3887. obj = []
  3888. if not isinstance(obj, (list, tuple)):
  3889. obj = [obj]
  3890. if obj == '' and not \
  3891. (is_string and fieldtype[:2] in ['st','te', 'pa','up']):
  3892. return None
  3893. if not obj is None:
  3894. if isinstance(obj, list) and not is_list:
  3895. obj = [self.represent(o, fieldtype) for o in obj]
  3896. elif fieldtype in ('integer','bigint','id'):
  3897. obj = long(obj)
  3898. elif fieldtype == 'double':
  3899. obj = float(obj)
  3900. elif is_string and field_is_type('reference'):
  3901. if isinstance(obj, (Row, Reference)):
  3902. obj = obj['id']
  3903. obj = long(obj)
  3904. elif fieldtype == 'boolean':
  3905. if obj and not str(obj)[0].upper() in '0F':
  3906. obj = True
  3907. else:
  3908. obj = False
  3909. elif fieldtype == 'date':
  3910. if not isinstance(obj, datetime.date):
  3911. (y, m, d) = map(int,str(obj).strip().split('-'))
  3912. obj = datetime.date(y, m, d)
  3913. elif isinstance(obj,datetime.datetime):
  3914. (y, m, d) = (obj.year, obj.month, obj.day)
  3915. obj = datetime.date(y, m, d)
  3916. elif fieldtype == 'time':
  3917. if not isinstance(obj, datetime.time):
  3918. time_items = map(int,str(obj).strip().split(':')[:3])
  3919. if len(time_items) == 3:
  3920. (h, mi, s) = time_items
  3921. else:
  3922. (h, mi, s) = time_items + [0]
  3923. obj = datetime.time(h, mi, s)
  3924. elif fieldtype == 'datetime':
  3925. if not isinstance(obj, datetime.datetime):
  3926. (y, m, d) = map(int,str(obj)[:10].strip().split('-'))
  3927. time_items = map(int,str(obj)[11:].strip().split(':')[:3])
  3928. while len(time_items)<3:
  3929. time_items.append(0)
  3930. (h, mi, s) = time_items
  3931. obj = datetime.datetime(y, m, d, h, mi, s)
  3932. elif fieldtype == 'blob':
  3933. pass
  3934. elif fieldtype == 'json':
  3935. if isinstance(obj, basestring):
  3936. obj = self.to_unicode(obj)
  3937. if have_serializers:
  3938. obj = serializers.loads_json(obj)
  3939. elif simplejson:
  3940. obj = simplejson.loads(obj)
  3941. else:
  3942. raise RuntimeError("missing simplejson")
  3943. elif is_string and field_is_type('list:string'):
  3944. return map(self.to_unicode,obj)
  3945. elif is_list:
  3946. return map(int,obj)
  3947. else:
  3948. obj = self.to_unicode(obj)
  3949. return obj
  3950. def _insert(self,table,fields):
  3951. return 'insert %s in %s' % (fields, table)
  3952. def _count(self,query,distinct=None):
  3953. return 'count %s' % repr(query)
  3954. def _select(self,query,fields,attributes):
  3955. return 'select %s where %s' % (repr(fields), repr(query))
  3956. def _delete(self,tablename, query):
  3957. return 'delete %s where %s' % (repr(tablename),repr(query))
  3958. def _update(self,tablename,query,fields):
  3959. return 'update %s (%s) where %s' % (repr(tablename),
  3960. repr(fields),repr(query))
  3961. def commit(self):
  3962. """
  3963. remember: no transactions on many NoSQL
  3964. """
  3965. pass
  3966. def rollback(self):
  3967. """
  3968. remember: no transactions on many NoSQL
  3969. """
  3970. pass
  3971. def close_connection(self):
  3972. """
  3973. remember: no transactions on many NoSQL
  3974. """
  3975. pass
  3976. # these functions should never be called!
  3977. def OR(self,first,second): raise SyntaxError("Not supported")
  3978. def AND(self,first,second): raise SyntaxError("Not supported")
  3979. def AS(self,first,second): raise SyntaxError("Not supported")
  3980. def ON(self,first,second): raise SyntaxError("Not supported")
  3981. def STARTSWITH(self,first,second=None): raise SyntaxError("Not supported")
  3982. def ENDSWITH(self,first,second=None): raise SyntaxError("Not supported")
  3983. def ADD(self,first,second): raise SyntaxError("Not supported")
  3984. def SUB(self,first,second): raise SyntaxError("Not supported")
  3985. def MUL(self,first,second): raise SyntaxError("Not supported")
  3986. def DIV(self,first,second): raise SyntaxError("Not supported")
  3987. def LOWER(self,first): raise SyntaxError("Not supported")
  3988. def UPPER(self,first): raise SyntaxError("Not supported")
  3989. def EXTRACT(self,first,what): raise SyntaxError("Not supported")
  3990. def LENGTH(self, first): raise SyntaxError("Not supported")
  3991. def AGGREGATE(self,first,what): raise SyntaxError("Not supported")
  3992. def LEFT_JOIN(self): raise SyntaxError("Not supported")
  3993. def RANDOM(self): raise SyntaxError("Not supported")
  3994. def SUBSTRING(self,field,parameters): raise SyntaxError("Not supported")
  3995. def PRIMARY_KEY(self,key): raise SyntaxError("Not supported")
  3996. def ILIKE(self,first,second): raise SyntaxError("Not supported")
  3997. def drop(self,table,mode): raise SyntaxError("Not supported")
  3998. def alias(self,table,alias): raise SyntaxError("Not supported")
  3999. def migrate_table(self,*a,**b): raise SyntaxError("Not supported")
  4000. def distributed_transaction_begin(self,key): raise SyntaxError("Not supported")
  4001. def prepare(self,key): raise SyntaxError("Not supported")
  4002. def commit_prepared(self,key): raise SyntaxError("Not supported")
  4003. def rollback_prepared(self,key): raise SyntaxError("Not supported")
  4004. def concat_add(self,table): raise SyntaxError("Not supported")
  4005. def constraint_name(self, table, fieldname): raise SyntaxError("Not supported")
  4006. def create_sequence_and_triggers(self, query, table, **args): pass
  4007. def log_execute(self,*a,**b): raise SyntaxError("Not supported")
  4008. def execute(self,*a,**b): raise SyntaxError("Not supported")
  4009. def represent_exceptions(self, obj, fieldtype): raise SyntaxError("Not supported")
  4010. def lastrowid(self,table): raise SyntaxError("Not supported")
  4011. def rowslice(self,rows,minimum=0,maximum=None): raise SyntaxError("Not supported")
  4012. class GAEF(object):
  4013. def __init__(self,name,op,value,apply):
  4014. self.name=name=='id' and '__key__' or name
  4015. self.op=op
  4016. self.value=value
  4017. self.apply=apply
  4018. def __repr__(self):
  4019. return '(%s %s %s:%s)' % (self.name, self.op, repr(self.value), type(self.value))
  4020. class GoogleDatastoreAdapter(NoSQLAdapter):
  4021. uploads_in_blob = True
  4022. types = {}
  4023. def file_exists(self, filename): pass
  4024. def file_open(self, filename, mode='rb', lock=True): pass
  4025. def file_close(self, fileobj): pass
  4026. REGEX_NAMESPACE = re.compile('.*://(?P<namespace>.+)')
  4027. def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
  4028. credential_decoder=IDENTITY, driver_args={},
  4029. adapter_args={}, do_connect=True, after_connection=None):
  4030. self.types.update({
  4031. 'boolean': gae.BooleanProperty,
  4032. 'string': (lambda **kwargs: gae.StringProperty(multiline=True, **kwargs)),
  4033. 'text': gae.TextProperty,
  4034. 'json': gae.TextProperty,
  4035. 'password': gae.StringProperty,
  4036. 'blob': gae.BlobProperty,
  4037. 'upload': gae.StringProperty,
  4038. 'integer': gae.IntegerProperty,
  4039. 'bigint': gae.IntegerProperty,
  4040. 'float': gae.FloatProperty,
  4041. 'double': gae.FloatProperty,
  4042. 'decimal': GAEDecimalProperty,
  4043. 'date': gae.DateProperty,
  4044. 'time': gae.TimeProperty,
  4045. 'datetime': gae.DateTimeProperty,
  4046. 'id': None,
  4047. 'reference': gae.IntegerProperty,
  4048. 'list:string': (lambda **kwargs: gae.StringListProperty(default=None, **kwargs)),
  4049. 'list:integer': (lambda **kwargs: gae.ListProperty(int,default=None, **kwargs)),
  4050. 'list:reference': (lambda **kwargs: gae.ListProperty(int,default=None, **kwargs)),
  4051. })
  4052. self.db = db
  4053. self.uri = uri
  4054. self.dbengine = 'google:datastore'
  4055. self.folder = folder
  4056. db['_lastsql'] = ''
  4057. self.db_codec = 'UTF-8'
  4058. self._after_connection = after_connection
  4059. self.pool_size = 0
  4060. match = self.REGEX_NAMESPACE.match(uri)
  4061. if match:
  4062. namespace_manager.set_namespace(match.group('namespace'))
  4063. def parse_id(self, value, field_type):
  4064. return value
  4065. def create_table(self,table,migrate=True,fake_migrate=False, polymodel=None):
  4066. myfields = {}
  4067. for field in table:
  4068. if isinstance(polymodel,Table) and field.name in polymodel.fields():
  4069. continue
  4070. attr = {}
  4071. if isinstance(field.custom_qualifier, dict):
  4072. #this is custom properties to add to the GAE field declartion
  4073. attr = field.custom_qualifier
  4074. field_type = field.type
  4075. if isinstance(field_type, SQLCustomType):
  4076. ftype = self.types[field_type.native or field_type.type](**attr)
  4077. elif isinstance(field_type, gae.Property):
  4078. ftype = field_type
  4079. elif field_type.startswith('id'):
  4080. continue
  4081. elif field_type.startswith('decimal'):
  4082. precision, scale = field_type[7:].strip('()').split(',')
  4083. precision = int(precision)
  4084. scale = int(scale)
  4085. ftype = GAEDecimalProperty(precision, scale, **attr)
  4086. elif field_type.startswith('reference'):
  4087. if field.notnull:
  4088. attr = dict(required=True)
  4089. referenced = field_type[10:].strip()
  4090. ftype = self.types[field_type[:9]](referenced, **attr)
  4091. elif field_type.startswith('list:reference'):
  4092. if field.notnull:
  4093. attr['required'] = True
  4094. referenced = field_type[15:].strip()
  4095. ftype = self.types[field_type[:14]](**attr)
  4096. elif field_type.startswith('list:'):
  4097. ftype = self.types[field_type](**attr)
  4098. elif not field_type in self.types\
  4099. or not self.types[field_type]:
  4100. raise SyntaxError('Field: unknown field type: %s' % field_type)
  4101. else:
  4102. ftype = self.types[field_type](**attr)
  4103. myfields[field.name] = ftype
  4104. if not polymodel:
  4105. table._tableobj = classobj(table._tablename, (gae.Model, ), myfields)
  4106. elif polymodel==True:
  4107. table._tableobj = classobj(table._tablename, (PolyModel, ), myfields)
  4108. elif isinstance(polymodel,Table):
  4109. table._tableobj = classobj(table._tablename, (polymodel._tableobj, ), myfields)
  4110. else:
  4111. raise SyntaxError("polymodel must be None, True, a table or a tablename")
  4112. return None
  4113. def expand(self,expression,field_type=None):
  4114. if isinstance(expression,Field):
  4115. if expression.type in ('text', 'blob', 'json'):
  4116. raise SyntaxError('AppEngine does not index by: %s' % expression.type)
  4117. return expression.name
  4118. elif isinstance(expression, (Expression, Query)):
  4119. if not expression.second is None:
  4120. return expression.op(expression.first, expression.second)
  4121. elif not expression.first is None:
  4122. return expression.op(expression.first)
  4123. else:
  4124. return expression.op()
  4125. elif field_type:
  4126. return self.represent(expression,field_type)
  4127. elif isinstance(expression,(list,tuple)):
  4128. return ','.join([self.represent(item,field_type) for item in expression])
  4129. else:
  4130. return str(expression)
  4131. ### TODO from gql.py Expression
  4132. def AND(self,first,second):
  4133. a = self.expand(first)
  4134. b = self.expand(second)
  4135. if b[0].name=='__key__' and a[0].name!='__key__':
  4136. return b+a
  4137. return a+b
  4138. def EQ(self,first,second=None):
  4139. if isinstance(second, Key):
  4140. return [GAEF(first.name,'=',second,lambda a,b:a==b)]
  4141. return [GAEF(first.name,'=',self.represent(second,first.type),lambda a,b:a==b)]
  4142. def NE(self,first,second=None):
  4143. if first.type != 'id':
  4144. return [GAEF(first.name,'!=',self.represent(second,first.type),lambda a,b:a!=b)]
  4145. else:
  4146. if not second is None:
  4147. second = Key.from_path(first._tablename, long(second))
  4148. return [GAEF(first.name,'!=',second,lambda a,b:a!=b)]
  4149. def LT(self,first,second=None):
  4150. if first.type != 'id':
  4151. return [GAEF(first.name,'<',self.represent(second,first.type),lambda a,b:a<b)]
  4152. else:
  4153. second = Key.from_path(first._tablename, long(second))
  4154. return [GAEF(first.name,'<',second,lambda a,b:a<b)]
  4155. def LE(self,first,second=None):
  4156. if first.type != 'id':
  4157. return [GAEF(first.name,'<=',self.represent(second,first.type),lambda a,b:a<=b)]
  4158. else:
  4159. second = Key.from_path(first._tablename, long(second))
  4160. return [GAEF(first.name,'<=',second,lambda a,b:a<=b)]
  4161. def GT(self,first,second=None):
  4162. if first.type != 'id' or second==0 or second == '0':
  4163. return [GAEF(first.name,'>',self.represent(second,first.type),lambda a,b:a>b)]
  4164. else:
  4165. second = Key.from_path(first._tablename, long(second))
  4166. return [GAEF(first.name,'>',second,lambda a,b:a>b)]
  4167. def GE(self,first,second=None):
  4168. if first.type != 'id':
  4169. return [GAEF(first.name,'>=',self.represent(second,first.type),lambda a,b:a>=b)]
  4170. else:
  4171. second = Key.from_path(first._tablename, long(second))
  4172. return [GAEF(first.name,'>=',second,lambda a,b:a>=b)]
  4173. def INVERT(self,first):
  4174. return '-%s' % first.name
  4175. def COMMA(self,first,second):
  4176. return '%s, %s' % (self.expand(first),self.expand(second))
  4177. def BELONGS(self,first,second=None):
  4178. if not isinstance(second,(list, tuple)):
  4179. raise SyntaxError("Not supported")
  4180. if first.type != 'id':
  4181. return [GAEF(first.name,'in',self.represent(second,first.type),lambda a,b:a in b)]
  4182. else:
  4183. second = [Key.from_path(first._tablename, int(i)) for i in second]
  4184. return [GAEF(first.name,'in',second,lambda a,b:a in b)]
  4185. def CONTAINS(self,first,second,case_sensitive=False):
  4186. # silently ignoring: GAE can only do case sensitive matches!
  4187. if not first.type.startswith('list:'):
  4188. raise SyntaxError("Not supported")
  4189. return [GAEF(first.name,'=',self.expand(second,first.type[5:]),lambda a,b:b in a)]
  4190. def NOT(self,first):
  4191. nops = { self.EQ: self.NE,
  4192. self.NE: self.EQ,
  4193. self.LT: self.GE,
  4194. self.GT: self.LE,
  4195. self.LE: self.GT,
  4196. self.GE: self.LT}
  4197. if not isinstance(first,Query):
  4198. raise SyntaxError("Not suported")
  4199. nop = nops.get(first.op,None)
  4200. if not nop:
  4201. raise SyntaxError("Not suported %s" % first.op.__name__)
  4202. first.op = nop
  4203. return self.expand(first)
  4204. def truncate(self,table,mode):
  4205. self.db(self.db._adapter.id_query(table)).delete()
  4206. def select_raw(self,query,fields=None,attributes=None):
  4207. db = self.db
  4208. fields = fields or []
  4209. attributes = attributes or {}
  4210. args_get = attributes.get
  4211. new_fields = []
  4212. for item in fields:
  4213. if isinstance(item,SQLALL):
  4214. new_fields += item._table
  4215. else:
  4216. new_fields.append(item)
  4217. fields = new_fields
  4218. if query:
  4219. tablename = self.get_table(query)
  4220. elif fields:
  4221. tablename = fields[0].tablename
  4222. query = db._adapter.id_query(fields[0].table)
  4223. else:
  4224. raise SyntaxError("Unable to determine a tablename")
  4225. if query:
  4226. if use_common_filters(query):
  4227. query = self.common_filter(query,[tablename])
  4228. #tableobj is a GAE Model class (or subclass)
  4229. tableobj = db[tablename]._tableobj
  4230. filters = self.expand(query)
  4231. projection = None
  4232. if len(db[tablename].fields) == len(fields):
  4233. #getting all fields, not a projection query
  4234. projection = None
  4235. elif args_get('projection') == True:
  4236. projection = []
  4237. for f in fields:
  4238. if f.type in ['text', 'blob', 'json']:
  4239. raise SyntaxError(
  4240. "text and blob field types not allowed in projection queries")
  4241. else:
  4242. projection.append(f.name)
  4243. elif args_get('filterfields') == True:
  4244. projection = []
  4245. for f in fields:
  4246. projection.append(f.name)
  4247. # real projection's can't include 'id'.
  4248. # it will be added to the result later
  4249. query_projection = [
  4250. p for p in projection if \
  4251. p != db[tablename]._id.name] if projection and \
  4252. args_get('projection') == True\
  4253. else None
  4254. cursor = None
  4255. if isinstance(args_get('reusecursor'), str):
  4256. cursor = args_get('reusecursor')
  4257. items = gae.Query(tableobj, projection=query_projection,
  4258. cursor=cursor)
  4259. for filter in filters:
  4260. if args_get('projection') == True and \
  4261. filter.name in query_projection and \
  4262. filter.op in ['=', '<=', '>=']:
  4263. raise SyntaxError(
  4264. "projection fields cannot have equality filters")
  4265. if filter.name=='__key__' and filter.op=='>' and filter.value==0:
  4266. continue
  4267. elif filter.name=='__key__' and filter.op=='=':
  4268. if filter.value==0:
  4269. items = []
  4270. elif isinstance(filter.value, Key):
  4271. # key qeuries return a class instance,
  4272. # can't use projection
  4273. # extra values will be ignored in post-processing later
  4274. item = tableobj.get(filter.value)
  4275. items = (item and [item]) or []
  4276. else:
  4277. # key qeuries return a class instance,
  4278. # can't use projection
  4279. # extra values will be ignored in post-processing later
  4280. item = tableobj.get_by_id(filter.value)
  4281. items = (item and [item]) or []
  4282. elif isinstance(items,list): # i.e. there is a single record!
  4283. items = [i for i in items if filter.apply(
  4284. getattr(item,filter.name),filter.value)]
  4285. else:
  4286. if filter.name=='__key__' and filter.op != 'in':
  4287. items.order('__key__')
  4288. items = items.filter('%s %s' % (filter.name,filter.op),
  4289. filter.value)
  4290. if not isinstance(items,list):
  4291. if args_get('left', None):
  4292. raise SyntaxError('Set: no left join in appengine')
  4293. if args_get('groupby', None):
  4294. raise SyntaxError('Set: no groupby in appengine')
  4295. orderby = args_get('orderby', False)
  4296. if orderby:
  4297. ### THIS REALLY NEEDS IMPROVEMENT !!!
  4298. if isinstance(orderby, (list, tuple)):
  4299. orderby = xorify(orderby)
  4300. if isinstance(orderby,Expression):
  4301. orderby = self.expand(orderby)
  4302. orders = orderby.split(', ')
  4303. for order in orders:
  4304. order={'-id':'-__key__','id':'__key__'}.get(order,order)
  4305. items = items.order(order)
  4306. if args_get('limitby', None):
  4307. (lmin, lmax) = attributes['limitby']
  4308. (limit, offset) = (lmax - lmin, lmin)
  4309. rows = items.fetch(limit,offset=offset)
  4310. #cursor is only useful if there was a limit and we didn't return
  4311. # all results
  4312. if args_get('reusecursor'):
  4313. db['_lastcursor'] = items.cursor()
  4314. items = rows
  4315. return (items, tablename, projection or db[tablename].fields)
  4316. def select(self,query,fields,attributes):
  4317. """
  4318. This is the GAE version of select. some notes to consider:
  4319. - db['_lastsql'] is not set because there is not SQL statement string
  4320. for a GAE query
  4321. - 'nativeRef' is a magical fieldname used for self references on GAE
  4322. - optional attribute 'projection' when set to True will trigger
  4323. use of the GAE projection queries. note that there are rules for
  4324. what is accepted imposed by GAE: each field must be indexed,
  4325. projection queries cannot contain blob or text fields, and you
  4326. cannot use == and also select that same field. see https://developers.google.com/appengine/docs/python/datastore/queries#Query_Projection
  4327. - optional attribute 'filterfields' when set to True web2py will only
  4328. parse the explicitly listed fields into the Rows object, even though
  4329. all fields are returned in the query. This can be used to reduce
  4330. memory usage in cases where true projection queries are not
  4331. usable.
  4332. - optional attribute 'reusecursor' allows use of cursor with queries
  4333. that have the limitby attribute. Set the attribute to True for the
  4334. first query, set it to the value of db['_lastcursor'] to continue
  4335. a previous query. The user must save the cursor value between
  4336. requests, and the filters must be identical. It is up to the user
  4337. to follow google's limitations: https://developers.google.com/appengine/docs/python/datastore/queries#Query_Cursors
  4338. """
  4339. (items, tablename, fields) = self.select_raw(query,fields,attributes)
  4340. # self.db['_lastsql'] = self._select(query,fields,attributes)
  4341. rows = [[(t==self.db[tablename]._id.name and item) or \
  4342. (t=='nativeRef' and item) or getattr(item, t) \
  4343. for t in fields] for item in items]
  4344. colnames = ['%s.%s' % (tablename, t) for t in fields]
  4345. processor = attributes.get('processor',self.parse)
  4346. return processor(rows,fields,colnames,False)
  4347. def count(self,query,distinct=None,limit=None):
  4348. if distinct:
  4349. raise RuntimeError("COUNT DISTINCT not supported")
  4350. (items, tablename, fields) = self.select_raw(query)
  4351. # self.db['_lastsql'] = self._count(query)
  4352. try:
  4353. return len(items)
  4354. except TypeError:
  4355. return items.count(limit=limit)
  4356. def delete(self,tablename, query):
  4357. """
  4358. This function was changed on 2010-05-04 because according to
  4359. http://code.google.com/p/googleappengine/issues/detail?id=3119
  4360. GAE no longer supports deleting more than 1000 records.
  4361. """
  4362. # self.db['_lastsql'] = self._delete(tablename,query)
  4363. (items, tablename, fields) = self.select_raw(query)
  4364. # items can be one item or a query
  4365. if not isinstance(items,list):
  4366. #use a keys_only query to ensure that this runs as a datastore
  4367. # small operations
  4368. leftitems = items.fetch(1000, keys_only=True)
  4369. counter = 0
  4370. while len(leftitems):
  4371. counter += len(leftitems)
  4372. gae.delete(leftitems)
  4373. leftitems = items.fetch(1000, keys_only=True)
  4374. else:
  4375. counter = len(items)
  4376. gae.delete(items)
  4377. return counter
  4378. def update(self,tablename,query,update_fields):
  4379. # self.db['_lastsql'] = self._update(tablename,query,update_fields)
  4380. (items, tablename, fields) = self.select_raw(query)
  4381. counter = 0
  4382. for item in items:
  4383. for field, value in update_fields:
  4384. setattr(item, field.name, self.represent(value,field.type))
  4385. item.put()
  4386. counter += 1
  4387. LOGGER.info(str(counter))
  4388. return counter
  4389. def insert(self,table,fields):
  4390. dfields=dict((f.name,self.represent(v,f.type)) for f,v in fields)
  4391. # table._db['_lastsql'] = self._insert(table,fields)
  4392. tmp = table._tableobj(**dfields)
  4393. tmp.put()
  4394. rid = Reference(tmp.key().id())
  4395. (rid._table, rid._record, rid._gaekey) = (table, None, tmp.key())
  4396. return rid
  4397. def bulk_insert(self,table,items):
  4398. parsed_items = []
  4399. for item in items:
  4400. dfields=dict((f.name,self.represent(v,f.type)) for f,v in item)
  4401. parsed_items.append(table._tableobj(**dfields))
  4402. gae.put(parsed_items)
  4403. return True
  4404. def uuid2int(uuidv):
  4405. return uuid.UUID(uuidv).int
  4406. def int2uuid(n):
  4407. return str(uuid.UUID(int=n))
  4408. class CouchDBAdapter(NoSQLAdapter):
  4409. drivers = ('couchdb',)
  4410. uploads_in_blob = True
  4411. types = {
  4412. 'boolean': bool,
  4413. 'string': str,
  4414. 'text': str,
  4415. 'json': str,
  4416. 'password': str,
  4417. 'blob': str,
  4418. 'upload': str,
  4419. 'integer': long,
  4420. 'bigint': long,
  4421. 'float': float,
  4422. 'double': float,
  4423. 'date': datetime.date,
  4424. 'time': datetime.time,
  4425. 'datetime': datetime.datetime,
  4426. 'id': long,
  4427. 'reference': long,
  4428. 'list:string': list,
  4429. 'list:integer': list,
  4430. 'list:reference': list,
  4431. }
  4432. def file_exists(self, filename): pass
  4433. def file_open(self, filename, mode='rb', lock=True): pass
  4434. def file_close(self, fileobj): pass
  4435. def expand(self,expression,field_type=None):
  4436. if isinstance(expression,Field):
  4437. if expression.type=='id':
  4438. return "%s._id" % expression.tablename
  4439. return BaseAdapter.expand(self,expression,field_type)
  4440. def AND(self,first,second):
  4441. return '(%s && %s)' % (self.expand(first),self.expand(second))
  4442. def OR(self,first,second):
  4443. return '(%s || %s)' % (self.expand(first),self.expand(second))
  4444. def EQ(self,first,second):
  4445. if second is None:
  4446. return '(%s == null)' % self.expand(first)
  4447. return '(%s == %s)' % (self.expand(first),self.expand(second,first.type))
  4448. def NE(self,first,second):
  4449. if second is None:
  4450. return '(%s != null)' % self.expand(first)
  4451. return '(%s != %s)' % (self.expand(first),self.expand(second,first.type))
  4452. def COMMA(self,first,second):
  4453. return '%s + %s' % (self.expand(first),self.expand(second))
  4454. def represent(self, obj, fieldtype):
  4455. value = NoSQLAdapter.represent(self, obj, fieldtype)
  4456. if fieldtype=='id':
  4457. return repr(str(long(value)))
  4458. elif fieldtype in ('date','time','datetime','boolean'):
  4459. return serializers.json(value)
  4460. return repr(not isinstance(value,unicode) and value \
  4461. or value and value.encode('utf8'))
  4462. def __init__(self,db,uri='couchdb://127.0.0.1:5984',
  4463. pool_size=0,folder=None,db_codec ='UTF-8',
  4464. credential_decoder=IDENTITY, driver_args={},
  4465. adapter_args={}, do_connect=True, after_connection=None):
  4466. self.db = db
  4467. self.uri = uri
  4468. if do_connect: self.find_driver(adapter_args)
  4469. self.dbengine = 'couchdb'
  4470. self.folder = folder
  4471. db['_lastsql'] = ''
  4472. self.db_codec = 'UTF-8'
  4473. self._after_connection = after_connection
  4474. self.pool_size = pool_size
  4475. url='http://'+uri[10:]
  4476. def connector(url=url,driver_args=driver_args):
  4477. return self.driver.Server(url,**driver_args)
  4478. self.reconnect(connector,cursor=False)
  4479. def create_table(self, table, migrate=True, fake_migrate=False, polymodel=None):
  4480. if migrate:
  4481. try:
  4482. self.connection.create(table._tablename)
  4483. except:
  4484. pass
  4485. def insert(self,table,fields):
  4486. id = uuid2int(web2py_uuid())
  4487. ctable = self.connection[table._tablename]
  4488. values = dict((k.name,self.represent(v,k.type)) for k,v in fields)
  4489. values['_id'] = str(id)
  4490. ctable.save(values)
  4491. return id
  4492. def _select(self,query,fields,attributes):
  4493. if not isinstance(query,Query):
  4494. raise SyntaxError("Not Supported")
  4495. for key in set(attributes.keys())-SELECT_ARGS:
  4496. raise SyntaxError('invalid select attribute: %s' % key)
  4497. new_fields=[]
  4498. for item in fields:
  4499. if isinstance(item,SQLALL):
  4500. new_fields += item._table
  4501. else:
  4502. new_fields.append(item)
  4503. def uid(fd):
  4504. return fd=='id' and '_id' or fd
  4505. def get(row,fd):
  4506. return fd=='id' and long(row['_id']) or row.get(fd,None)
  4507. fields = new_fields
  4508. tablename = self.get_table(query)
  4509. fieldnames = [f.name for f in (fields or self.db[tablename])]
  4510. colnames = ['%s.%s' % (tablename,k) for k in fieldnames]
  4511. fields = ','.join(['%s.%s' % (tablename,uid(f)) for f in fieldnames])
  4512. fn="(function(%(t)s){if(%(query)s)emit(%(order)s,[%(fields)s]);})" %\
  4513. dict(t=tablename,
  4514. query=self.expand(query),
  4515. order='%s._id' % tablename,
  4516. fields=fields)
  4517. return fn, colnames
  4518. def select(self,query,fields,attributes):
  4519. if not isinstance(query,Query):
  4520. raise SyntaxError("Not Supported")
  4521. fn, colnames = self._select(query,fields,attributes)
  4522. tablename = colnames[0].split('.')[0]
  4523. ctable = self.connection[tablename]
  4524. rows = [cols['value'] for cols in ctable.query(fn)]
  4525. processor = attributes.get('processor',self.parse)
  4526. return processor(rows,fields,colnames,False)
  4527. def delete(self,tablename,query):
  4528. if not isinstance(query,Query):
  4529. raise SyntaxError("Not Supported")
  4530. if query.first.type=='id' and query.op==self.EQ:
  4531. id = query.second
  4532. tablename = query.first.tablename
  4533. assert(tablename == query.first.tablename)
  4534. ctable = self.connection[tablename]
  4535. try:
  4536. del ctable[str(id)]
  4537. return 1
  4538. except couchdb.http.ResourceNotFound:
  4539. return 0
  4540. else:
  4541. tablename = self.get_table(query)
  4542. rows = self.select(query,[self.db[tablename]._id],{})
  4543. ctable = self.connection[tablename]
  4544. for row in rows:
  4545. del ctable[str(row.id)]
  4546. return len(rows)
  4547. def update(self,tablename,query,fields):
  4548. if not isinstance(query,Query):
  4549. raise SyntaxError("Not Supported")
  4550. if query.first.type=='id' and query.op==self.EQ:
  4551. id = query.second
  4552. tablename = query.first.tablename
  4553. ctable = self.connection[tablename]
  4554. try:
  4555. doc = ctable[str(id)]
  4556. for key,value in fields:
  4557. doc[key.name] = self.represent(value,self.db[tablename][key.name].type)
  4558. ctable.save(doc)
  4559. return 1
  4560. except couchdb.http.ResourceNotFound:
  4561. return 0
  4562. else:
  4563. tablename = self.get_table(query)
  4564. rows = self.select(query,[self.db[tablename]._id],{})
  4565. ctable = self.connection[tablename]
  4566. table = self.db[tablename]
  4567. for row in rows:
  4568. doc = ctable[str(row.id)]
  4569. for key,value in fields:
  4570. doc[key.name] = self.represent(value,table[key.name].type)
  4571. ctable.save(doc)
  4572. return len(rows)
  4573. def count(self,query,distinct=None):
  4574. if distinct:
  4575. raise RuntimeError("COUNT DISTINCT not supported")
  4576. if not isinstance(query,Query):
  4577. raise SyntaxError("Not Supported")
  4578. tablename = self.get_table(query)
  4579. rows = self.select(query,[self.db[tablename]._id],{})
  4580. return len(rows)
  4581. def cleanup(text):
  4582. """
  4583. validates that the given text is clean: only contains [0-9a-zA-Z_]
  4584. """
  4585. if not REGEX_ALPHANUMERIC.match(text):
  4586. raise SyntaxError('invalid table or field name: %s' % text)
  4587. return text
  4588. class MongoDBAdapter(NoSQLAdapter):
  4589. native_json = True
  4590. drivers = ('pymongo',)
  4591. uploads_in_blob = True
  4592. types = {
  4593. 'boolean': bool,
  4594. 'string': str,
  4595. 'text': str,
  4596. 'json': str,
  4597. 'password': str,
  4598. 'blob': str,
  4599. 'upload': str,
  4600. 'integer': long,
  4601. 'bigint': long,
  4602. 'float': float,
  4603. 'double': float,
  4604. 'date': datetime.date,
  4605. 'time': datetime.time,
  4606. 'datetime': datetime.datetime,
  4607. 'id': long,
  4608. 'reference': long,
  4609. 'list:string': list,
  4610. 'list:integer': list,
  4611. 'list:reference': list,
  4612. }
  4613. error_messages = {"javascript_needed": "This must yet be replaced" +
  4614. " with javascript in order to work."}
  4615. def __init__(self,db,uri='mongodb://127.0.0.1:5984/db',
  4616. pool_size=0, folder=None, db_codec ='UTF-8',
  4617. credential_decoder=IDENTITY, driver_args={},
  4618. adapter_args={}, do_connect=True, after_connection=None):
  4619. self.db = db
  4620. self.uri = uri
  4621. if do_connect: self.find_driver(adapter_args)
  4622. import random
  4623. from bson.objectid import ObjectId
  4624. from bson.son import SON
  4625. import pymongo.uri_parser
  4626. m = pymongo.uri_parser.parse_uri(uri)
  4627. self.SON = SON
  4628. self.ObjectId = ObjectId
  4629. self.random = random
  4630. self.dbengine = 'mongodb'
  4631. self.folder = folder
  4632. db['_lastsql'] = ''
  4633. self.db_codec = 'UTF-8'
  4634. self._after_connection = after_connection
  4635. self.pool_size = pool_size
  4636. #this is the minimum amount of replicates that it should wait
  4637. # for on insert/update
  4638. self.minimumreplication = adapter_args.get('minimumreplication',0)
  4639. # by default all inserts and selects are performand asynchronous,
  4640. # but now the default is
  4641. # synchronous, except when overruled by either this default or
  4642. # function parameter
  4643. self.safe = adapter_args.get('safe',True)
  4644. if isinstance(m,tuple):
  4645. m = {"database" : m[1]}
  4646. if m.get('database')==None:
  4647. raise SyntaxError("Database is required!")
  4648. def connector(uri=self.uri,m=m):
  4649. # Connection() is deprecated
  4650. if hasattr(self.driver, "MongoClient"):
  4651. Connection = self.driver.MongoClient
  4652. else:
  4653. Connection = self.driver.Connection
  4654. return Connection(uri)[m.get('database')]
  4655. self.reconnect(connector,cursor=False)
  4656. def object_id(self, arg=None):
  4657. """ Convert input to a valid Mongodb ObjectId instance
  4658. self.object_id("<random>") -> ObjectId (not unique) instance """
  4659. if not arg:
  4660. arg = 0
  4661. if isinstance(arg, basestring):
  4662. # we assume an integer as default input
  4663. rawhex = len(arg.replace("0x", "").replace("L", "")) == 24
  4664. if arg.isdigit() and (not rawhex):
  4665. arg = int(arg)
  4666. elif arg == "<random>":
  4667. arg = int("0x%sL" % \
  4668. "".join([self.random.choice("0123456789abcdef") \
  4669. for x in range(24)]), 0)
  4670. elif arg.isalnum():
  4671. if not arg.startswith("0x"):
  4672. arg = "0x%s" % arg
  4673. try:
  4674. arg = int(arg, 0)
  4675. except ValueError, e:
  4676. raise ValueError(
  4677. "invalid objectid argument string: %s" % e)
  4678. else:
  4679. raise ValueError("Invalid objectid argument string. " +
  4680. "Requires an integer or base 16 value")
  4681. elif isinstance(arg, self.ObjectId):
  4682. return arg
  4683. if not isinstance(arg, (int, long)):
  4684. raise TypeError("object_id argument must be of type " +
  4685. "ObjectId or an objectid representable integer")
  4686. if arg == 0:
  4687. hexvalue = "".zfill(24)
  4688. else:
  4689. hexvalue = hex(arg)[2:].replace("L", "")
  4690. return self.ObjectId(hexvalue)
  4691. def parse_reference(self, value, field_type):
  4692. # here we have to check for ObjectID before base parse
  4693. if isinstance(value, self.ObjectId):
  4694. value = long(str(value), 16)
  4695. return super(MongoDBAdapter,
  4696. self).parse_reference(value, field_type)
  4697. def parse_id(self, value, field_type):
  4698. if isinstance(value, self.ObjectId):
  4699. value = long(str(value), 16)
  4700. return super(MongoDBAdapter,
  4701. self).parse_id(value, field_type)
  4702. def represent(self, obj, fieldtype):
  4703. # the base adatpter does not support MongoDB ObjectId
  4704. if isinstance(obj, self.ObjectId):
  4705. value = obj
  4706. else:
  4707. value = NoSQLAdapter.represent(self, obj, fieldtype)
  4708. # reference types must be convert to ObjectID
  4709. if fieldtype =='date':
  4710. if value == None:
  4711. return value
  4712. # this piece of data can be stripped off based on the fieldtype
  4713. t = datetime.time(0, 0, 0)
  4714. # mongodb doesn't has a date object and so it must datetime,
  4715. # string or integer
  4716. return datetime.datetime.combine(value, t)
  4717. elif fieldtype == 'time':
  4718. if value == None:
  4719. return value
  4720. # this piece of data can be stripped of based on the fieldtype
  4721. d = datetime.date(2000, 1, 1)
  4722. # mongodb doesn't has a time object and so it must datetime,
  4723. # string or integer
  4724. return datetime.datetime.combine(d, value)
  4725. elif (isinstance(fieldtype, basestring) and
  4726. fieldtype.startswith('list:')):
  4727. if fieldtype.startswith('list:reference'):
  4728. newval = []
  4729. for v in value:
  4730. newval.append(self.object_id(v))
  4731. return newval
  4732. return value
  4733. elif ((isinstance(fieldtype, basestring) and
  4734. fieldtype.startswith("reference")) or
  4735. (isinstance(fieldtype, Table)) or fieldtype=="id"):
  4736. value = self.object_id(value)
  4737. return value
  4738. def create_table(self, table, migrate=True, fake_migrate=False,
  4739. polymodel=None, isCapped=False):
  4740. if isCapped:
  4741. raise RuntimeError("Not implemented")
  4742. def count(self, query, distinct=None, snapshot=True):
  4743. if distinct:
  4744. raise RuntimeError("COUNT DISTINCT not supported")
  4745. if not isinstance(query,Query):
  4746. raise SyntaxError("Not Supported")
  4747. tablename = self.get_table(query)
  4748. return long(self.select(query,[self.db[tablename]._id], {},
  4749. count=True,snapshot=snapshot)['count'])
  4750. # Maybe it would be faster if we just implemented the pymongo
  4751. # .count() function which is probably quicker?
  4752. # therefor call __select() connection[table].find(query).count()
  4753. # Since this will probably reduce the return set?
  4754. def expand(self, expression, field_type=None):
  4755. if isinstance(expression, Query):
  4756. # any query using 'id':=
  4757. # set name as _id (as per pymongo/mongodb primary key)
  4758. # convert second arg to an objectid field
  4759. # (if its not already)
  4760. # if second arg is 0 convert to objectid
  4761. if isinstance(expression.first,Field) and \
  4762. ((expression.first.type == 'id') or \
  4763. ("reference" in expression.first.type)):
  4764. if expression.first.type == 'id':
  4765. expression.first.name = '_id'
  4766. # cast to Mongo ObjectId
  4767. if isinstance(expression.second, (tuple, list, set)):
  4768. expression.second = [self.object_id(item) for
  4769. item in expression.second]
  4770. else:
  4771. expression.second = self.object_id(expression.second)
  4772. result = expression.op(expression.first, expression.second)
  4773. if isinstance(expression, Field):
  4774. if expression.type=='id':
  4775. result = "_id"
  4776. else:
  4777. result = expression.name
  4778. elif isinstance(expression, (Expression, Query)):
  4779. if not expression.second is None:
  4780. result = expression.op(expression.first, expression.second)
  4781. elif not expression.first is None:
  4782. result = expression.op(expression.first)
  4783. elif not isinstance(expression.op, str):
  4784. result = expression.op()
  4785. else:
  4786. result = expression.op
  4787. elif field_type:
  4788. result = self.represent(expression,field_type)
  4789. elif isinstance(expression,(list,tuple)):
  4790. result = ','.join(self.represent(item,field_type) for
  4791. item in expression)
  4792. else:
  4793. result = expression
  4794. return result
  4795. def drop(self, table, mode=''):
  4796. ctable = self.connection[table._tablename]
  4797. ctable.drop()
  4798. def truncate(self, table, mode, safe=None):
  4799. if safe == None:
  4800. safe=self.safe
  4801. ctable = self.connection[table._tablename]
  4802. ctable.remove(None, safe=True)
  4803. def _select(self, query, fields, attributes):
  4804. if 'for_update' in attributes:
  4805. logging.warn('mongodb does not support for_update')
  4806. for key in set(attributes.keys())-set(('limitby',
  4807. 'orderby','for_update')):
  4808. if attributes[key]!=None:
  4809. logging.warn('select attribute not implemented: %s' % key)
  4810. new_fields=[]
  4811. mongosort_list = []
  4812. # try an orderby attribute
  4813. orderby = attributes.get('orderby', False)
  4814. limitby = attributes.get('limitby', False)
  4815. # distinct = attributes.get('distinct', False)
  4816. if orderby:
  4817. if isinstance(orderby, (list, tuple)):
  4818. orderby = xorify(orderby)
  4819. # !!!! need to add 'random'
  4820. for f in self.expand(orderby).split(','):
  4821. if f.startswith('-'):
  4822. mongosort_list.append((f[1:], -1))
  4823. else:
  4824. mongosort_list.append((f, 1))
  4825. if limitby:
  4826. limitby_skip, limitby_limit = limitby[0], int(limitby[1])
  4827. else:
  4828. limitby_skip = limitby_limit = 0
  4829. mongofields_dict = self.SON()
  4830. mongoqry_dict = {}
  4831. for item in fields:
  4832. if isinstance(item, SQLALL):
  4833. new_fields += item._table
  4834. else:
  4835. new_fields.append(item)
  4836. fields = new_fields
  4837. if isinstance(query,Query):
  4838. tablename = self.get_table(query)
  4839. elif len(fields) != 0:
  4840. tablename = fields[0].tablename
  4841. else:
  4842. raise SyntaxError("The table name could not be found in " +
  4843. "the query nor from the select statement.")
  4844. mongoqry_dict = self.expand(query)
  4845. fields = fields or self.db[tablename]
  4846. for field in fields:
  4847. mongofields_dict[field.name] = 1
  4848. return tablename, mongoqry_dict, mongofields_dict, mongosort_list, \
  4849. limitby_limit, limitby_skip
  4850. def select(self, query, fields, attributes, count=False,
  4851. snapshot=False):
  4852. # TODO: support joins
  4853. tablename, mongoqry_dict, mongofields_dict, mongosort_list, \
  4854. limitby_limit, limitby_skip = self._select(query, fields, attributes)
  4855. ctable = self.connection[tablename]
  4856. if count:
  4857. return {'count' : ctable.find(
  4858. mongoqry_dict, mongofields_dict,
  4859. skip=limitby_skip, limit=limitby_limit,
  4860. sort=mongosort_list, snapshot=snapshot).count()}
  4861. else:
  4862. # pymongo cursor object
  4863. mongo_list_dicts = ctable.find(mongoqry_dict,
  4864. mongofields_dict, skip=limitby_skip,
  4865. limit=limitby_limit, sort=mongosort_list,
  4866. snapshot=snapshot)
  4867. rows = []
  4868. # populate row in proper order
  4869. # Here we replace ._id with .id to follow the standard naming
  4870. colnames = []
  4871. newnames = []
  4872. for field in fields:
  4873. colname = str(field)
  4874. colnames.append(colname)
  4875. tablename, fieldname = colname.split(".")
  4876. if fieldname == "_id":
  4877. # Mongodb reserved uuid key
  4878. field.name = "id"
  4879. newnames.append(".".join((tablename, field.name)))
  4880. for record in mongo_list_dicts:
  4881. row=[]
  4882. for colname in colnames:
  4883. tablename, fieldname = colname.split(".")
  4884. # switch to Mongo _id uuids for retrieving
  4885. # record id's
  4886. if fieldname == "id": fieldname = "_id"
  4887. if fieldname in record:
  4888. value = record[fieldname]
  4889. else:
  4890. value = None
  4891. row.append(value)
  4892. rows.append(row)
  4893. processor = attributes.get('processor', self.parse)
  4894. result = processor(rows, fields, newnames, False)
  4895. return result
  4896. def _insert(self, table, fields):
  4897. values = dict()
  4898. for k, v in fields:
  4899. if not k.name in ["id", "safe"]:
  4900. fieldname = k.name
  4901. fieldtype = table[k.name].type
  4902. values[fieldname] = self.represent(v, fieldtype)
  4903. return values
  4904. # Safe determines whether a asynchronious request is done or a
  4905. # synchronious action is done
  4906. # For safety, we use by default synchronous requests
  4907. def insert(self, table, fields, safe=None):
  4908. if safe==None:
  4909. safe = self.safe
  4910. ctable = self.connection[table._tablename]
  4911. values = self._insert(table, fields)
  4912. ctable.insert(values, safe=safe)
  4913. return long(str(values['_id']), 16)
  4914. #this function returns a dict with the where clause and update fields
  4915. def _update(self, tablename, query, fields):
  4916. if not isinstance(query, Query):
  4917. raise SyntaxError("Not Supported")
  4918. filter = None
  4919. if query:
  4920. filter = self.expand(query)
  4921. # do not try to update id fields to avoid backend errors
  4922. modify = {'$set': dict((k.name, self.represent(v, k.type)) for
  4923. k, v in fields if (not k.name in ("_id", "id")))}
  4924. return modify, filter
  4925. def update(self, tablename, query, fields, safe=None):
  4926. if safe == None:
  4927. safe = self.safe
  4928. # return amount of adjusted rows or zero, but no exceptions
  4929. # @ related not finding the result
  4930. if not isinstance(query, Query):
  4931. raise RuntimeError("Not implemented")
  4932. amount = self.count(query, False)
  4933. modify, filter = self._update(tablename, query, fields)
  4934. try:
  4935. result = self.connection[tablename].update(filter,
  4936. modify, multi=True, safe=safe)
  4937. if safe:
  4938. try:
  4939. # if result count is available fetch it
  4940. return result["n"]
  4941. except (KeyError, AttributeError, TypeError):
  4942. return amount
  4943. else:
  4944. return amount
  4945. except Exception, e:
  4946. # TODO Reverse update query to verifiy that the query succeded
  4947. raise RuntimeError("uncaught exception when updating rows: %s" % e)
  4948. def _delete(self, tablename, query):
  4949. if not isinstance(query, Query):
  4950. raise RuntimeError("query type %s is not supported" % \
  4951. type(query))
  4952. return self.expand(query)
  4953. def delete(self, tablename, query, safe=None):
  4954. if safe is None:
  4955. safe = self.safe
  4956. amount = 0
  4957. amount = self.count(query, False)
  4958. filter = self._delete(tablename, query)
  4959. self.connection[tablename].remove(filter, safe=safe)
  4960. return amount
  4961. def bulk_insert(self, table, items):
  4962. return [self.insert(table,item) for item in items]
  4963. ## OPERATORS
  4964. def INVERT(self, first):
  4965. #print "in invert first=%s" % first
  4966. return '-%s' % self.expand(first)
  4967. # TODO This will probably not work:(
  4968. def NOT(self, first):
  4969. result = {}
  4970. result["$not"] = self.expand(first)
  4971. return result
  4972. def AND(self,first,second):
  4973. f = self.expand(first)
  4974. s = self.expand(second)
  4975. f.update(s)
  4976. return f
  4977. def OR(self,first,second):
  4978. # pymongo expects: .find({'$or': [{'name':'1'}, {'name':'2'}]})
  4979. result = {}
  4980. f = self.expand(first)
  4981. s = self.expand(second)
  4982. result['$or'] = [f,s]
  4983. return result
  4984. def BELONGS(self, first, second):
  4985. if isinstance(second, str):
  4986. return {self.expand(first) : {"$in" : [ second[:-1]]} }
  4987. elif second==[] or second==() or second==set():
  4988. return {1:0}
  4989. items = [self.expand(item, first.type) for item in second]
  4990. return {self.expand(first) : {"$in" : items} }
  4991. def EQ(self,first,second):
  4992. result = {}
  4993. result[self.expand(first)] = self.expand(second)
  4994. return result
  4995. def NE(self, first, second=None):
  4996. result = {}
  4997. result[self.expand(first)] = {'$ne': self.expand(second)}
  4998. return result
  4999. def LT(self,first,second=None):
  5000. if second is None:
  5001. raise RuntimeError("Cannot compare %s < None" % first)
  5002. result = {}
  5003. result[self.expand(first)] = {'$lt': self.expand(second)}
  5004. return result
  5005. def LE(self,first,second=None):
  5006. if second is None:
  5007. raise RuntimeError("Cannot compare %s <= None" % first)
  5008. result = {}
  5009. result[self.expand(first)] = {'$lte': self.expand(second)}
  5010. return result
  5011. def GT(self,first,second):
  5012. result = {}
  5013. result[self.expand(first)] = {'$gt': self.expand(second)}
  5014. return result
  5015. def GE(self,first,second=None):
  5016. if second is None:
  5017. raise RuntimeError("Cannot compare %s >= None" % first)
  5018. result = {}
  5019. result[self.expand(first)] = {'$gte': self.expand(second)}
  5020. return result
  5021. def ADD(self, first, second):
  5022. raise NotImplementedError(self.error_messages["javascript_needed"])
  5023. return '%s + %s' % (self.expand(first),
  5024. self.expand(second, first.type))
  5025. def SUB(self, first, second):
  5026. raise NotImplementedError(self.error_messages["javascript_needed"])
  5027. return '(%s - %s)' % (self.expand(first),
  5028. self.expand(second, first.type))
  5029. def MUL(self, first, second):
  5030. raise NotImplementedError(self.error_messages["javascript_needed"])
  5031. return '(%s * %s)' % (self.expand(first),
  5032. self.expand(second, first.type))
  5033. def DIV(self, first, second):
  5034. raise NotImplementedError(self.error_messages["javascript_needed"])
  5035. return '(%s / %s)' % (self.expand(first),
  5036. self.expand(second, first.type))
  5037. def MOD(self, first, second):
  5038. raise NotImplementedError(self.error_messages["javascript_needed"])
  5039. return '(%s %% %s)' % (self.expand(first),
  5040. self.expand(second, first.type))
  5041. def AS(self, first, second):
  5042. raise NotImplementedError(self.error_messages["javascript_needed"])
  5043. return '%s AS %s' % (self.expand(first), second)
  5044. # We could implement an option that simulates a full featured SQL
  5045. # database. But I think the option should be set explicit or
  5046. # implemented as another library.
  5047. def ON(self, first, second):
  5048. raise NotImplementedError("This is not possible in NoSQL" +
  5049. " but can be simulated with a wrapper.")
  5050. return '%s ON %s' % (self.expand(first), self.expand(second))
  5051. # BLOW ARE TWO IMPLEMENTATIONS OF THE SAME FUNCITONS
  5052. # WHICH ONE IS BEST?
  5053. def COMMA(self, first, second):
  5054. return '%s, %s' % (self.expand(first), self.expand(second))
  5055. def LIKE(self, first, second):
  5056. #escaping regex operators?
  5057. return {self.expand(first): ('%s' % \
  5058. self.expand(second, 'string').replace('%','/'))}
  5059. def STARTSWITH(self, first, second):
  5060. #escaping regex operators?
  5061. return {self.expand(first): ('/^%s/' % \
  5062. self.expand(second, 'string'))}
  5063. def ENDSWITH(self, first, second):
  5064. #escaping regex operators?
  5065. return {self.expand(first): ('/%s^/' % \
  5066. self.expand(second, 'string'))}
  5067. def CONTAINS(self, first, second, case_sensitive=False):
  5068. # silently ignore, only case sensitive
  5069. # There is a technical difference, but mongodb doesn't support
  5070. # that, but the result will be the same
  5071. val = second if isinstance(second,self.ObjectId) else \
  5072. {'$regex':".*" + re.escape(self.expand(second, 'string')) + ".*"}
  5073. return {self.expand(first) : val}
  5074. def LIKE(self, first, second):
  5075. import re
  5076. return {self.expand(first): {'$regex': \
  5077. re.escape(self.expand(second,
  5078. 'string')).replace('%','.*')}}
  5079. #TODO verify full compatibilty with official SQL Like operator
  5080. def STARTSWITH(self, first, second):
  5081. #TODO Solve almost the same problem as with endswith
  5082. import re
  5083. return {self.expand(first): {'$regex' : '^' +
  5084. re.escape(self.expand(second,
  5085. 'string'))}}
  5086. #TODO verify full compatibilty with official SQL Like operator
  5087. def ENDSWITH(self, first, second):
  5088. #escaping regex operators?
  5089. #TODO if searched for a name like zsa_corbitt and the function
  5090. # is endswith('a') then this is also returned.
  5091. # Aldo it end with a t
  5092. import re
  5093. return {self.expand(first): {'$regex': \
  5094. re.escape(self.expand(second, 'string')) + '$'}}
  5095. #TODO verify full compatibilty with official oracle contains operator
  5096. def CONTAINS(self, first, second, case_sensitive=False):
  5097. # silently ignore, only case sensitive
  5098. #There is a technical difference, but mongodb doesn't support
  5099. # that, but the result will be the same
  5100. #TODO contains operators need to be transformed to Regex
  5101. return {self.expand(first) : {'$regex': \
  5102. ".*" + re.escape(self.expand(second, 'string')) + ".*"}}
  5103. class IMAPAdapter(NoSQLAdapter):
  5104. drivers = ('imaplib',)
  5105. """ IMAP server adapter
  5106. This class is intended as an interface with
  5107. email IMAP servers to perform simple queries in the
  5108. web2py DAL query syntax, so email read, search and
  5109. other related IMAP mail services (as those implemented
  5110. by brands like Google(r), and Yahoo!(r)
  5111. can be managed from web2py applications.
  5112. The code uses examples by Yuji Tomita on this post:
  5113. http://yuji.wordpress.com/2011/06/22/python-imaplib-imap-example-with-gmail/#comment-1137
  5114. and is based in docs for Python imaplib, python email
  5115. and email IETF's (i.e. RFC2060 and RFC3501)
  5116. This adapter was tested with a small set of operations with Gmail(r). Other
  5117. services requests could raise command syntax and response data issues.
  5118. It creates its table and field names "statically",
  5119. meaning that the developer should leave the table and field
  5120. definitions to the DAL instance by calling the adapter's
  5121. .define_tables() method. The tables are defined with the
  5122. IMAP server mailbox list information.
  5123. .define_tables() returns a dictionary mapping dal tablenames
  5124. to the server mailbox names with the following structure:
  5125. {<tablename>: str <server mailbox name>}
  5126. Here is a list of supported fields:
  5127. Field Type Description
  5128. ################################################################
  5129. uid string
  5130. answered boolean Flag
  5131. created date
  5132. content list:string A list of text or html parts
  5133. to string
  5134. cc string
  5135. bcc string
  5136. size integer the amount of octets of the message*
  5137. deleted boolean Flag
  5138. draft boolean Flag
  5139. flagged boolean Flag
  5140. sender string
  5141. recent boolean Flag
  5142. seen boolean Flag
  5143. subject string
  5144. mime string The mime header declaration
  5145. email string The complete RFC822 message**
  5146. attachments <type list> Each non text part as dict
  5147. encoding string The main detected encoding
  5148. *At the application side it is measured as the length of the RFC822
  5149. message string
  5150. WARNING: As row id's are mapped to email sequence numbers,
  5151. make sure your imap client web2py app does not delete messages
  5152. during select or update actions, to prevent
  5153. updating or deleting different messages.
  5154. Sequence numbers change whenever the mailbox is updated.
  5155. To avoid this sequence numbers issues, it is recommended the use
  5156. of uid fields in query references (although the update and delete
  5157. in separate actions rule still applies).
  5158. # This is the code recommended to start imap support
  5159. # at the app's model:
  5160. imapdb = DAL("imap://user:password@server:port", pool_size=1) # port 993 for ssl
  5161. imapdb.define_tables()
  5162. Here is an (incomplete) list of possible imap commands:
  5163. # Count today's unseen messages
  5164. # smaller than 6000 octets from the
  5165. # inbox mailbox
  5166. q = imapdb.INBOX.seen == False
  5167. q &= imapdb.INBOX.created == datetime.date.today()
  5168. q &= imapdb.INBOX.size < 6000
  5169. unread = imapdb(q).count()
  5170. # Fetch last query messages
  5171. rows = imapdb(q).select()
  5172. # it is also possible to filter query select results with limitby and
  5173. # sequences of mailbox fields
  5174. set.select(<fields sequence>, limitby=(<int>, <int>))
  5175. # Mark last query messages as seen
  5176. messages = [row.uid for row in rows]
  5177. seen = imapdb(imapdb.INBOX.uid.belongs(messages)).update(seen=True)
  5178. # Delete messages in the imap database that have mails from mr. Gumby
  5179. deleted = 0
  5180. for mailbox in imapdb.tables
  5181. deleted += imapdb(imapdb[mailbox].sender.contains("gumby")).delete()
  5182. # It is possible also to mark messages for deletion instead of ereasing them
  5183. # directly with set.update(deleted=True)
  5184. # This object give access
  5185. # to the adapter auto mailbox
  5186. # mapped names (which native
  5187. # mailbox has what table name)
  5188. imapdb.mailboxes <dict> # tablename, server native name pairs
  5189. # To retrieve a table native mailbox name use:
  5190. imapdb.<table>.mailbox
  5191. ### New features v2.4.1:
  5192. # Declare mailboxes statically with tablename, name pairs
  5193. # This avoids the extra server names retrieval
  5194. imapdb.define_tables({"inbox": "INBOX"})
  5195. # Selects without content/attachments/email columns will only
  5196. # fetch header and flags
  5197. imapdb(q).select(imapdb.INBOX.sender, imapdb.INBOX.subject)
  5198. """
  5199. types = {
  5200. 'string': str,
  5201. 'text': str,
  5202. 'date': datetime.date,
  5203. 'datetime': datetime.datetime,
  5204. 'id': long,
  5205. 'boolean': bool,
  5206. 'integer': int,
  5207. 'bigint': long,
  5208. 'blob': str,
  5209. 'list:string': str,
  5210. }
  5211. dbengine = 'imap'
  5212. REGEX_URI = re.compile('^(?P<user>[^:]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:@]+)(\:(?P<port>[0-9]+))?$')
  5213. def __init__(self,
  5214. db,
  5215. uri,
  5216. pool_size=0,
  5217. folder=None,
  5218. db_codec ='UTF-8',
  5219. credential_decoder=IDENTITY,
  5220. driver_args={},
  5221. adapter_args={},
  5222. do_connect=True,
  5223. after_connection=None):
  5224. # db uri: user@example.com:password@imap.server.com:123
  5225. # TODO: max size adapter argument for preventing large mail transfers
  5226. self.db = db
  5227. self.uri = uri
  5228. if do_connect: self.find_driver(adapter_args)
  5229. self.pool_size=pool_size
  5230. self.folder = folder
  5231. self.db_codec = db_codec
  5232. self._after_connection = after_connection
  5233. self.credential_decoder = credential_decoder
  5234. self.driver_args = driver_args
  5235. self.adapter_args = adapter_args
  5236. self.mailbox_size = None
  5237. self.static_names = None
  5238. self.charset = sys.getfilesystemencoding()
  5239. # imap class
  5240. self.imap4 = None
  5241. uri = uri.split("://")[1]
  5242. """ MESSAGE is an identifier for sequence number"""
  5243. self.flags = ['\\Deleted', '\\Draft', '\\Flagged',
  5244. '\\Recent', '\\Seen', '\\Answered']
  5245. self.search_fields = {
  5246. 'id': 'MESSAGE', 'created': 'DATE',
  5247. 'uid': 'UID', 'sender': 'FROM',
  5248. 'to': 'TO', 'cc': 'CC',
  5249. 'bcc': 'BCC', 'content': 'TEXT',
  5250. 'size': 'SIZE', 'deleted': '\\Deleted',
  5251. 'draft': '\\Draft', 'flagged': '\\Flagged',
  5252. 'recent': '\\Recent', 'seen': '\\Seen',
  5253. 'subject': 'SUBJECT', 'answered': '\\Answered',
  5254. 'mime': None, 'email': None,
  5255. 'attachments': None
  5256. }
  5257. db['_lastsql'] = ''
  5258. m = self.REGEX_URI.match(uri)
  5259. user = m.group('user')
  5260. password = m.group('password')
  5261. host = m.group('host')
  5262. port = int(m.group('port'))
  5263. over_ssl = False
  5264. if port==993:
  5265. over_ssl = True
  5266. driver_args.update(host=host,port=port, password=password, user=user)
  5267. def connector(driver_args=driver_args):
  5268. # it is assumed sucessful authentication alLways
  5269. # TODO: support direct connection and login tests
  5270. if over_ssl:
  5271. self.imap4 = self.driver.IMAP4_SSL
  5272. else:
  5273. self.imap4 = self.driver.IMAP4
  5274. connection = self.imap4(driver_args["host"], driver_args["port"])
  5275. data = connection.login(driver_args["user"], driver_args["password"])
  5276. # static mailbox list
  5277. connection.mailbox_names = None
  5278. # dummy cursor function
  5279. connection.cursor = lambda : True
  5280. return connection
  5281. self.db.define_tables = self.define_tables
  5282. self.connector = connector
  5283. if do_connect: self.reconnect()
  5284. def reconnect(self, f=None, cursor=True):
  5285. """
  5286. IMAP4 Pool connection method
  5287. imap connection lacks of self cursor command.
  5288. A custom command should be provided as a replacement
  5289. for connection pooling to prevent uncaught remote session
  5290. closing
  5291. """
  5292. if getattr(self,'connection',None) != None:
  5293. return
  5294. if f is None:
  5295. f = self.connector
  5296. if not self.pool_size:
  5297. self.connection = f()
  5298. self.cursor = cursor and self.connection.cursor()
  5299. else:
  5300. POOLS = ConnectionPool.POOLS
  5301. uri = self.uri
  5302. while True:
  5303. GLOBAL_LOCKER.acquire()
  5304. if not uri in POOLS:
  5305. POOLS[uri] = []
  5306. if POOLS[uri]:
  5307. self.connection = POOLS[uri].pop()
  5308. GLOBAL_LOCKER.release()
  5309. self.cursor = cursor and self.connection.cursor()
  5310. if self.cursor and self.check_active_connection:
  5311. try:
  5312. # check if connection is alive or close it
  5313. result, data = self.connection.list()
  5314. except:
  5315. # Possible connection reset error
  5316. # TODO: read exception class
  5317. self.connection = f()
  5318. break
  5319. else:
  5320. GLOBAL_LOCKER.release()
  5321. self.connection = f()
  5322. self.cursor = cursor and self.connection.cursor()
  5323. break
  5324. self.after_connection_hook()
  5325. def get_last_message(self, tablename):
  5326. last_message = None
  5327. # request mailbox list to the server
  5328. # if needed
  5329. if not isinstance(self.connection.mailbox_names, dict):
  5330. self.get_mailboxes()
  5331. try:
  5332. result = self.connection.select(self.connection.mailbox_names[tablename])
  5333. last_message = int(result[1][0])
  5334. except (IndexError, ValueError, TypeError, KeyError):
  5335. e = sys.exc_info()[1]
  5336. LOGGER.debug("Error retrieving the last mailbox sequence number. %s" % str(e))
  5337. return last_message
  5338. def get_uid_bounds(self, tablename):
  5339. if not isinstance(self.connection.mailbox_names, dict):
  5340. self.get_mailboxes()
  5341. # fetch first and last messages
  5342. # return (first, last) messages uid's
  5343. last_message = self.get_last_message(tablename)
  5344. result, data = self.connection.uid("search", None, "(ALL)")
  5345. uid_list = data[0].strip().split()
  5346. if len(uid_list) <= 0:
  5347. return None
  5348. else:
  5349. return (uid_list[0], uid_list[-1])
  5350. def convert_date(self, date, add=None):
  5351. if add is None:
  5352. add = datetime.timedelta()
  5353. """ Convert a date object to a string
  5354. with d-Mon-Y style for IMAP or the inverse
  5355. case
  5356. add <timedelta> adds to the date object
  5357. """
  5358. months = [None, "JAN","FEB","MAR","APR","MAY","JUN",
  5359. "JUL", "AUG","SEP","OCT","NOV","DEC"]
  5360. if isinstance(date, basestring):
  5361. # Prevent unexpected date response format
  5362. try:
  5363. dayname, datestring = date.split(",")
  5364. date_list = datestring.strip().split()
  5365. year = int(date_list[2])
  5366. month = months.index(date_list[1].upper())
  5367. day = int(date_list[0])
  5368. hms = map(int, date_list[3].split(":"))
  5369. return datetime.datetime(year, month, day,
  5370. hms[0], hms[1], hms[2]) + add
  5371. except (ValueError, AttributeError, IndexError), e:
  5372. LOGGER.error("Could not parse date text: %s. %s" %
  5373. (date, e))
  5374. return None
  5375. elif isinstance(date, (datetime.datetime, datetime.date)):
  5376. return (date + add).strftime("%d-%b-%Y")
  5377. else:
  5378. return None
  5379. @staticmethod
  5380. def header_represent(f, r):
  5381. from email.header import decode_header
  5382. text, encoding = decode_header(f)[0]
  5383. if encoding:
  5384. text = text.decode(encoding).encode('utf-8')
  5385. return text
  5386. def encode_text(self, text, charset, errors="replace"):
  5387. """ convert text for mail to unicode"""
  5388. if text is None:
  5389. text = ""
  5390. else:
  5391. if isinstance(text, str):
  5392. if charset is None:
  5393. text = unicode(text, "utf-8", errors)
  5394. else:
  5395. text = unicode(text, charset, errors)
  5396. else:
  5397. raise Exception("Unsupported mail text type %s" % type(text))
  5398. return text.encode("utf-8")
  5399. def get_charset(self, message):
  5400. charset = message.get_content_charset()
  5401. return charset
  5402. def get_mailboxes(self):
  5403. """ Query the mail database for mailbox names """
  5404. if self.static_names:
  5405. # statically defined mailbox names
  5406. self.connection.mailbox_names = self.static_names
  5407. return self.static_names.keys()
  5408. mailboxes_list = self.connection.list()
  5409. self.connection.mailbox_names = dict()
  5410. mailboxes = list()
  5411. x = 0
  5412. for item in mailboxes_list[1]:
  5413. x = x + 1
  5414. item = item.strip()
  5415. if not "NOSELECT" in item.upper():
  5416. sub_items = item.split("\"")
  5417. sub_items = [sub_item for sub_item in sub_items \
  5418. if len(sub_item.strip()) > 0]
  5419. # mailbox = sub_items[len(sub_items) -1]
  5420. mailbox = sub_items[-1]
  5421. # remove unwanted characters and store original names
  5422. # Don't allow leading non alphabetic characters
  5423. mailbox_name = re.sub('^[_0-9]*', '', re.sub('[^_\w]','',re.sub('[/ ]','_',mailbox)))
  5424. mailboxes.append(mailbox_name)
  5425. self.connection.mailbox_names[mailbox_name] = mailbox
  5426. return mailboxes
  5427. def get_query_mailbox(self, query):
  5428. nofield = True
  5429. tablename = None
  5430. attr = query
  5431. while nofield:
  5432. if hasattr(attr, "first"):
  5433. attr = attr.first
  5434. if isinstance(attr, Field):
  5435. return attr.tablename
  5436. elif isinstance(attr, Query):
  5437. pass
  5438. else:
  5439. return None
  5440. else:
  5441. return None
  5442. return tablename
  5443. def is_flag(self, flag):
  5444. if self.search_fields.get(flag, None) in self.flags:
  5445. return True
  5446. else:
  5447. return False
  5448. def define_tables(self, mailbox_names=None):
  5449. """
  5450. Auto create common IMAP fileds
  5451. This function creates fields definitions "statically"
  5452. meaning that custom fields as in other adapters should
  5453. not be supported and definitions handled on a service/mode
  5454. basis (local syntax for Gmail(r), Ymail(r)
  5455. Returns a dictionary with tablename, server native mailbox name
  5456. pairs.
  5457. """
  5458. if mailbox_names:
  5459. # optional statically declared mailboxes
  5460. self.static_names = mailbox_names
  5461. else:
  5462. self.static_names = None
  5463. if not isinstance(self.connection.mailbox_names, dict):
  5464. self.get_mailboxes()
  5465. names = self.connection.mailbox_names.keys()
  5466. for name in names:
  5467. self.db.define_table("%s" % name,
  5468. Field("uid", "string", writable=False),
  5469. Field("answered", "boolean"),
  5470. Field("created", "datetime", writable=False),
  5471. Field("content", "list:string", writable=False),
  5472. Field("to", "string", writable=False),
  5473. Field("cc", "string", writable=False),
  5474. Field("bcc", "string", writable=False),
  5475. Field("size", "integer", writable=False),
  5476. Field("deleted", "boolean"),
  5477. Field("draft", "boolean"),
  5478. Field("flagged", "boolean"),
  5479. Field("sender", "string", writable=False),
  5480. Field("recent", "boolean", writable=False),
  5481. Field("seen", "boolean"),
  5482. Field("subject", "string", writable=False),
  5483. Field("mime", "string", writable=False),
  5484. Field("email", "string", writable=False, readable=False),
  5485. Field("attachments", list, writable=False, readable=False),
  5486. Field("encoding", writable=False)
  5487. )
  5488. # Set a special _mailbox attribute for storing
  5489. # native mailbox names
  5490. self.db[name].mailbox = \
  5491. self.connection.mailbox_names[name]
  5492. # decode quoted printable
  5493. self.db[name].to.represent = self.db[name].cc.represent = \
  5494. self.db[name].bcc.represent = self.db[name].sender.represent = \
  5495. self.db[name].subject.represent = self.header_represent
  5496. # Set the db instance mailbox collections
  5497. self.db.mailboxes = self.connection.mailbox_names
  5498. return self.db.mailboxes
  5499. def create_table(self, *args, **kwargs):
  5500. # not implemented
  5501. # but required by DAL
  5502. pass
  5503. def _select(self, query, fields, attributes):
  5504. if use_common_filters(query):
  5505. query = self.common_filter(query, [self.get_query_mailbox(query),])
  5506. return str(query)
  5507. def select(self, query, fields, attributes):
  5508. """ Search and Fetch records and return web2py rows
  5509. """
  5510. # move this statement elsewhere (upper-level)
  5511. if use_common_filters(query):
  5512. query = self.common_filter(query, [self.get_query_mailbox(query),])
  5513. import email
  5514. # get records from imap server with search + fetch
  5515. # convert results to a dictionary
  5516. tablename = None
  5517. fetch_results = list()
  5518. if isinstance(query, Query):
  5519. tablename = self.get_table(query)
  5520. mailbox = self.connection.mailbox_names.get(tablename, None)
  5521. if mailbox is None:
  5522. raise ValueError("Mailbox name not found: %s" % mailbox)
  5523. else:
  5524. # select with readonly
  5525. result, selected = self.connection.select(mailbox, True)
  5526. if result != "OK":
  5527. raise Exception("IMAP error: %s" % selected)
  5528. self.mailbox_size = int(selected[0])
  5529. search_query = "(%s)" % str(query).strip()
  5530. search_result = self.connection.uid("search", None, search_query)
  5531. # Normal IMAP response OK is assumed (change this)
  5532. if search_result[0] == "OK":
  5533. # For "light" remote server responses just get the first
  5534. # ten records (change for non-experimental implementation)
  5535. # However, light responses are not guaranteed with this
  5536. # approach, just fewer messages.
  5537. limitby = attributes.get('limitby', None)
  5538. messages_set = search_result[1][0].split()
  5539. # descending order
  5540. messages_set.reverse()
  5541. if limitby is not None:
  5542. # TODO: orderby, asc/desc, limitby from complete message set
  5543. messages_set = messages_set[int(limitby[0]):int(limitby[1])]
  5544. # keep the requests small for header/flags
  5545. if any([(field.name in ["content", "size",
  5546. "attachments", "email"]) for
  5547. field in fields]):
  5548. imap_fields = "(RFC822 FLAGS)"
  5549. else:
  5550. imap_fields = "(RFC822.HEADER FLAGS)"
  5551. if len(messages_set) > 0:
  5552. # create fetch results object list
  5553. # fetch each remote message and store it in memmory
  5554. # (change to multi-fetch command syntax for faster
  5555. # transactions)
  5556. for uid in messages_set:
  5557. # fetch the RFC822 message body
  5558. typ, data = self.connection.uid("fetch", uid, imap_fields)
  5559. if typ == "OK":
  5560. fr = {"message": int(data[0][0].split()[0]),
  5561. "uid": long(uid),
  5562. "email": email.message_from_string(data[0][1]),
  5563. "raw_message": data[0][1]}
  5564. fr["multipart"] = fr["email"].is_multipart()
  5565. # fetch flags for the message
  5566. fr["flags"] = self.driver.ParseFlags(data[1])
  5567. fetch_results.append(fr)
  5568. else:
  5569. # error retrieving the message body
  5570. raise Exception("IMAP error retrieving the body: %s" % data)
  5571. else:
  5572. raise Exception("IMAP search error: %s" % search_result[1])
  5573. elif isinstance(query, (Expression, basestring)):
  5574. raise NotImplementedError()
  5575. else:
  5576. raise TypeError("Unexpected query type")
  5577. imapqry_dict = {}
  5578. imapfields_dict = {}
  5579. if len(fields) == 1 and isinstance(fields[0], SQLALL):
  5580. allfields = True
  5581. elif len(fields) == 0:
  5582. allfields = True
  5583. else:
  5584. allfields = False
  5585. if allfields:
  5586. colnames = ["%s.%s" % (tablename, field) for field in self.search_fields.keys()]
  5587. else:
  5588. colnames = ["%s.%s" % (tablename, field.name) for field in fields]
  5589. for k in colnames:
  5590. imapfields_dict[k] = k
  5591. imapqry_list = list()
  5592. imapqry_array = list()
  5593. for fr in fetch_results:
  5594. attachments = []
  5595. content = []
  5596. size = 0
  5597. n = int(fr["message"])
  5598. item_dict = dict()
  5599. message = fr["email"]
  5600. uid = fr["uid"]
  5601. charset = self.get_charset(message)
  5602. flags = fr["flags"]
  5603. raw_message = fr["raw_message"]
  5604. # Return messages data mapping static fields
  5605. # and fetched results. Mapping should be made
  5606. # outside the select function (with auxiliary
  5607. # instance methods)
  5608. # pending: search flags states trough the email message
  5609. # instances for correct output
  5610. # preserve subject encoding (ASCII/quoted printable)
  5611. if "%s.id" % tablename in colnames:
  5612. item_dict["%s.id" % tablename] = n
  5613. if "%s.created" % tablename in colnames:
  5614. item_dict["%s.created" % tablename] = self.convert_date(message["Date"])
  5615. if "%s.uid" % tablename in colnames:
  5616. item_dict["%s.uid" % tablename] = uid
  5617. if "%s.sender" % tablename in colnames:
  5618. # If there is no encoding found in the message header
  5619. # force utf-8 replacing characters (change this to
  5620. # module's defaults). Applies to .sender, .to, .cc and .bcc fields
  5621. item_dict["%s.sender" % tablename] = message["From"]
  5622. if "%s.to" % tablename in colnames:
  5623. item_dict["%s.to" % tablename] = message["To"]
  5624. if "%s.cc" % tablename in colnames:
  5625. if "Cc" in message.keys():
  5626. item_dict["%s.cc" % tablename] = message["Cc"]
  5627. else:
  5628. item_dict["%s.cc" % tablename] = ""
  5629. if "%s.bcc" % tablename in colnames:
  5630. if "Bcc" in message.keys():
  5631. item_dict["%s.bcc" % tablename] = message["Bcc"]
  5632. else:
  5633. item_dict["%s.bcc" % tablename] = ""
  5634. if "%s.deleted" % tablename in colnames:
  5635. item_dict["%s.deleted" % tablename] = "\\Deleted" in flags
  5636. if "%s.draft" % tablename in colnames:
  5637. item_dict["%s.draft" % tablename] = "\\Draft" in flags
  5638. if "%s.flagged" % tablename in colnames:
  5639. item_dict["%s.flagged" % tablename] = "\\Flagged" in flags
  5640. if "%s.recent" % tablename in colnames:
  5641. item_dict["%s.recent" % tablename] = "\\Recent" in flags
  5642. if "%s.seen" % tablename in colnames:
  5643. item_dict["%s.seen" % tablename] = "\\Seen" in flags
  5644. if "%s.subject" % tablename in colnames:
  5645. item_dict["%s.subject" % tablename] = message["Subject"]
  5646. if "%s.answered" % tablename in colnames:
  5647. item_dict["%s.answered" % tablename] = "\\Answered" in flags
  5648. if "%s.mime" % tablename in colnames:
  5649. item_dict["%s.mime" % tablename] = message.get_content_type()
  5650. if "%s.encoding" % tablename in colnames:
  5651. item_dict["%s.encoding" % tablename] = charset
  5652. # Here goes the whole RFC822 body as an email instance
  5653. # for controller side custom processing
  5654. # The message is stored as a raw string
  5655. # >> email.message_from_string(raw string)
  5656. # returns a Message object for enhanced object processing
  5657. if "%s.email" % tablename in colnames:
  5658. # WARNING: no encoding performed (raw message)
  5659. item_dict["%s.email" % tablename] = raw_message
  5660. # Size measure as suggested in a Velocity Reviews post
  5661. # by Tim Williams: "how to get size of email attachment"
  5662. # Note: len() and server RFC822.SIZE reports doesn't match
  5663. # To retrieve the server size for representation would add a new
  5664. # fetch transaction to the process
  5665. for part in message.walk():
  5666. maintype = part.get_content_maintype()
  5667. if ("%s.attachments" % tablename in colnames) or \
  5668. ("%s.content" % tablename in colnames):
  5669. if "%s.attachments" % tablename in colnames:
  5670. if not ("text" in maintype):
  5671. payload = part.get_payload(decode=True)
  5672. if payload:
  5673. attachment = {
  5674. "payload": payload,
  5675. "filename": part.get_filename(),
  5676. "encoding": part.get_content_charset(),
  5677. "mime": part.get_content_type(),
  5678. "disposition": part["Content-Disposition"]}
  5679. attachments.append(attachment)
  5680. if "%s.content" % tablename in colnames:
  5681. payload = part.get_payload(decode=True)
  5682. part_charset = self.get_charset(part)
  5683. if "text" in maintype:
  5684. if payload:
  5685. content.append(self.encode_text(payload, part_charset))
  5686. if "%s.size" % tablename in colnames:
  5687. if part is not None:
  5688. size += len(str(part))
  5689. item_dict["%s.content" % tablename] = content
  5690. item_dict["%s.attachments" % tablename] = attachments
  5691. item_dict["%s.size" % tablename] = size
  5692. imapqry_list.append(item_dict)
  5693. # extra object mapping for the sake of rows object
  5694. # creation (sends an array or lists)
  5695. for item_dict in imapqry_list:
  5696. imapqry_array_item = list()
  5697. for fieldname in colnames:
  5698. imapqry_array_item.append(item_dict[fieldname])
  5699. imapqry_array.append(imapqry_array_item)
  5700. # parse result and return a rows object
  5701. colnames = colnames
  5702. processor = attributes.get('processor',self.parse)
  5703. return processor(imapqry_array, fields, colnames)
  5704. def _update(self, tablename, query, fields, commit=False):
  5705. # TODO: the adapter should implement an .expand method
  5706. commands = list()
  5707. if use_common_filters(query):
  5708. query = self.common_filter(query, [tablename,])
  5709. mark = []
  5710. unmark = []
  5711. if query:
  5712. for item in fields:
  5713. field = item[0]
  5714. name = field.name
  5715. value = item[1]
  5716. if self.is_flag(name):
  5717. flag = self.search_fields[name]
  5718. if (value is not None) and (flag != "\\Recent"):
  5719. if value:
  5720. mark.append(flag)
  5721. else:
  5722. unmark.append(flag)
  5723. result, data = self.connection.select(
  5724. self.connection.mailbox_names[tablename])
  5725. string_query = "(%s)" % query
  5726. result, data = self.connection.search(None, string_query)
  5727. store_list = [item.strip() for item in data[0].split()
  5728. if item.strip().isdigit()]
  5729. # build commands for marked flags
  5730. for number in store_list:
  5731. result = None
  5732. if len(mark) > 0:
  5733. commands.append((number, "+FLAGS", "(%s)" % " ".join(mark)))
  5734. if len(unmark) > 0:
  5735. commands.append((number, "-FLAGS", "(%s)" % " ".join(unmark)))
  5736. return commands
  5737. def update(self, tablename, query, fields):
  5738. rowcount = 0
  5739. commands = self._update(tablename, query, fields)
  5740. for command in commands:
  5741. result, data = self.connection.store(*command)
  5742. if result == "OK":
  5743. rowcount += 1
  5744. else:
  5745. raise Exception("IMAP storing error: %s" % data)
  5746. return rowcount
  5747. def _count(self, query, distinct=None):
  5748. raise NotImplementedError()
  5749. def count(self,query,distinct=None):
  5750. counter = 0
  5751. tablename = self.get_query_mailbox(query)
  5752. if query and tablename is not None:
  5753. if use_common_filters(query):
  5754. query = self.common_filter(query, [tablename,])
  5755. result, data = self.connection.select(self.connection.mailbox_names[tablename])
  5756. string_query = "(%s)" % query
  5757. result, data = self.connection.search(None, string_query)
  5758. store_list = [item.strip() for item in data[0].split() if item.strip().isdigit()]
  5759. counter = len(store_list)
  5760. return counter
  5761. def delete(self, tablename, query):
  5762. counter = 0
  5763. if query:
  5764. if use_common_filters(query):
  5765. query = self.common_filter(query, [tablename,])
  5766. result, data = self.connection.select(self.connection.mailbox_names[tablename])
  5767. string_query = "(%s)" % query
  5768. result, data = self.connection.search(None, string_query)
  5769. store_list = [item.strip() for item in data[0].split() if item.strip().isdigit()]
  5770. for number in store_list:
  5771. result, data = self.connection.store(number, "+FLAGS", "(\\Deleted)")
  5772. if result == "OK":
  5773. counter += 1
  5774. else:
  5775. raise Exception("IMAP store error: %s" % data)
  5776. if counter > 0:
  5777. result, data = self.connection.expunge()
  5778. return counter
  5779. def BELONGS(self, first, second):
  5780. result = None
  5781. name = self.search_fields[first.name]
  5782. if name == "MESSAGE":
  5783. values = [str(val) for val in second if str(val).isdigit()]
  5784. result = "%s" % ",".join(values).strip()
  5785. elif name == "UID":
  5786. values = [str(val) for val in second if str(val).isdigit()]
  5787. result = "UID %s" % ",".join(values).strip()
  5788. else:
  5789. raise Exception("Operation not supported")
  5790. # result = "(%s %s)" % (self.expand(first), self.expand(second))
  5791. return result
  5792. def CONTAINS(self, first, second, case_sensitive=False):
  5793. # silently ignore, only case sensitive
  5794. result = None
  5795. name = self.search_fields[first.name]
  5796. if name in ("FROM", "TO", "SUBJECT", "TEXT"):
  5797. result = "%s \"%s\"" % (name, self.expand(second))
  5798. else:
  5799. if first.name in ("cc", "bcc"):
  5800. result = "%s \"%s\"" % (first.name.upper(), self.expand(second))
  5801. elif first.name == "mime":
  5802. result = "HEADER Content-Type \"%s\"" % self.expand(second)
  5803. else:
  5804. raise Exception("Operation not supported")
  5805. return result
  5806. def GT(self, first, second):
  5807. result = None
  5808. name = self.search_fields[first.name]
  5809. if name == "MESSAGE":
  5810. last_message = self.get_last_message(first.tablename)
  5811. result = "%d:%d" % (int(self.expand(second)) + 1, last_message)
  5812. elif name == "UID":
  5813. # GT and LT may not return
  5814. # expected sets depending on
  5815. # the uid format implemented
  5816. try:
  5817. pedestal, threshold = self.get_uid_bounds(first.tablename)
  5818. except TypeError:
  5819. e = sys.exc_info()[1]
  5820. LOGGER.debug("Error requesting uid bounds: %s", str(e))
  5821. return ""
  5822. try:
  5823. lower_limit = int(self.expand(second)) + 1
  5824. except (ValueError, TypeError):
  5825. e = sys.exc_info()[1]
  5826. raise Exception("Operation not supported (non integer UID)")
  5827. result = "UID %s:%s" % (lower_limit, threshold)
  5828. elif name == "DATE":
  5829. result = "SINCE %s" % self.convert_date(second, add=datetime.timedelta(1))
  5830. elif name == "SIZE":
  5831. result = "LARGER %s" % self.expand(second)
  5832. else:
  5833. raise Exception("Operation not supported")
  5834. return result
  5835. def GE(self, first, second):
  5836. result = None
  5837. name = self.search_fields[first.name]
  5838. if name == "MESSAGE":
  5839. last_message = self.get_last_message(first.tablename)
  5840. result = "%s:%s" % (self.expand(second), last_message)
  5841. elif name == "UID":
  5842. # GT and LT may not return
  5843. # expected sets depending on
  5844. # the uid format implemented
  5845. try:
  5846. pedestal, threshold = self.get_uid_bounds(first.tablename)
  5847. except TypeError:
  5848. e = sys.exc_info()[1]
  5849. LOGGER.debug("Error requesting uid bounds: %s", str(e))
  5850. return ""
  5851. lower_limit = self.expand(second)
  5852. result = "UID %s:%s" % (lower_limit, threshold)
  5853. elif name == "DATE":
  5854. result = "SINCE %s" % self.convert_date(second)
  5855. else:
  5856. raise Exception("Operation not supported")
  5857. return result
  5858. def LT(self, first, second):
  5859. result = None
  5860. name = self.search_fields[first.name]
  5861. if name == "MESSAGE":
  5862. result = "%s:%s" % (1, int(self.expand(second)) - 1)
  5863. elif name == "UID":
  5864. try:
  5865. pedestal, threshold = self.get_uid_bounds(first.tablename)
  5866. except TypeError:
  5867. e = sys.exc_info()[1]
  5868. LOGGER.debug("Error requesting uid bounds: %s", str(e))
  5869. return ""
  5870. try:
  5871. upper_limit = int(self.expand(second)) - 1
  5872. except (ValueError, TypeError):
  5873. e = sys.exc_info()[1]
  5874. raise Exception("Operation not supported (non integer UID)")
  5875. result = "UID %s:%s" % (pedestal, upper_limit)
  5876. elif name == "DATE":
  5877. result = "BEFORE %s" % self.convert_date(second)
  5878. elif name == "SIZE":
  5879. result = "SMALLER %s" % self.expand(second)
  5880. else:
  5881. raise Exception("Operation not supported")
  5882. return result
  5883. def LE(self, first, second):
  5884. result = None
  5885. name = self.search_fields[first.name]
  5886. if name == "MESSAGE":
  5887. result = "%s:%s" % (1, self.expand(second))
  5888. elif name == "UID":
  5889. try:
  5890. pedestal, threshold = self.get_uid_bounds(first.tablename)
  5891. except TypeError:
  5892. e = sys.exc_info()[1]
  5893. LOGGER.debug("Error requesting uid bounds: %s", str(e))
  5894. return ""
  5895. upper_limit = int(self.expand(second))
  5896. result = "UID %s:%s" % (pedestal, upper_limit)
  5897. elif name == "DATE":
  5898. result = "BEFORE %s" % self.convert_date(second, add=datetime.timedelta(1))
  5899. else:
  5900. raise Exception("Operation not supported")
  5901. return result
  5902. def NE(self, first, second=None):
  5903. if (second is None) and isinstance(first, Field):
  5904. # All records special table query
  5905. if first.type == "id":
  5906. return self.GE(first, 1)
  5907. result = self.NOT(self.EQ(first, second))
  5908. result = result.replace("NOT NOT", "").strip()
  5909. return result
  5910. def EQ(self,first,second):
  5911. name = self.search_fields[first.name]
  5912. result = None
  5913. if name is not None:
  5914. if name == "MESSAGE":
  5915. # query by message sequence number
  5916. result = "%s" % self.expand(second)
  5917. elif name == "UID":
  5918. result = "UID %s" % self.expand(second)
  5919. elif name == "DATE":
  5920. result = "ON %s" % self.convert_date(second)
  5921. elif name in self.flags:
  5922. if second:
  5923. result = "%s" % (name.upper()[1:])
  5924. else:
  5925. result = "NOT %s" % (name.upper()[1:])
  5926. else:
  5927. raise Exception("Operation not supported")
  5928. else:
  5929. raise Exception("Operation not supported")
  5930. return result
  5931. def AND(self, first, second):
  5932. result = "%s %s" % (self.expand(first), self.expand(second))
  5933. return result
  5934. def OR(self, first, second):
  5935. result = "OR %s %s" % (self.expand(first), self.expand(second))
  5936. return "%s" % result.replace("OR OR", "OR")
  5937. def NOT(self, first):
  5938. result = "NOT %s" % self.expand(first)
  5939. return result
  5940. ########################################################################
  5941. # end of adapters
  5942. ########################################################################
  5943. ADAPTERS = {
  5944. 'sqlite': SQLiteAdapter,
  5945. 'spatialite': SpatiaLiteAdapter,
  5946. 'sqlite:memory': SQLiteAdapter,
  5947. 'spatialite:memory': SpatiaLiteAdapter,
  5948. 'mysql': MySQLAdapter,
  5949. 'postgres': PostgreSQLAdapter,
  5950. 'postgres:psycopg2': PostgreSQLAdapter,
  5951. 'postgres:pg8000': PostgreSQLAdapter,
  5952. 'postgres2:psycopg2': NewPostgreSQLAdapter,
  5953. 'postgres2:pg8000': NewPostgreSQLAdapter,
  5954. 'oracle': OracleAdapter,
  5955. 'mssql': MSSQLAdapter,
  5956. 'mssql2': MSSQL2Adapter,
  5957. 'mssql3': MSSQL3Adapter,
  5958. 'vertica': VerticaAdapter,
  5959. 'sybase': SybaseAdapter,
  5960. 'db2': DB2Adapter,
  5961. 'teradata': TeradataAdapter,
  5962. 'informix': InformixAdapter,
  5963. 'informix-se': InformixSEAdapter,
  5964. 'firebird': FireBirdAdapter,
  5965. 'firebird_embedded': FireBirdAdapter,
  5966. 'ingres': IngresAdapter,
  5967. 'ingresu': IngresUnicodeAdapter,
  5968. 'sapdb': SAPDBAdapter,
  5969. 'cubrid': CubridAdapter,
  5970. 'jdbc:sqlite': JDBCSQLiteAdapter,
  5971. 'jdbc:sqlite:memory': JDBCSQLiteAdapter,
  5972. 'jdbc:postgres': JDBCPostgreSQLAdapter,
  5973. 'gae': GoogleDatastoreAdapter, # discouraged, for backward compatibility
  5974. 'google:datastore': GoogleDatastoreAdapter,
  5975. 'google:sql': GoogleSQLAdapter,
  5976. 'couchdb': CouchDBAdapter,
  5977. 'mongodb': MongoDBAdapter,
  5978. 'imap': IMAPAdapter
  5979. }
  5980. def sqlhtml_validators(field):
  5981. """
  5982. Field type validation, using web2py's validators mechanism.
  5983. makes sure the content of a field is in line with the declared
  5984. fieldtype
  5985. """
  5986. db = field.db
  5987. if not have_validators:
  5988. return []
  5989. field_type, field_length = field.type, field.length
  5990. if isinstance(field_type, SQLCustomType):
  5991. if hasattr(field_type, 'validator'):
  5992. return field_type.validator
  5993. else:
  5994. field_type = field_type.type
  5995. elif not isinstance(field_type,str):
  5996. return []
  5997. requires=[]
  5998. def ff(r,id):
  5999. row=r(id)
  6000. if not row:
  6001. return id
  6002. elif hasattr(r, '_format') and isinstance(r._format,str):
  6003. return r._format % row
  6004. elif hasattr(r, '_format') and callable(r._format):
  6005. return r._format(row)
  6006. else:
  6007. return id
  6008. if field_type in (('string', 'text', 'password')):
  6009. requires.append(validators.IS_LENGTH(field_length))
  6010. elif field_type == 'json':
  6011. requires.append(validators.IS_EMPTY_OR(validators.IS_JSON(native_json=field.db._adapter.native_json)))
  6012. elif field_type == 'double' or field_type == 'float':
  6013. requires.append(validators.IS_FLOAT_IN_RANGE(-1e100, 1e100))
  6014. elif field_type in ('integer','bigint'):
  6015. requires.append(validators.IS_INT_IN_RANGE(-1e100, 1e100))
  6016. elif field_type.startswith('decimal'):
  6017. requires.append(validators.IS_DECIMAL_IN_RANGE(-10**10, 10**10))
  6018. elif field_type == 'date':
  6019. requires.append(validators.IS_DATE())
  6020. elif field_type == 'time':
  6021. requires.append(validators.IS_TIME())
  6022. elif field_type == 'datetime':
  6023. requires.append(validators.IS_DATETIME())
  6024. elif db and field_type.startswith('reference') and \
  6025. field_type.find('.') < 0 and \
  6026. field_type[10:] in db.tables:
  6027. referenced = db[field_type[10:]]
  6028. def repr_ref(id, row=None, r=referenced, f=ff): return f(r, id)
  6029. field.represent = field.represent or repr_ref
  6030. if hasattr(referenced, '_format') and referenced._format:
  6031. requires = validators.IS_IN_DB(db,referenced._id,
  6032. referenced._format)
  6033. if field.unique:
  6034. requires._and = validators.IS_NOT_IN_DB(db,field)
  6035. if field.tablename == field_type[10:]:
  6036. return validators.IS_EMPTY_OR(requires)
  6037. return requires
  6038. elif db and field_type.startswith('list:reference') and \
  6039. field_type.find('.') < 0 and \
  6040. field_type[15:] in db.tables:
  6041. referenced = db[field_type[15:]]
  6042. def list_ref_repr(ids, row=None, r=referenced, f=ff):
  6043. if not ids:
  6044. return None
  6045. refs = None
  6046. db, id = r._db, r._id
  6047. if isinstance(db._adapter, GoogleDatastoreAdapter):
  6048. def count(values): return db(id.belongs(values)).select(id)
  6049. rx = range(0, len(ids), 30)
  6050. refs = reduce(lambda a,b:a&b, [count(ids[i:i+30]) for i in rx])
  6051. else:
  6052. refs = db(id.belongs(ids)).select(id)
  6053. return (refs and ', '.join(str(f(r,x.id)) for x in refs) or '')
  6054. field.represent = field.represent or list_ref_repr
  6055. if hasattr(referenced, '_format') and referenced._format:
  6056. requires = validators.IS_IN_DB(db,referenced._id,
  6057. referenced._format,multiple=True)
  6058. else:
  6059. requires = validators.IS_IN_DB(db,referenced._id,
  6060. multiple=True)
  6061. if field.unique:
  6062. requires._and = validators.IS_NOT_IN_DB(db,field)
  6063. return requires
  6064. elif field_type.startswith('list:'):
  6065. def repr_list(values,row=None): return', '.join(str(v) for v in (values or []))
  6066. field.represent = field.represent or repr_list
  6067. if field.unique:
  6068. requires.insert(0,validators.IS_NOT_IN_DB(db,field))
  6069. sff = ['in', 'do', 'da', 'ti', 'de', 'bo']
  6070. if field.notnull and not field_type[:2] in sff:
  6071. requires.insert(0, validators.IS_NOT_EMPTY())
  6072. elif not field.notnull and field_type[:2] in sff and requires:
  6073. requires[-1] = validators.IS_EMPTY_OR(requires[-1])
  6074. return requires
  6075. def bar_escape(item):
  6076. return str(item).replace('|', '||')
  6077. def bar_encode(items):
  6078. return '|%s|' % '|'.join(bar_escape(item) for item in items if str(item).strip())
  6079. def bar_decode_integer(value):
  6080. if not hasattr(value,'split') and hasattr(value,'read'):
  6081. value = value.read()
  6082. return [long(x) for x in value.split('|') if x.strip()]
  6083. def bar_decode_string(value):
  6084. return [x.replace('||', '|') for x in
  6085. REGEX_UNPACK.split(value[1:-1]) if x.strip()]
  6086. class Row(object):
  6087. """
  6088. a dictionary that lets you do d['a'] as well as d.a
  6089. this is only used to store a Row
  6090. """
  6091. __init__ = lambda self,*args,**kwargs: self.__dict__.update(*args,**kwargs)
  6092. def __getitem__(self, k):
  6093. key=str(k)
  6094. _extra = self.__dict__.get('_extra', None)
  6095. if _extra is not None:
  6096. v = _extra.get(key, DEFAULT)
  6097. if v != DEFAULT:
  6098. return v
  6099. m = REGEX_TABLE_DOT_FIELD.match(key)
  6100. if m:
  6101. try:
  6102. return ogetattr(self, m.group(1))[m.group(2)]
  6103. except (KeyError,AttributeError,TypeError):
  6104. key = m.group(2)
  6105. return ogetattr(self, key)
  6106. __setitem__ = lambda self, key, value: setattr(self, str(key), value)
  6107. __delitem__ = object.__delattr__
  6108. __copy__ = lambda self: Row(self)
  6109. __call__ = __getitem__
  6110. get = lambda self, key, default=None: self.__dict__.get(key,default)
  6111. has_key = __contains__ = lambda self, key: key in self.__dict__
  6112. __nonzero__ = lambda self: len(self.__dict__)>0
  6113. update = lambda self, *args, **kwargs: self.__dict__.update(*args, **kwargs)
  6114. keys = lambda self: self.__dict__.keys()
  6115. items = lambda self: self.__dict__.items()
  6116. values = lambda self: self.__dict__.values()
  6117. __iter__ = lambda self: self.__dict__.__iter__()
  6118. iteritems = lambda self: self.__dict__.iteritems()
  6119. __str__ = __repr__ = lambda self: '<Row %s>' % self.as_dict()
  6120. __int__ = lambda self: object.__getattribute__(self,'id')
  6121. __long__ = lambda self: long(object.__getattribute__(self,'id'))
  6122. def __eq__(self,other):
  6123. try:
  6124. return self.as_dict() == other.as_dict()
  6125. except AttributeError:
  6126. return False
  6127. def __ne__(self,other):
  6128. return not (self == other)
  6129. def __copy__(self):
  6130. return Row(dict(self))
  6131. def as_dict(self, datetime_to_str=False, custom_types=None):
  6132. SERIALIZABLE_TYPES = [str, unicode, int, long, float, bool, list, dict]
  6133. if isinstance(custom_types,(list,tuple,set)):
  6134. SERIALIZABLE_TYPES += custom_types
  6135. elif custom_types:
  6136. SERIALIZABLE_TYPES.append(custom_types)
  6137. d = dict(self)
  6138. for k in copy.copy(d.keys()):
  6139. v=d[k]
  6140. if d[k] is None:
  6141. continue
  6142. elif isinstance(v,Row):
  6143. d[k]=v.as_dict()
  6144. elif isinstance(v,Reference):
  6145. d[k]=long(v)
  6146. elif isinstance(v,decimal.Decimal):
  6147. d[k]=float(v)
  6148. elif isinstance(v, (datetime.date, datetime.datetime, datetime.time)):
  6149. if datetime_to_str:
  6150. d[k] = v.isoformat().replace('T',' ')[:19]
  6151. elif not isinstance(v,tuple(SERIALIZABLE_TYPES)):
  6152. del d[k]
  6153. return d
  6154. def as_xml(self, row_name="row", colnames=None, indent=' '):
  6155. def f(row,field,indent=' '):
  6156. if isinstance(row,Row):
  6157. spc = indent+' \n'
  6158. items = [f(row[x],x,indent+' ') for x in row]
  6159. return '%s<%s>\n%s\n%s</%s>' % (
  6160. indent,
  6161. field,
  6162. spc.join(item for item in items if item),
  6163. indent,
  6164. field)
  6165. elif not callable(row):
  6166. if REGEX_ALPHANUMERIC.match(field):
  6167. return '%s<%s>%s</%s>' % (indent,field,row,field)
  6168. else:
  6169. return '%s<extra name="%s">%s</extra>' % \
  6170. (indent,field,row)
  6171. else:
  6172. return None
  6173. return f(self, row_name, indent=indent)
  6174. def as_json(self, mode="object", default=None, colnames=None,
  6175. serialize=True, **kwargs):
  6176. """
  6177. serializes the row to a JSON object
  6178. kwargs are passed to .as_dict method
  6179. only "object" mode supported
  6180. serialize = False used by Rows.as_json
  6181. TODO: return array mode with query column order
  6182. mode and colnames are not implemented
  6183. """
  6184. item = self.as_dict(**kwargs)
  6185. if serialize:
  6186. if have_serializers:
  6187. return serializers.json(item,
  6188. default=default or
  6189. serializers.custom_json)
  6190. elif simplejson:
  6191. return simplejson.dumps(item)
  6192. else:
  6193. raise RuntimeError("missing simplejson")
  6194. else:
  6195. return item
  6196. ################################################################################
  6197. # Everything below should be independent of the specifics of the database
  6198. # and should work for RDBMs and some NoSQL databases
  6199. ################################################################################
  6200. class SQLCallableList(list):
  6201. def __call__(self):
  6202. return copy.copy(self)
  6203. def smart_query(fields,text):
  6204. if not isinstance(fields,(list,tuple)):
  6205. fields = [fields]
  6206. new_fields = []
  6207. for field in fields:
  6208. if isinstance(field,Field):
  6209. new_fields.append(field)
  6210. elif isinstance(field,Table):
  6211. for ofield in field:
  6212. new_fields.append(ofield)
  6213. else:
  6214. raise RuntimeError("fields must be a list of fields")
  6215. fields = new_fields
  6216. field_map = {}
  6217. for field in fields:
  6218. n = field.name.lower()
  6219. if not n in field_map:
  6220. field_map[n] = field
  6221. n = str(field).lower()
  6222. if not n in field_map:
  6223. field_map[n] = field
  6224. constants = {}
  6225. i = 0
  6226. while True:
  6227. m = REGEX_CONST_STRING.search(text)
  6228. if not m: break
  6229. text = text[:m.start()]+('#%i' % i)+text[m.end():]
  6230. constants[str(i)] = m.group()[1:-1]
  6231. i+=1
  6232. text = re.sub('\s+',' ',text).lower()
  6233. for a,b in [('&','and'),
  6234. ('|','or'),
  6235. ('~','not'),
  6236. ('==','='),
  6237. ('<','<'),
  6238. ('>','>'),
  6239. ('<=','<='),
  6240. ('>=','>='),
  6241. ('<>','!='),
  6242. ('=<','<='),
  6243. ('=>','>='),
  6244. ('=','='),
  6245. (' less or equal than ','<='),
  6246. (' greater or equal than ','>='),
  6247. (' equal or less than ','<='),
  6248. (' equal or greater than ','>='),
  6249. (' less or equal ','<='),
  6250. (' greater or equal ','>='),
  6251. (' equal or less ','<='),
  6252. (' equal or greater ','>='),
  6253. (' not equal to ','!='),
  6254. (' not equal ','!='),
  6255. (' equal to ','='),
  6256. (' equal ','='),
  6257. (' equals ','='),
  6258. (' less than ','<'),
  6259. (' greater than ','>'),
  6260. (' starts with ','startswith'),
  6261. (' ends with ','endswith'),
  6262. (' not in ' , 'notbelongs'),
  6263. (' in ' , 'belongs'),
  6264. (' is ','=')]:
  6265. if a[0]==' ':
  6266. text = text.replace(' is'+a,' %s ' % b)
  6267. text = text.replace(a,' %s ' % b)
  6268. text = re.sub('\s+',' ',text).lower()
  6269. text = re.sub('(?P<a>[\<\>\!\=])\s+(?P<b>[\<\>\!\=])','\g<a>\g<b>',text)
  6270. query = field = neg = op = logic = None
  6271. for item in text.split():
  6272. if field is None:
  6273. if item == 'not':
  6274. neg = True
  6275. elif not neg and not logic and item in ('and','or'):
  6276. logic = item
  6277. elif item in field_map:
  6278. field = field_map[item]
  6279. else:
  6280. raise RuntimeError("Invalid syntax")
  6281. elif not field is None and op is None:
  6282. op = item
  6283. elif not op is None:
  6284. if item.startswith('#'):
  6285. if not item[1:] in constants:
  6286. raise RuntimeError("Invalid syntax")
  6287. value = constants[item[1:]]
  6288. else:
  6289. value = item
  6290. if field.type in ('text', 'string', 'json'):
  6291. if op == '=': op = 'like'
  6292. if op == '=': new_query = field==value
  6293. elif op == '<': new_query = field<value
  6294. elif op == '>': new_query = field>value
  6295. elif op == '<=': new_query = field<=value
  6296. elif op == '>=': new_query = field>=value
  6297. elif op == '!=': new_query = field!=value
  6298. elif op == 'belongs': new_query = field.belongs(value.split(','))
  6299. elif op == 'notbelongs': new_query = ~field.belongs(value.split(','))
  6300. elif field.type in ('text', 'string', 'json'):
  6301. if op == 'contains': new_query = field.contains(value)
  6302. elif op == 'like': new_query = field.like(value)
  6303. elif op == 'startswith': new_query = field.startswith(value)
  6304. elif op == 'endswith': new_query = field.endswith(value)
  6305. else: raise RuntimeError("Invalid operation")
  6306. elif field._db._adapter.dbengine=='google:datastore' and \
  6307. field.type in ('list:integer', 'list:string', 'list:reference'):
  6308. if op == 'contains': new_query = field.contains(value)
  6309. else: raise RuntimeError("Invalid operation")
  6310. else: raise RuntimeError("Invalid operation")
  6311. if neg: new_query = ~new_query
  6312. if query is None:
  6313. query = new_query
  6314. elif logic == 'and':
  6315. query &= new_query
  6316. elif logic == 'or':
  6317. query |= new_query
  6318. field = op = neg = logic = None
  6319. return query
  6320. class DAL(object):
  6321. """
  6322. an instance of this class represents a database connection
  6323. Example::
  6324. db = DAL('sqlite://test.db')
  6325. or
  6326. db = DAL({"uri": ..., "items": ...}) # experimental
  6327. db.define_table('tablename', Field('fieldname1'),
  6328. Field('fieldname2'))
  6329. """
  6330. def __new__(cls, uri='sqlite://dummy.db', *args, **kwargs):
  6331. if not hasattr(THREAD_LOCAL,'db_instances'):
  6332. THREAD_LOCAL.db_instances = {}
  6333. if not hasattr(THREAD_LOCAL,'db_instances_zombie'):
  6334. THREAD_LOCAL.db_instances_zombie = {}
  6335. if uri == '<zombie>':
  6336. db_uid = kwargs['db_uid'] # a zombie must have a db_uid!
  6337. if db_uid in THREAD_LOCAL.db_instances:
  6338. db_group = THREAD_LOCAL.db_instances[db_uid]
  6339. db = db_group[-1]
  6340. elif db_uid in THREAD_LOCAL.db_instances_zombie:
  6341. db = THREAD_LOCAL.db_instances_zombie[db_uid]
  6342. else:
  6343. db = super(DAL, cls).__new__(cls)
  6344. THREAD_LOCAL.db_instances_zombie[db_uid] = db
  6345. else:
  6346. db_uid = kwargs.get('db_uid',hashlib_md5(repr(uri)).hexdigest())
  6347. if db_uid in THREAD_LOCAL.db_instances_zombie:
  6348. db = THREAD_LOCAL.db_instances_zombie[db_uid]
  6349. del THREAD_LOCAL.db_instances_zombie[db_uid]
  6350. else:
  6351. db = super(DAL, cls).__new__(cls)
  6352. db_group = THREAD_LOCAL.db_instances.get(db_uid,[])
  6353. db_group.append(db)
  6354. THREAD_LOCAL.db_instances[db_uid] = db_group
  6355. db._db_uid = db_uid
  6356. return db
  6357. @staticmethod
  6358. def set_folder(folder):
  6359. """
  6360. # ## this allows gluon to set a folder for this thread
  6361. # ## <<<<<<<<< Should go away as new DAL replaces old sql.py
  6362. """
  6363. BaseAdapter.set_folder(folder)
  6364. @staticmethod
  6365. def get_instances():
  6366. """
  6367. Returns a dictionary with uri as key with timings and defined tables
  6368. {'sqlite://storage.sqlite': {
  6369. 'dbstats': [(select auth_user.email from auth_user, 0.02009)],
  6370. 'dbtables': {
  6371. 'defined': ['auth_cas', 'auth_event', 'auth_group',
  6372. 'auth_membership', 'auth_permission', 'auth_user'],
  6373. 'lazy': '[]'
  6374. }
  6375. }
  6376. }
  6377. """
  6378. dbs = getattr(THREAD_LOCAL,'db_instances',{}).items()
  6379. infos = {}
  6380. for db_uid, db_group in dbs:
  6381. for db in db_group:
  6382. if not db._uri:
  6383. continue
  6384. k = hide_password(db._uri)
  6385. infos[k] = dict(dbstats = [(row[0], row[1]) for row in db._timings],
  6386. dbtables = {'defined':
  6387. sorted(list(set(db.tables) -
  6388. set(db._LAZY_TABLES.keys()))),
  6389. 'lazy': sorted(db._LAZY_TABLES.keys())}
  6390. )
  6391. return infos
  6392. @staticmethod
  6393. def distributed_transaction_begin(*instances):
  6394. if not instances:
  6395. return
  6396. thread_key = '%s.%s' % (socket.gethostname(), threading.currentThread())
  6397. keys = ['%s.%i' % (thread_key, i) for (i,db) in instances]
  6398. instances = enumerate(instances)
  6399. for (i, db) in instances:
  6400. if not db._adapter.support_distributed_transaction():
  6401. raise SyntaxError(
  6402. 'distributed transaction not suported by %s' % db._dbname)
  6403. for (i, db) in instances:
  6404. db._adapter.distributed_transaction_begin(keys[i])
  6405. @staticmethod
  6406. def distributed_transaction_commit(*instances):
  6407. if not instances:
  6408. return
  6409. instances = enumerate(instances)
  6410. thread_key = '%s.%s' % (socket.gethostname(), threading.currentThread())
  6411. keys = ['%s.%i' % (thread_key, i) for (i,db) in instances]
  6412. for (i, db) in instances:
  6413. if not db._adapter.support_distributed_transaction():
  6414. raise SyntaxError(
  6415. 'distributed transaction not suported by %s' % db._dbanme)
  6416. try:
  6417. for (i, db) in instances:
  6418. db._adapter.prepare(keys[i])
  6419. except:
  6420. for (i, db) in instances:
  6421. db._adapter.rollback_prepared(keys[i])
  6422. raise RuntimeError('failure to commit distributed transaction')
  6423. else:
  6424. for (i, db) in instances:
  6425. db._adapter.commit_prepared(keys[i])
  6426. return
  6427. def __init__(self, uri=DEFAULT_URI,
  6428. pool_size=0, folder=None,
  6429. db_codec='UTF-8', check_reserved=None,
  6430. migrate=True, fake_migrate=False,
  6431. migrate_enabled=True, fake_migrate_all=False,
  6432. decode_credentials=False, driver_args=None,
  6433. adapter_args=None, attempts=5, auto_import=False,
  6434. bigint_id=False,debug=False,lazy_tables=False,
  6435. db_uid=None, do_connect=True, after_connection=None):
  6436. """
  6437. Creates a new Database Abstraction Layer instance.
  6438. Keyword arguments:
  6439. :uri: string that contains information for connecting to a database.
  6440. (default: 'sqlite://dummy.db')
  6441. experimental: you can specify a dictionary as uri
  6442. parameter i.e. with
  6443. db = DAL({"uri": "sqlite://storage.sqlite",
  6444. "items": {...}, ...})
  6445. for an example of dict input you can check the output
  6446. of the scaffolding db model with
  6447. db.as_dict()
  6448. Note that for compatibility with Python older than
  6449. version 2.6.5 you should cast your dict input keys
  6450. to str due to a syntax limitation on kwarg names.
  6451. for proper DAL dictionary input you can use one of:
  6452. obj = serializers.cast_keys(dict, [encoding="utf-8"])
  6453. or else (for parsing json input)
  6454. obj = serializers.loads_json(data, unicode_keys=False)
  6455. :pool_size: How many open connections to make to the database object.
  6456. :folder: where .table files will be created.
  6457. automatically set within web2py
  6458. use an explicit path when using DAL outside web2py
  6459. :db_codec: string encoding of the database (default: 'UTF-8')
  6460. :check_reserved: list of adapters to check tablenames and column names
  6461. against sql/nosql reserved keywords. (Default None)
  6462. * 'common' List of sql keywords that are common to all database types
  6463. such as "SELECT, INSERT". (recommended)
  6464. * 'all' Checks against all known SQL keywords. (not recommended)
  6465. <adaptername> Checks against the specific adapters list of keywords
  6466. (recommended)
  6467. * '<adaptername>_nonreserved' Checks against the specific adapters
  6468. list of nonreserved keywords. (if available)
  6469. :migrate (defaults to True) sets default migrate behavior for all tables
  6470. :fake_migrate (defaults to False) sets default fake_migrate behavior for all tables
  6471. :migrate_enabled (defaults to True). If set to False disables ALL migrations
  6472. :fake_migrate_all (defaults to False). If sets to True fake migrates ALL tables
  6473. :attempts (defaults to 5). Number of times to attempt connecting
  6474. :auto_import (defaults to False). If set, import automatically table definitions from the
  6475. databases folder
  6476. :bigint_id (defaults to False): If set, turn on bigint instead of int for id fields
  6477. :lazy_tables (defaults to False): delay table definition until table access
  6478. :after_connection (defaults to None): a callable that will be execute after the connection
  6479. """
  6480. items = None
  6481. if isinstance(uri, dict):
  6482. if "items" in uri:
  6483. items = uri.pop("items")
  6484. try:
  6485. newuri = uri.pop("uri")
  6486. except KeyError:
  6487. newuri = DEFAULT_URI
  6488. locals().update(uri)
  6489. uri = newuri
  6490. if uri == '<zombie>' and db_uid is not None: return
  6491. if not decode_credentials:
  6492. credential_decoder = lambda cred: cred
  6493. else:
  6494. credential_decoder = lambda cred: urllib.unquote(cred)
  6495. self._folder = folder
  6496. if folder:
  6497. self.set_folder(folder)
  6498. self._uri = uri
  6499. self._pool_size = pool_size
  6500. self._db_codec = db_codec
  6501. self._lastsql = ''
  6502. self._timings = []
  6503. self._pending_references = {}
  6504. self._request_tenant = 'request_tenant'
  6505. self._common_fields = []
  6506. self._referee_name = '%(table)s'
  6507. self._bigint_id = bigint_id
  6508. self._debug = debug
  6509. self._migrated = []
  6510. self._LAZY_TABLES = {}
  6511. self._lazy_tables = lazy_tables
  6512. self._tables = SQLCallableList()
  6513. self._driver_args = driver_args
  6514. self._adapter_args = adapter_args
  6515. self._check_reserved = check_reserved
  6516. self._decode_credentials = decode_credentials
  6517. self._attempts = attempts
  6518. self._do_connect = do_connect
  6519. if not str(attempts).isdigit() or attempts < 0:
  6520. attempts = 5
  6521. if uri:
  6522. uris = isinstance(uri,(list,tuple)) and uri or [uri]
  6523. error = ''
  6524. connected = False
  6525. for k in range(attempts):
  6526. for uri in uris:
  6527. try:
  6528. if is_jdbc and not uri.startswith('jdbc:'):
  6529. uri = 'jdbc:'+uri
  6530. self._dbname = REGEX_DBNAME.match(uri).group()
  6531. if not self._dbname in ADAPTERS:
  6532. raise SyntaxError("Error in URI '%s' or database not supported" % self._dbname)
  6533. # notice that driver args or {} else driver_args
  6534. # defaults to {} global, not correct
  6535. kwargs = dict(db=self,uri=uri,
  6536. pool_size=pool_size,
  6537. folder=folder,
  6538. db_codec=db_codec,
  6539. credential_decoder=credential_decoder,
  6540. driver_args=driver_args or {},
  6541. adapter_args=adapter_args or {},
  6542. do_connect=do_connect,
  6543. after_connection=after_connection)
  6544. self._adapter = ADAPTERS[self._dbname](**kwargs)
  6545. types = ADAPTERS[self._dbname].types
  6546. # copy so multiple DAL() possible
  6547. self._adapter.types = copy.copy(types)
  6548. self._adapter.build_parsemap()
  6549. if bigint_id:
  6550. if 'big-id' in types and 'reference' in types:
  6551. self._adapter.types['id'] = types['big-id']
  6552. self._adapter.types['reference'] = types['big-reference']
  6553. connected = True
  6554. break
  6555. except SyntaxError:
  6556. raise
  6557. except Exception:
  6558. tb = traceback.format_exc()
  6559. sys.stderr.write('DEBUG: connect attempt %i, connection error:\n%s' % (k, tb))
  6560. if connected:
  6561. break
  6562. else:
  6563. time.sleep(1)
  6564. if not connected:
  6565. raise RuntimeError("Failure to connect, tried %d times:\n%s" % (attempts, tb))
  6566. else:
  6567. self._adapter = BaseAdapter(db=self,pool_size=0,
  6568. uri='None',folder=folder,
  6569. db_codec=db_codec, after_connection=after_connection)
  6570. migrate = fake_migrate = False
  6571. adapter = self._adapter
  6572. self._uri_hash = hashlib_md5(adapter.uri).hexdigest()
  6573. self.check_reserved = check_reserved
  6574. if self.check_reserved:
  6575. from reserved_sql_keywords import ADAPTERS as RSK
  6576. self.RSK = RSK
  6577. self._migrate = migrate
  6578. self._fake_migrate = fake_migrate
  6579. self._migrate_enabled = migrate_enabled
  6580. self._fake_migrate_all = fake_migrate_all
  6581. if auto_import or items:
  6582. self.import_table_definitions(adapter.folder,
  6583. items=items)
  6584. @property
  6585. def tables(self):
  6586. return self._tables
  6587. def import_table_definitions(self, path, migrate=False,
  6588. fake_migrate=False, items=None):
  6589. pattern = pjoin(path,self._uri_hash+'_*.table')
  6590. if items:
  6591. for tablename, table in items.iteritems():
  6592. # TODO: read all field/table options
  6593. fields = []
  6594. # remove unsupported/illegal Table arguments
  6595. [table.pop(name) for name in ("name", "fields") if
  6596. name in table]
  6597. if "items" in table:
  6598. for fieldname, field in table.pop("items").iteritems():
  6599. # remove unsupported/illegal Field arguments
  6600. [field.pop(key) for key in ("requires", "name",
  6601. "compute", "colname") if key in field]
  6602. fields.append(Field(str(fieldname), **field))
  6603. self.define_table(str(tablename), *fields, **table)
  6604. else:
  6605. for filename in glob.glob(pattern):
  6606. tfile = self._adapter.file_open(filename, 'r')
  6607. try:
  6608. sql_fields = pickle.load(tfile)
  6609. name = filename[len(pattern)-7:-6]
  6610. mf = [(value['sortable'],
  6611. Field(key,
  6612. type=value['type'],
  6613. length=value.get('length',None),
  6614. notnull=value.get('notnull',False),
  6615. unique=value.get('unique',False))) \
  6616. for key, value in sql_fields.iteritems()]
  6617. mf.sort(lambda a,b: cmp(a[0],b[0]))
  6618. self.define_table(name,*[item[1] for item in mf],
  6619. **dict(migrate=migrate,
  6620. fake_migrate=fake_migrate))
  6621. finally:
  6622. self._adapter.file_close(tfile)
  6623. def check_reserved_keyword(self, name):
  6624. """
  6625. Validates ``name`` against SQL keywords
  6626. Uses self.check_reserve which is a list of
  6627. operators to use.
  6628. self.check_reserved
  6629. ['common', 'postgres', 'mysql']
  6630. self.check_reserved
  6631. ['all']
  6632. """
  6633. for backend in self.check_reserved:
  6634. if name.upper() in self.RSK[backend]:
  6635. raise SyntaxError(
  6636. 'invalid table/column name "%s" is a "%s" reserved SQL/NOSQL keyword' % (name, backend.upper()))
  6637. def parse_as_rest(self,patterns,args,vars,queries=None,nested_select=True):
  6638. """
  6639. EXAMPLE:
  6640. db.define_table('person',Field('name'),Field('info'))
  6641. db.define_table('pet',Field('ownedby',db.person),Field('name'),Field('info'))
  6642. @request.restful()
  6643. def index():
  6644. def GET(*args,**vars):
  6645. patterns = [
  6646. "/friends[person]",
  6647. "/{person.name}/:field",
  6648. "/{person.name}/pets[pet.ownedby]",
  6649. "/{person.name}/pets[pet.ownedby]/{pet.name}",
  6650. "/{person.name}/pets[pet.ownedby]/{pet.name}/:field",
  6651. ("/dogs[pet]", db.pet.info=='dog'),
  6652. ("/dogs[pet]/{pet.name.startswith}", db.pet.info=='dog'),
  6653. ]
  6654. parser = db.parse_as_rest(patterns,args,vars)
  6655. if parser.status == 200:
  6656. return dict(content=parser.response)
  6657. else:
  6658. raise HTTP(parser.status,parser.error)
  6659. def POST(table_name,**vars):
  6660. if table_name == 'person':
  6661. return db.person.validate_and_insert(**vars)
  6662. elif table_name == 'pet':
  6663. return db.pet.validate_and_insert(**vars)
  6664. else:
  6665. raise HTTP(400)
  6666. return locals()
  6667. """
  6668. db = self
  6669. re1 = REGEX_SEARCH_PATTERN
  6670. re2 = REGEX_SQUARE_BRACKETS
  6671. def auto_table(table,base='',depth=0):
  6672. patterns = []
  6673. for field in db[table].fields:
  6674. if base:
  6675. tag = '%s/%s' % (base,field.replace('_','-'))
  6676. else:
  6677. tag = '/%s/%s' % (table.replace('_','-'),field.replace('_','-'))
  6678. f = db[table][field]
  6679. if not f.readable: continue
  6680. if f.type=='id' or 'slug' in field or f.type.startswith('reference'):
  6681. tag += '/{%s.%s}' % (table,field)
  6682. patterns.append(tag)
  6683. patterns.append(tag+'/:field')
  6684. elif f.type.startswith('boolean'):
  6685. tag += '/{%s.%s}' % (table,field)
  6686. patterns.append(tag)
  6687. patterns.append(tag+'/:field')
  6688. elif f.type in ('float','double','integer','bigint'):
  6689. tag += '/{%s.%s.ge}/{%s.%s.lt}' % (table,field,table,field)
  6690. patterns.append(tag)
  6691. patterns.append(tag+'/:field')
  6692. elif f.type.startswith('list:'):
  6693. tag += '/{%s.%s.contains}' % (table,field)
  6694. patterns.append(tag)
  6695. patterns.append(tag+'/:field')
  6696. elif f.type in ('date','datetime'):
  6697. tag+= '/{%s.%s.year}' % (table,field)
  6698. patterns.append(tag)
  6699. patterns.append(tag+'/:field')
  6700. tag+='/{%s.%s.month}' % (table,field)
  6701. patterns.append(tag)
  6702. patterns.append(tag+'/:field')
  6703. tag+='/{%s.%s.day}' % (table,field)
  6704. patterns.append(tag)
  6705. patterns.append(tag+'/:field')
  6706. if f.type in ('datetime','time'):
  6707. tag+= '/{%s.%s.hour}' % (table,field)
  6708. patterns.append(tag)
  6709. patterns.append(tag+'/:field')
  6710. tag+='/{%s.%s.minute}' % (table,field)
  6711. patterns.append(tag)
  6712. patterns.append(tag+'/:field')
  6713. tag+='/{%s.%s.second}' % (table,field)
  6714. patterns.append(tag)
  6715. patterns.append(tag+'/:field')
  6716. if depth>0:
  6717. for f in db[table]._referenced_by:
  6718. tag+='/%s[%s.%s]' % (table,f.tablename,f.name)
  6719. patterns.append(tag)
  6720. patterns += auto_table(table,base=tag,depth=depth-1)
  6721. return patterns
  6722. if patterns == 'auto':
  6723. patterns=[]
  6724. for table in db.tables:
  6725. if not table.startswith('auth_'):
  6726. patterns.append('/%s[%s]' % (table,table))
  6727. patterns += auto_table(table,base='',depth=1)
  6728. else:
  6729. i = 0
  6730. while i<len(patterns):
  6731. pattern = patterns[i]
  6732. if not isinstance(pattern,str):
  6733. pattern = pattern[0]
  6734. tokens = pattern.split('/')
  6735. if tokens[-1].startswith(':auto') and re2.match(tokens[-1]):
  6736. new_patterns = auto_table(tokens[-1][tokens[-1].find('[')+1:-1],
  6737. '/'.join(tokens[:-1]))
  6738. patterns = patterns[:i]+new_patterns+patterns[i+1:]
  6739. i += len(new_patterns)
  6740. else:
  6741. i += 1
  6742. if '/'.join(args) == 'patterns':
  6743. return Row({'status':200,'pattern':'list',
  6744. 'error':None,'response':patterns})
  6745. for pattern in patterns:
  6746. basequery, exposedfields = None, []
  6747. if isinstance(pattern,tuple):
  6748. if len(pattern)==2:
  6749. pattern, basequery = pattern
  6750. elif len(pattern)>2:
  6751. pattern, basequery, exposedfields = pattern[0:3]
  6752. otable=table=None
  6753. if not isinstance(queries,dict):
  6754. dbset=db(queries)
  6755. if basequery is not None:
  6756. dbset = dbset(basequery)
  6757. i=0
  6758. tags = pattern[1:].split('/')
  6759. if len(tags)!=len(args):
  6760. continue
  6761. for tag in tags:
  6762. if re1.match(tag):
  6763. # print 're1:'+tag
  6764. tokens = tag[1:-1].split('.')
  6765. table, field = tokens[0], tokens[1]
  6766. if not otable or table == otable:
  6767. if len(tokens)==2 or tokens[2]=='eq':
  6768. query = db[table][field]==args[i]
  6769. elif tokens[2]=='ne':
  6770. query = db[table][field]!=args[i]
  6771. elif tokens[2]=='lt':
  6772. query = db[table][field]<args[i]
  6773. elif tokens[2]=='gt':
  6774. query = db[table][field]>args[i]
  6775. elif tokens[2]=='ge':
  6776. query = db[table][field]>=args[i]
  6777. elif tokens[2]=='le':
  6778. query = db[table][field]<=args[i]
  6779. elif tokens[2]=='year':
  6780. query = db[table][field].year()==args[i]
  6781. elif tokens[2]=='month':
  6782. query = db[table][field].month()==args[i]
  6783. elif tokens[2]=='day':
  6784. query = db[table][field].day()==args[i]
  6785. elif tokens[2]=='hour':
  6786. query = db[table][field].hour()==args[i]
  6787. elif tokens[2]=='minute':
  6788. query = db[table][field].minutes()==args[i]
  6789. elif tokens[2]=='second':
  6790. query = db[table][field].seconds()==args[i]
  6791. elif tokens[2]=='startswith':
  6792. query = db[table][field].startswith(args[i])
  6793. elif tokens[2]=='contains':
  6794. query = db[table][field].contains(args[i])
  6795. else:
  6796. raise RuntimeError("invalid pattern: %s" % pattern)
  6797. if len(tokens)==4 and tokens[3]=='not':
  6798. query = ~query
  6799. elif len(tokens)>=4:
  6800. raise RuntimeError("invalid pattern: %s" % pattern)
  6801. if not otable and isinstance(queries,dict):
  6802. dbset = db(queries[table])
  6803. if basequery is not None:
  6804. dbset = dbset(basequery)
  6805. dbset=dbset(query)
  6806. else:
  6807. raise RuntimeError("missing relation in pattern: %s" % pattern)
  6808. elif re2.match(tag) and args[i]==tag[:tag.find('[')]:
  6809. ref = tag[tag.find('[')+1:-1]
  6810. if '.' in ref and otable:
  6811. table,field = ref.split('.')
  6812. selfld = '_id'
  6813. if db[table][field].type.startswith('reference '):
  6814. refs = [ x.name for x in db[otable] if x.type == db[table][field].type ]
  6815. else:
  6816. refs = [ x.name for x in db[table]._referenced_by if x.tablename==otable ]
  6817. if refs:
  6818. selfld = refs[0]
  6819. if nested_select:
  6820. try:
  6821. dbset=db(db[table][field].belongs(dbset._select(db[otable][selfld])))
  6822. except ValueError:
  6823. return Row({'status':400,'pattern':pattern,
  6824. 'error':'invalid path','response':None})
  6825. else:
  6826. items = [item.id for item in dbset.select(db[otable][selfld])]
  6827. dbset=db(db[table][field].belongs(items))
  6828. else:
  6829. table = ref
  6830. if not otable and isinstance(queries,dict):
  6831. dbset = db(queries[table])
  6832. dbset=dbset(db[table])
  6833. elif tag==':field' and table:
  6834. # print 're3:'+tag
  6835. field = args[i]
  6836. if not field in db[table]: break
  6837. # hand-built patterns should respect .readable=False as well
  6838. if not db[table][field].readable:
  6839. return Row({'status':418,'pattern':pattern,
  6840. 'error':'I\'m a teapot','response':None})
  6841. try:
  6842. distinct = vars.get('distinct', False) == 'True'
  6843. offset = long(vars.get('offset',None) or 0)
  6844. limits = (offset,long(vars.get('limit',None) or 1000)+offset)
  6845. except ValueError:
  6846. return Row({'status':400,'error':'invalid limits','response':None})
  6847. items = dbset.select(db[table][field], distinct=distinct, limitby=limits)
  6848. if items:
  6849. return Row({'status':200,'response':items,
  6850. 'pattern':pattern})
  6851. else:
  6852. return Row({'status':404,'pattern':pattern,
  6853. 'error':'no record found','response':None})
  6854. elif tag != args[i]:
  6855. break
  6856. otable = table
  6857. i += 1
  6858. if i==len(tags) and table:
  6859. ofields = vars.get('order',db[table]._id.name).split('|')
  6860. try:
  6861. orderby = [db[table][f] if not f.startswith('~') else ~db[table][f[1:]] for f in ofields]
  6862. except (KeyError, AttributeError):
  6863. return Row({'status':400,'error':'invalid orderby','response':None})
  6864. if exposedfields:
  6865. fields = [field for field in db[table] if str(field).split('.')[-1] in exposedfields and field.readable]
  6866. else:
  6867. fields = [field for field in db[table] if field.readable]
  6868. count = dbset.count()
  6869. try:
  6870. offset = long(vars.get('offset',None) or 0)
  6871. limits = (offset,long(vars.get('limit',None) or 1000)+offset)
  6872. except ValueError:
  6873. return Row({'status':400,'error':'invalid limits','response':None})
  6874. if count > limits[1]-limits[0]:
  6875. return Row({'status':400,'error':'too many records','response':None})
  6876. try:
  6877. response = dbset.select(limitby=limits,orderby=orderby,*fields)
  6878. except ValueError:
  6879. return Row({'status':400,'pattern':pattern,
  6880. 'error':'invalid path','response':None})
  6881. return Row({'status':200,'response':response,
  6882. 'pattern':pattern,'count':count})
  6883. return Row({'status':400,'error':'no matching pattern','response':None})
  6884. def define_table(
  6885. self,
  6886. tablename,
  6887. *fields,
  6888. **args
  6889. ):
  6890. if not isinstance(tablename,str):
  6891. raise SyntaxError("missing table name")
  6892. elif hasattr(self,tablename) or tablename in self.tables:
  6893. if not args.get('redefine',False):
  6894. raise SyntaxError('table already defined: %s' % tablename)
  6895. elif tablename.startswith('_') or hasattr(self,tablename) or \
  6896. REGEX_PYTHON_KEYWORDS.match(tablename):
  6897. raise SyntaxError('invalid table name: %s' % tablename)
  6898. elif self.check_reserved:
  6899. self.check_reserved_keyword(tablename)
  6900. else:
  6901. invalid_args = set(args)-TABLE_ARGS
  6902. if invalid_args:
  6903. raise SyntaxError('invalid table "%s" attributes: %s' \
  6904. % (tablename,invalid_args))
  6905. if self._lazy_tables and not tablename in self._LAZY_TABLES:
  6906. self._LAZY_TABLES[tablename] = (tablename,fields,args)
  6907. table = None
  6908. else:
  6909. table = self.lazy_define_table(tablename,*fields,**args)
  6910. if not tablename in self.tables:
  6911. self.tables.append(tablename)
  6912. return table
  6913. def lazy_define_table(
  6914. self,
  6915. tablename,
  6916. *fields,
  6917. **args
  6918. ):
  6919. args_get = args.get
  6920. common_fields = self._common_fields
  6921. if common_fields:
  6922. fields = list(fields) + list(common_fields)
  6923. table_class = args_get('table_class',Table)
  6924. table = table_class(self, tablename, *fields, **args)
  6925. table._actual = True
  6926. self[tablename] = table
  6927. # must follow above line to handle self references
  6928. table._create_references()
  6929. for field in table:
  6930. if field.requires == DEFAULT:
  6931. field.requires = sqlhtml_validators(field)
  6932. migrate = self._migrate_enabled and args_get('migrate',self._migrate)
  6933. if migrate and not self._uri in (None,'None') \
  6934. or self._adapter.dbengine=='google:datastore':
  6935. fake_migrate = self._fake_migrate_all or \
  6936. args_get('fake_migrate',self._fake_migrate)
  6937. polymodel = args_get('polymodel',None)
  6938. try:
  6939. GLOBAL_LOCKER.acquire()
  6940. self._lastsql = self._adapter.create_table(
  6941. table,migrate=migrate,
  6942. fake_migrate=fake_migrate,
  6943. polymodel=polymodel)
  6944. finally:
  6945. GLOBAL_LOCKER.release()
  6946. else:
  6947. table._dbt = None
  6948. on_define = args_get('on_define',None)
  6949. if on_define: on_define(table)
  6950. return table
  6951. def as_dict(self, flat=False, sanitize=True, field_options=True):
  6952. dbname = db_uid = uri = None
  6953. if not sanitize:
  6954. uri, dbname, db_uid = (self._uri, self._dbname, self._db_uid)
  6955. db_as_dict = dict(items={}, tables=[], uri=uri, dbname=dbname,
  6956. db_uid=db_uid,
  6957. **dict([(k, getattr(self, "_" + k)) for
  6958. k in 'pool_size','folder','db_codec',
  6959. 'check_reserved','migrate','fake_migrate',
  6960. 'migrate_enabled','fake_migrate_all',
  6961. 'decode_credentials','driver_args',
  6962. 'adapter_args', 'attempts',
  6963. 'bigint_id','debug','lazy_tables',
  6964. 'do_connect']))
  6965. for table in self:
  6966. tablename = str(table)
  6967. db_as_dict["tables"].append(tablename)
  6968. db_as_dict["items"][tablename] = table.as_dict(flat=flat,
  6969. sanitize=sanitize,
  6970. field_options=field_options)
  6971. return db_as_dict
  6972. def as_xml(self, sanitize=True, field_options=True):
  6973. if not have_serializers:
  6974. raise ImportError("No xml serializers available")
  6975. d = self.as_dict(flat=True, sanitize=sanitize,
  6976. field_options=field_options)
  6977. return serializers.xml(d)
  6978. def as_json(self, sanitize=True, field_options=True):
  6979. if not have_serializers:
  6980. raise ImportError("No json serializers available")
  6981. d = self.as_dict(flat=True, sanitize=sanitize,
  6982. field_options=field_options)
  6983. return serializers.json(d)
  6984. def as_yaml(self, sanitize=True, field_options=True):
  6985. if not have_serializers:
  6986. raise ImportError("No YAML serializers available")
  6987. d = self.as_dict(flat=True, sanitize=sanitize,
  6988. field_options=field_options)
  6989. return serializers.yaml(d)
  6990. def __contains__(self, tablename):
  6991. try:
  6992. return tablename in self.tables
  6993. except AttributeError:
  6994. # The instance has no .tables attribute yet
  6995. return False
  6996. has_key = __contains__
  6997. def get(self,key,default=None):
  6998. return self.__dict__.get(key,default)
  6999. def __iter__(self):
  7000. for tablename in self.tables:
  7001. yield self[tablename]
  7002. def __getitem__(self, key):
  7003. return self.__getattr__(str(key))
  7004. def __getattr__(self, key):
  7005. if ogetattr(self,'_lazy_tables') and \
  7006. key in ogetattr(self,'_LAZY_TABLES'):
  7007. tablename, fields, args = self._LAZY_TABLES.pop(key)
  7008. return self.lazy_define_table(tablename,*fields,**args)
  7009. return ogetattr(self, key)
  7010. def __setitem__(self, key, value):
  7011. osetattr(self, str(key), value)
  7012. def __setattr__(self, key, value):
  7013. if key[:1]!='_' and key in self:
  7014. raise SyntaxError(
  7015. 'Object %s exists and cannot be redefined' % key)
  7016. osetattr(self,key,value)
  7017. __delitem__ = object.__delattr__
  7018. def __repr__(self):
  7019. if hasattr(self,'_uri'):
  7020. return '<DAL uri="%s">' % hide_password(str(self._uri))
  7021. else:
  7022. return '<DAL db_uid="%s">' % self._db_uid
  7023. def smart_query(self,fields,text):
  7024. return Set(self, smart_query(fields,text))
  7025. def __call__(self, query=None, ignore_common_filters=None):
  7026. if isinstance(query,Table):
  7027. query = self._adapter.id_query(query)
  7028. elif isinstance(query,Field):
  7029. query = query!=None
  7030. elif isinstance(query, dict):
  7031. icf = query.get("ignore_common_filters")
  7032. if icf: ignore_common_filters = icf
  7033. return Set(self, query, ignore_common_filters=ignore_common_filters)
  7034. def commit(self):
  7035. self._adapter.commit()
  7036. def rollback(self):
  7037. self._adapter.rollback()
  7038. def close(self):
  7039. self._adapter.close()
  7040. if self._db_uid in THREAD_LOCAL.db_instances:
  7041. db_group = THREAD_LOCAL.db_instances[self._db_uid]
  7042. db_group.remove(self)
  7043. if not db_group:
  7044. del THREAD_LOCAL.db_instances[self._db_uid]
  7045. def executesql(self, query, placeholders=None, as_dict=False,
  7046. fields=None, colnames=None):
  7047. """
  7048. placeholders is optional and will always be None.
  7049. If using raw SQL with placeholders, placeholders may be
  7050. a sequence of values to be substituted in
  7051. or, (if supported by the DB driver), a dictionary with keys
  7052. matching named placeholders in your SQL.
  7053. Added 2009-12-05 "as_dict" optional argument. Will always be
  7054. None when using DAL. If using raw SQL can be set to True
  7055. and the results cursor returned by the DB driver will be
  7056. converted to a sequence of dictionaries keyed with the db
  7057. field names. Tested with SQLite but should work with any database
  7058. since the cursor.description used to get field names is part of the
  7059. Python dbi 2.0 specs. Results returned with as_dict=True are
  7060. the same as those returned when applying .to_list() to a DAL query.
  7061. [{field1: value1, field2: value2}, {field1: value1b, field2: value2b}]
  7062. Added 2012-08-24 "fields" and "colnames" optional arguments. If either
  7063. is provided, the results cursor returned by the DB driver will be
  7064. converted to a DAL Rows object using the db._adapter.parse() method.
  7065. The "fields" argument is a list of DAL Field objects that match the
  7066. fields returned from the DB. The Field objects should be part of one or
  7067. more Table objects defined on the DAL object. The "fields" list can
  7068. include one or more DAL Table objects in addition to or instead of
  7069. including Field objects, or it can be just a single table (not in a
  7070. list). In that case, the Field objects will be extracted from the
  7071. table(s).
  7072. Instead of specifying the "fields" argument, the "colnames" argument
  7073. can be specified as a list of field names in tablename.fieldname format.
  7074. Again, these should represent tables and fields defined on the DAL
  7075. object.
  7076. It is also possible to specify both "fields" and the associated
  7077. "colnames". In that case, "fields" can also include DAL Expression
  7078. objects in addition to Field objects. For Field objects in "fields",
  7079. the associated "colnames" must still be in tablename.fieldname format.
  7080. For Expression objects in "fields", the associated "colnames" can
  7081. be any arbitrary labels.
  7082. Note, the DAL Table objects referred to by "fields" or "colnames" can
  7083. be dummy tables and do not have to represent any real tables in the
  7084. database. Also, note that the "fields" and "colnames" must be in the
  7085. same order as the fields in the results cursor returned from the DB.
  7086. """
  7087. adapter = self._adapter
  7088. if placeholders:
  7089. adapter.execute(query, placeholders)
  7090. else:
  7091. adapter.execute(query)
  7092. if as_dict:
  7093. if not hasattr(adapter.cursor,'description'):
  7094. raise RuntimeError("database does not support executesql(...,as_dict=True)")
  7095. # Non-DAL legacy db query, converts cursor results to dict.
  7096. # sequence of 7-item sequences. each sequence tells about a column.
  7097. # first item is always the field name according to Python Database API specs
  7098. columns = adapter.cursor.description
  7099. # reduce the column info down to just the field names
  7100. fields = [f[0] for f in columns]
  7101. # will hold our finished resultset in a list
  7102. data = adapter._fetchall()
  7103. # convert the list for each row into a dictionary so it's
  7104. # easier to work with. row['field_name'] rather than row[0]
  7105. return [dict(zip(fields,row)) for row in data]
  7106. try:
  7107. data = adapter._fetchall()
  7108. except:
  7109. return None
  7110. if fields or colnames:
  7111. fields = [] if fields is None else fields
  7112. if not isinstance(fields, list):
  7113. fields = [fields]
  7114. extracted_fields = []
  7115. for field in fields:
  7116. if isinstance(field, Table):
  7117. extracted_fields.extend([f for f in field])
  7118. else:
  7119. extracted_fields.append(field)
  7120. if not colnames:
  7121. colnames = ['%s.%s' % (f.tablename, f.name)
  7122. for f in extracted_fields]
  7123. data = adapter.parse(
  7124. data, fields=extracted_fields, colnames=colnames)
  7125. return data
  7126. def _remove_references_to(self, thistable):
  7127. for table in self:
  7128. table._referenced_by = [field for field in table._referenced_by
  7129. if not field.table==thistable]
  7130. def export_to_csv_file(self, ofile, *args, **kwargs):
  7131. step = long(kwargs.get('max_fetch_rows,',500))
  7132. write_colnames = kwargs['write_colnames'] = \
  7133. kwargs.get("write_colnames", True)
  7134. for table in self.tables:
  7135. ofile.write('TABLE %s\r\n' % table)
  7136. query = self._adapter.id_query(self[table])
  7137. nrows = self(query).count()
  7138. kwargs['write_colnames'] = write_colnames
  7139. for k in range(0,nrows,step):
  7140. self(query).select(limitby=(k,k+step)).export_to_csv_file(
  7141. ofile, *args, **kwargs)
  7142. kwargs['write_colnames'] = False
  7143. ofile.write('\r\n\r\n')
  7144. ofile.write('END')
  7145. def import_from_csv_file(self, ifile, id_map=None, null='<NULL>',
  7146. unique='uuid', map_tablenames=None,
  7147. ignore_missing_tables=False,
  7148. *args, **kwargs):
  7149. #if id_map is None: id_map={}
  7150. id_offset = {} # only used if id_map is None
  7151. map_tablenames = map_tablenames or {}
  7152. for line in ifile:
  7153. line = line.strip()
  7154. if not line:
  7155. continue
  7156. elif line == 'END':
  7157. return
  7158. elif not line.startswith('TABLE ') or \
  7159. not line[6:] in self.tables:
  7160. raise SyntaxError('invalid file format')
  7161. else:
  7162. tablename = line[6:]
  7163. tablename = map_tablenames.get(tablename,tablename)
  7164. if tablename is not None and tablename in self.tables:
  7165. self[tablename].import_from_csv_file(
  7166. ifile, id_map, null, unique, id_offset,
  7167. *args, **kwargs)
  7168. elif tablename is None or ignore_missing_tables:
  7169. # skip all non-empty lines
  7170. for line in ifile:
  7171. if not line.strip():
  7172. breal
  7173. else:
  7174. raise RuntimeError("Unable to import table that does not exist.\nTry db.import_from_csv_file(..., map_tablenames={'table':'othertable'},ignore_missing_tables=True)")
  7175. def DAL_unpickler(db_uid):
  7176. return DAL('<zombie>',db_uid=db_uid)
  7177. def DAL_pickler(db):
  7178. return DAL_unpickler, (db._db_uid,)
  7179. copyreg.pickle(DAL, DAL_pickler, DAL_unpickler)
  7180. class SQLALL(object):
  7181. """
  7182. Helper class providing a comma-separated string having all the field names
  7183. (prefixed by table name and '.')
  7184. normally only called from within gluon.sql
  7185. """
  7186. def __init__(self, table):
  7187. self._table = table
  7188. def __str__(self):
  7189. return ', '.join([str(field) for field in self._table])
  7190. # class Reference(int):
  7191. class Reference(long):
  7192. def __allocate(self):
  7193. if not self._record:
  7194. self._record = self._table[long(self)]
  7195. if not self._record:
  7196. raise RuntimeError(
  7197. "Using a recursive select but encountered a broken reference: %s %d"%(self._table, long(self)))
  7198. def __getattr__(self, key):
  7199. if key == 'id':
  7200. return long(self)
  7201. self.__allocate()
  7202. return self._record.get(key, None)
  7203. def get(self, key, default=None):
  7204. return self.__getattr__(key, default)
  7205. def __setattr__(self, key, value):
  7206. if key.startswith('_'):
  7207. long.__setattr__(self, key, value)
  7208. return
  7209. self.__allocate()
  7210. self._record[key] = value
  7211. def __getitem__(self, key):
  7212. if key == 'id':
  7213. return long(self)
  7214. self.__allocate()
  7215. return self._record.get(key, None)
  7216. def __setitem__(self,key,value):
  7217. self.__allocate()
  7218. self._record[key] = value
  7219. def Reference_unpickler(data):
  7220. return marshal.loads(data)
  7221. def Reference_pickler(data):
  7222. try:
  7223. marshal_dump = marshal.dumps(long(data))
  7224. except AttributeError:
  7225. marshal_dump = 'i%s' % struct.pack('<i', long(data))
  7226. return (Reference_unpickler, (marshal_dump,))
  7227. copyreg.pickle(Reference, Reference_pickler, Reference_unpickler)
  7228. class MethodAdder(object):
  7229. def __init__(self,table):
  7230. self.table = table
  7231. def __call__(self):
  7232. return self.register()
  7233. def __getattr__(self,method_name):
  7234. return self.register(method_name)
  7235. def register(self,method_name=None):
  7236. def _decorated(f):
  7237. instance = self.table
  7238. import types
  7239. method = types.MethodType(f, instance, instance.__class__)
  7240. name = method_name or f.func_name
  7241. setattr(instance, name, method)
  7242. return f
  7243. return _decorated
  7244. class Table(object):
  7245. """
  7246. an instance of this class represents a database table
  7247. Example::
  7248. db = DAL(...)
  7249. db.define_table('users', Field('name'))
  7250. db.users.insert(name='me') # print db.users._insert(...) to see SQL
  7251. db.users.drop()
  7252. """
  7253. def __init__(
  7254. self,
  7255. db,
  7256. tablename,
  7257. *fields,
  7258. **args
  7259. ):
  7260. """
  7261. Initializes the table and performs checking on the provided fields.
  7262. Each table will have automatically an 'id'.
  7263. If a field is of type Table, the fields (excluding 'id') from that table
  7264. will be used instead.
  7265. :raises SyntaxError: when a supplied field is of incorrect type.
  7266. """
  7267. self._actual = False # set to True by define_table()
  7268. self._tablename = tablename
  7269. self._ot = args.get('actual_name')
  7270. self._sequence_name = args.get('sequence_name') or \
  7271. db and db._adapter.sequence_name(tablename)
  7272. self._trigger_name = args.get('trigger_name') or \
  7273. db and db._adapter.trigger_name(tablename)
  7274. self._common_filter = args.get('common_filter')
  7275. self._format = args.get('format')
  7276. self._singular = args.get(
  7277. 'singular',tablename.replace('_',' ').capitalize())
  7278. self._plural = args.get(
  7279. 'plural',pluralize(self._singular.lower()).capitalize())
  7280. # horrible but for backard compatibility of appamdin:
  7281. if 'primarykey' in args and args['primarykey'] is not None:
  7282. self._primarykey = args.get('primarykey')
  7283. self._before_insert = []
  7284. self._before_update = [Set.delete_uploaded_files]
  7285. self._before_delete = [Set.delete_uploaded_files]
  7286. self._after_insert = []
  7287. self._after_update = []
  7288. self._after_delete = []
  7289. self.add_method = MethodAdder(self)
  7290. fieldnames,newfields=set(),[]
  7291. if hasattr(self,'_primarykey'):
  7292. if not isinstance(self._primarykey,list):
  7293. raise SyntaxError(
  7294. "primarykey must be a list of fields from table '%s'" \
  7295. % tablename)
  7296. if len(self._primarykey)==1:
  7297. self._id = [f for f in fields if isinstance(f,Field) \
  7298. and f.name==self._primarykey[0]][0]
  7299. elif not [f for f in fields if isinstance(f,Field) and f.type=='id']:
  7300. field = Field('id', 'id')
  7301. newfields.append(field)
  7302. fieldnames.add('id')
  7303. self._id = field
  7304. virtual_fields = []
  7305. for field in fields:
  7306. if isinstance(field, (FieldMethod, FieldVirtual)):
  7307. virtual_fields.append(field)
  7308. elif isinstance(field, Field) and not field.name in fieldnames:
  7309. if field.db is not None:
  7310. field = copy.copy(field)
  7311. newfields.append(field)
  7312. fieldnames.add(field.name)
  7313. if field.type=='id':
  7314. self._id = field
  7315. elif isinstance(field, Table):
  7316. table = field
  7317. for field in table:
  7318. if not field.name in fieldnames and not field.type=='id':
  7319. t2 = not table._actual and self._tablename
  7320. field = field.clone(point_self_references_to=t2)
  7321. newfields.append(field)
  7322. fieldnames.add(field.name)
  7323. elif not isinstance(field, (Field, Table)):
  7324. raise SyntaxError(
  7325. 'define_table argument is not a Field or Table: %s' % field)
  7326. fields = newfields
  7327. self._db = db
  7328. tablename = tablename
  7329. self._fields = SQLCallableList()
  7330. self.virtualfields = []
  7331. fields = list(fields)
  7332. if db and db._adapter.uploads_in_blob==True:
  7333. uploadfields = [f.name for f in fields if f.type=='blob']
  7334. for field in fields:
  7335. fn = field.uploadfield
  7336. if isinstance(field, Field) and field.type == 'upload'\
  7337. and fn is True:
  7338. fn = field.uploadfield = '%s_blob' % field.name
  7339. if isinstance(fn,str) and not fn in uploadfields:
  7340. fields.append(Field(fn,'blob',default='',
  7341. writable=False,readable=False))
  7342. lower_fieldnames = set()
  7343. reserved = dir(Table) + ['fields']
  7344. for field in fields:
  7345. field_name = field.name
  7346. if db and db.check_reserved:
  7347. db.check_reserved_keyword(field_name)
  7348. elif field_name in reserved:
  7349. raise SyntaxError("field name %s not allowed" % field_name)
  7350. if field_name.lower() in lower_fieldnames:
  7351. raise SyntaxError("duplicate field %s in table %s" \
  7352. % (field_name, tablename))
  7353. else:
  7354. lower_fieldnames.add(field_name.lower())
  7355. self.fields.append(field_name)
  7356. self[field_name] = field
  7357. if field.type == 'id':
  7358. self['id'] = field
  7359. field.tablename = field._tablename = tablename
  7360. field.table = field._table = self
  7361. field.db = field._db = db
  7362. self.ALL = SQLALL(self)
  7363. if hasattr(self,'_primarykey'):
  7364. for k in self._primarykey:
  7365. if k not in self.fields:
  7366. raise SyntaxError(
  7367. "primarykey must be a list of fields from table '%s " % tablename)
  7368. else:
  7369. self[k].notnull = True
  7370. for field in virtual_fields:
  7371. self[field.name] = field
  7372. @property
  7373. def fields(self):
  7374. return self._fields
  7375. def update(self,*args,**kwargs):
  7376. raise RuntimeError("Syntax Not Supported")
  7377. def _enable_record_versioning(self,
  7378. archive_db=None,
  7379. archive_name = '%(tablename)s_archive',
  7380. current_record = 'current_record',
  7381. is_active = 'is_active'):
  7382. db = self._db
  7383. archive_db = archive_db or db
  7384. archive_name = archive_name % dict(tablename=self._tablename)
  7385. if archive_name in archive_db.tables():
  7386. return # do not try define the archive if already exists
  7387. fieldnames = self.fields()
  7388. same_db = archive_db is db
  7389. field_type = self if same_db else 'bigint'
  7390. clones = []
  7391. for field in self:
  7392. nfk = same_db or not field.type.startswith('reference')
  7393. clones.append(field.clone(
  7394. unique=False, type=field.type if nfk else 'bigint'))
  7395. archive_db.define_table(
  7396. archive_name, Field(current_record,field_type), *clones)
  7397. self._before_update.append(
  7398. lambda qset,fs,db=archive_db,an=archive_name,cn=current_record:
  7399. archive_record(qset,fs,db[an],cn))
  7400. if is_active and is_active in fieldnames:
  7401. self._before_delete.append(
  7402. lambda qset: qset.update(is_active=False))
  7403. newquery = lambda query, t=self, name=self._tablename: \
  7404. reduce(AND,[db[tn].is_active == True
  7405. for tn in db._adapter.tables(query)
  7406. if tn==name or getattr(db[tn],'_ot',None)==name])
  7407. query = self._common_filter
  7408. if query:
  7409. newquery = query & newquery
  7410. self._common_filter = newquery
  7411. def _validate(self,**vars):
  7412. errors = Row()
  7413. for key,value in vars.iteritems():
  7414. value,error = self[key].validate(value)
  7415. if error:
  7416. errors[key] = error
  7417. return errors
  7418. def _create_references(self):
  7419. db = self._db
  7420. pr = db._pending_references
  7421. self._referenced_by = []
  7422. self._references = []
  7423. for field in self:
  7424. fieldname = field.name
  7425. field_type = field.type
  7426. if isinstance(field_type,str) and field_type[:10] == 'reference ':
  7427. ref = field_type[10:].strip()
  7428. if not ref.strip():
  7429. raise SyntaxError('Table: reference to nothing: %s' %ref)
  7430. if '.' in ref:
  7431. rtablename, rfieldname = ref.split('.',1)
  7432. else:
  7433. rtablename, rfieldname = ref, None
  7434. if not rtablename in db:
  7435. pr[rtablename] = pr.get(rtablename,[]) + [field]
  7436. continue
  7437. rtable = db[rtablename]
  7438. if rfieldname:
  7439. if not hasattr(rtable,'_primarykey'):
  7440. raise SyntaxError(
  7441. 'keyed tables can only reference other keyed tables (for now)')
  7442. if rfieldname not in rtable.fields:
  7443. raise SyntaxError(
  7444. "invalid field '%s' for referenced table '%s' in table '%s'" \
  7445. % (rfieldname, rtablename, self._tablename))
  7446. rfield = rtable[rfieldname]
  7447. else:
  7448. rfield = rtable._id
  7449. rtable._referenced_by.append(field)
  7450. field.referent = rfield
  7451. self._references.append(field)
  7452. else:
  7453. field.referent = None
  7454. for referee in pr.get(self._tablename,[]):
  7455. self._referenced_by.append(referee)
  7456. def _filter_fields(self, record, id=False):
  7457. return dict([(k, v) for (k, v) in record.iteritems() if k
  7458. in self.fields and (self[k].type!='id' or id)])
  7459. def _build_query(self,key):
  7460. """ for keyed table only """
  7461. query = None
  7462. for k,v in key.iteritems():
  7463. if k in self._primarykey:
  7464. if query:
  7465. query = query & (self[k] == v)
  7466. else:
  7467. query = (self[k] == v)
  7468. else:
  7469. raise SyntaxError(
  7470. 'Field %s is not part of the primary key of %s' % \
  7471. (k,self._tablename))
  7472. return query
  7473. def __getitem__(self, key):
  7474. if not key:
  7475. return None
  7476. elif isinstance(key, dict):
  7477. """ for keyed table """
  7478. query = self._build_query(key)
  7479. return self._db(query).select(limitby=(0,1), orderby_on_limitby=False).first()
  7480. elif str(key).isdigit() or 'google' in DRIVERS and isinstance(key, Key):
  7481. return self._db(self._id == key).select(limitby=(0,1), orderby_on_limitby=False).first()
  7482. elif key:
  7483. return ogetattr(self, str(key))
  7484. def __call__(self, key=DEFAULT, **kwargs):
  7485. for_update = kwargs.get('_for_update',False)
  7486. if '_for_update' in kwargs: del kwargs['_for_update']
  7487. orderby = kwargs.get('_orderby',None)
  7488. if '_orderby' in kwargs: del kwargs['_orderby']
  7489. if not key is DEFAULT:
  7490. if isinstance(key, Query):
  7491. record = self._db(key).select(
  7492. limitby=(0,1),for_update=for_update, orderby=orderby, orderby_on_limitby=False).first()
  7493. elif not str(key).isdigit():
  7494. record = None
  7495. else:
  7496. record = self._db(self._id == key).select(
  7497. limitby=(0,1),for_update=for_update, orderby=orderby, orderby_on_limitby=False).first()
  7498. if record:
  7499. for k,v in kwargs.iteritems():
  7500. if record[k]!=v: return None
  7501. return record
  7502. elif kwargs:
  7503. query = reduce(lambda a,b:a&b,[self[k]==v for k,v in kwargs.iteritems()])
  7504. return self._db(query).select(limitby=(0,1),for_update=for_update, orderby=orderby, orderby_on_limitby=False).first()
  7505. else:
  7506. return None
  7507. def __setitem__(self, key, value):
  7508. if isinstance(key, dict) and isinstance(value, dict):
  7509. """ option for keyed table """
  7510. if set(key.keys()) == set(self._primarykey):
  7511. value = self._filter_fields(value)
  7512. kv = {}
  7513. kv.update(value)
  7514. kv.update(key)
  7515. if not self.insert(**kv):
  7516. query = self._build_query(key)
  7517. self._db(query).update(**self._filter_fields(value))
  7518. else:
  7519. raise SyntaxError(
  7520. 'key must have all fields from primary key: %s'%\
  7521. (self._primarykey))
  7522. elif str(key).isdigit():
  7523. if key == 0:
  7524. self.insert(**self._filter_fields(value))
  7525. elif self._db(self._id == key)\
  7526. .update(**self._filter_fields(value)) is None:
  7527. raise SyntaxError('No such record: %s' % key)
  7528. else:
  7529. if isinstance(key, dict):
  7530. raise SyntaxError(
  7531. 'value must be a dictionary: %s' % value)
  7532. osetattr(self, str(key), value)
  7533. __getattr__ = __getitem__
  7534. def __setattr__(self, key, value):
  7535. if key[:1]!='_' and key in self:
  7536. raise SyntaxError('Object exists and cannot be redefined: %s' % key)
  7537. osetattr(self,key,value)
  7538. def __delitem__(self, key):
  7539. if isinstance(key, dict):
  7540. query = self._build_query(key)
  7541. if not self._db(query).delete():
  7542. raise SyntaxError('No such record: %s' % key)
  7543. elif not str(key).isdigit() or \
  7544. not self._db(self._id == key).delete():
  7545. raise SyntaxError('No such record: %s' % key)
  7546. def __contains__(self,key):
  7547. return hasattr(self,key)
  7548. has_key = __contains__
  7549. def items(self):
  7550. return self.__dict__.items()
  7551. def __iter__(self):
  7552. for fieldname in self.fields:
  7553. yield self[fieldname]
  7554. def iteritems(self):
  7555. return self.__dict__.iteritems()
  7556. def __repr__(self):
  7557. return '<Table %s (%s)>' % (self._tablename,','.join(self.fields()))
  7558. def __str__(self):
  7559. if self._ot is not None:
  7560. ot = self._db._adapter.QUOTE_TEMPLATE % self._ot
  7561. if 'Oracle' in str(type(self._db._adapter)):
  7562. return '%s %s' % (ot, self._tablename)
  7563. return '%s AS %s' % (ot, self._tablename)
  7564. return self._tablename
  7565. def _drop(self, mode = ''):
  7566. return self._db._adapter._drop(self, mode)
  7567. def drop(self, mode = ''):
  7568. return self._db._adapter.drop(self,mode)
  7569. def _listify(self,fields,update=False):
  7570. new_fields = {} # format: new_fields[name] = (field,value)
  7571. # store all fields passed as input in new_fields
  7572. for name in fields:
  7573. if not name in self.fields:
  7574. if name != 'id':
  7575. raise SyntaxError(
  7576. 'Field %s does not belong to the table' % name)
  7577. else:
  7578. field = self[name]
  7579. value = fields[name]
  7580. if field.filter_in:
  7581. value = field.filter_in(value)
  7582. new_fields[name] = (field,value)
  7583. # check all fields that should be in the table but are not passed
  7584. to_compute = []
  7585. for ofield in self:
  7586. name = ofield.name
  7587. if not name in new_fields:
  7588. # if field is supposed to be computed, compute it!
  7589. if ofield.compute: # save those to compute for later
  7590. to_compute.append((name,ofield))
  7591. # if field is required, check its default value
  7592. elif not update and not ofield.default is None:
  7593. value = ofield.default
  7594. fields[name] = value
  7595. new_fields[name] = (ofield,value)
  7596. # if this is an update, user the update field instead
  7597. elif update and not ofield.update is None:
  7598. value = ofield.update
  7599. fields[name] = value
  7600. new_fields[name] = (ofield,value)
  7601. # if the field is still not there but it should, error
  7602. elif not update and ofield.required:
  7603. raise RuntimeError(
  7604. 'Table: missing required field: %s' % name)
  7605. # now deal with fields that are supposed to be computed
  7606. if to_compute:
  7607. row = Row(fields)
  7608. for name,ofield in to_compute:
  7609. # try compute it
  7610. try:
  7611. row[name] = new_value = ofield.compute(row)
  7612. new_fields[name] = (ofield, new_value)
  7613. except (KeyError, AttributeError):
  7614. # error silently unless field is required!
  7615. if ofield.required:
  7616. raise SyntaxError('unable to compute field: %s' % name)
  7617. return new_fields.values()
  7618. def _attempt_upload(self, fields):
  7619. for field in self:
  7620. if field.type=='upload' and field.name in fields:
  7621. value = fields[field.name]
  7622. if value is not None and not isinstance(value,str):
  7623. if hasattr(value,'file') and hasattr(value,'filename'):
  7624. new_name = field.store(value.file,filename=value.filename)
  7625. elif hasattr(value,'read') and hasattr(value,'name'):
  7626. new_name = field.store(value,filename=value.name)
  7627. else:
  7628. raise RuntimeError("Unable to handle upload")
  7629. fields[field.name] = new_name
  7630. def _defaults(self, fields):
  7631. "If there are no fields/values specified, return table defaults"
  7632. if not fields:
  7633. fields = {}
  7634. for field in self:
  7635. if field.type != "id":
  7636. fields[field.name] = field.default
  7637. return fields
  7638. def _insert(self, **fields):
  7639. fields = self._defaults(fields)
  7640. return self._db._adapter._insert(self, self._listify(fields))
  7641. def insert(self, **fields):
  7642. fields = self._defaults(fields)
  7643. self._attempt_upload(fields)
  7644. if any(f(fields) for f in self._before_insert): return 0
  7645. ret = self._db._adapter.insert(self, self._listify(fields))
  7646. if ret and self._after_insert:
  7647. fields = Row(fields)
  7648. [f(fields,ret) for f in self._after_insert]
  7649. return ret
  7650. def validate_and_insert(self,**fields):
  7651. response = Row()
  7652. response.errors = Row()
  7653. new_fields = copy.copy(fields)
  7654. for key,value in fields.iteritems():
  7655. value,error = self[key].validate(value)
  7656. if error:
  7657. response.errors[key] = "%s" % error
  7658. else:
  7659. new_fields[key] = value
  7660. if not response.errors:
  7661. response.id = self.insert(**new_fields)
  7662. else:
  7663. response.id = None
  7664. return response
  7665. def update_or_insert(self, _key=DEFAULT, **values):
  7666. if _key is DEFAULT:
  7667. record = self(**values)
  7668. elif isinstance(_key,dict):
  7669. record = self(**_key)
  7670. else:
  7671. record = self(_key)
  7672. if record:
  7673. record.update_record(**values)
  7674. newid = None
  7675. else:
  7676. newid = self.insert(**values)
  7677. return newid
  7678. def bulk_insert(self, items):
  7679. """
  7680. here items is a list of dictionaries
  7681. """
  7682. items = [self._listify(item) for item in items]
  7683. if any(f(item) for item in items for f in self._before_insert):return 0
  7684. ret = self._db._adapter.bulk_insert(self,items)
  7685. ret and [[f(item,ret[k]) for k,item in enumerate(items)] for f in self._after_insert]
  7686. return ret
  7687. def _truncate(self, mode = None):
  7688. return self._db._adapter._truncate(self, mode)
  7689. def truncate(self, mode = None):
  7690. return self._db._adapter.truncate(self, mode)
  7691. def import_from_csv_file(
  7692. self,
  7693. csvfile,
  7694. id_map=None,
  7695. null='<NULL>',
  7696. unique='uuid',
  7697. id_offset=None, # id_offset used only when id_map is None
  7698. *args, **kwargs
  7699. ):
  7700. """
  7701. Import records from csv file.
  7702. Column headers must have same names as table fields.
  7703. Field 'id' is ignored.
  7704. If column names read 'table.file' the 'table.' prefix is ignored.
  7705. 'unique' argument is a field which must be unique
  7706. (typically a uuid field)
  7707. 'restore' argument is default False;
  7708. if set True will remove old values in table first.
  7709. 'id_map' ff set to None will not map ids.
  7710. The import will keep the id numbers in the restored table.
  7711. This assumes that there is an field of type id that
  7712. is integer and in incrementing order.
  7713. Will keep the id numbers in restored table.
  7714. """
  7715. delimiter = kwargs.get('delimiter', ',')
  7716. quotechar = kwargs.get('quotechar', '"')
  7717. quoting = kwargs.get('quoting', csv.QUOTE_MINIMAL)
  7718. restore = kwargs.get('restore', False)
  7719. if restore:
  7720. self._db[self].truncate()
  7721. reader = csv.reader(csvfile, delimiter=delimiter,
  7722. quotechar=quotechar, quoting=quoting)
  7723. colnames = None
  7724. if isinstance(id_map, dict):
  7725. if not self._tablename in id_map:
  7726. id_map[self._tablename] = {}
  7727. id_map_self = id_map[self._tablename]
  7728. def fix(field, value, id_map, id_offset):
  7729. list_reference_s='list:reference'
  7730. if value == null:
  7731. value = None
  7732. elif field.type=='blob':
  7733. value = base64.b64decode(value)
  7734. elif field.type=='double' or field.type=='float':
  7735. if not value.strip():
  7736. value = None
  7737. else:
  7738. value = float(value)
  7739. elif field.type in ('integer','bigint'):
  7740. if not value.strip():
  7741. value = None
  7742. else:
  7743. value = long(value)
  7744. elif field.type.startswith('list:string'):
  7745. value = bar_decode_string(value)
  7746. elif field.type.startswith(list_reference_s):
  7747. ref_table = field.type[len(list_reference_s):].strip()
  7748. if id_map is not None:
  7749. value = [id_map[ref_table][long(v)] \
  7750. for v in bar_decode_string(value)]
  7751. else:
  7752. value = [v for v in bar_decode_string(value)]
  7753. elif field.type.startswith('list:'):
  7754. value = bar_decode_integer(value)
  7755. elif id_map and field.type.startswith('reference'):
  7756. try:
  7757. value = id_map[field.type[9:].strip()][long(value)]
  7758. except KeyError:
  7759. pass
  7760. elif id_offset and field.type.startswith('reference'):
  7761. try:
  7762. value = id_offset[field.type[9:].strip()]+long(value)
  7763. except KeyError:
  7764. pass
  7765. return (field.name, value)
  7766. def is_id(colname):
  7767. if colname in self:
  7768. return self[colname].type == 'id'
  7769. else:
  7770. return False
  7771. first = True
  7772. unique_idx = None
  7773. for line in reader:
  7774. if not line:
  7775. break
  7776. if not colnames:
  7777. colnames = [x.split('.',1)[-1] for x in line][:len(line)]
  7778. cols, cid = [], None
  7779. for i,colname in enumerate(colnames):
  7780. if is_id(colname):
  7781. cid = i
  7782. else:
  7783. cols.append(i)
  7784. if colname == unique:
  7785. unique_idx = i
  7786. else:
  7787. items = [fix(self[colnames[i]], line[i], id_map, id_offset) \
  7788. for i in cols if colnames[i] in self.fields]
  7789. if not id_map and cid is not None and id_offset is not None and not unique_idx:
  7790. csv_id = long(line[cid])
  7791. curr_id = self.insert(**dict(items))
  7792. if first:
  7793. first = False
  7794. # First curr_id is bigger than csv_id,
  7795. # then we are not restoring but
  7796. # extending db table with csv db table
  7797. if curr_id>csv_id:
  7798. id_offset[self._tablename] = curr_id-csv_id
  7799. else:
  7800. id_offset[self._tablename] = 0
  7801. # create new id until we get the same as old_id+offset
  7802. while curr_id<csv_id+id_offset[self._tablename]:
  7803. self._db(self._db[self][colnames[cid]] == curr_id).delete()
  7804. curr_id = self.insert(**dict(items))
  7805. # Validation. Check for duplicate of 'unique' &,
  7806. # if present, update instead of insert.
  7807. elif not unique_idx:
  7808. new_id = self.insert(**dict(items))
  7809. else:
  7810. unique_value = line[unique_idx]
  7811. query = self._db[self][unique] == unique_value
  7812. record = self._db(query).select().first()
  7813. if record:
  7814. record.update_record(**dict(items))
  7815. new_id = record[self._id.name]
  7816. else:
  7817. new_id = self.insert(**dict(items))
  7818. if id_map and cid is not None:
  7819. id_map_self[long(line[cid])] = new_id
  7820. def as_dict(self, flat=False, sanitize=True, field_options=True):
  7821. tablename = str(self)
  7822. table_as_dict = dict(name=tablename, items={}, fields=[],
  7823. sequence_name=self._sequence_name,
  7824. trigger_name=self._trigger_name,
  7825. common_filter=self._common_filter, format=self._format,
  7826. singular=self._singular, plural=self._plural)
  7827. for field in self:
  7828. if (field.readable or field.writable) or (not sanitize):
  7829. table_as_dict["fields"].append(field.name)
  7830. table_as_dict["items"][field.name] = \
  7831. field.as_dict(flat=flat, sanitize=sanitize,
  7832. options=field_options)
  7833. return table_as_dict
  7834. def as_xml(self, sanitize=True, field_options=True):
  7835. if not have_serializers:
  7836. raise ImportError("No xml serializers available")
  7837. d = self.as_dict(flat=True, sanitize=sanitize,
  7838. field_options=field_options)
  7839. return serializers.xml(d)
  7840. def as_json(self, sanitize=True, field_options=True):
  7841. if not have_serializers:
  7842. raise ImportError("No json serializers available")
  7843. d = self.as_dict(flat=True, sanitize=sanitize,
  7844. field_options=field_options)
  7845. return serializers.json(d)
  7846. def as_yaml(self, sanitize=True, field_options=True):
  7847. if not have_serializers:
  7848. raise ImportError("No YAML serializers available")
  7849. d = self.as_dict(flat=True, sanitize=sanitize,
  7850. field_options=field_options)
  7851. return serializers.yaml(d)
  7852. def with_alias(self, alias):
  7853. return self._db._adapter.alias(self,alias)
  7854. def on(self, query):
  7855. return Expression(self._db,self._db._adapter.ON,self,query)
  7856. def archive_record(qset,fs,archive_table,current_record):
  7857. tablenames = qset.db._adapter.tables(qset.query)
  7858. if len(tablenames)!=1: raise RuntimeError("cannot update join")
  7859. table = qset.db[tablenames[0]]
  7860. for row in qset.select():
  7861. fields = archive_table._filter_fields(row)
  7862. fields[current_record] = row.id
  7863. archive_table.insert(**fields)
  7864. return False
  7865. class Expression(object):
  7866. def __init__(
  7867. self,
  7868. db,
  7869. op,
  7870. first=None,
  7871. second=None,
  7872. type=None,
  7873. **optional_args
  7874. ):
  7875. self.db = db
  7876. self.op = op
  7877. self.first = first
  7878. self.second = second
  7879. self._table = getattr(first,'_table',None)
  7880. ### self._tablename = first._tablename ## CHECK
  7881. if not type and first and hasattr(first,'type'):
  7882. self.type = first.type
  7883. else:
  7884. self.type = type
  7885. self.optional_args = optional_args
  7886. def sum(self):
  7887. db = self.db
  7888. return Expression(db, db._adapter.AGGREGATE, self, 'SUM', self.type)
  7889. def max(self):
  7890. db = self.db
  7891. return Expression(db, db._adapter.AGGREGATE, self, 'MAX', self.type)
  7892. def min(self):
  7893. db = self.db
  7894. return Expression(db, db._adapter.AGGREGATE, self, 'MIN', self.type)
  7895. def len(self):
  7896. db = self.db
  7897. return Expression(db, db._adapter.LENGTH, self, None, 'integer')
  7898. def avg(self):
  7899. db = self.db
  7900. return Expression(db, db._adapter.AGGREGATE, self, 'AVG', self.type)
  7901. def abs(self):
  7902. db = self.db
  7903. return Expression(db, db._adapter.AGGREGATE, self, 'ABS', self.type)
  7904. def lower(self):
  7905. db = self.db
  7906. return Expression(db, db._adapter.LOWER, self, None, self.type)
  7907. def upper(self):
  7908. db = self.db
  7909. return Expression(db, db._adapter.UPPER, self, None, self.type)
  7910. def replace(self,a,b):
  7911. db = self.db
  7912. return Expression(db, db._adapter.REPLACE, self, (a,b), self.type)
  7913. def year(self):
  7914. db = self.db
  7915. return Expression(db, db._adapter.EXTRACT, self, 'year', 'integer')
  7916. def month(self):
  7917. db = self.db
  7918. return Expression(db, db._adapter.EXTRACT, self, 'month', 'integer')
  7919. def day(self):
  7920. db = self.db
  7921. return Expression(db, db._adapter.EXTRACT, self, 'day', 'integer')
  7922. def hour(self):
  7923. db = self.db
  7924. return Expression(db, db._adapter.EXTRACT, self, 'hour', 'integer')
  7925. def minutes(self):
  7926. db = self.db
  7927. return Expression(db, db._adapter.EXTRACT, self, 'minute', 'integer')
  7928. def coalesce(self,*others):
  7929. db = self.db
  7930. return Expression(db, db._adapter.COALESCE, self, others, self.type)
  7931. def coalesce_zero(self):
  7932. db = self.db
  7933. return Expression(db, db._adapter.COALESCE_ZERO, self, None, self.type)
  7934. def seconds(self):
  7935. db = self.db
  7936. return Expression(db, db._adapter.EXTRACT, self, 'second', 'integer')
  7937. def epoch(self):
  7938. db = self.db
  7939. return Expression(db, db._adapter.EPOCH, self, None, 'integer')
  7940. def __getslice__(self, start, stop):
  7941. db = self.db
  7942. if start < 0:
  7943. pos0 = '(%s - %d)' % (self.len(), abs(start) - 1)
  7944. else:
  7945. pos0 = start + 1
  7946. if stop < 0:
  7947. length = '(%s - %d - %s)' % (self.len(), abs(stop) - 1, pos0)
  7948. elif stop == sys.maxint:
  7949. length = self.len()
  7950. else:
  7951. length = '(%s - %s)' % (stop + 1, pos0)
  7952. return Expression(db,db._adapter.SUBSTRING,
  7953. self, (pos0, length), self.type)
  7954. def __getitem__(self, i):
  7955. return self[i:i + 1]
  7956. def __str__(self):
  7957. return self.db._adapter.expand(self,self.type)
  7958. def __or__(self, other): # for use in sortby
  7959. db = self.db
  7960. return Expression(db,db._adapter.COMMA,self,other,self.type)
  7961. def __invert__(self):
  7962. db = self.db
  7963. if hasattr(self,'_op') and self.op == db._adapter.INVERT:
  7964. return self.first
  7965. return Expression(db,db._adapter.INVERT,self,type=self.type)
  7966. def __add__(self, other):
  7967. db = self.db
  7968. return Expression(db,db._adapter.ADD,self,other,self.type)
  7969. def __sub__(self, other):
  7970. db = self.db
  7971. if self.type in ('integer','bigint'):
  7972. result_type = 'integer'
  7973. elif self.type in ['date','time','datetime','double','float']:
  7974. result_type = 'double'
  7975. elif self.type.startswith('decimal('):
  7976. result_type = self.type
  7977. else:
  7978. raise SyntaxError("subtraction operation not supported for type")
  7979. return Expression(db,db._adapter.SUB,self,other,result_type)
  7980. def __mul__(self, other):
  7981. db = self.db
  7982. return Expression(db,db._adapter.MUL,self,other,self.type)
  7983. def __div__(self, other):
  7984. db = self.db
  7985. return Expression(db,db._adapter.DIV,self,other,self.type)
  7986. def __mod__(self, other):
  7987. db = self.db
  7988. return Expression(db,db._adapter.MOD,self,other,self.type)
  7989. def __eq__(self, value):
  7990. db = self.db
  7991. return Query(db, db._adapter.EQ, self, value)
  7992. def __ne__(self, value):
  7993. db = self.db
  7994. return Query(db, db._adapter.NE, self, value)
  7995. def __lt__(self, value):
  7996. db = self.db
  7997. return Query(db, db._adapter.LT, self, value)
  7998. def __le__(self, value):
  7999. db = self.db
  8000. return Query(db, db._adapter.LE, self, value)
  8001. def __gt__(self, value):
  8002. db = self.db
  8003. return Query(db, db._adapter.GT, self, value)
  8004. def __ge__(self, value):
  8005. db = self.db
  8006. return Query(db, db._adapter.GE, self, value)
  8007. def like(self, value, case_sensitive=False):
  8008. db = self.db
  8009. op = case_sensitive and db._adapter.LIKE or db._adapter.ILIKE
  8010. return Query(db, op, self, value)
  8011. def regexp(self, value):
  8012. db = self.db
  8013. return Query(db, db._adapter.REGEXP, self, value)
  8014. def belongs(self, *value):
  8015. """
  8016. Accepts the following inputs:
  8017. field.belongs(1,2)
  8018. field.belongs((1,2))
  8019. field.belongs(query)
  8020. Does NOT accept:
  8021. field.belongs(1)
  8022. """
  8023. db = self.db
  8024. if len(value) == 1:
  8025. value = value[0]
  8026. if isinstance(value,Query):
  8027. value = db(value)._select(value.first._table._id)
  8028. return Query(db, db._adapter.BELONGS, self, value)
  8029. def startswith(self, value):
  8030. db = self.db
  8031. if not self.type in ('string', 'text', 'json'):
  8032. raise SyntaxError("startswith used with incompatible field type")
  8033. return Query(db, db._adapter.STARTSWITH, self, value)
  8034. def endswith(self, value):
  8035. db = self.db
  8036. if not self.type in ('string', 'text', 'json'):
  8037. raise SyntaxError("endswith used with incompatible field type")
  8038. return Query(db, db._adapter.ENDSWITH, self, value)
  8039. def contains(self, value, all=False, case_sensitive=False):
  8040. """
  8041. The case_sensitive parameters is only useful for PostgreSQL
  8042. For other RDMBs it is ignored and contains is always case in-sensitive
  8043. For MongoDB and GAE contains is always case sensitive
  8044. """
  8045. db = self.db
  8046. if isinstance(value,(list, tuple)):
  8047. subqueries = [self.contains(str(v).strip(),case_sensitive=case_sensitive)
  8048. for v in value if str(v).strip()]
  8049. if not subqueries:
  8050. return self.contains('')
  8051. else:
  8052. return reduce(all and AND or OR,subqueries)
  8053. if not self.type in ('string', 'text', 'json') and not self.type.startswith('list:'):
  8054. raise SyntaxError("contains used with incompatible field type")
  8055. return Query(db, db._adapter.CONTAINS, self, value, case_sensitive=case_sensitive)
  8056. def with_alias(self, alias):
  8057. db = self.db
  8058. return Expression(db, db._adapter.AS, self, alias, self.type)
  8059. # GIS expressions
  8060. def st_asgeojson(self, precision=15, options=0, version=1):
  8061. return Expression(self.db, self.db._adapter.ST_ASGEOJSON, self,
  8062. dict(precision=precision, options=options,
  8063. version=version), 'string')
  8064. def st_astext(self):
  8065. db = self.db
  8066. return Expression(db, db._adapter.ST_ASTEXT, self, type='string')
  8067. def st_x(self):
  8068. db = self.db
  8069. return Expression(db, db._adapter.ST_X, self, type='string')
  8070. def st_y(self):
  8071. db = self.db
  8072. return Expression(db, db._adapter.ST_Y, self, type='string')
  8073. def st_distance(self, other):
  8074. db = self.db
  8075. return Expression(db,db._adapter.ST_DISTANCE,self,other, 'double')
  8076. def st_simplify(self, value):
  8077. db = self.db
  8078. return Expression(db, db._adapter.ST_SIMPLIFY, self, value, self.type)
  8079. # GIS queries
  8080. def st_contains(self, value):
  8081. db = self.db
  8082. return Query(db, db._adapter.ST_CONTAINS, self, value)
  8083. def st_equals(self, value):
  8084. db = self.db
  8085. return Query(db, db._adapter.ST_EQUALS, self, value)
  8086. def st_intersects(self, value):
  8087. db = self.db
  8088. return Query(db, db._adapter.ST_INTERSECTS, self, value)
  8089. def st_overlaps(self, value):
  8090. db = self.db
  8091. return Query(db, db._adapter.ST_OVERLAPS, self, value)
  8092. def st_touches(self, value):
  8093. db = self.db
  8094. return Query(db, db._adapter.ST_TOUCHES, self, value)
  8095. def st_within(self, value):
  8096. db = self.db
  8097. return Query(db, db._adapter.ST_WITHIN, self, value)
  8098. # for use in both Query and sortby
  8099. class SQLCustomType(object):
  8100. """
  8101. allows defining of custom SQL types
  8102. Example::
  8103. decimal = SQLCustomType(
  8104. type ='double',
  8105. native ='integer',
  8106. encoder =(lambda x: int(float(x) * 100)),
  8107. decoder = (lambda x: Decimal("0.00") + Decimal(str(float(x)/100)) )
  8108. )
  8109. db.define_table(
  8110. 'example',
  8111. Field('value', type=decimal)
  8112. )
  8113. :param type: the web2py type (default = 'string')
  8114. :param native: the backend type
  8115. :param encoder: how to encode the value to store it in the backend
  8116. :param decoder: how to decode the value retrieved from the backend
  8117. :param validator: what validators to use ( default = None, will use the
  8118. default validator for type)
  8119. """
  8120. def __init__(
  8121. self,
  8122. type='string',
  8123. native=None,
  8124. encoder=None,
  8125. decoder=None,
  8126. validator=None,
  8127. _class=None,
  8128. ):
  8129. self.type = type
  8130. self.native = native
  8131. self.encoder = encoder or (lambda x: x)
  8132. self.decoder = decoder or (lambda x: x)
  8133. self.validator = validator
  8134. self._class = _class or type
  8135. def startswith(self, text=None):
  8136. try:
  8137. return self.type.startswith(self, text)
  8138. except TypeError:
  8139. return False
  8140. def __getslice__(self, a=0, b=100):
  8141. return None
  8142. def __getitem__(self, i):
  8143. return None
  8144. def __str__(self):
  8145. return self._class
  8146. class FieldVirtual(object):
  8147. def __init__(self, name, f=None, ftype='string',label=None,table_name=None):
  8148. # for backward compatibility
  8149. (self.name, self.f) = (name, f) if f else ('unknown', name)
  8150. self.type = ftype
  8151. self.label = label or self.name.capitalize().replace('_',' ')
  8152. self.represent = lambda v,r:v
  8153. self.formatter = IDENTITY
  8154. self.comment = None
  8155. self.readable = True
  8156. self.writable = False
  8157. self.requires = None
  8158. self.widget = None
  8159. self.tablename = table_name
  8160. self.filter_out = None
  8161. def __str__(self):
  8162. return '%s.%s' % (self.tablename, self.name)
  8163. class FieldMethod(object):
  8164. def __init__(self, name, f=None, handler=None):
  8165. # for backward compatibility
  8166. (self.name, self.f) = (name, f) if f else ('unknown', name)
  8167. self.handler = handler
  8168. def list_represent(x,r=None):
  8169. return ', '.join(str(y) for y in x or [])
  8170. class Field(Expression):
  8171. Virtual = FieldVirtual
  8172. Method = FieldMethod
  8173. Lazy = FieldMethod # for backward compatibility
  8174. """
  8175. an instance of this class represents a database field
  8176. example::
  8177. a = Field(name, 'string', length=32, default=None, required=False,
  8178. requires=IS_NOT_EMPTY(), ondelete='CASCADE',
  8179. notnull=False, unique=False,
  8180. uploadfield=True, widget=None, label=None, comment=None,
  8181. uploadfield=True, # True means store on disk,
  8182. # 'a_field_name' means store in this field in db
  8183. # False means file content will be discarded.
  8184. writable=True, readable=True, update=None, authorize=None,
  8185. autodelete=False, represent=None, uploadfolder=None,
  8186. uploadseparate=False # upload to separate directories by uuid_keys
  8187. # first 2 character and tablename.fieldname
  8188. # False - old behavior
  8189. # True - put uploaded file in
  8190. # <uploaddir>/<tablename>.<fieldname>/uuid_key[:2]
  8191. # directory)
  8192. uploadfs=None # a pyfilesystem where to store upload
  8193. to be used as argument of DAL.define_table
  8194. allowed field types:
  8195. string, boolean, integer, double, text, blob,
  8196. date, time, datetime, upload, password
  8197. """
  8198. def __init__(
  8199. self,
  8200. fieldname,
  8201. type='string',
  8202. length=None,
  8203. default=DEFAULT,
  8204. required=False,
  8205. requires=DEFAULT,
  8206. ondelete='CASCADE',
  8207. notnull=False,
  8208. unique=False,
  8209. uploadfield=True,
  8210. widget=None,
  8211. label=None,
  8212. comment=None,
  8213. writable=True,
  8214. readable=True,
  8215. update=None,
  8216. authorize=None,
  8217. autodelete=False,
  8218. represent=None,
  8219. uploadfolder=None,
  8220. uploadseparate=False,
  8221. uploadfs=None,
  8222. compute=None,
  8223. custom_store=None,
  8224. custom_retrieve=None,
  8225. custom_retrieve_file_properties=None,
  8226. custom_delete=None,
  8227. filter_in = None,
  8228. filter_out = None,
  8229. custom_qualifier = None,
  8230. map_none = None,
  8231. ):
  8232. self._db = self.db = None # both for backward compatibility
  8233. self.op = None
  8234. self.first = None
  8235. self.second = None
  8236. self.name = fieldname = cleanup(fieldname)
  8237. if not isinstance(fieldname,str) or hasattr(Table,fieldname) or \
  8238. fieldname[0] == '_' or REGEX_PYTHON_KEYWORDS.match(fieldname):
  8239. raise SyntaxError('Field: invalid field name: %s' % fieldname)
  8240. self.type = type if not isinstance(type, (Table,Field)) else 'reference %s' % type
  8241. self.length = length if not length is None else DEFAULTLENGTH.get(self.type,512)
  8242. self.default = default if default!=DEFAULT else (update or None)
  8243. self.required = required # is this field required
  8244. self.ondelete = ondelete.upper() # this is for reference fields only
  8245. self.notnull = notnull
  8246. self.unique = unique
  8247. self.uploadfield = uploadfield
  8248. self.uploadfolder = uploadfolder
  8249. self.uploadseparate = uploadseparate
  8250. self.uploadfs = uploadfs
  8251. self.widget = widget
  8252. self.comment = comment
  8253. self.writable = writable
  8254. self.readable = readable
  8255. self.update = update
  8256. self.authorize = authorize
  8257. self.autodelete = autodelete
  8258. self.represent = list_represent if \
  8259. represent==None and type in ('list:integer','list:string') else represent
  8260. self.compute = compute
  8261. self.isattachment = True
  8262. self.custom_store = custom_store
  8263. self.custom_retrieve = custom_retrieve
  8264. self.custom_retrieve_file_properties = custom_retrieve_file_properties
  8265. self.custom_delete = custom_delete
  8266. self.filter_in = filter_in
  8267. self.filter_out = filter_out
  8268. self.custom_qualifier = custom_qualifier
  8269. self.label = label if label!=None else fieldname.replace('_',' ').title()
  8270. self.requires = requires if requires!=None else []
  8271. self.map_none = map_none
  8272. def set_attributes(self,*args,**attributes):
  8273. self.__dict__.update(*args,**attributes)
  8274. def clone(self,point_self_references_to=False,**args):
  8275. field = copy.copy(self)
  8276. if point_self_references_to and \
  8277. field.type == 'reference %s'+field._tablename:
  8278. field.type = 'reference %s' % point_self_references_to
  8279. field.__dict__.update(args)
  8280. return field
  8281. def store(self, file, filename=None, path=None):
  8282. if self.custom_store:
  8283. return self.custom_store(file,filename,path)
  8284. if isinstance(file, cgi.FieldStorage):
  8285. filename = filename or file.filename
  8286. file = file.file
  8287. elif not filename:
  8288. filename = file.name
  8289. filename = os.path.basename(filename.replace('/', os.sep)\
  8290. .replace('\\', os.sep))
  8291. m = REGEX_STORE_PATTERN.search(filename)
  8292. extension = m and m.group('e') or 'txt'
  8293. uuid_key = web2py_uuid().replace('-', '')[-16:]
  8294. encoded_filename = base64.b16encode(filename).lower()
  8295. newfilename = '%s.%s.%s.%s' % \
  8296. (self._tablename, self.name, uuid_key, encoded_filename)
  8297. newfilename = newfilename[:(self.length - 1 - len(extension))] + '.' + extension
  8298. self_uploadfield = self.uploadfield
  8299. if isinstance(self_uploadfield,Field):
  8300. blob_uploadfield_name = self_uploadfield.uploadfield
  8301. keys={self_uploadfield.name: newfilename,
  8302. blob_uploadfield_name: file.read()}
  8303. self_uploadfield.table.insert(**keys)
  8304. elif self_uploadfield == True:
  8305. if path:
  8306. pass
  8307. elif self.uploadfolder:
  8308. path = self.uploadfolder
  8309. elif self.db._adapter.folder:
  8310. path = pjoin(self.db._adapter.folder, '..', 'uploads')
  8311. else:
  8312. raise RuntimeError(
  8313. "you must specify a Field(...,uploadfolder=...)")
  8314. if self.uploadseparate:
  8315. if self.uploadfs:
  8316. raise RuntimeError("not supported")
  8317. path = pjoin(path,"%s.%s" %(self._tablename, self.name),
  8318. uuid_key[:2])
  8319. if not exists(path):
  8320. os.makedirs(path)
  8321. pathfilename = pjoin(path, newfilename)
  8322. if self.uploadfs:
  8323. dest_file = self.uploadfs.open(newfilename, 'wb')
  8324. else:
  8325. dest_file = open(pathfilename, 'wb')
  8326. try:
  8327. shutil.copyfileobj(file, dest_file)
  8328. except IOError:
  8329. raise IOError(
  8330. 'Unable to store file "%s" because invalid permissions, readonly file system, or filename too long' % pathfilename)
  8331. dest_file.close()
  8332. return newfilename
  8333. def retrieve(self, name, path=None, nameonly=False):
  8334. """
  8335. if nameonly==True return (filename, fullfilename) instead of
  8336. (filename, stream)
  8337. """
  8338. self_uploadfield = self.uploadfield
  8339. if self.custom_retrieve:
  8340. return self.custom_retrieve(name, path)
  8341. import http
  8342. if self.authorize or isinstance(self_uploadfield, str):
  8343. row = self.db(self == name).select().first()
  8344. if not row:
  8345. raise http.HTTP(404)
  8346. if self.authorize and not self.authorize(row):
  8347. raise http.HTTP(403)
  8348. m = REGEX_UPLOAD_PATTERN.match(name)
  8349. if not m or not self.isattachment:
  8350. raise TypeError('Can\'t retrieve %s' % name)
  8351. file_properties = self.retrieve_file_properties(name,path)
  8352. filename = file_properties['filename']
  8353. if isinstance(self_uploadfield, str): # ## if file is in DB
  8354. stream = StringIO.StringIO(row[self_uploadfield] or '')
  8355. elif isinstance(self_uploadfield,Field):
  8356. blob_uploadfield_name = self_uploadfield.uploadfield
  8357. query = self_uploadfield == name
  8358. data = self_uploadfield.table(query)[blob_uploadfield_name]
  8359. stream = StringIO.StringIO(data)
  8360. elif self.uploadfs:
  8361. # ## if file is on pyfilesystem
  8362. stream = self.uploadfs.open(name, 'rb')
  8363. else:
  8364. # ## if file is on regular filesystem
  8365. # this is intentially a sting with filename and not a stream
  8366. # this propagates and allows stream_file_or_304_or_206 to be called
  8367. fullname = pjoin(file_properties['path'],name)
  8368. if nameonly:
  8369. return (filename, fullname)
  8370. stream = open(fullname,'rb')
  8371. return (filename, stream)
  8372. def retrieve_file_properties(self, name, path=None):
  8373. self_uploadfield = self.uploadfield
  8374. if self.custom_retrieve_file_properties:
  8375. return self.custom_retrieve_file_properties(name, path)
  8376. try:
  8377. m = REGEX_UPLOAD_PATTERN.match(name)
  8378. if not m or not self.isattachment:
  8379. raise TypeError('Can\'t retrieve %s file properties' % name)
  8380. filename = base64.b16decode(m.group('name'), True)
  8381. filename = REGEX_CLEANUP_FN.sub('_', filename)
  8382. except (TypeError, AttributeError):
  8383. filename = name
  8384. if isinstance(self_uploadfield, str): # ## if file is in DB
  8385. return dict(path=None,filename=filename)
  8386. elif isinstance(self_uploadfield,Field):
  8387. return dict(path=None,filename=filename)
  8388. else:
  8389. # ## if file is on filesystem
  8390. if path:
  8391. pass
  8392. elif self.uploadfolder:
  8393. path = self.uploadfolder
  8394. else:
  8395. path = pjoin(self.db._adapter.folder, '..', 'uploads')
  8396. if self.uploadseparate:
  8397. t = m.group('table')
  8398. f = m.group('field')
  8399. u = m.group('uuidkey')
  8400. path = pjoin(path,"%s.%s" % (t,f),u[:2])
  8401. return dict(path=path,filename=filename)
  8402. def formatter(self, value):
  8403. requires = self.requires
  8404. if value is None or not requires:
  8405. return value or self.map_none
  8406. if not isinstance(requires, (list, tuple)):
  8407. requires = [requires]
  8408. elif isinstance(requires, tuple):
  8409. requires = list(requires)
  8410. else:
  8411. requires = copy.copy(requires)
  8412. requires.reverse()
  8413. for item in requires:
  8414. if hasattr(item, 'formatter'):
  8415. value = item.formatter(value)
  8416. return value
  8417. def validate(self, value):
  8418. if not self.requires or self.requires == DEFAULT:
  8419. return ((value if value!=self.map_none else None), None)
  8420. requires = self.requires
  8421. if not isinstance(requires, (list, tuple)):
  8422. requires = [requires]
  8423. for validator in requires:
  8424. (value, error) = validator(value)
  8425. if error:
  8426. return (value, error)
  8427. return ((value if value!=self.map_none else None), None)
  8428. def count(self, distinct=None):
  8429. return Expression(self.db, self.db._adapter.COUNT, self, distinct, 'integer')
  8430. def as_dict(self, flat=False, sanitize=True, options=True):
  8431. attrs = ('type', 'length', 'default', 'required',
  8432. 'ondelete', 'notnull', 'unique', 'uploadfield',
  8433. 'widget', 'label', 'comment', 'writable', 'readable',
  8434. 'update', 'authorize', 'autodelete', 'represent',
  8435. 'uploadfolder', 'uploadseparate', 'uploadfs',
  8436. 'compute', 'custom_store', 'custom_retrieve',
  8437. 'custom_retrieve_file_properties', 'custom_delete',
  8438. 'filter_in', 'filter_out', 'custom_qualifier',
  8439. 'map_none', 'name')
  8440. SERIALIZABLE_TYPES = (int, long, basestring, dict, list,
  8441. float, tuple, bool, type(None))
  8442. def flatten(obj):
  8443. if flat:
  8444. if isinstance(obj, flatten.__class__):
  8445. return str(type(obj))
  8446. elif isinstance(obj, type):
  8447. try:
  8448. return str(obj).split("'")[1]
  8449. except IndexError:
  8450. return str(obj)
  8451. elif not isinstance(obj, SERIALIZABLE_TYPES):
  8452. return str(obj)
  8453. elif isinstance(obj, dict):
  8454. newobj = dict()
  8455. for k, v in obj.items():
  8456. newobj[k] = flatten(v)
  8457. return newobj
  8458. elif isinstance(obj, (list, tuple, set)):
  8459. return [flatten(v) for v in obj]
  8460. else:
  8461. return obj
  8462. elif isinstance(obj, (dict, set)):
  8463. return obj.copy()
  8464. else: return obj
  8465. def filter_requires(t, r, options=True):
  8466. if sanitize and any([keyword in str(t).upper() for
  8467. keyword in ("CRYPT", "IS_STRONG")]):
  8468. return None
  8469. if not isinstance(r, dict):
  8470. if options and hasattr(r, "options"):
  8471. if callable(r.options):
  8472. r.options()
  8473. newr = r.__dict__.copy()
  8474. else:
  8475. newr = r.copy()
  8476. # remove options if not required
  8477. if not options and newr.has_key("labels"):
  8478. [newr.update({key:None}) for key in
  8479. ("labels", "theset") if (key in newr)]
  8480. for k, v in newr.items():
  8481. if k == "other":
  8482. if isinstance(v, dict):
  8483. otype, other = v.popitem()
  8484. else:
  8485. otype = flatten(type(v))
  8486. other = v
  8487. newr[k] = {otype: filter_requires(otype, other,
  8488. options=options)}
  8489. else:
  8490. newr[k] = flatten(v)
  8491. return newr
  8492. if isinstance(self.requires, (tuple, list, set)):
  8493. requires = dict([(flatten(type(r)),
  8494. filter_requires(type(r), r,
  8495. options=options)) for
  8496. r in self.requires])
  8497. else:
  8498. requires = {flatten(type(self.requires)):
  8499. filter_requires(type(self.requires),
  8500. self.requires, options=options)}
  8501. d = dict(colname="%s.%s" % (self.tablename, self.name),
  8502. requires=requires)
  8503. d.update([(attr, flatten(getattr(self, attr))) for attr in attrs])
  8504. return d
  8505. def as_xml(self, sanitize=True, options=True):
  8506. if have_serializers:
  8507. xml = serializers.xml
  8508. else:
  8509. raise ImportError("No xml serializers available")
  8510. d = self.as_dict(flat=True, sanitize=sanitize,
  8511. options=options)
  8512. return xml(d)
  8513. def as_json(self, sanitize=True, options=True):
  8514. if have_serializers:
  8515. json = serializers.json
  8516. else:
  8517. raise ImportError("No json serializers available")
  8518. d = self.as_dict(flat=True, sanitize=sanitize,
  8519. options=options)
  8520. return json(d)
  8521. def as_yaml(self, sanitize=True, options=True):
  8522. if have_serializers:
  8523. d = self.as_dict(flat=True, sanitize=sanitize,
  8524. options=options)
  8525. return serializers.yaml(d)
  8526. else:
  8527. raise ImportError("No YAML serializers available")
  8528. def __nonzero__(self):
  8529. return True
  8530. def __str__(self):
  8531. try:
  8532. return '%s.%s' % (self.tablename, self.name)
  8533. except:
  8534. return '<no table>.%s' % self.name
  8535. class Query(object):
  8536. """
  8537. a query object necessary to define a set.
  8538. it can be stored or can be passed to DAL.__call__() to obtain a Set
  8539. Example::
  8540. query = db.users.name=='Max'
  8541. set = db(query)
  8542. records = set.select()
  8543. """
  8544. def __init__(
  8545. self,
  8546. db,
  8547. op,
  8548. first=None,
  8549. second=None,
  8550. ignore_common_filters = False,
  8551. **optional_args
  8552. ):
  8553. self.db = self._db = db
  8554. self.op = op
  8555. self.first = first
  8556. self.second = second
  8557. self.ignore_common_filters = ignore_common_filters
  8558. self.optional_args = optional_args
  8559. def __repr__(self):
  8560. return '<Query %s>' % BaseAdapter.expand(self.db._adapter,self)
  8561. def __str__(self):
  8562. return self.db._adapter.expand(self)
  8563. def __and__(self, other):
  8564. return Query(self.db,self.db._adapter.AND,self,other)
  8565. __rand__ = __and__
  8566. def __or__(self, other):
  8567. return Query(self.db,self.db._adapter.OR,self,other)
  8568. __ror__ = __or__
  8569. def __invert__(self):
  8570. if self.op==self.db._adapter.NOT:
  8571. return self.first
  8572. return Query(self.db,self.db._adapter.NOT,self)
  8573. def __eq__(self, other):
  8574. return repr(self) == repr(other)
  8575. def __ne__(self, other):
  8576. return not (self == other)
  8577. def case(self,t=1,f=0):
  8578. return self.db._adapter.CASE(self,t,f)
  8579. def as_dict(self, flat=False, sanitize=True):
  8580. """Experimental stuff
  8581. This allows to return a plain dictionary with the basic
  8582. query representation. Can be used with json/xml services
  8583. for client-side db I/O
  8584. Example:
  8585. >>> q = db.auth_user.id != 0
  8586. >>> q.as_dict(flat=True)
  8587. {"op": "NE", "first":{"tablename": "auth_user",
  8588. "fieldname": "id"},
  8589. "second":0}
  8590. """
  8591. SERIALIZABLE_TYPES = (tuple, dict, list, int, long, float,
  8592. basestring, type(None), bool)
  8593. def loop(d):
  8594. newd = dict()
  8595. for k, v in d.items():
  8596. if k in ("first", "second"):
  8597. if isinstance(v, self.__class__):
  8598. newd[k] = loop(v.__dict__)
  8599. elif isinstance(v, Field):
  8600. newd[k] = {"tablename": v._tablename,
  8601. "fieldname": v.name}
  8602. elif isinstance(v, Expression):
  8603. newd[k] = loop(v.__dict__)
  8604. elif isinstance(v, SERIALIZABLE_TYPES):
  8605. newd[k] = v
  8606. elif isinstance(v, (datetime.date,
  8607. datetime.time,
  8608. datetime.datetime)):
  8609. newd[k] = unicode(v)
  8610. elif k == "op":
  8611. if callable(v):
  8612. newd[k] = v.__name__
  8613. elif isinstance(v, basestring):
  8614. newd[k] = v
  8615. else: pass # not callable or string
  8616. elif isinstance(v, SERIALIZABLE_TYPES):
  8617. if isinstance(v, dict):
  8618. newd[k] = loop(v)
  8619. else: newd[k] = v
  8620. return newd
  8621. if flat:
  8622. return loop(self.__dict__)
  8623. else: return self.__dict__
  8624. def as_xml(self, sanitize=True):
  8625. if have_serializers:
  8626. xml = serializers.xml
  8627. else:
  8628. raise ImportError("No xml serializers available")
  8629. d = self.as_dict(flat=True, sanitize=sanitize)
  8630. return xml(d)
  8631. def as_json(self, sanitize=True):
  8632. if have_serializers:
  8633. json = serializers.json
  8634. else:
  8635. raise ImportError("No json serializers available")
  8636. d = self.as_dict(flat=True, sanitize=sanitize)
  8637. return json(d)
  8638. def xorify(orderby):
  8639. if not orderby:
  8640. return None
  8641. orderby2 = orderby[0]
  8642. for item in orderby[1:]:
  8643. orderby2 = orderby2 | item
  8644. return orderby2
  8645. def use_common_filters(query):
  8646. return (query and hasattr(query,'ignore_common_filters') and \
  8647. not query.ignore_common_filters)
  8648. class Set(object):
  8649. """
  8650. a Set represents a set of records in the database,
  8651. the records are identified by the query=Query(...) object.
  8652. normally the Set is generated by DAL.__call__(Query(...))
  8653. given a set, for example
  8654. set = db(db.users.name=='Max')
  8655. you can:
  8656. set.update(db.users.name='Massimo')
  8657. set.delete() # all elements in the set
  8658. set.select(orderby=db.users.id, groupby=db.users.name, limitby=(0,10))
  8659. and take subsets:
  8660. subset = set(db.users.id<5)
  8661. """
  8662. def __init__(self, db, query, ignore_common_filters = None):
  8663. self.db = db
  8664. self._db = db # for backward compatibility
  8665. self.dquery = None
  8666. # if query is a dict, parse it
  8667. if isinstance(query, dict):
  8668. query = self.parse(query)
  8669. if not ignore_common_filters is None and \
  8670. use_common_filters(query) == ignore_common_filters:
  8671. query = copy.copy(query)
  8672. query.ignore_common_filters = ignore_common_filters
  8673. self.query = query
  8674. def __repr__(self):
  8675. return '<Set %s>' % BaseAdapter.expand(self.db._adapter,self.query)
  8676. def __call__(self, query, ignore_common_filters=False):
  8677. if query is None:
  8678. return self
  8679. elif isinstance(query,Table):
  8680. query = self.db._adapter.id_query(query)
  8681. elif isinstance(query,str):
  8682. query = Expression(self.db,query)
  8683. elif isinstance(query,Field):
  8684. query = query!=None
  8685. if self.query:
  8686. return Set(self.db, self.query & query,
  8687. ignore_common_filters=ignore_common_filters)
  8688. else:
  8689. return Set(self.db, query,
  8690. ignore_common_filters=ignore_common_filters)
  8691. def _count(self,distinct=None):
  8692. return self.db._adapter._count(self.query,distinct)
  8693. def _select(self, *fields, **attributes):
  8694. adapter = self.db._adapter
  8695. tablenames = adapter.tables(self.query,
  8696. attributes.get('join',None),
  8697. attributes.get('left',None),
  8698. attributes.get('orderby',None),
  8699. attributes.get('groupby',None))
  8700. fields = adapter.expand_all(fields, tablenames)
  8701. return adapter._select(self.query,fields,attributes)
  8702. def _delete(self):
  8703. db = self.db
  8704. tablename = db._adapter.get_table(self.query)
  8705. return db._adapter._delete(tablename,self.query)
  8706. def _update(self, **update_fields):
  8707. db = self.db
  8708. tablename = db._adapter.get_table(self.query)
  8709. fields = db[tablename]._listify(update_fields,update=True)
  8710. return db._adapter._update(tablename,self.query,fields)
  8711. def as_dict(self, flat=False, sanitize=True):
  8712. if flat:
  8713. uid = dbname = uri = None
  8714. codec = self.db._db_codec
  8715. if not sanitize:
  8716. uri, dbname, uid = (self.db._dbname, str(self.db),
  8717. self.db._db_uid)
  8718. d = {"query": self.query.as_dict(flat=flat)}
  8719. d["db"] = {"uid": uid, "codec": codec,
  8720. "name": dbname, "uri": uri}
  8721. return d
  8722. else: return self.__dict__
  8723. def as_xml(self, sanitize=True):
  8724. if have_serializers:
  8725. xml = serializers.xml
  8726. else:
  8727. raise ImportError("No xml serializers available")
  8728. d = self.as_dict(flat=True, sanitize=sanitize)
  8729. return xml(d)
  8730. def as_json(self, sanitize=True):
  8731. if have_serializers:
  8732. json = serializers.json
  8733. else:
  8734. raise ImportError("No json serializers available")
  8735. d = self.as_dict(flat=True, sanitize=sanitize)
  8736. return json(d)
  8737. def parse(self, dquery):
  8738. "Experimental: Turn a dictionary into a Query object"
  8739. self.dquery = dquery
  8740. return self.build(self.dquery)
  8741. def build(self, d):
  8742. "Experimental: see .parse()"
  8743. op, first, second = (d["op"], d["first"],
  8744. d.get("second", None))
  8745. left = right = built = None
  8746. if op in ("AND", "OR"):
  8747. if not (type(first), type(second)) == (dict, dict):
  8748. raise SyntaxError("Invalid AND/OR query")
  8749. if op == "AND":
  8750. built = self.build(first) & self.build(second)
  8751. else: built = self.build(first) | self.build(second)
  8752. elif op == "NOT":
  8753. if first is None:
  8754. raise SyntaxError("Invalid NOT query")
  8755. built = ~self.build(first)
  8756. else:
  8757. # normal operation (GT, EQ, LT, ...)
  8758. for k, v in {"left": first, "right": second}.items():
  8759. if isinstance(v, dict) and v.get("op"):
  8760. v = self.build(v)
  8761. if isinstance(v, dict) and ("tablename" in v):
  8762. v = self.db[v["tablename"]][v["fieldname"]]
  8763. if k == "left": left = v
  8764. else: right = v
  8765. if hasattr(self.db._adapter, op):
  8766. opm = getattr(self.db._adapter, op)
  8767. if op == "EQ": built = left == right
  8768. elif op == "NE": built = left != right
  8769. elif op == "GT": built = left > right
  8770. elif op == "GE": built = left >= right
  8771. elif op == "LT": built = left < right
  8772. elif op == "LE": built = left <= right
  8773. elif op in ("JOIN", "LEFT_JOIN", "RANDOM", "ALLOW_NULL"):
  8774. built = Expression(self.db, opm)
  8775. elif op in ("LOWER", "UPPER", "EPOCH", "PRIMARY_KEY",
  8776. "COALESCE_ZERO", "RAW", "INVERT"):
  8777. built = Expression(self.db, opm, left)
  8778. elif op in ("COUNT", "EXTRACT", "AGGREGATE", "SUBSTRING",
  8779. "REGEXP", "LIKE", "ILIKE", "STARTSWITH",
  8780. "ENDSWITH", "ADD", "SUB", "MUL", "DIV",
  8781. "MOD", "AS", "ON", "COMMA", "NOT_NULL",
  8782. "COALESCE", "CONTAINS", "BELONGS"):
  8783. built = Expression(self.db, opm, left, right)
  8784. # expression as string
  8785. elif not (left or right): built = Expression(self.db, op)
  8786. else:
  8787. raise SyntaxError("Operator not supported: %s" % op)
  8788. return built
  8789. def isempty(self):
  8790. return not self.select(limitby=(0,1), orderby_on_limitby=False)
  8791. def count(self,distinct=None, cache=None):
  8792. db = self.db
  8793. if cache:
  8794. cache_model, time_expire = cache
  8795. sql = self._count(distinct=distinct)
  8796. key = db._uri + '/' + sql
  8797. if len(key)>200: key = hashlib_md5(key).hexdigest()
  8798. return cache_model(
  8799. key,
  8800. (lambda self=self,distinct=distinct: \
  8801. db._adapter.count(self.query,distinct)),
  8802. time_expire)
  8803. return db._adapter.count(self.query,distinct)
  8804. def select(self, *fields, **attributes):
  8805. adapter = self.db._adapter
  8806. tablenames = adapter.tables(self.query,
  8807. attributes.get('join',None),
  8808. attributes.get('left',None),
  8809. attributes.get('orderby',None),
  8810. attributes.get('groupby',None))
  8811. fields = adapter.expand_all(fields, tablenames)
  8812. return adapter.select(self.query,fields,attributes)
  8813. def nested_select(self,*fields,**attributes):
  8814. return Expression(self.db,self._select(*fields,**attributes))
  8815. def delete(self):
  8816. db = self.db
  8817. tablename = db._adapter.get_table(self.query)
  8818. table = db[tablename]
  8819. if any(f(self) for f in table._before_delete): return 0
  8820. ret = db._adapter.delete(tablename,self.query)
  8821. ret and [f(self) for f in table._after_delete]
  8822. return ret
  8823. def update(self, **update_fields):
  8824. db = self.db
  8825. tablename = db._adapter.get_table(self.query)
  8826. table = db[tablename]
  8827. table._attempt_upload(update_fields)
  8828. if any(f(self,update_fields) for f in table._before_update):
  8829. return 0
  8830. fields = table._listify(update_fields,update=True)
  8831. if not fields:
  8832. raise SyntaxError("No fields to update")
  8833. ret = db._adapter.update("%s" % table,self.query,fields)
  8834. ret and [f(self,update_fields) for f in table._after_update]
  8835. return ret
  8836. def update_naive(self, **update_fields):
  8837. """
  8838. same as update but does not call table._before_update and _after_update
  8839. """
  8840. tablename = self.db._adapter.get_table(self.query)
  8841. table = self.db[tablename]
  8842. fields = table._listify(update_fields,update=True)
  8843. if not fields: raise SyntaxError("No fields to update")
  8844. ret = self.db._adapter.update("%s" % table,self.query,fields)
  8845. return ret
  8846. def validate_and_update(self, **update_fields):
  8847. tablename = self.db._adapter.get_table(self.query)
  8848. response = Row()
  8849. response.errors = Row()
  8850. new_fields = copy.copy(update_fields)
  8851. for key,value in update_fields.iteritems():
  8852. value,error = self.db[tablename][key].validate(value)
  8853. if error:
  8854. response.errors[key] = error
  8855. else:
  8856. new_fields[key] = value
  8857. table = self.db[tablename]
  8858. if response.errors:
  8859. response.updated = None
  8860. else:
  8861. if not any(f(self,new_fields) for f in table._before_update):
  8862. fields = table._listify(new_fields,update=True)
  8863. if not fields: raise SyntaxError("No fields to update")
  8864. ret = self.db._adapter.update(tablename,self.query,fields)
  8865. ret and [f(self,new_fields) for f in table._after_update]
  8866. else:
  8867. ret = 0
  8868. response.updated = ret
  8869. return response
  8870. def delete_uploaded_files(self, upload_fields=None):
  8871. table = self.db[self.db._adapter.tables(self.query)[0]]
  8872. # ## mind uploadfield==True means file is not in DB
  8873. if upload_fields:
  8874. fields = upload_fields.keys()
  8875. else:
  8876. fields = table.fields
  8877. fields = [f for f in fields if table[f].type == 'upload'
  8878. and table[f].uploadfield == True
  8879. and table[f].autodelete]
  8880. if not fields:
  8881. return False
  8882. for record in self.select(*[table[f] for f in fields]):
  8883. for fieldname in fields:
  8884. field = table[fieldname]
  8885. oldname = record.get(fieldname, None)
  8886. if not oldname:
  8887. continue
  8888. if upload_fields and oldname == upload_fields[fieldname]:
  8889. continue
  8890. if field.custom_delete:
  8891. field.custom_delete(oldname)
  8892. else:
  8893. uploadfolder = field.uploadfolder
  8894. if not uploadfolder:
  8895. uploadfolder = pjoin(
  8896. self.db._adapter.folder, '..', 'uploads')
  8897. if field.uploadseparate:
  8898. items = oldname.split('.')
  8899. uploadfolder = pjoin(
  8900. uploadfolder,
  8901. "%s.%s" % (items[0], items[1]),
  8902. items[2][:2])
  8903. oldpath = pjoin(uploadfolder, oldname)
  8904. if exists(oldpath):
  8905. os.unlink(oldpath)
  8906. return False
  8907. class RecordUpdater(object):
  8908. def __init__(self, colset, table, id):
  8909. self.colset, self.db, self.tablename, self.id = \
  8910. colset, table._db, table._tablename, id
  8911. def __call__(self, **fields):
  8912. colset, db, tablename, id = self.colset, self.db, self.tablename, self.id
  8913. table = db[tablename]
  8914. newfields = fields or dict(colset)
  8915. for fieldname in newfields.keys():
  8916. if not fieldname in table.fields or table[fieldname].type=='id':
  8917. del newfields[fieldname]
  8918. table._db(table._id==id,ignore_common_filters=True).update(**newfields)
  8919. colset.update(newfields)
  8920. return colset
  8921. class RecordDeleter(object):
  8922. def __init__(self, table, id):
  8923. self.db, self.tablename, self.id = table._db, table._tablename, id
  8924. def __call__(self):
  8925. return self.db(self.db[self.tablename]._id==self.id).delete()
  8926. class LazySet(object):
  8927. def __init__(self, field, id):
  8928. self.db, self.tablename, self.fieldname, self.id = \
  8929. field.db, field._tablename, field.name, id
  8930. def _getset(self):
  8931. query = self.db[self.tablename][self.fieldname]==self.id
  8932. return Set(self.db,query)
  8933. def __repr__(self):
  8934. return repr(self._getset())
  8935. def __call__(self, query, ignore_common_filters=False):
  8936. return self._getset()(query, ignore_common_filters)
  8937. def _count(self,distinct=None):
  8938. return self._getset()._count(distinct)
  8939. def _select(self, *fields, **attributes):
  8940. return self._getset()._select(*fields,**attributes)
  8941. def _delete(self):
  8942. return self._getset()._delete()
  8943. def _update(self, **update_fields):
  8944. return self._getset()._update(**update_fields)
  8945. def isempty(self):
  8946. return self._getset().isempty()
  8947. def count(self,distinct=None, cache=None):
  8948. return self._getset().count(distinct,cache)
  8949. def select(self, *fields, **attributes):
  8950. return self._getset().select(*fields,**attributes)
  8951. def nested_select(self,*fields,**attributes):
  8952. return self._getset().nested_select(*fields,**attributes)
  8953. def delete(self):
  8954. return self._getset().delete()
  8955. def update(self, **update_fields):
  8956. return self._getset().update(**update_fields)
  8957. def update_naive(self, **update_fields):
  8958. return self._getset().update_naive(**update_fields)
  8959. def validate_and_update(self, **update_fields):
  8960. return self._getset().validate_and_update(**update_fields)
  8961. def delete_uploaded_files(self, upload_fields=None):
  8962. return self._getset().delete_uploaded_files(upload_fields)
  8963. class VirtualCommand(object):
  8964. def __init__(self,method,row):
  8965. self.method=method
  8966. self.row=row
  8967. def __call__(self,*args,**kwargs):
  8968. return self.method(self.row,*args,**kwargs)
  8969. def lazy_virtualfield(f):
  8970. f.__lazy__ = True
  8971. return f
  8972. class Rows(object):
  8973. """
  8974. A wrapper for the return value of a select. It basically represents a table.
  8975. It has an iterator and each row is represented as a dictionary.
  8976. """
  8977. # ## TODO: this class still needs some work to care for ID/OID
  8978. def __init__(
  8979. self,
  8980. db=None,
  8981. records=[],
  8982. colnames=[],
  8983. compact=True,
  8984. rawrows=None
  8985. ):
  8986. self.db = db
  8987. self.records = records
  8988. self.colnames = colnames
  8989. self.compact = compact
  8990. self.response = rawrows
  8991. def __repr__(self):
  8992. return '<Rows (%s)>' % len(self.records)
  8993. def setvirtualfields(self,**keyed_virtualfields):
  8994. """
  8995. db.define_table('x',Field('number','integer'))
  8996. if db(db.x).isempty(): [db.x.insert(number=i) for i in range(10)]
  8997. from gluon.dal import lazy_virtualfield
  8998. class MyVirtualFields(object):
  8999. # normal virtual field (backward compatible, discouraged)
  9000. def normal_shift(self): return self.x.number+1
  9001. # lazy virtual field (because of @staticmethod)
  9002. @lazy_virtualfield
  9003. def lazy_shift(instance,row,delta=4): return row.x.number+delta
  9004. db.x.virtualfields.append(MyVirtualFields())
  9005. for row in db(db.x).select():
  9006. print row.number, row.normal_shift, row.lazy_shift(delta=7)
  9007. """
  9008. if not keyed_virtualfields:
  9009. return self
  9010. for row in self.records:
  9011. for (tablename,virtualfields) in keyed_virtualfields.iteritems():
  9012. attributes = dir(virtualfields)
  9013. if not tablename in row:
  9014. box = row[tablename] = Row()
  9015. else:
  9016. box = row[tablename]
  9017. updated = False
  9018. for attribute in attributes:
  9019. if attribute[0] != '_':
  9020. method = getattr(virtualfields,attribute)
  9021. if hasattr(method,'__lazy__'):
  9022. box[attribute]=VirtualCommand(method,row)
  9023. elif type(method)==types.MethodType:
  9024. if not updated:
  9025. virtualfields.__dict__.update(row)
  9026. updated = True
  9027. box[attribute]=method()
  9028. return self
  9029. def __and__(self,other):
  9030. if self.colnames!=other.colnames:
  9031. raise Exception('Cannot & incompatible Rows objects')
  9032. records = self.records+other.records
  9033. return Rows(self.db,records,self.colnames)
  9034. def __or__(self,other):
  9035. if self.colnames!=other.colnames:
  9036. raise Exception('Cannot | incompatible Rows objects')
  9037. records = self.records
  9038. records += [record for record in other.records \
  9039. if not record in records]
  9040. return Rows(self.db,records,self.colnames)
  9041. def __nonzero__(self):
  9042. if len(self.records):
  9043. return 1
  9044. return 0
  9045. def __len__(self):
  9046. return len(self.records)
  9047. def __getslice__(self, a, b):
  9048. return Rows(self.db,self.records[a:b],self.colnames,compact=self.compact)
  9049. def __getitem__(self, i):
  9050. row = self.records[i]
  9051. keys = row.keys()
  9052. if self.compact and len(keys) == 1 and keys[0] != '_extra':
  9053. return row[row.keys()[0]]
  9054. return row
  9055. def __iter__(self):
  9056. """
  9057. iterator over records
  9058. """
  9059. for i in xrange(len(self)):
  9060. yield self[i]
  9061. def __str__(self):
  9062. """
  9063. serializes the table into a csv file
  9064. """
  9065. s = StringIO.StringIO()
  9066. self.export_to_csv_file(s)
  9067. return s.getvalue()
  9068. def first(self):
  9069. if not self.records:
  9070. return None
  9071. return self[0]
  9072. def last(self):
  9073. if not self.records:
  9074. return None
  9075. return self[-1]
  9076. def find(self,f,limitby=None):
  9077. """
  9078. returns a new Rows object, a subset of the original object,
  9079. filtered by the function f
  9080. """
  9081. if not self:
  9082. return Rows(self.db, [], self.colnames)
  9083. records = []
  9084. if limitby:
  9085. a,b = limitby
  9086. else:
  9087. a,b = 0,len(self)
  9088. k = 0
  9089. for row in self:
  9090. if f(row):
  9091. if a<=k: records.append(row)
  9092. k += 1
  9093. if k==b: break
  9094. return Rows(self.db, records, self.colnames)
  9095. def exclude(self, f):
  9096. """
  9097. removes elements from the calling Rows object, filtered by the function f,
  9098. and returns a new Rows object containing the removed elements
  9099. """
  9100. if not self.records:
  9101. return Rows(self.db, [], self.colnames)
  9102. removed = []
  9103. i=0
  9104. while i<len(self):
  9105. row = self[i]
  9106. if f(row):
  9107. removed.append(self.records[i])
  9108. del self.records[i]
  9109. else:
  9110. i += 1
  9111. return Rows(self.db, removed, self.colnames)
  9112. def sort(self, f, reverse=False):
  9113. """
  9114. returns a list of sorted elements (not sorted in place)
  9115. """
  9116. rows = Rows(self.db,[],self.colnames,compact=False)
  9117. rows.records = sorted(self,key=f,reverse=reverse)
  9118. return rows
  9119. def group_by_value(self, field):
  9120. """
  9121. regroups the rows, by one of the fields
  9122. """
  9123. if not self.records:
  9124. return {}
  9125. key = str(field)
  9126. grouped_row_group = dict()
  9127. for row in self:
  9128. value = row[key]
  9129. if not value in grouped_row_group:
  9130. grouped_row_group[value] = [row]
  9131. else:
  9132. grouped_row_group[value].append(row)
  9133. return grouped_row_group
  9134. def render(self, i=None, fields=None):
  9135. """
  9136. Takes an index and returns a copy of the indexed row with values
  9137. transformed via the "represent" attributes of the associated fields.
  9138. If no index is specified, a generator is returned for iteration
  9139. over all the rows.
  9140. fields -- a list of fields to transform (if None, all fields with
  9141. "represent" attributes will be transformed).
  9142. """
  9143. if i is None:
  9144. return (self.repr(i, fields=fields) for i in range(len(self)))
  9145. import sqlhtml
  9146. row = copy.deepcopy(self.records[i])
  9147. keys = row.keys()
  9148. tables = [f.tablename for f in fields] if fields \
  9149. else [k for k in keys if k != '_extra']
  9150. for table in tables:
  9151. repr_fields = [f.name for f in fields if f.tablename == table] \
  9152. if fields else [k for k in row[table].keys()
  9153. if (hasattr(self.db[table], k) and
  9154. isinstance(self.db[table][k], Field)
  9155. and self.db[table][k].represent)]
  9156. for field in repr_fields:
  9157. row[table][field] = sqlhtml.represent(
  9158. self.db[table][field], row[table][field], row[table])
  9159. if self.compact and len(keys) == 1 and keys[0] != '_extra':
  9160. return row[keys[0]]
  9161. return row
  9162. def as_list(self,
  9163. compact=True,
  9164. storage_to_dict=True,
  9165. datetime_to_str=False,
  9166. custom_types=None):
  9167. """
  9168. returns the data as a list or dictionary.
  9169. :param storage_to_dict: when True returns a dict, otherwise a list(default True)
  9170. :param datetime_to_str: convert datetime fields as strings (default False)
  9171. """
  9172. (oc, self.compact) = (self.compact, compact)
  9173. if storage_to_dict:
  9174. items = [item.as_dict(datetime_to_str, custom_types) for item in self]
  9175. else:
  9176. items = [item for item in self]
  9177. self.compact = compact
  9178. return items
  9179. def as_dict(self,
  9180. key='id',
  9181. compact=True,
  9182. storage_to_dict=True,
  9183. datetime_to_str=False,
  9184. custom_types=None):
  9185. """
  9186. returns the data as a dictionary of dictionaries (storage_to_dict=True) or records (False)
  9187. :param key: the name of the field to be used as dict key, normally the id
  9188. :param compact: ? (default True)
  9189. :param storage_to_dict: when True returns a dict, otherwise a list(default True)
  9190. :param datetime_to_str: convert datetime fields as strings (default False)
  9191. """
  9192. # test for multiple rows
  9193. multi = False
  9194. f = self.first()
  9195. if f and isinstance(key, basestring):
  9196. multi = any([isinstance(v, f.__class__) for v in f.values()])
  9197. if (not "." in key) and multi:
  9198. # No key provided, default to int indices
  9199. def new_key():
  9200. i = 0
  9201. while True:
  9202. yield i
  9203. i += 1
  9204. key_generator = new_key()
  9205. key = lambda r: key_generator.next()
  9206. rows = self.as_list(compact, storage_to_dict, datetime_to_str, custom_types)
  9207. if isinstance(key,str) and key.count('.')==1:
  9208. (table, field) = key.split('.')
  9209. return dict([(r[table][field],r) for r in rows])
  9210. elif isinstance(key,str):
  9211. return dict([(r[key],r) for r in rows])
  9212. else:
  9213. return dict([(key(r),r) for r in rows])
  9214. def export_to_csv_file(self, ofile, null='<NULL>', *args, **kwargs):
  9215. """
  9216. export data to csv, the first line contains the column names
  9217. :param ofile: where the csv must be exported to
  9218. :param null: how null values must be represented (default '<NULL>')
  9219. :param delimiter: delimiter to separate values (default ',')
  9220. :param quotechar: character to use to quote string values (default '"')
  9221. :param quoting: quote system, use csv.QUOTE_*** (default csv.QUOTE_MINIMAL)
  9222. :param represent: use the fields .represent value (default False)
  9223. :param colnames: list of column names to use (default self.colnames)
  9224. This will only work when exporting rows objects!!!!
  9225. DO NOT use this with db.export_to_csv()
  9226. """
  9227. delimiter = kwargs.get('delimiter', ',')
  9228. quotechar = kwargs.get('quotechar', '"')
  9229. quoting = kwargs.get('quoting', csv.QUOTE_MINIMAL)
  9230. represent = kwargs.get('represent', False)
  9231. writer = csv.writer(ofile, delimiter=delimiter,
  9232. quotechar=quotechar, quoting=quoting)
  9233. colnames = kwargs.get('colnames', self.colnames)
  9234. write_colnames = kwargs.get('write_colnames',True)
  9235. # a proper csv starting with the column names
  9236. if write_colnames:
  9237. writer.writerow(colnames)
  9238. def none_exception(value):
  9239. """
  9240. returns a cleaned up value that can be used for csv export:
  9241. - unicode text is encoded as such
  9242. - None values are replaced with the given representation (default <NULL>)
  9243. """
  9244. if value is None:
  9245. return null
  9246. elif isinstance(value, unicode):
  9247. return value.encode('utf8')
  9248. elif isinstance(value,Reference):
  9249. return long(value)
  9250. elif hasattr(value, 'isoformat'):
  9251. return value.isoformat()[:19].replace('T', ' ')
  9252. elif isinstance(value, (list,tuple)): # for type='list:..'
  9253. return bar_encode(value)
  9254. return value
  9255. for record in self:
  9256. row = []
  9257. for col in colnames:
  9258. if not REGEX_TABLE_DOT_FIELD.match(col):
  9259. row.append(record._extra[col])
  9260. else:
  9261. (t, f) = col.split('.')
  9262. field = self.db[t][f]
  9263. if isinstance(record.get(t, None), (Row,dict)):
  9264. value = record[t][f]
  9265. else:
  9266. value = record[f]
  9267. if field.type=='blob' and not value is None:
  9268. value = base64.b64encode(value)
  9269. elif represent and field.represent:
  9270. value = field.represent(value)
  9271. row.append(none_exception(value))
  9272. writer.writerow(row)
  9273. def xml(self,strict=False,row_name='row',rows_name='rows'):
  9274. """
  9275. serializes the table using sqlhtml.SQLTABLE (if present)
  9276. """
  9277. if strict:
  9278. ncols = len(self.colnames)
  9279. return '<%s>\n%s\n</%s>' % (rows_name,
  9280. '\n'.join(row.as_xml(row_name=row_name,
  9281. colnames=self.colnames) for
  9282. row in self), rows_name)
  9283. import sqlhtml
  9284. return sqlhtml.SQLTABLE(self).xml()
  9285. def as_xml(self,row_name='row',rows_name='rows'):
  9286. return self.xml(strict=True, row_name=row_name, rows_name=rows_name)
  9287. def as_json(self, mode='object', default=None):
  9288. """
  9289. serializes the rows to a JSON list or object with objects
  9290. mode='object' is not implemented (should return a nested
  9291. object structure)
  9292. """
  9293. items = [record.as_json(mode=mode, default=default,
  9294. serialize=False,
  9295. colnames=self.colnames) for
  9296. record in self]
  9297. if have_serializers:
  9298. return serializers.json(items,
  9299. default=default or
  9300. serializers.custom_json)
  9301. elif simplejson:
  9302. return simplejson.dumps(items)
  9303. else:
  9304. raise RuntimeError("missing simplejson")
  9305. # for consistent naming yet backwards compatible
  9306. as_csv = __str__
  9307. json = as_json
  9308. ################################################################################
  9309. # dummy function used to define some doctests
  9310. ################################################################################
  9311. def test_all():
  9312. """
  9313. >>> if len(sys.argv)<2: db = DAL(\"sqlite://test.db\")
  9314. >>> if len(sys.argv)>1: db = DAL(sys.argv[1])
  9315. >>> tmp = db.define_table('users',\
  9316. Field('stringf', 'string', length=32, required=True),\
  9317. Field('booleanf', 'boolean', default=False),\
  9318. Field('passwordf', 'password', notnull=True),\
  9319. Field('uploadf', 'upload'),\
  9320. Field('blobf', 'blob'),\
  9321. Field('integerf', 'integer', unique=True),\
  9322. Field('doublef', 'double', unique=True,notnull=True),\
  9323. Field('jsonf', 'json'),\
  9324. Field('datef', 'date', default=datetime.date.today()),\
  9325. Field('timef', 'time'),\
  9326. Field('datetimef', 'datetime'),\
  9327. migrate='test_user.table')
  9328. Insert a field
  9329. >>> db.users.insert(stringf='a', booleanf=True, passwordf='p', blobf='0A',\
  9330. uploadf=None, integerf=5, doublef=3.14,\
  9331. jsonf={"j": True},\
  9332. datef=datetime.date(2001, 1, 1),\
  9333. timef=datetime.time(12, 30, 15),\
  9334. datetimef=datetime.datetime(2002, 2, 2, 12, 30, 15))
  9335. 1
  9336. Drop the table
  9337. >>> db.users.drop()
  9338. Examples of insert, select, update, delete
  9339. >>> tmp = db.define_table('person',\
  9340. Field('name'),\
  9341. Field('birth','date'),\
  9342. migrate='test_person.table')
  9343. >>> person_id = db.person.insert(name=\"Marco\",birth='2005-06-22')
  9344. >>> person_id = db.person.insert(name=\"Massimo\",birth='1971-12-21')
  9345. commented len(db().select(db.person.ALL))
  9346. commented 2
  9347. >>> me = db(db.person.id==person_id).select()[0] # test select
  9348. >>> me.name
  9349. 'Massimo'
  9350. >>> db.person[2].name
  9351. 'Massimo'
  9352. >>> db.person(2).name
  9353. 'Massimo'
  9354. >>> db.person(name='Massimo').name
  9355. 'Massimo'
  9356. >>> db.person(db.person.name=='Massimo').name
  9357. 'Massimo'
  9358. >>> row = db.person[2]
  9359. >>> row.name == row['name'] == row['person.name'] == row('person.name')
  9360. True
  9361. >>> db(db.person.name=='Massimo').update(name='massimo') # test update
  9362. 1
  9363. >>> db(db.person.name=='Marco').select().first().delete_record() # test delete
  9364. 1
  9365. Update a single record
  9366. >>> me.update_record(name=\"Max\")
  9367. <Row {'name': 'Max', 'birth': datetime.date(1971, 12, 21), 'id': 2}>
  9368. >>> me.name
  9369. 'Max'
  9370. Examples of complex search conditions
  9371. >>> len(db((db.person.name=='Max')&(db.person.birth<'2003-01-01')).select())
  9372. 1
  9373. >>> len(db((db.person.name=='Max')&(db.person.birth<datetime.date(2003,01,01))).select())
  9374. 1
  9375. >>> len(db((db.person.name=='Max')|(db.person.birth<'2003-01-01')).select())
  9376. 1
  9377. >>> me = db(db.person.id==person_id).select(db.person.name)[0]
  9378. >>> me.name
  9379. 'Max'
  9380. Examples of search conditions using extract from date/datetime/time
  9381. >>> len(db(db.person.birth.month()==12).select())
  9382. 1
  9383. >>> len(db(db.person.birth.year()>1900).select())
  9384. 1
  9385. Example of usage of NULL
  9386. >>> len(db(db.person.birth==None).select()) ### test NULL
  9387. 0
  9388. >>> len(db(db.person.birth!=None).select()) ### test NULL
  9389. 1
  9390. Examples of search conditions using lower, upper, and like
  9391. >>> len(db(db.person.name.upper()=='MAX').select())
  9392. 1
  9393. >>> len(db(db.person.name.like('%ax')).select())
  9394. 1
  9395. >>> len(db(db.person.name.upper().like('%AX')).select())
  9396. 1
  9397. >>> len(db(~db.person.name.upper().like('%AX')).select())
  9398. 0
  9399. orderby, groupby and limitby
  9400. >>> people = db().select(db.person.name, orderby=db.person.name)
  9401. >>> order = db.person.name|~db.person.birth
  9402. >>> people = db().select(db.person.name, orderby=order)
  9403. >>> people = db().select(db.person.name, orderby=db.person.name, groupby=db.person.name)
  9404. >>> people = db().select(db.person.name, orderby=order, limitby=(0,100))
  9405. Example of one 2 many relation
  9406. >>> tmp = db.define_table('dog',\
  9407. Field('name'),\
  9408. Field('birth','date'),\
  9409. Field('owner',db.person),\
  9410. migrate='test_dog.table')
  9411. >>> db.dog.insert(name='Snoopy', birth=None, owner=person_id)
  9412. 1
  9413. A simple JOIN
  9414. >>> len(db(db.dog.owner==db.person.id).select())
  9415. 1
  9416. >>> len(db().select(db.person.ALL, db.dog.name,left=db.dog.on(db.dog.owner==db.person.id)))
  9417. 1
  9418. Drop tables
  9419. >>> db.dog.drop()
  9420. >>> db.person.drop()
  9421. Example of many 2 many relation and Set
  9422. >>> tmp = db.define_table('author', Field('name'),\
  9423. migrate='test_author.table')
  9424. >>> tmp = db.define_table('paper', Field('title'),\
  9425. migrate='test_paper.table')
  9426. >>> tmp = db.define_table('authorship',\
  9427. Field('author_id', db.author),\
  9428. Field('paper_id', db.paper),\
  9429. migrate='test_authorship.table')
  9430. >>> aid = db.author.insert(name='Massimo')
  9431. >>> pid = db.paper.insert(title='QCD')
  9432. >>> tmp = db.authorship.insert(author_id=aid, paper_id=pid)
  9433. Define a Set
  9434. >>> authored_papers = db((db.author.id==db.authorship.author_id)&(db.paper.id==db.authorship.paper_id))
  9435. >>> rows = authored_papers.select(db.author.name, db.paper.title)
  9436. >>> for row in rows: print row.author.name, row.paper.title
  9437. Massimo QCD
  9438. Example of search condition using belongs
  9439. >>> set = (1, 2, 3)
  9440. >>> rows = db(db.paper.id.belongs(set)).select(db.paper.ALL)
  9441. >>> print rows[0].title
  9442. QCD
  9443. Example of search condition using nested select
  9444. >>> nested_select = db()._select(db.authorship.paper_id)
  9445. >>> rows = db(db.paper.id.belongs(nested_select)).select(db.paper.ALL)
  9446. >>> print rows[0].title
  9447. QCD
  9448. Example of expressions
  9449. >>> mynumber = db.define_table('mynumber', Field('x', 'integer'))
  9450. >>> db(mynumber).delete()
  9451. 0
  9452. >>> for i in range(10): tmp = mynumber.insert(x=i)
  9453. >>> db(mynumber).select(mynumber.x.sum())[0](mynumber.x.sum())
  9454. 45
  9455. >>> db(mynumber.x+2==5).select(mynumber.x + 2)[0](mynumber.x + 2)
  9456. 5
  9457. Output in csv
  9458. >>> print str(authored_papers.select(db.author.name, db.paper.title)).strip()
  9459. author.name,paper.title\r
  9460. Massimo,QCD
  9461. Delete all leftover tables
  9462. >>> DAL.distributed_transaction_commit(db)
  9463. >>> db.mynumber.drop()
  9464. >>> db.authorship.drop()
  9465. >>> db.author.drop()
  9466. >>> db.paper.drop()
  9467. """
  9468. ################################################################################
  9469. # deprecated since the new DAL; here only for backward compatibility
  9470. ################################################################################
  9471. SQLField = Field
  9472. SQLTable = Table
  9473. SQLXorable = Expression
  9474. SQLQuery = Query
  9475. SQLSet = Set
  9476. SQLRows = Rows
  9477. SQLStorage = Row
  9478. SQLDB = DAL
  9479. GQLDB = DAL
  9480. DAL.Field = Field # was necessary in gluon/globals.py session.connect
  9481. DAL.Table = Table # was necessary in gluon/globals.py session.connect
  9482. ################################################################################
  9483. # Geodal utils
  9484. ################################################################################
  9485. def geoPoint(x,y):
  9486. return "POINT (%f %f)" % (x,y)
  9487. def geoLine(*line):
  9488. return "LINESTRING (%s)" % ','.join("%f %f" % item for item in line)
  9489. def geoPolygon(*line):
  9490. return "POLYGON ((%s))" % ','.join("%f %f" % item for item in line)
  9491. ################################################################################
  9492. # run tests
  9493. ################################################################################
  9494. if __name__ == '__main__':
  9495. import doctest
  9496. doctest.testmod()