PageRenderTime 69ms CodeModel.GetById 15ms RepoModel.GetById 0ms app.codeStats 2ms

/libs/gluon/dal.py

https://github.com/saffih/openshift_web2py
Python | 9552 lines | 9327 code | 76 blank | 149 comment | 138 complexity | 048b7117e257bb113f8532a636b5f425 MD5 | raw file
Possible License(s): BSD-2-Clause, MPL-2.0-no-copyleft-exception, MIT, Apache-2.0, BSD-3-Clause, LGPL-2.1

Large files files are truncated, but you can click here to view the full file

  1. #!/bin/env python
  2. # -*- coding: utf-8 -*-
  3. """
  4. This file is part of the web2py Web Framework
  5. Copyrighted by Massimo Di Pierro <mdipierro@cs.depaul.edu>
  6. License: LGPLv3 (http://www.gnu.org/licenses/lgpl.html)
  7. Thanks to
  8. * Niall Sweeny <niall.sweeny@fonjax.com> for MS SQL support
  9. * Marcel Leuthi <mluethi@mlsystems.ch> for Oracle support
  10. * Denes
  11. * Chris Clark
  12. * clach05
  13. * Denes Lengyel
  14. * and many others who have contributed to current and previous versions
  15. This file contains the DAL support for many relational databases,
  16. including:
  17. - SQLite & SpatiaLite
  18. - MySQL
  19. - Postgres
  20. - Firebird
  21. - Oracle
  22. - MS SQL
  23. - DB2
  24. - Interbase
  25. - Ingres
  26. - SapDB (experimental)
  27. - Cubrid (experimental)
  28. - CouchDB (experimental)
  29. - MongoDB (in progress)
  30. - Google:nosql
  31. - Google:sql
  32. - Teradata
  33. - IMAP (experimental)
  34. Example of usage:
  35. >>> # from dal import DAL, Field
  36. ### create DAL connection (and create DB if it doesn't exist)
  37. >>> db = DAL(('sqlite://storage.sqlite','mysql://a:b@localhost/x'),
  38. ... folder=None)
  39. ### define a table 'person' (create/alter as necessary)
  40. >>> person = db.define_table('person',Field('name','string'))
  41. ### insert a record
  42. >>> id = person.insert(name='James')
  43. ### retrieve it by id
  44. >>> james = person(id)
  45. ### retrieve it by name
  46. >>> james = person(name='James')
  47. ### retrieve it by arbitrary query
  48. >>> query = (person.name=='James') & (person.name.startswith('J'))
  49. >>> james = db(query).select(person.ALL)[0]
  50. ### update one record
  51. >>> james.update_record(name='Jim')
  52. <Row {'id': 1, 'name': 'Jim'}>
  53. ### update multiple records by query
  54. >>> db(person.name.like('J%')).update(name='James')
  55. 1
  56. ### delete records by query
  57. >>> db(person.name.lower() == 'jim').delete()
  58. 0
  59. ### retrieve multiple records (rows)
  60. >>> people = db(person).select(orderby=person.name,
  61. ... groupby=person.name, limitby=(0,100))
  62. ### further filter them
  63. >>> james = people.find(lambda row: row.name == 'James').first()
  64. >>> print james.id, james.name
  65. 1 James
  66. ### check aggregates
  67. >>> counter = person.id.count()
  68. >>> print db(person).select(counter).first()(counter)
  69. 1
  70. ### delete one record
  71. >>> james.delete_record()
  72. 1
  73. ### delete (drop) entire database table
  74. >>> person.drop()
  75. Supported field types:
  76. id string text boolean integer double decimal password upload
  77. blob time date datetime
  78. Supported DAL URI strings:
  79. 'sqlite://test.db'
  80. 'spatialite://test.db'
  81. 'sqlite:memory'
  82. 'spatialite:memory'
  83. 'jdbc:sqlite://test.db'
  84. 'mysql://root:none@localhost/test'
  85. 'postgres://mdipierro:password@localhost/test'
  86. 'postgres:psycopg2://mdipierro:password@localhost/test'
  87. 'postgres:pg8000://mdipierro:password@localhost/test'
  88. 'jdbc:postgres://mdipierro:none@localhost/test'
  89. 'mssql://web2py:none@A64X2/web2py_test'
  90. 'mssql2://web2py:none@A64X2/web2py_test' # alternate mappings
  91. 'oracle://username:password@database'
  92. 'firebird://user:password@server:3050/database'
  93. 'db2://DSN=dsn;UID=user;PWD=pass'
  94. 'firebird://username:password@hostname/database'
  95. 'firebird_embedded://username:password@c://path'
  96. 'informix://user:password@server:3050/database'
  97. 'informixu://user:password@server:3050/database' # unicode informix
  98. 'google:datastore' # for google app engine datastore
  99. 'google:sql' # for google app engine with sql (mysql compatible)
  100. 'teradata://DSN=dsn;UID=user;PWD=pass; DATABASE=database' # experimental
  101. 'imap://user:password@server:port' # experimental
  102. For more info:
  103. help(DAL)
  104. help(Field)
  105. """
  106. ###################################################################################
  107. # this file only exposes DAL and Field
  108. ###################################################################################
  109. __all__ = ['DAL', 'Field']
  110. MAXCHARLENGTH = 2**15 # not quite but reasonable default max char length
  111. DEFAULTLENGTH = {'string':512,
  112. 'password':512,
  113. 'upload':512,
  114. 'text':2**15,
  115. 'blob':2**31}
  116. TIMINGSSIZE = 100
  117. SPATIALLIBS = {
  118. 'Windows':'libspatialite',
  119. 'Linux':'libspatialite.so',
  120. 'Darwin':'libspatialite.dylib'
  121. }
  122. import re
  123. import sys
  124. import locale
  125. import os
  126. import types
  127. import cPickle
  128. import datetime
  129. import threading
  130. import time
  131. import cStringIO
  132. import csv
  133. import cgi
  134. import copy
  135. import socket
  136. import logging
  137. import copy_reg
  138. import base64
  139. import shutil
  140. import marshal
  141. import decimal
  142. import struct
  143. import urllib
  144. import hashlib
  145. import uuid
  146. import glob
  147. import traceback
  148. import platform
  149. CALLABLETYPES = (types.LambdaType, types.FunctionType,
  150. types.BuiltinFunctionType,
  151. types.MethodType, types.BuiltinMethodType)
  152. TABLE_ARGS = set(
  153. ('migrate','primarykey','fake_migrate','format','redefine',
  154. 'singular','plural','trigger_name','sequence_name',
  155. 'common_filter','polymodel','table_class','on_define',))
  156. SELECT_ARGS = set(
  157. ('orderby', 'groupby', 'limitby','required', 'cache', 'left',
  158. 'distinct', 'having', 'join','for_update', 'processor','cacheable'))
  159. ogetattr = object.__getattribute__
  160. osetattr = object.__setattr__
  161. exists = os.path.exists
  162. pjoin = os.path.join
  163. ###################################################################################
  164. # following checks allow the use of dal without web2py, as a standalone module
  165. ###################################################################################
  166. try:
  167. from utils import web2py_uuid
  168. except ImportError:
  169. import uuid
  170. def web2py_uuid(): return str(uuid.uuid4())
  171. try:
  172. import portalocker
  173. have_portalocker = True
  174. except ImportError:
  175. have_portalocker = False
  176. try:
  177. import serializers
  178. have_serializers = True
  179. except ImportError:
  180. have_serializers = False
  181. try:
  182. import validators
  183. have_validators = True
  184. except ImportError:
  185. have_validators = False
  186. LOGGER = logging.getLogger("web2py.dal")
  187. DEFAULT = lambda:0
  188. GLOBAL_LOCKER = threading.RLock()
  189. THREAD_LOCAL = threading.local()
  190. # internal representation of tables with field
  191. # <table>.<field>, tables and fields may only be [a-zA-Z0-9_]
  192. REGEX_TYPE = re.compile('^([\w\_\:]+)')
  193. REGEX_DBNAME = re.compile('^(\w+)(\:\w+)*')
  194. REGEX_W = re.compile('^\w+$')
  195. REGEX_TABLE_DOT_FIELD = re.compile('^(\w+)\.(\w+)$')
  196. REGEX_UPLOAD_PATTERN = re.compile('(?P<table>[\w\-]+)\.(?P<field>[\w\-]+)\.(?P<uuidkey>[\w\-]+)\.(?P<name>\w+)\.\w+$')
  197. REGEX_CLEANUP_FN = re.compile('[\'"\s;]+')
  198. REGEX_UNPACK = re.compile('(?<!\|)\|(?!\|)')
  199. REGEX_PYTHON_KEYWORDS = re.compile('^(and|del|from|not|while|as|elif|global|or|with|assert|else|if|pass|yield|break|except|import|print|class|exec|in|raise|continue|finally|is|return|def|for|lambda|try)$')
  200. REGEX_SELECT_AS_PARSER = re.compile("\s+AS\s+(\S+)")
  201. REGEX_CONST_STRING = re.compile('(\"[^\"]*?\")|(\'[^\']*?\')')
  202. REGEX_SEARCH_PATTERN = re.compile('^{[^\.]+\.[^\.]+(\.(lt|gt|le|ge|eq|ne|contains|startswith|year|month|day|hour|minute|second))?(\.not)?}$')
  203. REGEX_SQUARE_BRACKETS = re.compile('^.+\[.+\]$')
  204. REGEX_STORE_PATTERN = re.compile('\.(?P<e>\w{1,5})$')
  205. REGEX_QUOTES = re.compile("'[^']*'")
  206. REGEX_ALPHANUMERIC = re.compile('^[0-9a-zA-Z]\w*$')
  207. REGEX_PASSWORD = re.compile('\://([^:@]*)\:')
  208. # list of drivers will be built on the fly
  209. # and lists only what is available
  210. DRIVERS = []
  211. try:
  212. from new import classobj
  213. from google.appengine.ext import db as gae
  214. from google.appengine.api import namespace_manager, rdbms
  215. from google.appengine.api.datastore_types import Key ### for belongs on ID
  216. from google.appengine.ext.db.polymodel import PolyModel
  217. DRIVERS.append('google')
  218. except ImportError:
  219. pass
  220. if not 'google' in DRIVERS:
  221. try:
  222. from pysqlite2 import dbapi2 as sqlite2
  223. DRIVERS.append('SQLite(sqlite2)')
  224. except ImportError:
  225. LOGGER.debug('no SQLite drivers pysqlite2.dbapi2')
  226. try:
  227. from sqlite3 import dbapi2 as sqlite3
  228. DRIVERS.append('SQLite(sqlite3)')
  229. except ImportError:
  230. LOGGER.debug('no SQLite drivers sqlite3')
  231. try:
  232. # first try contrib driver, then from site-packages (if installed)
  233. try:
  234. import contrib.pymysql as pymysql
  235. # monkeypatch pymysql because they havent fixed the bug:
  236. # https://github.com/petehunt/PyMySQL/issues/86
  237. pymysql.ESCAPE_REGEX = re.compile("'")
  238. pymysql.ESCAPE_MAP = {"'": "''"}
  239. # end monkeypatch
  240. except ImportError:
  241. import pymysql
  242. DRIVERS.append('MySQL(pymysql)')
  243. except ImportError:
  244. LOGGER.debug('no MySQL driver pymysql')
  245. try:
  246. import MySQLdb
  247. DRIVERS.append('MySQL(MySQLdb)')
  248. except ImportError:
  249. LOGGER.debug('no MySQL driver MySQLDB')
  250. try:
  251. import psycopg2
  252. from psycopg2.extensions import adapt as psycopg2_adapt
  253. DRIVERS.append('PostgreSQL(psycopg2)')
  254. except ImportError:
  255. LOGGER.debug('no PostgreSQL driver psycopg2')
  256. try:
  257. # first try contrib driver, then from site-packages (if installed)
  258. try:
  259. import contrib.pg8000.dbapi as pg8000
  260. except ImportError:
  261. import pg8000.dbapi as pg8000
  262. DRIVERS.append('PostgreSQL(pg8000)')
  263. except ImportError:
  264. LOGGER.debug('no PostgreSQL driver pg8000')
  265. try:
  266. import cx_Oracle
  267. DRIVERS.append('Oracle(cx_Oracle)')
  268. except ImportError:
  269. LOGGER.debug('no Oracle driver cx_Oracle')
  270. try:
  271. import pyodbc
  272. DRIVERS.append('MSSQL(pyodbc)')
  273. DRIVERS.append('DB2(pyodbc)')
  274. DRIVERS.append('Teradata(pyodbc)')
  275. except ImportError:
  276. LOGGER.debug('no MSSQL/DB2/Teradata driver pyodbc')
  277. try:
  278. import Sybase
  279. DRIVERS.append('Sybase(Sybase)')
  280. except ImportError:
  281. LOGGER.debug('no Sybase driver')
  282. try:
  283. import kinterbasdb
  284. DRIVERS.append('Interbase(kinterbasdb)')
  285. DRIVERS.append('Firebird(kinterbasdb)')
  286. except ImportError:
  287. LOGGER.debug('no Firebird/Interbase driver kinterbasdb')
  288. try:
  289. import fdb
  290. DRIVERS.append('Firbird(fdb)')
  291. except ImportError:
  292. LOGGER.debug('no Firebird driver fdb')
  293. #####
  294. try:
  295. import firebirdsql
  296. DRIVERS.append('Firebird(firebirdsql)')
  297. except ImportError:
  298. LOGGER.debug('no Firebird driver firebirdsql')
  299. try:
  300. import informixdb
  301. DRIVERS.append('Informix(informixdb)')
  302. LOGGER.warning('Informix support is experimental')
  303. except ImportError:
  304. LOGGER.debug('no Informix driver informixdb')
  305. try:
  306. import sapdb
  307. DRIVERS.append('SQL(sapdb)')
  308. LOGGER.warning('SAPDB support is experimental')
  309. except ImportError:
  310. LOGGER.debug('no SAP driver sapdb')
  311. try:
  312. import cubriddb
  313. DRIVERS.append('Cubrid(cubriddb)')
  314. LOGGER.warning('Cubrid support is experimental')
  315. except ImportError:
  316. LOGGER.debug('no Cubrid driver cubriddb')
  317. try:
  318. from com.ziclix.python.sql import zxJDBC
  319. import java.sql
  320. # Try sqlite jdbc driver from http://www.zentus.com/sqlitejdbc/
  321. from org.sqlite import JDBC # required by java.sql; ensure we have it
  322. zxJDBC_sqlite = java.sql.DriverManager
  323. DRIVERS.append('PostgreSQL(zxJDBC)')
  324. DRIVERS.append('SQLite(zxJDBC)')
  325. LOGGER.warning('zxJDBC support is experimental')
  326. is_jdbc = True
  327. except ImportError:
  328. LOGGER.debug('no SQLite/PostgreSQL driver zxJDBC')
  329. is_jdbc = False
  330. try:
  331. import ingresdbi
  332. DRIVERS.append('Ingres(ingresdbi)')
  333. except ImportError:
  334. LOGGER.debug('no Ingres driver ingresdbi')
  335. # NOTE could try JDBC.......
  336. try:
  337. import couchdb
  338. DRIVERS.append('CouchDB(couchdb)')
  339. except ImportError:
  340. LOGGER.debug('no Couchdb driver couchdb')
  341. try:
  342. import pymongo
  343. DRIVERS.append('MongoDB(pymongo)')
  344. except:
  345. LOGGER.debug('no MongoDB driver pymongo')
  346. try:
  347. import imaplib
  348. DRIVERS.append('IMAP(imaplib)')
  349. except:
  350. LOGGER.debug('no IMAP driver imaplib')
  351. PLURALIZE_RULES = [
  352. (re.compile('child$'), re.compile('child$'), 'children'),
  353. (re.compile('oot$'), re.compile('oot$'), 'eet'),
  354. (re.compile('ooth$'), re.compile('ooth$'), 'eeth'),
  355. (re.compile('l[eo]af$'), re.compile('l([eo])af$'), 'l\\1aves'),
  356. (re.compile('sis$'), re.compile('sis$'), 'ses'),
  357. (re.compile('man$'), re.compile('man$'), 'men'),
  358. (re.compile('ife$'), re.compile('ife$'), 'ives'),
  359. (re.compile('eau$'), re.compile('eau$'), 'eaux'),
  360. (re.compile('lf$'), re.compile('lf$'), 'lves'),
  361. (re.compile('[sxz]$'), re.compile('$'), 'es'),
  362. (re.compile('[^aeioudgkprt]h$'), re.compile('$'), 'es'),
  363. (re.compile('(qu|[^aeiou])y$'), re.compile('y$'), 'ies'),
  364. (re.compile('$'), re.compile('$'), 's'),
  365. ]
  366. def pluralize(singular, rules=PLURALIZE_RULES):
  367. for line in rules:
  368. re_search, re_sub, replace = line
  369. plural = re_search.search(singular) and re_sub.sub(replace, singular)
  370. if plural: return plural
  371. def hide_password(uri):
  372. return REGEX_PASSWORD.sub('://******:',uri)
  373. def OR(a,b):
  374. return a|b
  375. def AND(a,b):
  376. return a&b
  377. def IDENTITY(x): return x
  378. def varquote_aux(name,quotestr='%s'):
  379. return name if REGEX_W.match(name) else quotestr % name
  380. if 'google' in DRIVERS:
  381. is_jdbc = False
  382. class GAEDecimalProperty(gae.Property):
  383. """
  384. GAE decimal implementation
  385. """
  386. data_type = decimal.Decimal
  387. def __init__(self, precision, scale, **kwargs):
  388. super(GAEDecimalProperty, self).__init__(self, **kwargs)
  389. d = '1.'
  390. for x in range(scale):
  391. d += '0'
  392. self.round = decimal.Decimal(d)
  393. def get_value_for_datastore(self, model_instance):
  394. value = super(GAEDecimalProperty, self)\
  395. .get_value_for_datastore(model_instance)
  396. if value is None or value == '':
  397. return None
  398. else:
  399. return str(value)
  400. def make_value_from_datastore(self, value):
  401. if value is None or value == '':
  402. return None
  403. else:
  404. return decimal.Decimal(value).quantize(self.round)
  405. def validate(self, value):
  406. value = super(GAEDecimalProperty, self).validate(value)
  407. if value is None or isinstance(value, decimal.Decimal):
  408. return value
  409. elif isinstance(value, basestring):
  410. return decimal.Decimal(value)
  411. raise gae.BadValueError("Property %s must be a Decimal or string."\
  412. % self.name)
  413. ###################################################################################
  414. # class that handles connection pooling (all adapters are derived from this one)
  415. ###################################################################################
  416. class ConnectionPool(object):
  417. POOLS = {}
  418. check_active_connection = True
  419. @staticmethod
  420. def set_folder(folder):
  421. THREAD_LOCAL.folder = folder
  422. # ## this allows gluon to commit/rollback all dbs in this thread
  423. def close(self,action='commit',really=True):
  424. if action:
  425. if callable(action):
  426. action(self)
  427. else:
  428. getattr(self, action)()
  429. # ## if you want pools, recycle this connection
  430. if self.pool_size:
  431. GLOBAL_LOCKER.acquire()
  432. pool = ConnectionPool.POOLS[self.uri]
  433. if len(pool) < self.pool_size:
  434. pool.append(self.connection)
  435. really = False
  436. GLOBAL_LOCKER.release()
  437. if really:
  438. self.close_connection()
  439. self.connection = None
  440. @staticmethod
  441. def close_all_instances(action):
  442. """ to close cleanly databases in a multithreaded environment """
  443. dbs = getattr(THREAD_LOCAL,'db_instances',{}).items()
  444. for db_uid, db_group in dbs:
  445. for db in db_group:
  446. if hasattr(db,'_adapter'):
  447. db._adapter.close(action)
  448. getattr(THREAD_LOCAL,'db_instances',{}).clear()
  449. getattr(THREAD_LOCAL,'db_instances_zombie',{}).clear()
  450. if callable(action):
  451. action(None)
  452. return
  453. def find_or_make_work_folder(self):
  454. """ this actually does not make the folder. it has to be there """
  455. self.folder = getattr(THREAD_LOCAL,'folder','')
  456. # Creating the folder if it does not exist
  457. if False and self.folder and not exists(self.folder):
  458. os.mkdir(self.folder)
  459. def after_connection(self):
  460. """ this it is suppoed to be overloaded by adtapters"""
  461. pass
  462. def reconnect(self, f=None, cursor=True):
  463. """
  464. this function defines: self.connection and self.cursor
  465. (iff cursor is True)
  466. if self.pool_size>0 it will try pull the connection from the pool
  467. if the connection is not active (closed by db server) it will loop
  468. if not self.pool_size or no active connections in pool makes a new one
  469. """
  470. if getattr(self,'connection',None) != None:
  471. return
  472. if f is None:
  473. f = self.connector
  474. if not self.pool_size:
  475. self.connection = f()
  476. self.cursor = cursor and self.connection.cursor()
  477. else:
  478. uri = self.uri
  479. POOLS = ConnectionPool.POOLS
  480. while True:
  481. GLOBAL_LOCKER.acquire()
  482. if not uri in POOLS:
  483. POOLS[uri] = []
  484. if POOLS[uri]:
  485. self.connection = POOLS[uri].pop()
  486. GLOBAL_LOCKER.release()
  487. self.cursor = cursor and self.connection.cursor()
  488. try:
  489. if self.cursor and self.check_active_connection:
  490. self.execute('SELECT 1;')
  491. break
  492. except:
  493. pass
  494. else:
  495. GLOBAL_LOCKER.release()
  496. self.connection = f()
  497. self.cursor = cursor and self.connection.cursor()
  498. break
  499. self.after_connection()
  500. ###################################################################################
  501. # this is a generic adapter that does nothing; all others are derived from this one
  502. ###################################################################################
  503. class BaseAdapter(ConnectionPool):
  504. driver = None
  505. driver_name = None
  506. drivers = () # list of drivers from which to pick
  507. connection = None
  508. maxcharlength = MAXCHARLENGTH
  509. commit_on_alter_table = False
  510. support_distributed_transaction = False
  511. uploads_in_blob = False
  512. can_select_for_update = True
  513. TRUE = 'T'
  514. FALSE = 'F'
  515. types = {
  516. 'boolean': 'CHAR(1)',
  517. 'string': 'CHAR(%(length)s)',
  518. 'text': 'TEXT',
  519. 'password': 'CHAR(%(length)s)',
  520. 'blob': 'BLOB',
  521. 'upload': 'CHAR(%(length)s)',
  522. 'integer': 'INTEGER',
  523. 'bigint': 'INTEGER',
  524. 'float':'DOUBLE',
  525. 'double': 'DOUBLE',
  526. 'decimal': 'DOUBLE',
  527. 'date': 'DATE',
  528. 'time': 'TIME',
  529. 'datetime': 'TIMESTAMP',
  530. 'id': 'INTEGER PRIMARY KEY AUTOINCREMENT',
  531. 'reference': 'INTEGER REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
  532. 'list:integer': 'TEXT',
  533. 'list:string': 'TEXT',
  534. 'list:reference': 'TEXT',
  535. # the two below are only used when DAL(...bigint_id=True) and replace 'id','reference'
  536. 'big-id': 'BIGINT PRIMARY KEY AUTOINCREMENT',
  537. 'big-reference': 'BIGINT REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
  538. }
  539. def id_query(self, table):
  540. return table._id != None
  541. def adapt(self, obj):
  542. return "'%s'" % obj.replace("'", "''")
  543. def smart_adapt(self, obj):
  544. if isinstance(obj,(int,float)):
  545. return str(obj)
  546. return self.adapt(str(obj))
  547. def integrity_error(self):
  548. return self.driver.IntegrityError
  549. def operational_error(self):
  550. return self.driver.OperationalError
  551. def file_exists(self, filename):
  552. """
  553. to be used ONLY for files that on GAE may not be on filesystem
  554. """
  555. return exists(filename)
  556. def file_open(self, filename, mode='rb', lock=True):
  557. """
  558. to be used ONLY for files that on GAE may not be on filesystem
  559. """
  560. if have_portalocker and lock:
  561. fileobj = portalocker.LockedFile(filename,mode)
  562. else:
  563. fileobj = open(filename,mode)
  564. return fileobj
  565. def file_close(self, fileobj):
  566. """
  567. to be used ONLY for files that on GAE may not be on filesystem
  568. """
  569. if fileobj:
  570. fileobj.close()
  571. def file_delete(self, filename):
  572. os.unlink(filename)
  573. def find_driver(self,adapter_args,uri=None):
  574. if getattr(self,'driver',None) != None:
  575. return
  576. drivers_available = [driver for driver in self.drivers
  577. if driver in globals()]
  578. if uri:
  579. items = uri.split('://',1)[0].split(':')
  580. request_driver = items[1] if len(items)>1 else None
  581. else:
  582. request_driver = None
  583. request_driver = request_driver or adapter_args.get('driver')
  584. if request_driver:
  585. if request_driver in drivers_available:
  586. self.driver_name = request_driver
  587. self.driver = globals().get(request_driver)
  588. else:
  589. raise RuntimeError, "driver %s not available" % request_driver
  590. elif drivers_available:
  591. self.driver_name = drivers_available[0]
  592. self.driver = globals().get(self.driver_name)
  593. else:
  594. raise RuntimeError, "no driver available %s" % self.drivers
  595. def __init__(self, db,uri,pool_size=0, folder=None, db_codec='UTF-8',
  596. credential_decoder=IDENTITY, driver_args={},
  597. adapter_args={},do_connect=True):
  598. self.db = db
  599. self.dbengine = "None"
  600. self.uri = uri
  601. self.pool_size = pool_size
  602. self.folder = folder
  603. self.db_codec = db_codec
  604. class Dummy(object):
  605. lastrowid = 1
  606. def __getattr__(self, value):
  607. return lambda *a, **b: []
  608. self.connection = Dummy()
  609. self.cursor = Dummy()
  610. def sequence_name(self,tablename):
  611. return '%s_sequence' % tablename
  612. def trigger_name(self,tablename):
  613. return '%s_sequence' % tablename
  614. def varquote(self,name):
  615. return name
  616. def create_table(self, table,
  617. migrate=True,
  618. fake_migrate=False,
  619. polymodel=None):
  620. db = table._db
  621. fields = []
  622. # PostGIS geo fields are added after the table has been created
  623. postcreation_fields = []
  624. sql_fields = {}
  625. sql_fields_aux = {}
  626. TFK = {}
  627. tablename = table._tablename
  628. sortable = 0
  629. types = self.types
  630. for field in table:
  631. sortable += 1
  632. field_name = field.name
  633. field_type = field.type
  634. if isinstance(field_type,SQLCustomType):
  635. ftype = field_type.native or field_type.type
  636. elif field_type.startswith('reference'):
  637. referenced = field_type[10:].strip()
  638. if referenced == '.':
  639. referenced = tablename
  640. constraint_name = self.constraint_name(tablename, field_name)
  641. if not '.' in referenced \
  642. and referenced != tablename \
  643. and hasattr(table,'_primarykey'):
  644. ftype = types['integer']
  645. else:
  646. if hasattr(table,'_primarykey'):
  647. rtablename,rfieldname = referenced.split('.')
  648. rtable = db[rtablename]
  649. rfield = rtable[rfieldname]
  650. # must be PK reference or unique
  651. if rfieldname in hasattr(rtable,'_primarykey') or \
  652. rfield.unique:
  653. ftype = types[rfield.type[:9]] % \
  654. dict(length=rfield.length)
  655. # multicolumn primary key reference?
  656. if not rfield.unique and len(rtable._primarykey)>1:
  657. # then it has to be a table level FK
  658. if rtablename not in TFK:
  659. TFK[rtablename] = {}
  660. TFK[rtablename][rfieldname] = field_name
  661. else:
  662. ftype = ftype + \
  663. types['reference FK'] % dict(
  664. constraint_name = constraint_name, # should be quoted
  665. foreign_key = '%s (%s)' % (rtablename,
  666. rfieldname),
  667. on_delete_action=field.ondelete)
  668. else:
  669. # make a guess here for circular references
  670. if referenced in db:
  671. id_fieldname = db[referenced]._id.name
  672. elif referenced == tablename:
  673. id_fieldname = table._id.name
  674. else: #make a guess
  675. id_fieldname = 'id'
  676. ftype = types[field_type[:9]] % dict(
  677. index_name = field_name+'__idx',
  678. field_name = field_name,
  679. constraint_name = constraint_name,
  680. foreign_key = '%s (%s)' % (referenced,
  681. id_fieldname),
  682. on_delete_action=field.ondelete)
  683. elif field_type.startswith('list:reference'):
  684. ftype = types[field_type[:14]]
  685. elif field_type.startswith('decimal'):
  686. precision, scale = map(int,field_type[8:-1].split(','))
  687. ftype = types[field_type[:7]] % \
  688. dict(precision=precision,scale=scale)
  689. elif field_type.startswith('geo'):
  690. srid = self.srid
  691. geotype, parms = field_type[:-1].split('(')
  692. if not geotype in types:
  693. raise SyntaxError, \
  694. 'Field: unknown field type: %s for %s' \
  695. % (field_type, field_name)
  696. ftype = types[geotype]
  697. if self.dbengine == 'postgres' and geotype == 'geometry':
  698. # parameters: schema, srid, dimension
  699. dimension = 2 # GIS.dimension ???
  700. parms = parms.split(',')
  701. if len(parms) == 3:
  702. schema, srid, dimension = parms
  703. elif len(parms) == 2:
  704. schema, srid = parms
  705. else:
  706. schema = parms[0]
  707. ftype = "SELECT AddGeometryColumn ('%%(schema)s', '%%(tablename)s', '%%(fieldname)s', %%(srid)s, '%s', %%(dimension)s);" % types[geotype]
  708. ftype = ftype % dict(schema=schema,
  709. tablename=tablename,
  710. fieldname=field_name, srid=srid,
  711. dimension=dimension)
  712. postcreation_fields.append(ftype)
  713. elif not field_type in types:
  714. raise SyntaxError, 'Field: unknown field type: %s for %s' % \
  715. (field_type, field_name)
  716. else:
  717. ftype = types[field_type]\
  718. % dict(length=field.length)
  719. if not field_type.startswith('id') and \
  720. not field_type.startswith('reference'):
  721. if field.notnull:
  722. ftype += ' NOT NULL'
  723. else:
  724. ftype += self.ALLOW_NULL()
  725. if field.unique:
  726. ftype += ' UNIQUE'
  727. if field.custom_qualifier:
  728. ftype += ' %s' % field.custom_qualifier
  729. # add to list of fields
  730. sql_fields[field_name] = dict(
  731. length=field.length,
  732. unique=field.unique,
  733. notnull=field.notnull,
  734. sortable=sortable,
  735. type=str(field_type),
  736. sql=ftype)
  737. if isinstance(field.default,(str,int,float)):
  738. # Caveat: sql_fields and sql_fields_aux
  739. # differ for default values.
  740. # sql_fields is used to trigger migrations and sql_fields_aux
  741. # is used for create tables.
  742. # The reason is that we do not want to trigger
  743. # a migration simply because a default value changes.
  744. not_null = self.NOT_NULL(field.default, field_type)
  745. ftype = ftype.replace('NOT NULL', not_null)
  746. sql_fields_aux[field_name] = dict(sql=ftype)
  747. # Postgres - PostGIS:
  748. # geometry fields are added after the table has been created, not now
  749. if not (self.dbengine == 'postgres' and \
  750. field_type.startswith('geom')):
  751. fields.append('%s %s' % (field_name, ftype))
  752. other = ';'
  753. # backend-specific extensions to fields
  754. if self.dbengine == 'mysql':
  755. if not hasattr(table, "_primarykey"):
  756. fields.append('PRIMARY KEY(%s)' % table._id.name)
  757. other = ' ENGINE=InnoDB CHARACTER SET utf8;'
  758. fields = ',\n '.join(fields)
  759. for rtablename in TFK:
  760. rfields = TFK[rtablename]
  761. pkeys = db[rtablename]._primarykey
  762. fkeys = [ rfields[k] for k in pkeys ]
  763. fields = fields + ',\n ' + \
  764. types['reference TFK'] % dict(
  765. table_name = tablename,
  766. field_name=', '.join(fkeys),
  767. foreign_table = rtablename,
  768. foreign_key = ', '.join(pkeys),
  769. on_delete_action = field.ondelete)
  770. if hasattr(table,'_primarykey'):
  771. query = "CREATE TABLE %s(\n %s,\n %s) %s" % \
  772. (tablename, fields,
  773. self.PRIMARY_KEY(', '.join(table._primarykey)),other)
  774. else:
  775. query = "CREATE TABLE %s(\n %s\n)%s" % \
  776. (tablename, fields, other)
  777. if self.uri.startswith('sqlite:///') \
  778. or self.uri.startswith('spatialite:///'):
  779. path_encoding = sys.getfilesystemencoding() \
  780. or locale.getdefaultlocale()[1] or 'utf8'
  781. dbpath = self.uri[9:self.uri.rfind('/')]\
  782. .decode('utf8').encode(path_encoding)
  783. else:
  784. dbpath = self.folder
  785. if not migrate:
  786. return query
  787. elif self.uri.startswith('sqlite:memory')\
  788. or self.uri.startswith('spatialite:memory'):
  789. table._dbt = None
  790. elif isinstance(migrate, str):
  791. table._dbt = pjoin(dbpath, migrate)
  792. else:
  793. table._dbt = pjoin(
  794. dbpath, '%s_%s.table' % (table._db._uri_hash, tablename))
  795. if table._dbt:
  796. table._loggername = pjoin(dbpath, 'sql.log')
  797. logfile = self.file_open(table._loggername, 'a')
  798. else:
  799. logfile = None
  800. if not table._dbt or not self.file_exists(table._dbt):
  801. if table._dbt:
  802. logfile.write('timestamp: %s\n'
  803. % datetime.datetime.today().isoformat())
  804. logfile.write(query + '\n')
  805. if not fake_migrate:
  806. self.create_sequence_and_triggers(query,table)
  807. table._db.commit()
  808. # Postgres geom fields are added now,
  809. # after the table has been created
  810. for query in postcreation_fields:
  811. self.execute(query)
  812. table._db.commit()
  813. if table._dbt:
  814. tfile = self.file_open(table._dbt, 'w')
  815. cPickle.dump(sql_fields, tfile)
  816. self.file_close(tfile)
  817. if fake_migrate:
  818. logfile.write('faked!\n')
  819. else:
  820. logfile.write('success!\n')
  821. else:
  822. tfile = self.file_open(table._dbt, 'r')
  823. try:
  824. sql_fields_old = cPickle.load(tfile)
  825. except EOFError:
  826. self.file_close(tfile)
  827. self.file_close(logfile)
  828. raise RuntimeError, 'File %s appears corrupted' % table._dbt
  829. self.file_close(tfile)
  830. if sql_fields != sql_fields_old:
  831. self.migrate_table(table,
  832. sql_fields, sql_fields_old,
  833. sql_fields_aux, logfile,
  834. fake_migrate=fake_migrate)
  835. self.file_close(logfile)
  836. return query
  837. def migrate_table(
  838. self,
  839. table,
  840. sql_fields,
  841. sql_fields_old,
  842. sql_fields_aux,
  843. logfile,
  844. fake_migrate=False,
  845. ):
  846. db = table._db
  847. db._migrated.append(table._tablename)
  848. tablename = table._tablename
  849. def fix(item):
  850. k,v=item
  851. if not isinstance(v,dict):
  852. v=dict(type='unkown',sql=v)
  853. return k.lower(),v
  854. # make sure all field names are lower case to avoid
  855. # migrations because of case cahnge
  856. sql_fields = dict(map(fix,sql_fields.iteritems()))
  857. sql_fields_old = dict(map(fix,sql_fields_old.iteritems()))
  858. sql_fields_aux = dict(map(fix,sql_fields_aux.iteritems()))
  859. if db._debug:
  860. logging.debug('migrating %s to %s' % (sql_fields_old,sql_fields))
  861. keys = sql_fields.keys()
  862. for key in sql_fields_old:
  863. if not key in keys:
  864. keys.append(key)
  865. if self.dbengine == 'mssql':
  866. new_add = '; ALTER TABLE %s ADD ' % tablename
  867. else:
  868. new_add = ', ADD '
  869. metadata_change = False
  870. sql_fields_current = copy.copy(sql_fields_old)
  871. for key in keys:
  872. query = None
  873. if not key in sql_fields_old:
  874. sql_fields_current[key] = sql_fields[key]
  875. if self.dbengine in ('postgres',) and \
  876. sql_fields[key]['type'].startswith('geometry'):
  877. # 'sql' == ftype in sql
  878. query = [ sql_fields[key]['sql'] ]
  879. else:
  880. query = ['ALTER TABLE %s ADD %s %s;' % \
  881. (tablename, key,
  882. sql_fields_aux[key]['sql'].replace(', ', new_add))]
  883. metadata_change = True
  884. elif self.dbengine in ('sqlite', 'spatialite'):
  885. if key in sql_fields:
  886. sql_fields_current[key] = sql_fields[key]
  887. metadata_change = True
  888. elif not key in sql_fields:
  889. del sql_fields_current[key]
  890. ftype = sql_fields_old[key]['type']
  891. if self.dbengine in ('postgres',) \
  892. and ftype.startswith('geometry'):
  893. geotype, parms = ftype[:-1].split('(')
  894. schema = parms.split(',')[0]
  895. query = [ "SELECT DropGeometryColumn ('%(schema)s', '%(table)s', '%(field)s');" % dict(schema=schema, table=tablename, field=key,) ]
  896. elif not self.dbengine in ('firebird',):
  897. query = ['ALTER TABLE %s DROP COLUMN %s;'
  898. % (tablename, key)]
  899. else:
  900. query = ['ALTER TABLE %s DROP %s;' % (tablename, key)]
  901. metadata_change = True
  902. elif sql_fields[key]['sql'] != sql_fields_old[key]['sql'] \
  903. and not (key in table.fields and
  904. isinstance(table[key].type, SQLCustomType)) \
  905. and not sql_fields[key]['type'].startswith('reference')\
  906. and not sql_fields[key]['type'].startswith('double')\
  907. and not sql_fields[key]['type'].startswith('id'):
  908. sql_fields_current[key] = sql_fields[key]
  909. t = tablename
  910. tt = sql_fields_aux[key]['sql'].replace(', ', new_add)
  911. if not self.dbengine in ('firebird',):
  912. query = ['ALTER TABLE %s ADD %s__tmp %s;' % (t, key, tt),
  913. 'UPDATE %s SET %s__tmp=%s;' % (t, key, key),
  914. 'ALTER TABLE %s DROP COLUMN %s;' % (t, key),
  915. 'ALTER TABLE %s ADD %s %s;' % (t, key, tt),
  916. 'UPDATE %s SET %s=%s__tmp;' % (t, key, key),
  917. 'ALTER TABLE %s DROP COLUMN %s__tmp;' % (t, key)]
  918. else:
  919. query = ['ALTER TABLE %s ADD %s__tmp %s;' % (t, key, tt),
  920. 'UPDATE %s SET %s__tmp=%s;' % (t, key, key),
  921. 'ALTER TABLE %s DROP %s;' % (t, key),
  922. 'ALTER TABLE %s ADD %s %s;' % (t, key, tt),
  923. 'UPDATE %s SET %s=%s__tmp;' % (t, key, key),
  924. 'ALTER TABLE %s DROP %s__tmp;' % (t, key)]
  925. metadata_change = True
  926. elif sql_fields[key]['type'] != sql_fields_old[key]['type']:
  927. sql_fields_current[key] = sql_fields[key]
  928. metadata_change = True
  929. if query:
  930. logfile.write('timestamp: %s\n'
  931. % datetime.datetime.today().isoformat())
  932. db['_lastsql'] = '\n'.join(query)
  933. for sub_query in query:
  934. logfile.write(sub_query + '\n')
  935. if not fake_migrate:
  936. self.execute(sub_query)
  937. # Caveat: mysql, oracle and firebird do not allow multiple alter table
  938. # in one transaction so we must commit partial transactions and
  939. # update table._dbt after alter table.
  940. if db._adapter.commit_on_alter_table:
  941. db.commit()
  942. tfile = self.file_open(table._dbt, 'w')
  943. cPickle.dump(sql_fields_current, tfile)
  944. self.file_close(tfile)
  945. logfile.write('success!\n')
  946. else:
  947. logfile.write('faked!\n')
  948. elif metadata_change:
  949. tfile = self.file_open(table._dbt, 'w')
  950. cPickle.dump(sql_fields_current, tfile)
  951. self.file_close(tfile)
  952. if metadata_change and \
  953. not (query and self.dbengine in ('mysql','oracle','firebird')):
  954. db.commit()
  955. tfile = self.file_open(table._dbt, 'w')
  956. cPickle.dump(sql_fields_current, tfile)
  957. self.file_close(tfile)
  958. def LOWER(self, first):
  959. return 'LOWER(%s)' % self.expand(first)
  960. def UPPER(self, first):
  961. return 'UPPER(%s)' % self.expand(first)
  962. def COUNT(self, first, distinct=None):
  963. return ('COUNT(%s)' if not distinct else 'COUNT(DISTINCT %s)') \
  964. % self.expand(first)
  965. def EXTRACT(self, first, what):
  966. return "EXTRACT(%s FROM %s)" % (what, self.expand(first))
  967. def EPOCH(self, first):
  968. return self.EXTRACT(first, 'epoch')
  969. def AGGREGATE(self, first, what):
  970. return "%s(%s)" % (what, self.expand(first))
  971. def JOIN(self):
  972. return 'JOIN'
  973. def LEFT_JOIN(self):
  974. return 'LEFT JOIN'
  975. def RANDOM(self):
  976. return 'Random()'
  977. def NOT_NULL(self, default, field_type):
  978. return 'NOT NULL DEFAULT %s' % self.represent(default,field_type)
  979. def COALESCE(self, first, second):
  980. expressions = [self.expand(first)]+[self.expand(e) for e in second]
  981. return 'COALESCE(%s)' % ','.join(expressions)
  982. def COALESCE_ZERO(self, first):
  983. return 'COALESCE(%s,0)' % self.expand(first)
  984. def RAW(self, first):
  985. return first
  986. def ALLOW_NULL(self):
  987. return ''
  988. def SUBSTRING(self, field, parameters):
  989. return 'SUBSTR(%s,%s,%s)' % (self.expand(field), parameters[0], parameters[1])
  990. def PRIMARY_KEY(self, key):
  991. return 'PRIMARY KEY(%s)' % key
  992. def _drop(self, table, mode):
  993. return ['DROP TABLE %s;' % table]
  994. def drop(self, table, mode=''):
  995. db = table._db
  996. if table._dbt:
  997. logfile = self.file_open(table._loggername, 'a')
  998. queries = self._drop(table, mode)
  999. for query in queries:
  1000. if table._dbt:
  1001. logfile.write(query + '\n')
  1002. self.execute(query)
  1003. db.commit()
  1004. del db[table._tablename]
  1005. del db.tables[db.tables.index(table._tablename)]
  1006. db._remove_references_to(table)
  1007. if table._dbt:
  1008. self.file_delete(table._dbt)
  1009. logfile.write('success!\n')
  1010. def _insert(self, table, fields):
  1011. keys = ','.join(f.name for f,v in fields)
  1012. values = ','.join(self.expand(v,f.type) for f,v in fields)
  1013. return 'INSERT INTO %s(%s) VALUES (%s);' % (table, keys, values)
  1014. def insert(self, table, fields):
  1015. query = self._insert(table,fields)
  1016. try:
  1017. self.execute(query)
  1018. except Exception, e:
  1019. if isinstance(e,self.integrity_error_class()):
  1020. return None
  1021. raise e
  1022. if hasattr(table,'_primarykey'):
  1023. return dict([(k[0].name, k[1]) for k in fields \
  1024. if k[0].name in table._primarykey])
  1025. id = self.lastrowid(table)
  1026. if not isinstance(id,int):
  1027. return id
  1028. rid = Reference(id)
  1029. (rid._table, rid._record) = (table, None)
  1030. return rid
  1031. def bulk_insert(self, table, items):
  1032. return [self.insert(table,item) for item in items]
  1033. def NOT(self, first):
  1034. return '(NOT %s)' % self.expand(first)
  1035. def AND(self, first, second):
  1036. return '(%s AND %s)' % (self.expand(first), self.expand(second))
  1037. def OR(self, first, second):
  1038. return '(%s OR %s)' % (self.expand(first), self.expand(second))
  1039. def BELONGS(self, first, second):
  1040. if isinstance(second, str):
  1041. return '(%s IN (%s))' % (self.expand(first), second[:-1])
  1042. elif second==[] or second==():
  1043. return '(1=0)'
  1044. items = ','.join(self.expand(item, first.type) for item in second)
  1045. return '(%s IN (%s))' % (self.expand(first), items)
  1046. def REGEXP(self, first, second):
  1047. "regular expression operator"
  1048. raise NotImplementedError
  1049. def LIKE(self, first, second):
  1050. "case sensitive like operator"
  1051. raise NotImplementedError
  1052. def ILIKE(self, first, second):
  1053. "case in-sensitive like operator"
  1054. return '(%s LIKE %s)' % (self.expand(first),
  1055. self.expand(second, 'string'))
  1056. def STARTSWITH(self, first, second):
  1057. return '(%s LIKE %s)' % (self.expand(first),
  1058. self.expand(second+'%', 'string'))
  1059. def ENDSWITH(self, first, second):
  1060. return '(%s LIKE %s)' % (self.expand(first),
  1061. self.expand('%'+second, 'string'))
  1062. def CONTAINS(self, first, second):
  1063. if first.type in ('string', 'text'):
  1064. key = '%'+str(second).replace('%','%%')+'%'
  1065. elif first.type.startswith('list:'):
  1066. key = '%|'+str(second).replace('|','||').replace('%','%%')+'|%'
  1067. return '(%s LIKE %s)' % (self.expand(first),self.expand(key,'string'))
  1068. def EQ(self, first, second=None):
  1069. if second is None:
  1070. return '(%s IS NULL)' % self.expand(first)
  1071. return '(%s = %s)' % (self.expand(first),
  1072. self.expand(second, first.type))
  1073. def NE(self, first, second=None):
  1074. if second is None:
  1075. return '(%s IS NOT NULL)' % self.expand(first)
  1076. return '(%s <> %s)' % (self.expand(first),
  1077. self.expand(second, first.type))
  1078. def LT(self,first,second=None):
  1079. if second is None:
  1080. raise RuntimeError, "Cannot compare %s < None" % first
  1081. return '(%s < %s)' % (self.expand(first),
  1082. self.expand(second,first.type))
  1083. def LE(self,first,second=None):
  1084. if second is None:
  1085. raise RuntimeError, "Cannot compare %s <= None" % first
  1086. return '(%s <= %s)' % (self.expand(first),
  1087. self.expand(second,first.type))
  1088. def GT(self,first,second=None):
  1089. if second is None:
  1090. raise RuntimeError, "Cannot compare %s > None" % first
  1091. return '(%s > %s)' % (self.expand(first),
  1092. self.expand(second,first.type))
  1093. def GE(self,first,second=None):
  1094. if second is None:
  1095. raise RuntimeError, "Cannot compare %s >= None" % first
  1096. return '(%s >= %s)' % (self.expand(first),
  1097. self.expand(second,first.type))
  1098. def ADD(self, first, second):
  1099. return '(%s + %s)' % (self.expand(first),
  1100. self.expand(second, first.type))
  1101. def SUB(self, first, second):
  1102. return '(%s - %s)' % (self.expand(first),
  1103. self.expand(second, first.type))
  1104. def MUL(self, first, second):
  1105. return '(%s * %s)' % (self.expand(first),
  1106. self.expand(second, first.type))
  1107. def DIV(self, first, second):
  1108. return '(%s / %s)' % (self.expand(first),
  1109. self.expand(second, first.type))
  1110. def MOD(self, first, second):
  1111. return '(%s %% %s)' % (self.expand(first),
  1112. self.expand(second, first.type))
  1113. def AS(self, first, second):
  1114. return '%s AS %s' % (self.expand(first), second)
  1115. def ON(self, first, second):
  1116. return '%s ON %s' % (self.expand(first), self.expand(second))
  1117. def INVERT(self, first):
  1118. return '%s DESC' % self.expand(first)
  1119. def COMMA(self, first, second):
  1120. return '%s, %s' % (self.expand(first), self.expand(second))
  1121. def expand(self, expression, field_type=None):
  1122. if isinstance(expression, Field):
  1123. return '%s.%s' % (expression.tablename, expression.name)
  1124. elif isinstance(expression, (Expression, Query)):
  1125. first = expression.first
  1126. second = expression.second
  1127. op = expression.op
  1128. if not second is None:
  1129. return op(first, second)
  1130. elif not first is None:
  1131. return op(first)
  1132. elif isinstance(op, str):
  1133. if op.endswith(';'):
  1134. op=op[:-1]
  1135. return '(%s)' % op
  1136. else:
  1137. return op()
  1138. elif field_type:
  1139. return str(self.represent(expression,field_type))
  1140. elif isinstance(expression,(list,tuple)):
  1141. return ','.join(self.represent(item,field_type) \
  1142. for item in expression)
  1143. elif isinstance(expression, bool):
  1144. return '1' if expression else '0'
  1145. else:
  1146. return str(expression)
  1147. def alias(self, table, alias):
  1148. """
  1149. Given a table object, makes a new table object
  1150. with alias name.
  1151. """
  1152. other = copy.copy(table)
  1153. other['_ot'] = other._tablename
  1154. other['ALL'] = SQLALL(other)
  1155. other['_tablename'] = alias
  1156. for fieldname in other.fields:
  1157. other[fieldname] = copy.copy(other[fieldname])
  1158. other[fieldname]._tablename = alias
  1159. other[fieldname].tablename = alias
  1160. other[fieldname].table = other
  1161. table._db[alias] = other
  1162. return other
  1163. def _truncate(self, table, mode=''):
  1164. tablename = table._tablename
  1165. return ['TRUNCATE TABLE %s %s;' % (tablename, mode or '')]
  1166. def truncate(self, table, mode= ' '):
  1167. # Prepare functions "write_to_logfile" and "close_logfile"
  1168. if table._dbt:
  1169. logfile = self.file_open(table._loggername, 'a')
  1170. else:
  1171. class Logfile(object):
  1172. def write(self, value):
  1173. pass
  1174. def close(self):
  1175. pass
  1176. logfile = Logfile()
  1177. try:
  1178. queries = table._db._adapter._truncate(table, mode)
  1179. for query in queries:
  1180. logfile.write(query + '\n')
  1181. self.execute(query)
  1182. table._db.commit()
  1183. logfile.write('success!\n')
  1184. finally:
  1185. logfile.close()
  1186. def _update(self, tablename, query, fields):
  1187. if query:
  1188. if use_common_filters(query):
  1189. query = self.common_filter(query, [tablename])
  1190. sql_w = ' WHERE ' + self.expand(query)
  1191. else:
  1192. sql_w = ''
  1193. sql_v = ','.join(['%s=%s' % (field.name,
  1194. self.expand(value, field.type)) \
  1195. for (field, value) in fields])
  1196. return 'UPDATE %s SET %s%s;' % (tablename, sql_v, sql_w)
  1197. def update(self, tablename, query, fields):
  1198. sql = self._update(tablename, query, fields)
  1199. self.execute(sql)
  1200. try:
  1201. return self.cursor.rowcount
  1202. except:
  1203. return None
  1204. def _delete(self, tablename, query):
  1205. if query:
  1206. if use_common_filters(query):
  1207. query = self.common_filter(query, [tablename])

Large files files are truncated, but you can click here to view the full file