@@ -105,11 +105,11 @@
'firebird_embedded://username:password@c://path'
'informix://user:password@server:3050/database'
'informixu://user:password@server:3050/database' # unicode informix
'google:datastore' # for google app engine datastore
'google:sql' # for google app engine with sql (mysql compatible)
-'teradata://DSN=dsn;UID=user;PWD=pass' # experimental
+'teradata://DSN=dsn;UID=user;PWD=pass' # experimental
For more info:
help(DAL)
help(Field)
"""
@@ -117,12 +117,17 @@
###################################################################################
# this file orly exposes DAL and Field
###################################################################################
__all__ = ['DAL', 'Field']
-MAXCHARLENGTH = 512
-INFINITY = 2**15 # not quite but reasonable default max char length
+
+MAXCHARLENGTH = 2**15 # not quite but reasonable default max char length
+DEFAULTLENGTH = {'string':512,
+ 'password':512,
+ 'upload':512,
+ 'text':2**15,
+ 'blob':2**31}
import re
import sys
import locale
import os
@@ -186,11 +191,11 @@
# internal representation of tables with field
#
., tables and fields may only be [a-zA-Z0-0_]
regex_dbname = re.compile('^(\w+)(\:\w+)*')
-table_field = re.compile('^[\w_]+\.[\w_]+$')
+table_field = re.compile('^([\w_]+)\.([\w_]+)$')
regex_content = re.compile('(?P[\w\-]+)\.(?P[\w\-]+)\.(?P[\w\-]+)\.(?P\w+)\.\w+$')
regex_cleanup_fn = re.compile('[\'"\s;]+')
string_unpack=re.compile('(?0 it will try pull the connection from the pool
+ if the connection is not active (closed by db server) it will loop
+ if not self.pool_size or no active connections in pool makes a new one
+ """
if not self.pool_size:
self.connection = f()
+ self.cursor = cursor and self.connection.cursor()
else:
uri = self.uri
- sql_locker.acquire()
- if not uri in ConnectionPool.pools:
- ConnectionPool.pools[uri] = []
- if ConnectionPool.pools[uri]:
- self.connection = ConnectionPool.pools[uri].pop()
- sql_locker.release()
- else:
- sql_locker.release()
- self.connection = f()
+ while True:
+ sql_locker.acquire()
+ if not uri in ConnectionPool.pools:
+ ConnectionPool.pools[uri] = []
+ if ConnectionPool.pools[uri]:
+ self.connection = ConnectionPool.pools[uri].pop()
+ sql_locker.release()
+ self.cursor = cursor and self.connection.cursor()
+ try:
+ if self.cursor and self.check_active_connection:
+ self.execute('SELECT 1;')
+ break
+ except:
+ pass
+ else:
+ sql_locker.release()
+ self.connection = f()
+ self.cursor = cursor and self.connection.cursor()
+ break
if not hasattr(thread,'instances'):
thread.instances = []
thread.instances.append(self)
@@ -419,11 +448,11 @@
###################################################################################
class BaseAdapter(ConnectionPool):
driver = None
- maxcharlength = INFINITY
+ maxcharlength = MAXCHARLENGTH
commit_on_alter_table = False
support_distributed_transaction = False
uploads_in_blob = False
types = {
'boolean': 'CHAR(1)',
@@ -443,13 +472,19 @@
'list:integer': 'TEXT',
'list:string': 'TEXT',
'list:reference': 'TEXT',
}
+ def adapt(self,obj):
+ return "'%s'" % obj.replace("'", "''")
+
def integrity_error(self):
return self.driver.IntegrityError
+ def operational_error(self):
+ return self.driver.OperationalError
+
def file_exists(self, filename):
"""
to be used ONLY for files that on GAE may not be on filesystem
"""
return os.path.exists(filename)
@@ -796,12 +831,19 @@
return 'Random()'
def NOT_NULL(self,default,field_type):
return 'NOT NULL DEFAULT %s' % self.represent(default,field_type)
+ def COALESCE(self,first,second):
+ expressions = [self.expand(first)]+[self.expand(e) for e in second]
+ return 'COALESCE(%s)' % ','.join(expressions)
+
def COALESCE_ZERO(self,first):
return 'COALESCE(%s,0)' % self.expand(first)
+
+ def RAW(self,first):
+ return first
def ALLOW_NULL(self):
return ''
def SUBSTRING(self,field,parameters):
@@ -866,11 +908,11 @@
def BELONGS(self,first,second):
if isinstance(second,str):
return '(%s IN (%s))' % (self.expand(first),second[:-1])
elif second==[] or second==():
- return '(0)'
+ return '(1=0)'
items =','.join(self.expand(item,first.type) for item in second)
return '(%s IN (%s))' % (self.expand(first),items)
def LIKE(self,first,second):
return '(%s LIKE %s)' % (self.expand(first),self.expand(second,'string'))
@@ -943,12 +985,14 @@
elif isinstance(expression, (Expression, Query)):
if not expression.second is None:
return expression.op(expression.first, expression.second)
elif not expression.first is None:
return expression.op(expression.first)
- else:
+ elif not isinstance(expression.op,str):
return expression.op()
+ else:
+ return '(%s)' % expression.op
elif field_type:
return self.represent(expression,field_type)
elif isinstance(expression,(list,tuple)):
return ','.join([self.represent(item,field_type) for item in expression])
else:
@@ -996,10 +1040,11 @@
logfile.write('success!\n')
finally:
logfile.close()
def _update(self,tablename,query,fields):
+ query = self.filter_tenant(query,[tablename])
if query:
sql_w = ' WHERE ' + self.expand(query)
else:
sql_w = ''
sql_v = ','.join(['%s=%s' % (field.name, self.expand(value,field.type)) for (field,value) in fields])
@@ -1012,10 +1057,11 @@
return self.cursor.rowcount
except:
return None
def _delete(self,tablename, query):
+ query = self.filter_tenant(query,[tablename])
if query:
sql_w = ' WHERE ' + self.expand(query)
else:
sql_w = ''
return 'DELETE FROM %s%s;' % (tablename, sql_w)
@@ -1191,21 +1237,22 @@
rows = self.rowslice(rows,limitby[0],None)
return self.parse(rows,self._colnames)
def _count(self,query,distinct=None):
tablenames = self.tables(query)
+ query = self.filter_tenant(query,tablenames)
if query:
sql_w = ' WHERE ' + self.expand(query)
else:
sql_w = ''
sql_t = ','.join(tablenames)
if distinct:
if isinstance(distinct,(list,tuple)):
distinct = xorify(distinct)
sql_d = self.expand(distinct)
- return 'SELECT count(DISTINCT %s) FROM %s%s' % (sql_d, sql_t, sql_w)
- return 'SELECT count(*) FROM %s%s' % (sql_t, sql_w)
+ return 'SELECT count(DISTINCT %s) FROM %s%s;' % (sql_d, sql_t, sql_w)
+ return 'SELECT count(*) FROM %s%s;' % (sql_t, sql_w)
def count(self,query,distinct=None):
self.execute(self._count(query,distinct))
return self.cursor.fetchone()[0]
@@ -1213,13 +1260,13 @@
def tables(self,query):
tables = set()
if isinstance(query, Field):
tables.add(query.tablename)
elif isinstance(query, (Expression, Query)):
- if query.first!=None:
+ if not query.first is None:
tables = tables.union(self.tables(query.first))
- if query.second!=None:
+ if not query.second is None:
tables = tables.union(self.tables(query.second))
return list(tables)
def commit(self):
return self.connection.commit()
@@ -1278,11 +1325,11 @@
if obj is None:
return 'NULL'
if obj == '' and not fieldtype[:2] in ['st', 'te', 'pa', 'up']:
return 'NULL'
r = self.represent_exceptions(obj,fieldtype)
- if r != None:
+ if not r is None:
return r
if fieldtype == 'boolean':
if obj and not str(obj)[:1].upper() in ['F', '0']:
return "'T'"
else:
@@ -1324,11 +1371,11 @@
obj = str(obj)
try:
obj.decode(self.db_codec)
except:
obj = obj.decode('latin1').encode(self.db_codec)
- return "'%s'" % obj.replace("'", "''")
+ return self.adapt(obj)
def represent_exceptions(self, obj, fieldtype):
return None
def lastrowid(self,table):
@@ -1448,20 +1495,34 @@
colset.update_record = lambda _ = (colset, table, id), **a: update_record(_, a)
colset.delete_record = lambda t = table, i = id: t._db(t._id==i).delete()
for (referee_table, referee_name) in \
table._referenced_by:
s = db[referee_table][referee_name]
- if not referee_table in colset:
- # for backward compatibility
- colset[referee_table] = Set(db, s == id)
- ### add new feature?
- ### colset[referee_table+'_by_'+refree_name] = Set(db, s == id)
+ referee_link = db._referee_name and \
+ db._referee_name % dict(table=referee_table,field=referee_name)
+ if referee_link and not referee_link in colset:
+ colset[referee_link] = Set(db, s == id)
colset['id'] = id
new_rows.append(new_row)
+
rowsobj = Rows(db, new_rows, colnames, rawrows=rows)
+
for tablename in virtualtables:
- for item in db[tablename].virtualfields:
+ ### new style virtual fields
+ table = db[tablename]
+ fields_virtual = [(f,v) for (f,v) in table.items() if isinstance(v,FieldVirtual)]
+ fields_lazy = [(f,v) for (f,v) in table.items() if isinstance(v,FieldLazy)]
+ if fields_virtual or fields_lazy:
+ for row in rowsobj.records:
+ box = row[tablename]
+ for f,v in fields_virtual:
+ box[f] = v.f(row)
+ for f,v in fields_lazy:
+ box[f] = (v.handler or VirtualCommand)(v.f,row)
+
+ ### old style virtual fields
+ for item in table.virtualfields:
try:
rowsobj = rowsobj.setvirtualfields(**{tablename:item})
except KeyError:
# to avoid breaking virtualfields when partial select
pass
@@ -1471,12 +1532,16 @@
fieldname = self.db._request_tenant
for tablename in tablenames:
table = self.db[tablename]
if fieldname in table:
default = table[fieldname].default
- if default!=None:
- query = query&(table[fieldname]==default)
+ if not default is None:
+ newquery = table[fieldname]==default
+ if query is None:
+ query = newquery
+ else:
+ query = query&newquery
return query
###################################################################################
# List of all the available adapters, they all extend BaseAdapter
###################################################################################
@@ -1504,11 +1569,11 @@
except:
return None
def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
credential_decoder=lambda x:x, driver_args={},
- adapter_args={}):
+ adapter_args={}):
self.db = db
self.dbengine = "sqlite"
self.uri = uri
self.pool_size = pool_size
self.folder = folder
@@ -1521,14 +1586,15 @@
dbpath = uri.split('://')[1]
if dbpath[0] != '/':
dbpath = os.path.join(self.folder.decode(path_encoding).encode('utf8'),dbpath)
if not 'check_same_thread' in driver_args:
driver_args['check_same_thread'] = False
+ if not 'detect_types' in driver_args:
+ driver_args['detect_types'] = self.driver.PARSE_DECLTYPES
def connect(dbpath=dbpath, driver_args=driver_args):
return self.driver.Connection(dbpath, **driver_args)
self.pool_connection(connect)
- self.cursor = self.connection.cursor()
self.connection.create_function('web2py_extract', 2, SQLiteAdapter.web2py_extract)
def _truncate(self,table,mode = ''):
tablename = table._tablename
return ['DELETE FROM %s;' % tablename,
@@ -1542,11 +1608,11 @@
driver = globals().get('zxJDBC',None)
def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
credential_decoder=lambda x:x, driver_args={},
- adapter_args={}):
+ adapter_args={}):
self.db = db
self.dbengine = "sqlite"
self.uri = uri
self.pool_size = pool_size
self.folder = folder
@@ -1559,17 +1625,15 @@
dbpath = uri.split('://')[1]
if dbpath[0] != '/':
dbpath = os.path.join(self.folder.decode(path_encoding).encode('utf8'),dbpath)
def connect(dbpath=dbpath,driver_args=driver_args):
return self.driver.connect(java.sql.DriverManager.getConnection('jdbc:sqlite:'+dbpath),**driver_args)
- self.pool_connection(connect)
- self.cursor = self.connection.cursor()
# FIXME http://www.zentus.com/sqlitejdbc/custom_functions.html for UDFs
# self.connection.create_function('web2py_extract', 2, SQLiteAdapter.web2py_extract)
- def execute(self,a):
- return self.log_execute(a[:-1])
+ def execute(self,a):
+ return self.log_execute(a)
class MySQLAdapter(BaseAdapter):
driver = globals().get('pymysql',None)
@@ -1622,11 +1686,11 @@
def concat_add(self,table):
return '; ALTER TABLE %s ADD ' % table
def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
credential_decoder=lambda x:x, driver_args={},
- adapter_args={}):
+ adapter_args={}):
self.db = db
self.dbengine = "mysql"
self.uri = uri
self.pool_size = pool_size
self.folder = folder
@@ -1658,18 +1722,16 @@
port=port,
charset=charset))
def connect(driver_args=driver_args):
return self.driver.connect(**driver_args)
self.pool_connection(connect)
- self.cursor = self.connection.cursor()
self.execute('SET FOREIGN_KEY_CHECKS=1;')
self.execute("SET sql_mode='NO_BACKSLASH_ESCAPES';")
def lastrowid(self,table):
self.execute('select last_insert_id();')
return int(self.cursor.fetchone()[0])
-
class PostgreSQLAdapter(BaseAdapter):
driver = globals().get('psycopg2',None)
@@ -1692,10 +1754,13 @@
'list:integer': 'TEXT',
'list:string': 'TEXT',
'list:reference': 'TEXT',
}
+ def adapt(self,obj):
+ return psycopg2_adapt(obj).getquoted()
+
def sequence_name(self,table):
return '%s_id_Seq' % table
def RANDOM(self):
return 'RANDOM()'
@@ -1719,11 +1784,11 @@
# % (table._tablename, table._fieldname, table._sequence_name))
self.execute(query)
def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
credential_decoder=lambda x:x, driver_args={},
- adapter_args={}):
+ adapter_args={}):
self.db = db
self.dbengine = "postgres"
self.uri = uri
self.pool_size = pool_size
self.folder = folder
@@ -1757,13 +1822,10 @@
% (db, user, host, port, password)
def connect(msg=msg,driver_args=driver_args):
return self.driver.connect(msg,**driver_args)
self.pool_connection(connect)
self.connection.set_client_encoding('UTF8')
- self.cursor = self.connection.cursor()
- self.execute('BEGIN;')
- self.execute("SET CLIENT_ENCODING TO 'UNICODE';")
self.execute("SET standard_conforming_strings=on;")
def lastrowid(self,table):
self.execute("select currval('%s')" % table._sequence_name)
return int(self.cursor.fetchone()[0])
@@ -1786,11 +1848,11 @@
class JDBCPostgreSQLAdapter(PostgreSQLAdapter):
def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
credential_decoder=lambda x:x, driver_args={},
- adapter_args={}):
+ adapter_args={}):
self.db = db
self.dbengine = "postgres"
self.uri = uri
self.pool_size = pool_size
self.folder = folder
@@ -1816,11 +1878,10 @@
msg = ('jdbc:postgresql://%s:%s/%s' % (host, port, db), user, password)
def connect(msg=msg,driver_args=driver_args):
return self.driver.connect(*msg,**driver_args)
self.pool_connection(connect)
self.connection.set_client_encoding('UTF8')
- self.cursor = self.connection.cursor()
self.execute('BEGIN;')
self.execute("SET CLIENT_ENCODING TO 'UNICODE';")
class OracleAdapter(BaseAdapter):
@@ -1903,11 +1964,11 @@
return "to_date('%s','yyyy-mm-dd hh24:mi:ss')" % obj
return None
def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
credential_decoder=lambda x:x, driver_args={},
- adapter_args={}):
+ adapter_args={}):
self.db = db
self.dbengine = "oracle"
self.uri = uri
self.pool_size = pool_size
self.folder = folder
@@ -1917,11 +1978,10 @@
if not 'threaded' in driver_args:
driver_args['threaded']=True
def connect(uri=uri,driver_args=driver_args):
return self.driver.connect(uri,**driver_args)
self.pool_connection(connect)
- self.cursor = self.connection.cursor()
self.execute("ALTER SESSION SET NLS_DATE_FORMAT = 'YYYY-MM-DD HH24:MI:SS';")
self.execute("ALTER SESSION SET NLS_TIMESTAMP_FORMAT = 'YYYY-MM-DD HH24:MI:SS';")
oracle_fix = re.compile("[^']*('[^']*'[^']*)*\:(?PCLOB\('([^']+|'')*'\))")
def execute(self, command):
@@ -1932,11 +1992,13 @@
if not m:
break
command = command[:m.start('clob')] + str(i) + command[m.end('clob'):]
args.append(m.group('clob')[6:-2].replace("''", "'"))
i += 1
- return self.log_execute(command[:-1], args)
+ if command[-1:]==';':
+ command = command[:-1]
+ return self.log_execute(command, args)
def create_sequence_and_triggers(self, query, table, **args):
tablename = table._tablename
sequence_name = table._sequence_name
trigger_name = table._trigger_name
@@ -2064,11 +2126,10 @@
% (host, port, db, user, password, urlargs)
def connect(cnxn=cnxn,driver_args=driver_args):
return self.driver.connect(cnxn,**driver_args)
if not fake_connect:
self.pool_connection(connect)
- self.cursor = self.connection.cursor()
def lastrowid(self,table):
#self.execute('SELECT @@IDENTITY;')
self.execute('SELECT SCOPE_IDENTITY();')
return int(self.cursor.fetchone()[0])
@@ -2170,11 +2231,11 @@
return ['DELETE FROM %s;' % table._tablename,
'SET GENERATOR %s TO 0;' % table._sequence_name]
def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
credential_decoder=lambda x:x, driver_args={},
- adapter_args={}):
+ adapter_args={}):
self.db = db
self.dbengine = "firebird"
self.uri = uri
self.pool_size = pool_size
self.folder = folder
@@ -2206,15 +2267,14 @@
if adapter_args['driver_name'] == 'kinterbasdb':
self.driver = kinterbasdb
elif adapter_args['driver_name'] == 'firebirdsql':
self.driver = firebirdsql
else:
- self.driver = kinterbasdb
+ self.driver = kinterbasdb
def connect(driver_args=driver_args):
return self.driver.connect(**driver_args)
self.pool_connection(connect)
- self.cursor = self.connection.cursor()
def create_sequence_and_triggers(self, query, table, **args):
tablename = table._tablename
sequence_name = table._sequence_name
trigger_name = table._trigger_name
@@ -2231,11 +2291,11 @@
class FireBirdEmbeddedAdapter(FireBirdAdapter):
def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
credential_decoder=lambda x:x, driver_args={},
- adapter_args={}):
+ adapter_args={}):
self.db = db
self.dbengine = "firebird"
self.uri = uri
self.pool_size = pool_size
self.folder = folder
@@ -2270,15 +2330,14 @@
if adapter_args['driver_name'] == 'kinterbasdb':
self.driver = kinterbasdb
elif adapter_args['driver_name'] == 'firebirdsql':
self.driver = firebirdsql
else:
- self.driver = kinterbasdb
+ self.driver = kinterbasdb
def connect(driver_args=driver_args):
return self.driver.connect(**driver_args)
self.pool_connection(connect)
- self.cursor = self.connection.cursor()
class InformixAdapter(BaseAdapter):
driver = globals().get('informixdb',None)
@@ -2341,11 +2400,11 @@
return "to_date('%s','yyyy-mm-dd hh24:mi:ss')" % obj
return None
def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
credential_decoder=lambda x:x, driver_args={},
- adapter_args={}):
+ adapter_args={}):
self.db = db
self.dbengine = "informix"
self.uri = uri
self.pool_size = pool_size
self.folder = folder
@@ -2373,11 +2432,10 @@
dsn = '%s@%s' % (db,host)
driver_args.update(dict(user=user,password=password,autocommit=True))
def connect(dsn=dsn,driver_args=driver_args):
return self.driver.connect(dsn,**driver_args)
self.pool_connection(connect)
- self.cursor = self.connection.cursor()
def execute(self,command):
if command[-1:]==';':
command = command[:-1]
return self.log_execute(command)
@@ -2439,11 +2497,11 @@
return "'%s'" % obj
return None
def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
credential_decoder=lambda x:x, driver_args={},
- adapter_args={}):
+ adapter_args={}):
self.db = db
self.dbengine = "db2"
self.uri = uri
self.pool_size = pool_size
self.folder = folder
@@ -2451,11 +2509,10 @@
self.find_or_make_work_folder()
cnxn = uri.split('://', 1)[1]
def connect(cnxn=cnxn,driver_args=driver_args):
return self.driver.connect(cnxn,**driver_args)
self.pool_connection(connect)
- self.cursor = self.connection.cursor()
def execute(self,command):
if command[-1:]==';':
command = command[:-1]
return self.log_execute(command)
@@ -2497,11 +2554,11 @@
}
def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
credential_decoder=lambda x:x, driver_args={},
- adapter_args={}):
+ adapter_args={}):
self.db = db
self.dbengine = "teradata"
self.uri = uri
self.pool_size = pool_size
self.folder = folder
@@ -2509,11 +2566,10 @@
self.find_or_make_work_folder()
cnxn = uri.split('://', 1)[1]
def connect(cnxn=cnxn,driver_args=driver_args):
return self.driver.connect(cnxn,**driver_args)
self.pool_connection(connect)
- self.cursor = self.connection.cursor()
INGRES_SEQNAME='ii***lineitemsequence' # NOTE invalid database object name
# (ANSI-SQL wants this form of name
# to be a delimited identifier)
@@ -2561,11 +2617,11 @@
sql_o += ' OFFSET %d' % (lmin, )
return 'SELECT %s %s FROM %s%s%s;' % (sql_s, sql_f, sql_t, sql_w, sql_o)
def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
credential_decoder=lambda x:x, driver_args={},
- adapter_args={}):
+ adapter_args={}):
self.db = db
self.dbengine = "ingres"
self.uri = uri
self.pool_size = pool_size
self.folder = folder
@@ -2585,11 +2641,10 @@
servertype=servertype,
trace=trace))
def connect(driver_args=driver_args):
return self.driver.connect(**driver_args)
self.pool_connection(connect)
- self.cursor = self.connection.cursor()
def create_sequence_and_triggers(self, query, table, **args):
# post create table auto inc code (if needed)
# modify table to btree for performance....
# Older Ingres releases could use rule/trigger like Oracle above.
@@ -2682,11 +2737,11 @@
% (table._tablename, table._id.name, table._sequence_name))
self.execute(query)
def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
credential_decoder=lambda x:x, driver_args={},
- adapter_args={}):
+ adapter_args={}):
self.db = db
self.dbengine = "sapdb"
self.uri = uri
self.pool_size = pool_size
self.folder = folder
@@ -2711,12 +2766,10 @@
def connect(user=user,password=password,database=db,
host=host,driver_args=driver_args):
return self.driver.Connection(user,password,database,
host,**driver_args)
self.pool_connection(connect)
- # self.connection.set_client_encoding('UTF8')
- self.cursor = self.connection.cursor()
def lastrowid(self,table):
self.execute("select %s.NEXTVAL from dual" % table._sequence_name)
return int(self.cursor.fetchone()[0])
@@ -2756,11 +2809,10 @@
user=credential_decoder(user),
passwd=credential_decoder(password),
def connect(host,port,db,user,passwd,driver_args=driver_args):
return self.driver.connect(host,port,db,user,passwd,**driver_args)
self.pool_connection(connect)
- self.cursor = self.connection.cursor()
self.execute('SET FOREIGN_KEY_CHECKS=1;')
self.execute("SET sql_mode='NO_BACKSLASH_ESCAPES';")
######## GAE MySQL ##########
@@ -2767,10 +2819,13 @@
class DatabaseStoredFile:
web2py_filesystem = False
+ def escape(self,obj):
+ return self.db._adapter.esacpe(obj)
+
def __init__(self,db,filename,mode):
if db._adapter.dbengine != 'mysql':
raise RuntimeError, "only MySQL can store metadata .table files in database for now"
self.db = db
self.filename = filename
@@ -2779,11 +2834,12 @@
self.db.executesql("CREATE TABLE IF NOT EXISTS web2py_filesystem (path VARCHAR(512), content LONGTEXT, PRIMARY KEY(path) ) ENGINE=InnoDB;")
DatabaseStoredFile.web2py_filesystem = True
self.p=0
self.data = ''
if mode in ('r','rw','a'):
- query = "SELECT content FROM web2py_filesystem WHERE path='%s'" % filename
+ query = "SELECT content FROM web2py_filesystem WHERE path='%s'" \
+ % filename
rows = self.db.executesql(query)
if rows:
self.data = rows[0][0]
elif os.path.exists(filename):
datafile = open(filename, 'r')
@@ -2809,21 +2865,23 @@
def write(self,data):
self.data += data
def close(self):
- self.db.executesql("DELETE FROM web2py_filesystem WHERE path='%s'" % self.filename)
- query = "INSERT INTO web2py_filesystem(path,content) VALUES ('%s','%s')" % \
- (self.filename, self.data.replace("'","''"))
+ self.db.executesql("DELETE FROM web2py_filesystem WHERE path=%s" \
+ % self.adapt(self.filename))
+ query = "INSERT INTO web2py_filesystem(path,content) VALUES (%s,%s)"\
+ % (self.adapt(self.filename), self.adapt(self.data))
self.db.executesql(query)
self.db.commit()
@staticmethod
def exists(db,filename):
if os.path.exists(filename):
return True
- query = "SELECT path FROM web2py_filesystem WHERE path='%s'" % filename
+ query = "SELECT path FROM web2py_filesystem WHERE path=%s" \
+ % self.adapt(filename)
if db.executesql(query):
return True
return False
@@ -2843,15 +2901,14 @@
self.db.executesql(query)
self.db.commit()
class GoogleSQLAdapter(UseDatabaseStoredFile,MySQLAdapter):
- def __init__(self, db, uri='google:sql://realm:domain/database', pool_size=0,
- folder=None, db_codec='UTF-8', check_reserved=None,
- migrate=True, fake_migrate=False,
+ def __init__(self, db, uri='google:sql://realm:domain/database',
+ pool_size=0, folder=None, db_codec='UTF-8',
credential_decoder = lambda x:x, driver_args={},
- adapter_args={}):
+ adapter_args={}):
self.db = db
self.dbengine = "mysql"
self.uri = uri
self.pool_size = pool_size
@@ -2863,17 +2920,17 @@
if not m:
raise SyntaxError, "Invalid URI string in SQLDB: %s" % self._uri
instance = credential_decoder(m.group('instance'))
db = credential_decoder(m.group('db'))
driver_args['instance'] = instance
- if not migrate:
+ createdb = adapter_args.get('createdb',True)
+ if not createdb:
driver_args['database'] = db
def connect(driver_args=driver_args):
return rdbms.connect(**driver_args)
self.pool_connection(connect)
- self.cursor = self.connection.cursor()
- if migrate:
+ if createdb:
# self.execute('DROP DATABASE %s' % db)
self.execute('CREATE DATABASE IF NOT EXISTS %s' % db)
self.execute('USE %s' % db)
self.execute("SET FOREIGN_KEY_CHECKS=1;")
self.execute("SET sql_mode='NO_BACKSLASH_ESCAPES';")
@@ -2903,11 +2960,11 @@
obj = []
if not isinstance(obj, (list, tuple)):
obj = [obj]
if obj == '' and not fieldtype[:2] in ['st','te','pa','up']:
return None
- if obj != None:
+ if not obj is None:
if isinstance(obj, list) and not fieldtype.startswith('list'):
obj = [self.represent(o, fieldtype) for o in obj]
elif fieldtype in ('integer','id'):
obj = long(obj)
elif fieldtype == 'double':
@@ -3044,11 +3101,11 @@
def file_open(self, filename, mode='rb', lock=True): pass
def file_close(self, fileobj, unlock=True): pass
def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
credential_decoder=lambda x:x, driver_args={},
- adapter_args={}):
+ adapter_args={}):
self.types.update({
'boolean': gae.BooleanProperty,
'string': (lambda: gae.StringProperty(multiline=True)),
'text': gae.TextProperty,
'password': gae.StringProperty,
@@ -3226,11 +3283,13 @@
return self.expand(first)
def truncate(self,table,mode):
self.db(table._id > 0).delete()
- def select_raw(self,query,fields=[],attributes={}):
+ def select_raw(self,query,fields=None,attributes=None):
+ fields = fields or []
+ attributes = attributes or {}
new_fields = []
for item in fields:
if isinstance(item,SQLALL):
new_fields += item.table
else:
@@ -3420,11 +3479,11 @@
return repr(not isinstance(value,unicode) and value or value.encode('utf8'))
def __init__(self,db,uri='couchdb://127.0.0.1:5984',
pool_size=0,folder=None,db_codec ='UTF-8',
credential_decoder=lambda x:x, driver_args={},
- adapter_args={}):
+ adapter_args={}):
self.db = db
self.uri = uri
self.dbengine = 'couchdb'
self.folder = folder
db['_lastsql'] = ''
@@ -3432,11 +3491,11 @@
self.pool_size = pool_size
url='http://'+uri[10:]
def connect(url=url,driver_args=driver_args):
return couchdb.Server(url,**driver_args)
- self.pool_connection(connect)
+ self.pool_connection(connect,cursor=False)
def create_table(self, table, migrate=True, fake_migrate=False, polymodel=None):
if migrate:
try:
self.connection.create(table._tablename)
@@ -3580,22 +3639,22 @@
}
def __init__(self,db,uri='mongodb://127.0.0.1:5984/db',
pool_size=0,folder=None,db_codec ='UTF-8',
credential_decoder=lambda x:x, driver_args={},
- adapter_args={}):
+ adapter_args={}):
self.db = db
self.uri = uri
self.dbengine = 'mongodb'
self.folder = folder
db['_lastsql'] = ''
self.db_codec = 'UTF-8'
self.pool_size = pool_size
- m = re.compile('^(?P[^\:/]+)(\:(?P[0-9]+))?/(?P.+)$').match(self._uri[10:])
+ m = re.compile('^(?P[^\:/]+)(\:(?P[0-9]+))?/(?P.+)$').match(self.uri[10:])
if not m:
- raise SyntaxError, "Invalid URI string in DAL: %s" % self._uri
+ raise SyntaxError, "Invalid URI string in DAL: %s" % self.uri
host = m.group('host')
if not host:
raise SyntaxError, 'mongodb: host name required'
dbname = m.group('db')
if not dbname:
@@ -3602,11 +3661,11 @@
raise SyntaxError, 'mongodb: db name required'
port = m.group('port') or 27017
driver_args.update(dict(host=host,port=port))
def connect(dbname=dbname,driver_args=driver_args):
return pymongo.Connection(**driver_args)[dbname]
- self.pool_connection(connect)
+ self.pool_connection(connect,cursor=False)
def insert(self,table,fields):
ctable = self.connection[table._tablename]
values = dict((k,self.represent(v,table[k].type)) for k,v in fields)
ctable.insert(values)
@@ -3706,11 +3765,11 @@
requires.append(validators.IS_DATETIME())
elif field.db and field_type.startswith('reference') and \
field_type.find('.') < 0 and \
field_type[10:] in field.db.tables:
referenced = field.db[field_type[10:]]
- def repr_ref(id, r=referenced, f=ff): return f(r, id)
+ def repr_ref(id, row=None, r=referenced, f=ff): return f(r, id)
field.represent = field.represent or repr_ref
if hasattr(referenced, '_format') and referenced._format:
requires = validators.IS_IN_DB(field.db,referenced._id,
referenced._format)
if field.unique:
@@ -3720,11 +3779,11 @@
return requires
elif field.db and field_type.startswith('list:reference') and \
field_type.find('.') < 0 and \
field_type[15:] in field.db.tables:
referenced = field.db[field_type[15:]]
- def list_ref_repr(ids, r=referenced, f=ff):
+ def list_ref_repr(ids, row=None, r=referenced, f=ff):
if not ids:
return None
refs = r._db(r._id.belongs(ids)).select(r._id)
return (refs and ', '.join(str(f(r,ref.id)) for ref in refs) or '')
field.represent = field.represent or list_ref_repr
@@ -3736,11 +3795,11 @@
multiple=True)
if field.unique:
requires._and = validators.IS_NOT_IN_DB(field.db,field)
return requires
elif field_type.startswith('list:'):
- def repr_list(values): return', '.join(str(v) for v in (values or []))
+ def repr_list(values,row=None): return', '.join(str(v) for v in (values or []))
field.represent = field.represent or repr_list
if field.unique:
requires.insert(0,validators.IS_NOT_IN_DB(field.db,field))
sff = ['in', 'do', 'da', 'ti', 'de', 'bo']
if field.notnull and not field_type[:2] in sff:
@@ -3770,12 +3829,18 @@
this is only used to store a Row
"""
def __getitem__(self, key):
key=str(key)
+ m = table_field.match(key)
if key in self.get('_extra',{}):
return self._extra[key]
+ elif m:
+ try:
+ return dict.__getitem__(self, m.group(1))[m.group(2)]
+ except (KeyError,TypeError):
+ key = m.group(2)
return dict.__getitem__(self, key)
def __call__(self,key):
return self.__getitem__(key)
@@ -3843,10 +3908,115 @@
class SQLCallableList(list):
def __call__(self):
return copy.copy(self)
+def smart_query(fields,text):
+ if not isinstance(fields,(list,tuple)):
+ fields = [fields]
+ new_fields = []
+ for field in fields:
+ if isinstance(field,Field):
+ new_fields.append(field)
+ elif isinstance(field,Table):
+ for ofield in field:
+ new_fields.append(ofield)
+ else:
+ raise RuntimeError, "fields must be a list of fields"
+ field_map = {}
+ for field in fields:
+ n = field.name.lower()
+ if not n in field_map:
+ field_map[n] = field
+ n = str(field).lower()
+ if not n in field_map:
+ field_map[n] = field
+ re_constants = re.compile('(\"[^\"]*?\")|(\'[^\']*?\')')
+ constants = {}
+ i = 0
+ while True:
+ m = re_constants.search(text)
+ if not m: break
+ text = text[:m.start()]+('#%i' % i)+text[m.end():]
+ constants[str(i)] = m.group()[1:-1]
+ i+=1
+ text = re.sub('\s+',' ',text).lower()
+ for a,b in [('&','and'),
+ ('|','or'),
+ ('~','not'),
+ ('==','=='),
+ ('<','<'),
+ ('>','>'),
+ ('<=','<='),
+ ('>=','>='),
+ ('<>','!='),
+ ('=<','<='),
+ ('=>','>='),
+ ('=','=='),
+ (' less or equal than ','<='),
+ (' greater or equal than ','>='),
+ (' equal or less than ','<='),
+ (' equal or greater than ','>='),
+ (' less or equal ','<='),
+ (' greater or equal ','>='),
+ (' equal or less ','<='),
+ (' equal or greater ','>='),
+ (' not equal to ','!='),
+ (' not equal ','!='),
+ (' equal to ','=='),
+ (' equal ','=='),
+ (' equals ','!='),
+ (' less than ','<'),
+ (' greater than ','>'),
+ (' starts with ','startswith'),
+ (' ends with ','endswith'),
+ (' is ','==')]:
+ if a[0]==' ':
+ text = text.replace(' is'+a,' %s ' % b)
+ text = text.replace(a,' %s ' % b)
+ text = re.sub('\s+',' ',text).lower()
+ query = field = neg = op = logic = None
+ for item in text.split():
+ if field is None:
+ if item == 'not':
+ neg = True
+ elif not neg and not logic and item in ('and','or'):
+ logic = item
+ elif item in field_map:
+ field = field_map[item]
+ else:
+ raise RuntimeError, "Invalid syntax"
+ elif not field is None and op is None:
+ op = item
+ elif not op is None:
+ if item.startswith('#'):
+ if not item[1:] in constants:
+ raise RuntimeError, "Invalid syntax"
+ value = constants[item[1:]]
+ else:
+ value = item
+ if op == '==': op = 'like'
+ if op == '==': new_query = field==value
+ elif op == '<': new_query = field': new_query = field>value
+ elif op == '<=': new_query = field<=value
+ elif op == '>=': new_query = field>=value
+ elif op == 'contains': new_query = field.contains(value)
+ elif op == 'like': new_query = field.like(value)
+ elif op == 'startswith': new_query = field.startswith(value)
+ elif op == 'endswith': new_query = field.endswith(value)
+ else: raise RuntimeError, "Invalid operation"
+ if neg: new_query = ~new_query
+ if query is None:
+ query = new_query
+ elif logic == 'and':
+ query &= new_query
+ elif logic == 'or':
+ query |= new_query
+ field = op = neg = logic = None
+ return query
+
class DAL(dict):
"""
an instance of this class represents a database connection
@@ -3902,16 +4072,17 @@
for (i, db) in instances:
db._adapter.commit_prepared(keys[i])
return
- def __init__(self, uri='sqlite://dummy.db', pool_size=0, folder=None,
+ def __init__(self, uri='sqlite://dummy.db',
+ pool_size=0, folder=None,
db_codec='UTF-8', check_reserved=None,
migrate=True, fake_migrate=False,
migrate_enabled=True, fake_migrate_all=False,
decode_credentials=False, driver_args=None,
- adapter_args={}, attempts=5, auto_import=False):
+ adapter_args=None, attempts=5, auto_import=False):
"""
Creates a new Database Abstraction Layer instance.
Keyword arguments:
@@ -3948,10 +4119,11 @@
self._lastsql = ''
self._timings = []
self._pending_references = {}
self._request_tenant = 'request_tenant'
self._common_fields = []
+ self._referee_name = '%(table)s'
if not str(attempts).isdigit() or attempts < 0:
attempts = 5
if uri:
uris = isinstance(uri,(list,tuple)) and uri or [uri]
error = ''
@@ -3962,12 +4134,15 @@
if is_jdbc and not uri.startswith('jdbc:'):
uri = 'jdbc:'+uri
self._dbname = regex_dbname.match(uri).group()
if not self._dbname in ADAPTERS:
raise SyntaxError, "Error in URI '%s' or database not supported" % self._dbname
- # notice that driver args or {} else driver_args defaults to {} global, not correct
- args = (self,uri,pool_size,folder,db_codec,credential_decoder,driver_args or {}, adapter_args)
+ # notice that driver args or {} else driver_args
+ # defaults to {} global, not correct
+ args = (self,uri,pool_size,folder,
+ db_codec, credential_decoder,
+ driver_args or {}, adapter_args or {})
self._adapter = ADAPTERS[self._dbname](*args)
connected = True
break
except SyntaxError:
raise
@@ -4266,14 +4441,19 @@
'primarykey',
'fake_migrate',
'format',
'trigger_name',
'sequence_name',
- 'polymodel']:
- raise SyntaxError, 'invalid table "%s" attribute: %s' % (tablename, key)
- migrate = self._migrate_enabled and args.get('migrate',self._migrate)
- fake_migrate = self._fake_migrate_all or args.get('fake_migrate',self._fake_migrate)
+ 'polymodel',
+ 'table_class']:
+ raise SyntaxError, 'invalid table "%s" attribute: %s' \
+ % (tablename, key)
+ migrate = self._migrate_enabled and args.get('migrate',
+ self._migrate)
+ fake_migrate = self._fake_migrate_all or args.get('fake_migrate',
+ self._fake_migrate)
+ table_class = args.get('table_class',Table)
format = args.get('format',None)
trigger_name = args.get('trigger_name', None)
sequence_name = args.get('sequence_name', None)
primarykey=args.get('primarykey',None)
polymodel=args.get('polymodel',None)
@@ -4291,14 +4471,14 @@
self.check_reserved_keyword(tablename)
if self._common_fields:
fields = [f for f in fields] + [f for f in self._common_fields]
- t = self[tablename] = Table(self, tablename, *fields,
- **dict(primarykey=primarykey,
- trigger_name=trigger_name,
- sequence_name=sequence_name))
+ t = self[tablename] = table_class(self, tablename, *fields,
+ **dict(primarykey=primarykey,
+ trigger_name=trigger_name,
+ sequence_name=sequence_name))
# db magic
if self._uri in (None,'None'):
return t
t._create_references()
@@ -4337,10 +4517,13 @@
self[key] = value
def __repr__(self):
return ''
+ def smart_query(self,fields,text):
+ return Set(self, smart_query(fields,text))
+
def __call__(self, query=None):
if isinstance(query,Table):
query = query._id>0
elif isinstance(query,Field):
query = query!=None
@@ -4407,12 +4590,13 @@
ofile.write('TABLE %s\r\n' % table)
self(self[table]._id > 0).select().export_to_csv_file(ofile, *args, **kwargs)
ofile.write('\r\n\r\n')
ofile.write('END')
- def import_from_csv_file(self, ifile, id_map={}, null='',
+ def import_from_csv_file(self, ifile, id_map=None, null='',
unique='uuid', *args, **kwargs):
+ if id_map is None: id_map={}
for line in ifile:
line = line.strip()
if not line:
continue
elif line == 'END':
@@ -4452,10 +4636,13 @@
if key == 'id':
return int(self)
self.__allocate()
return self._record.get(key, None)
+ def get(self, key):
+ return self.__getattr__(key)
+
def __setattr__(self, key, value):
if key.startswith('_'):
int.__setattr__(self, key, value)
return
self.__allocate()
@@ -4526,11 +4713,11 @@
if primarykey:
if not isinstance(primarykey,list):
raise SyntaxError, \
"primarykey must be a list of fields from table '%s'" \
% tablename
- self._primarykey = primarykey
+ self._primarykey = primarykey
elif not [f for f in fields if isinstance(f,Field) and f.type=='id']:
field = Field('id', 'id')
newfields.append(field)
fieldnames.add('id')
self._id = field
@@ -4567,11 +4754,11 @@
and field.uploadfield is True:
tmp = field.uploadfield = '%s_blob' % field.name
fields.append(self._db.Field(tmp, 'blob', default=''))
lower_fieldnames = set()
- reserved = dir(Table) + ['fields']
+ reserved = dir(Table) + ['fields']
for field in fields:
if db and db.check_reserved:
db.check_reserved_keyword(field.name)
elif field.name in reserved:
raise SyntaxError, "field name %s not allowed" % field.name
@@ -4587,11 +4774,11 @@
if field.type == 'id':
self['id'] = field
field.tablename = field._tablename = tablename
field.table = field._table = self
field.db = field._db = self._db
- if self._db and field.type!='text' and \
+ if self._db and not field.type in ('text','blob') and \
self._db._adapter.maxcharlength < field.length:
field.length = self._db._adapter.maxcharlength
if field.requires == DEFAULT:
field.requires = sqlhtml_validators(field)
self.ALL = SQLALL(self)
@@ -4602,10 +4789,13 @@
raise SyntaxError, \
"primarykey must be a list of fields from table '%s " % tablename
else:
self[k].notnull = True
+ def update(self,*args,**kwargs):
+ raise RuntimeError, "Syntax Not Supported"
+
def _validate(self,**vars):
errors = Row()
for key,value in vars.items():
value,error = self[key].validate(value)
if error:
@@ -4763,13 +4953,13 @@
else:
new_fields.append((self[name],fields[name]))
new_fields_names.append(name)
for ofield in self:
if not ofield.name in new_fields_names:
- if not update and ofield.default!=None:
+ if not update and not ofield.default is None:
new_fields.append((ofield,ofield.default))
- elif update and ofield.update!=None:
+ elif update and not ofield.update is None:
new_fields.append((ofield,ofield.update))
for ofield in self:
if not ofield.name in new_fields_names and ofield.compute:
try:
new_fields.append((ofield,ofield.compute(Row(fields))))
@@ -4845,10 +5035,11 @@
if not self._tablename in id_map:
id_map[self._tablename] = {}
id_map_self = id_map[self._tablename]
def fix(field, value, id_map):
+ list_reference_s='list:reference'
if value == null:
value = None
elif field.type=='blob':
value = base64.b64decode(value)
elif field.type=='double':
@@ -4861,12 +5052,12 @@
value = None
else:
value = int(value)
elif field.type.startswith('list:string'):
value = bar_decode_string(value)
- elif field.type.startswith('list:reference'):
- ref_table = field.type[10:].strip()
+ elif field.type.startswith(list_reference_s):
+ ref_table = field.type[len(list_reference_s):].strip()
value = [id_map[ref_table][int(v)] \
for v in bar_decode_string(value)]
elif field.type.startswith('list:'):
value = bar_decode_integer(value)
elif id_map and field.type.startswith('reference'):
@@ -4910,11 +5101,11 @@
record.update_record(**dict(items))
new_id = record[self._id.name]
else:
new_id = self.insert(**dict(items))
if id_map and cid != []:
- id_map_self[line[cid]] = new_id
+ id_map_self[int(line[cid])] = new_id
def with_alias(self, alias):
return self._db._adapter.alias(self,alias)
def on(self, query):
@@ -4974,12 +5165,15 @@
return Expression(self.db, self.db._adapter.EXTRACT, self, 'hour', 'integer')
def minutes(self):
return Expression(self.db, self.db._adapter.EXTRACT, self, 'minute', 'integer')
+ def coalesce(self,*others):
+ return Expression(self.db, self.db._adapter.COALESCE, self, others, self.type)
+
def coalesce_zero(self):
- return Expression(self.db, self.db._adapter.COALESCE_ZERO, self, None, self.type)
+ return Expression(self.db, self.db._adapter.COALESCE_ZERO, self, None, self.type)
def seconds(self):
return Expression(self.db, self.db._adapter.EXTRACT, self, 'second', 'integer')
def __getslice__(self, start, stop):
@@ -5064,11 +5258,14 @@
def endswith(self, value):
if not self.type in ('string', 'text'):
raise SyntaxError, "endswith used with incompatible field type"
return Query(self.db, self.db._adapter.ENDSWITH, self, value)
- def contains(self, value):
+ def contains(self, value, all=False):
+ if isinstance(value,(list,tuple)):
+ subqueries = [self.contains(str(v).strip()) for v in value if str(v).strip()]
+ return reduce(all and AND or OR, subqueries)
if not self.type in ('string', 'text') and not self.type.startswith('list:'):
raise SyntaxError, "contains used with incompatible field type"
return Query(self.db, self.db._adapter.CONTAINS, self, value)
def with_alias(self,alias):
@@ -5130,12 +5327,24 @@
return None
def __str__(self):
return self._class
+class FieldVirtual(object):
+ def __init__(self,f):
+ self.f = f
+
+class FieldLazy(object):
+ def __init__(self,f,handler=None):
+ self.f = f
+ self.handler = handler
+
class Field(Expression):
+
+ Virtual = FieldVirtual
+ Lazy = FieldLazy
"""
an instance of this class represents a database field
example::
@@ -5179,11 +5388,11 @@
ondelete='CASCADE',
notnull=False,
unique=False,
uploadfield=True,
widget=None,
- label=None,
+ label=DEFAULT,
comment=None,
writable=True,
readable=True,
update=None,
authorize=None,
@@ -5213,11 +5422,11 @@
regex_python_keywords.match(fieldname):
raise SyntaxError, 'Field: invalid field name: %s' % fieldname
if isinstance(type, Table):
type = 'reference ' + type._tablename
self.type = type # 'string', 'integer'
- self.length = (length is None) and MAXCHARLENGTH or length
+ self.length = (length is None) and DEFAULTLENGTH.get(type,512) or length
if default==DEFAULT:
self.default = update or None
else:
self.default = default
self.required = required # is this field required
@@ -5226,19 +5435,22 @@
self.unique = unique
self.uploadfield = uploadfield
self.uploadfolder = uploadfolder
self.uploadseparate = uploadseparate
self.widget = widget
- self.label = label or ' '.join(item.capitalize() for item in fieldname.split('_'))
+ if label == DEFAULT:
+ self.label = ' '.join(i.capitalize() for i in fieldname.split('_'))
+ else:
+ self.label = label or ''
self.comment = comment
self.writable = writable
self.readable = readable
self.update = update
self.authorize = authorize
self.autodelete = autodelete
if not represent and type in ('list:integer','list:string'):
- represent=lambda x: ', '.join(str(y) for y in x or [])
+ represent=lambda x,r=None: ', '.join(str(y) for y in x or [])
self.represent = represent
self.compute = compute
self.isattachment = True
self.custom_store = custom_store
self.custom_retrieve = custom_retrieve
@@ -5369,10 +5581,12 @@
return '%s.%s' % (self.tablename, self.name)
except:
return '.%s' % self.name
+def raw(s): return Expression(None,s)
+
class Query(object):
"""
a query object necessary to define a set.
it can be stored or can be passed to DAL.__call__() to obtain a Set
@@ -5390,11 +5604,11 @@
db,
op,
first=None,
second=None,
):
- self.db = db
+ self.db = self._db = db
self.op = op
self.first = first
self.second = second
def __str__(self):
@@ -5447,10 +5661,12 @@
self.query = query
def __call__(self, query):
if isinstance(query,Table):
query = query._id>0
+ elif isinstance(query,str):
+ query = raw(query)
elif isinstance(query,Field):
query = query!=None
if self.query:
return Set(self.db, self.query & query)
else:
@@ -5490,15 +5706,15 @@
fields = self.db[tablename]._listify(update_fields,update=True)
if not fields:
raise SyntaxError, "No fields to update"
self.delete_uploaded_files(update_fields)
return self.db._adapter.update(tablename,self.query,fields)
-
+
def validate_and_update(self, **update_fields):
tablename = self.db._adapter.get_table(self.query)
response = Row()
- response.errors = self.db[tablename]._validate(**update_fields)
+ response.errors = self.db[tablename]._validate(**update_fields)
fields = self.db[tablename]._listify(update_fields,update=True)
if not fields:
raise SyntaxError, "No fields to update"
self.delete_uploaded_files(update_fields)
if not response.errors:
@@ -5535,23 +5751,34 @@
uploadfolder = os.path.join(self.db._adapter.folder, '..', 'uploads')
if field.uploadseparate:
items = oldname.split('.')
uploadfolder = os.path.join(uploadfolder,
"%s.%s" % (items[0], items[1]),
- items[2][:2])
+ items[2][:2])
oldpath = os.path.join(uploadfolder, oldname)
if os.path.exists(oldpath):
os.unlink(oldpath)
-def update_record(pack, a={}):
+def update_record(pack, a=None):
(colset, table, id) = pack
b = a or dict(colset)
c = dict([(k,v) for (k,v) in b.items() if k in table.fields and table[k].type!='id'])
table._db(table._id==id).update(**c)
for (k, v) in c.items():
colset[k] = v
+class VirtualCommand(object):
+ def __init__(self,method,row):
+ self.method=method
+ #self.instance=instance
+ self.row=row
+ def __call__(self,*args,**kwargs):
+ return self.method(self.row,*args,**kwargs)
+
+def lazy_virtualfield(f):
+ f.__lazy__ = True
+ return f
class Rows(object):
"""
A wrapper for the return value of a select. It basically represents a table.
@@ -5573,24 +5800,46 @@
self.colnames = colnames
self.compact = compact
self.response = rawrows
def setvirtualfields(self,**keyed_virtualfields):
+ """
+ db.define_table('x',Field('number','integer'))
+ if db(db.x).isempty(): [db.x.insert(number=i) for i in range(10)]
+
+ from gluon.dal import lazy_virtualfield
+
+ class MyVirtualFields(object):
+ # normal virtual field (backward compatible, discouraged)
+ def normal_shift(self): return self.x.number+1
+ # lazy virtual field (because of @staticmethod)
+ @lazy_virtualfield
+ def lazy_shift(instance,row,delta=4): return row.x.number+delta
+ db.x.virtualfields.append(MyVirtualFields())
+
+ for row in db(db.x).select():
+ print row.number, row.normal_shift, row.lazy_shift(delta=7)
+ """
if not keyed_virtualfields:
return self
for row in self.records:
for (tablename,virtualfields) in keyed_virtualfields.items():
attributes = dir(virtualfields)
- virtualfields.__dict__.update(row)
if not tablename in row:
box = row[tablename] = Row()
else:
box = row[tablename]
+ updated = False
for attribute in attributes:
if attribute[0] != '_':
method = getattr(virtualfields,attribute)
- if hasattr(method,'im_func') and method.im_func.func_code.co_argcount:
+ if hasattr(method,'__lazy__'):
+ box[attribute]=VirtualCommand(method,row)
+ elif type(method)==types.MethodType:
+ if not updated:
+ virtualfields.__dict__.update(row)
+ updated = True
box[attribute]=method()
return self
def __and__(self,other):
if self.colnames!=other.colnames: raise Exception, 'Cannot & incompatible Rows objects'
@@ -5779,11 +6028,11 @@
field = self.db[t][f]
if isinstance(record.get(t, None), (Row,dict)):
value = record[t][f]
else:
value = record[f]
- if field.type=='blob' and value!=None:
+ if field.type=='blob' and not value is None:
value = base64.b64encode(value)
elif represent and field.represent:
value = field.represent(value)
row.append(none_exception(value))
writer.writerow(row)
@@ -5806,18 +6055,20 @@
def inner_loop(record, col):
(t, f) = col.split('.')
res = None
if not table_field.match(col):
+ key = col
res = record._extra[col]
else:
+ key = f
if isinstance(record.get(t, None), Row):
res = record[t][f]
else:
res = record[f]
if mode == 'object':
- return (f, res)
+ return (key, res)
else:
return res
if mode == 'object':
items = [dict([inner_loop(record, col) for col in
@@ -6056,8 +6307,10 @@
################################################################################
if __name__ == '__main__':
import doctest
doctest.testmod()
+
+