1
2
3
4 """
5 This file is part of the web2py Web Framework
6 Copyrighted by Massimo Di Pierro <mdipierro@cs.depaul.edu>
7 License: LGPLv3 (http://www.gnu.org/licenses/lgpl.html)
8
9 Thanks to
10 * Niall Sweeny <niall.sweeny@fonjax.com> for MS SQL support
11 * Marcel Leuthi <mluethi@mlsystems.ch> for Oracle support
12 * Denes
13 * Chris Clark
14 * clach05
15 * Denes Lengyel
16 * and many others who have contributed to current and previous versions
17
18 This file contains the DAL support for many relational databases,
19 including:
20 - SQLite
21 - MySQL
22 - Postgres
23 - Oracle
24 - MS SQL
25 - DB2
26 - Interbase
27 - Ingres
28 - SapDB (experimental)
29 - Cubrid (experimental)
30 - CouchDB (experimental)
31 - MongoDB (in progress)
32 - Google:nosql
33 - Google:sql
34
35 Example of usage:
36
37 >>> # from dal import DAL, Field
38
39 ### create DAL connection (and create DB if not exists)
40 >>> db=DAL(('mysql://a:b@locahost/x','sqlite://storage.sqlite'),folder=None)
41
42 ### define a table 'person' (create/aster as necessary)
43 >>> person = db.define_table('person',Field('name','string'))
44
45 ### insert a record
46 >>> id = person.insert(name='James')
47
48 ### retrieve it by id
49 >>> james = person(id)
50
51 ### retrieve it by name
52 >>> james = person(name='James')
53
54 ### retrieve it by arbitrary query
55 >>> query = (person.name=='James')&(person.name.startswith('J'))
56 >>> james = db(query).select(person.ALL)[0]
57
58 ### update one record
59 >>> james.update_record(name='Jim')
60
61 ### update multiple records by query
62 >>> db(person.name.like('J%')).update(name='James')
63 1
64
65 ### delete records by query
66 >>> db(person.name.lower()=='jim').delete()
67 0
68
69 ### retrieve multiple records (rows)
70 >>> people = db(person).select(orderby=person.name,groupby=person.name,limitby=(0,100))
71
72 ### further filter them
73 >>> james = people.find(lambda row: row.name=='James').first()
74 >>> print james.id, james.name
75 1 James
76
77 ### check aggrgates
78 >>> counter = person.id.count()
79 >>> print db(person).select(counter).first()(counter)
80 1
81
82 ### delete one record
83 >>> james.delete_record()
84 1
85
86 ### delete (drop) entire database table
87 >>> person.drop()
88
89 Supported field types:
90 id string text boolean integer double decimal password upload blob time date datetime,
91
92 Supported DAL URI strings:
93 'sqlite://test.db'
94 'sqlite:memory'
95 'jdbc:sqlite://test.db'
96 'mysql://root:none@localhost/test'
97 'postgres://mdipierro:none@localhost/test'
98 'jdbc:postgres://mdipierro:none@localhost/test'
99 'mssql://web2py:none@A64X2/web2py_test'
100 'mssql2://web2py:none@A64X2/web2py_test' # alternate mappings
101 'oracle://username:password@database'
102 'firebird://user:password@server:3050/database'
103 'db2://DSN=dsn;UID=user;PWD=pass'
104 'firebird://username:password@hostname/database'
105 'firebird_embedded://username:password@c://path'
106 'informix://user:password@server:3050/database'
107 'informixu://user:password@server:3050/database' # unicode informix
108 'google:datastore' # for google app engine datastore
109 'google:sql' # for google app engine with sql (mysql compatible)
110 'teradata://DSN=dsn;UID=user;PWD=pass' # experimental
111
112 For more info:
113 help(DAL)
114 help(Field)
115 """
116
117
118
119
120
121 __all__ = ['DAL', 'Field']
122 MAXCHARLENGTH = 512
123 INFINITY = 2**15
124
125 import re
126 import sys
127 import locale
128 import os
129 import types
130 import cPickle
131 import datetime
132 import threading
133 import time
134 import cStringIO
135 import csv
136 import copy
137 import socket
138 import logging
139 import copy_reg
140 import base64
141 import shutil
142 import marshal
143 import decimal
144 import struct
145 import urllib
146 import hashlib
147 import uuid
148 import glob
149
150 CALLABLETYPES = (types.LambdaType, types.FunctionType, types.BuiltinFunctionType,
151 types.MethodType, types.BuiltinMethodType)
152
153
154
155
156
157 try:
158 from utils import web2py_uuid
159 except ImportError:
160 import uuid
162
163 try:
164 import portalocker
165 have_portalocker = True
166 except ImportError:
167 have_portalocker = False
168
169 try:
170 import serializers
171 have_serializers = True
172 except ImportError:
173 have_serializers = False
174
175 try:
176 import validators
177 have_validators = True
178 except ImportError:
179 have_validators = False
180
181 logger = logging.getLogger("web2py.dal")
182 DEFAULT = lambda:0
183
184 sql_locker = threading.RLock()
185 thread = threading.local()
186
187
188
189
190 regex_dbname = re.compile('^(\w+)(\:\w+)*')
191 table_field = re.compile('^[\w_]+\.[\w_]+$')
192 regex_content = re.compile('(?P<table>[\w\-]+)\.(?P<field>[\w\-]+)\.(?P<uuidkey>[\w\-]+)\.(?P<name>\w+)\.\w+$')
193 regex_cleanup_fn = re.compile('[\'"\s;]+')
194 string_unpack=re.compile('(?<!\|)\|(?!\|)')
195 regex_python_keywords = re.compile('^(and|del|from|not|while|as|elif|global|or|with|assert|else|if|pass|yield|break|except|import|print|class|exec|in|raise|continue|finally|is|return|def|for|lambda|try)$')
196
197
198
199
200
201 drivers = []
202
203 try:
204 from new import classobj
205 from google.appengine.ext import db as gae
206 from google.appengine.api import namespace_manager, rdbms
207 from google.appengine.api.datastore_types import Key
208 from google.appengine.ext.db.polymodel import PolyModel
209 drivers.append('google')
210 except ImportError:
211 pass
212
213 if not 'google' in drivers:
214
215 try:
216 from pysqlite2 import dbapi2 as sqlite3
217 drivers.append('pysqlite2')
218 except ImportError:
219 try:
220 from sqlite3 import dbapi2 as sqlite3
221 drivers.append('SQLite3')
222 except ImportError:
223 logger.debug('no sqlite3 or pysqlite2.dbapi2 driver')
224
225 try:
226 import contrib.pymysql as pymysql
227 drivers.append('pymysql')
228 except ImportError:
229 logger.debug('no pymysql driver')
230
231 try:
232 import psycopg2
233 drivers.append('PostgreSQL')
234 except ImportError:
235 logger.debug('no psycopg2 driver')
236
237 try:
238 import cx_Oracle
239 drivers.append('Oracle')
240 except ImportError:
241 logger.debug('no cx_Oracle driver')
242
243 try:
244 import pyodbc
245 drivers.append('MSSQL/DB2')
246 except ImportError:
247 logger.debug('no MSSQL/DB2 driver')
248
249 try:
250 import kinterbasdb
251 drivers.append('Interbase')
252 except ImportError:
253 logger.debug('no kinterbasdb driver')
254
255 try:
256 import firebirdsql
257 drivers.append('Firebird')
258 except ImportError:
259 logger.debug('no Firebird driver')
260
261 try:
262 import informixdb
263 drivers.append('Informix')
264 logger.warning('Informix support is experimental')
265 except ImportError:
266 logger.debug('no informixdb driver')
267
268 try:
269 import sapdb
270 drivers.append('SAPDB')
271 logger.warning('SAPDB support is experimental')
272 except ImportError:
273 logger.debug('no sapdb driver')
274
275 try:
276 import cubriddb
277 drivers.append('Cubrid')
278 logger.warning('Cubrid support is experimental')
279 except ImportError:
280 logger.debug('no cubriddb driver')
281
282 try:
283 from com.ziclix.python.sql import zxJDBC
284 import java.sql
285
286 from org.sqlite import JDBC
287 drivers.append('zxJDBC')
288 logger.warning('zxJDBC support is experimental')
289 is_jdbc = True
290 except ImportError:
291 logger.debug('no zxJDBC driver')
292 is_jdbc = False
293
294 try:
295 import ingresdbi
296 drivers.append('Ingres')
297 except ImportError:
298 logger.debug('no Ingres driver')
299
300
301 try:
302 import couchdb
303 drivers.append('CouchDB')
304 except ImportError:
305 logger.debug('no couchdb driver')
306
307 try:
308 import pymongo
309 drivers.append('mongoDB')
310 except:
311 logger.debug('no mongoDB driver')
312
313
314 if 'google' in drivers:
315
316 is_jdbc = False
317
319 """
320 GAE decimal implementation
321 """
322 data_type = decimal.Decimal
323
324 - def __init__(self, precision, scale, **kwargs):
325 super(GAEDecimalProperty, self).__init__(self, **kwargs)
326 d = '1.'
327 for x in range(scale):
328 d += '0'
329 self.round = decimal.Decimal(d)
330
337
339 if value:
340 return decimal.Decimal(value).quantize(self.round)
341 else:
342 return None
343
345 value = super(GAEDecimalProperty, self).validate(value)
346 if value is None or isinstance(value, decimal.Decimal):
347 return value
348 elif isinstance(value, basestring):
349 return decimal.Decimal(value)
350 raise gae.BadValueError("Property %s must be a Decimal or string." % self.name)
351
352
353
354
355
357
358 pools = {}
359
360 @staticmethod
363
364
365
366 @staticmethod
368 """ to close cleanly databases in a multithreaded environment """
369 if not hasattr(thread,'instances'):
370 return
371 while thread.instances:
372 instance = thread.instances.pop()
373 getattr(instance,action)()
374
375 really = True
376 if instance.pool_size:
377 sql_locker.acquire()
378 pool = ConnectionPool.pools[instance.uri]
379 if len(pool) < instance.pool_size:
380 pool.append(instance.connection)
381 really = False
382 sql_locker.release()
383 if really:
384 getattr(instance,'close')()
385 return
386
388 """ this actually does not make the folder. it has to be there """
389 if hasattr(thread,'folder'):
390 self.folder = thread.folder
391 else:
392 self.folder = thread.folder = ''
393
394
395 if False and self.folder and not os.path.exists(self.folder):
396 os.mkdir(self.folder)
397
415
416
417
418
419
420
422
423 driver = None
424 maxcharlength = INFINITY
425 commit_on_alter_table = False
426 support_distributed_transaction = False
427 uploads_in_blob = False
428 types = {
429 'boolean': 'CHAR(1)',
430 'string': 'CHAR(%(length)s)',
431 'text': 'TEXT',
432 'password': 'CHAR(%(length)s)',
433 'blob': 'BLOB',
434 'upload': 'CHAR(%(length)s)',
435 'integer': 'INTEGER',
436 'double': 'DOUBLE',
437 'decimal': 'DOUBLE',
438 'date': 'DATE',
439 'time': 'TIME',
440 'datetime': 'TIMESTAMP',
441 'id': 'INTEGER PRIMARY KEY AUTOINCREMENT',
442 'reference': 'INTEGER REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
443 'list:integer': 'TEXT',
444 'list:string': 'TEXT',
445 'list:reference': 'TEXT',
446 }
447
450
452 """
453 to be used ONLY for files that on GAE may not be on filesystem
454 """
455 return os.path.exists(filename)
456
457 - def file_open(self, filename, mode='rb', lock=True):
471
480
483
484 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
485 credential_decoder=lambda x:x, driver_args={},
486 adapter_args={}):
487 self.db = db
488 self.dbengine = "None"
489 self.uri = uri
490 self.pool_size = pool_size
491 self.folder = folder
492 self.db_codec = db_codec
493 class Dummy(object):
494 lastrowid = 1
495 def __getattr__(self, value):
496 return lambda *a, **b: []
497 self.connection = Dummy()
498 self.cursor = Dummy()
499
501 return '%s_sequence' % tablename
502
504 return '%s_sequence' % tablename
505
506
507 - def create_table(self, table, migrate=True, fake_migrate=False, polymodel=None):
508 fields = []
509 sql_fields = {}
510 sql_fields_aux = {}
511 TFK = {}
512 tablename = table._tablename
513 sortable = 0
514 for field in table:
515 sortable += 1
516 k = field.name
517 if isinstance(field.type,SQLCustomType):
518 ftype = field.type.native or field.type.type
519 elif field.type.startswith('reference'):
520 referenced = field.type[10:].strip()
521 constraint_name = self.constraint_name(tablename, field.name)
522 if hasattr(table,'_primarykey'):
523 rtablename,rfieldname = referenced.split('.')
524 rtable = table._db[rtablename]
525 rfield = rtable[rfieldname]
526
527 if rfieldname in rtable._primarykey or rfield.unique:
528 ftype = self.types[rfield.type[:9]] % dict(length=rfield.length)
529
530 if not rfield.unique and len(rtable._primarykey)>1 :
531
532 if rtablename not in TFK:
533 TFK[rtablename] = {}
534 TFK[rtablename][rfieldname] = field.name
535 else:
536 ftype = ftype + \
537 self.types['reference FK'] %dict(\
538 constraint_name=constraint_name,
539 table_name=tablename,
540 field_name=field.name,
541 foreign_key='%s (%s)'%(rtablename, rfieldname),
542 on_delete_action=field.ondelete)
543 else:
544
545 id_fieldname = referenced in table._db and table._db[referenced]._id.name or 'id'
546 ftype = self.types[field.type[:9]]\
547 % dict(table_name=tablename,
548 field_name=field.name,
549 constraint_name=constraint_name,
550 foreign_key=referenced + ('(%s)' % id_fieldname),
551 on_delete_action=field.ondelete)
552 elif field.type.startswith('list:reference'):
553 ftype = self.types[field.type[:14]]
554 elif field.type.startswith('decimal'):
555 precision, scale = map(int,field.type[8:-1].split(','))
556 ftype = self.types[field.type[:7]] % \
557 dict(precision=precision,scale=scale)
558 elif not field.type in self.types:
559 raise SyntaxError, 'Field: unknown field type: %s for %s' % \
560 (field.type, field.name)
561 else:
562 ftype = self.types[field.type]\
563 % dict(length=field.length)
564 if not field.type.startswith('id') and not field.type.startswith('reference'):
565 if field.notnull:
566 ftype += ' NOT NULL'
567 else:
568 ftype += self.ALLOW_NULL()
569 if field.unique:
570 ftype += ' UNIQUE'
571
572
573 sql_fields[field.name] = dict(sortable=sortable,
574 type=str(field.type),
575 sql=ftype)
576
577 if isinstance(field.default,(str,int,float)):
578
579
580
581
582
583 not_null = self.NOT_NULL(field.default,field.type)
584 ftype = ftype.replace('NOT NULL',not_null)
585 sql_fields_aux[field.name] = dict(sql=ftype)
586
587 fields.append('%s %s' % (field.name, ftype))
588 other = ';'
589
590
591 if self.dbengine == 'mysql':
592 if not hasattr(table, "_primarykey"):
593 fields.append('PRIMARY KEY(%s)' % table._id.name)
594 other = ' ENGINE=InnoDB CHARACTER SET utf8;'
595
596 fields = ',\n '.join(fields)
597 for rtablename in TFK:
598 rfields = TFK[rtablename]
599 pkeys = table._db[rtablename]._primarykey
600 fkeys = [ rfields[k] for k in pkeys ]
601 fields = fields + ',\n ' + \
602 self.types['reference TFK'] %\
603 dict(table_name=tablename,
604 field_name=', '.join(fkeys),
605 foreign_table=rtablename,
606 foreign_key=', '.join(pkeys),
607 on_delete_action=field.ondelete)
608
609 if hasattr(table,'_primarykey'):
610 query = '''CREATE TABLE %s(\n %s,\n %s) %s''' % \
611 (tablename, fields, self.PRIMARY_KEY(', '.join(table._primarykey)),other)
612 else:
613 query = '''CREATE TABLE %s(\n %s\n)%s''' % \
614 (tablename, fields, other)
615
616 if self.uri.startswith('sqlite:///'):
617 path_encoding = sys.getfilesystemencoding() or locale.getdefaultlocale()[1] or 'utf8'
618 dbpath = self.uri[9:self.uri.rfind('/')].decode('utf8').encode(path_encoding)
619 else:
620 dbpath = self.folder
621
622 if not migrate:
623 return query
624 elif self.uri.startswith('sqlite:memory'):
625 table._dbt = None
626 elif isinstance(migrate, str):
627 table._dbt = os.path.join(dbpath, migrate)
628 else:
629 table._dbt = os.path.join(dbpath, '%s_%s.table' \
630 % (table._db._uri_hash, tablename))
631 if table._dbt:
632 table._loggername = os.path.join(dbpath, 'sql.log')
633 logfile = self.file_open(table._loggername, 'a')
634 else:
635 logfile = None
636 if not table._dbt or not self.file_exists(table._dbt):
637 if table._dbt:
638 logfile.write('timestamp: %s\n'
639 % datetime.datetime.today().isoformat())
640 logfile.write(query + '\n')
641 if not fake_migrate:
642 self.create_sequence_and_triggers(query,table)
643 table._db.commit()
644 if table._dbt:
645 tfile = self.file_open(table._dbt, 'w')
646 cPickle.dump(sql_fields, tfile)
647 self.file_close(tfile)
648 if fake_migrate:
649 logfile.write('faked!\n')
650 else:
651 logfile.write('success!\n')
652 else:
653 tfile = self.file_open(table._dbt, 'r')
654 try:
655 sql_fields_old = cPickle.load(tfile)
656 except EOFError:
657 self.file_close(tfile)
658 self.file_close(logfile)
659 raise RuntimeError, 'File %s appears corrupted' % table._dbt
660 self.file_close(tfile)
661 if sql_fields != sql_fields_old:
662 self.migrate_table(table,
663 sql_fields, sql_fields_old,
664 sql_fields_aux, logfile,
665 fake_migrate=fake_migrate)
666 self.file_close(logfile)
667 return query
668
669 - def migrate_table(
670 self,
671 table,
672 sql_fields,
673 sql_fields_old,
674 sql_fields_aux,
675 logfile,
676 fake_migrate=False,
677 ):
678 tablename = table._tablename
679 def fix(item):
680 k,v=item
681 if not isinstance(v,dict):
682 v=dict(type='unkown',sql=v)
683 return k.lower(),v
684
685 sql_fields = dict(map(fix,sql_fields.items()))
686 sql_fields_old = dict(map(fix,sql_fields_old.items()))
687 sql_fields_aux = dict(map(fix,sql_fields_aux.items()))
688
689 keys = sql_fields.keys()
690 for key in sql_fields_old:
691 if not key in keys:
692 keys.append(key)
693 if self.dbengine == 'mssql':
694 new_add = '; ALTER TABLE %s ADD ' % tablename
695 else:
696 new_add = ', ADD '
697
698 metadata_change = False
699 sql_fields_current = copy.copy(sql_fields_old)
700 for key in keys:
701 query = None
702 if not key in sql_fields_old:
703 sql_fields_current[key] = sql_fields[key]
704 query = ['ALTER TABLE %s ADD %s %s;' % \
705 (tablename, key,
706 sql_fields_aux[key]['sql'].replace(', ', new_add))]
707 metadata_change = True
708 elif self.dbengine == 'sqlite':
709 if key in sql_fields:
710 sql_fields_current[key] = sql_fields[key]
711 metadata_change = True
712 elif not key in sql_fields:
713 del sql_fields_current[key]
714 if not self.dbengine in ('firebird',):
715 query = ['ALTER TABLE %s DROP COLUMN %s;' % (tablename, key)]
716 else:
717 query = ['ALTER TABLE %s DROP %s;' % (tablename, key)]
718 metadata_change = True
719 elif sql_fields[key]['sql'] != sql_fields_old[key]['sql'] \
720 and not isinstance(table[key].type, SQLCustomType) \
721 and not (table[key].type.startswith('reference') and \
722 sql_fields[key]['sql'].startswith('INT,') and \
723 sql_fields_old[key]['sql'].startswith('INT NOT NULL,')):
724 sql_fields_current[key] = sql_fields[key]
725 t = tablename
726 tt = sql_fields_aux[key]['sql'].replace(', ', new_add)
727 if not self.dbengine in ('firebird',):
728 query = ['ALTER TABLE %s ADD %s__tmp %s;' % (t, key, tt),
729 'UPDATE %s SET %s__tmp=%s;' % (t, key, key),
730 'ALTER TABLE %s DROP COLUMN %s;' % (t, key),
731 'ALTER TABLE %s ADD %s %s;' % (t, key, tt),
732 'UPDATE %s SET %s=%s__tmp;' % (t, key, key),
733 'ALTER TABLE %s DROP COLUMN %s__tmp;' % (t, key)]
734 else:
735 query = ['ALTER TABLE %s ADD %s__tmp %s;' % (t, key, tt),
736 'UPDATE %s SET %s__tmp=%s;' % (t, key, key),
737 'ALTER TABLE %s DROP %s;' % (t, key),
738 'ALTER TABLE %s ADD %s %s;' % (t, key, tt),
739 'UPDATE %s SET %s=%s__tmp;' % (t, key, key),
740 'ALTER TABLE %s DROP %s__tmp;' % (t, key)]
741 metadata_change = True
742 elif sql_fields[key]['type'] != sql_fields_old[key]['type']:
743 sql_fields_current[key] = sql_fields[key]
744 metadata_change = True
745
746 if query:
747 logfile.write('timestamp: %s\n'
748 % datetime.datetime.today().isoformat())
749 table._db['_lastsql'] = '\n'.join(query)
750 for sub_query in query:
751 logfile.write(sub_query + '\n')
752 if not fake_migrate:
753 self.execute(sub_query)
754
755
756
757 if table._db._adapter.commit_on_alter_table:
758 table._db.commit()
759 tfile = self.file_open(table._dbt, 'w')
760 cPickle.dump(sql_fields_current, tfile)
761 self.file_close(tfile)
762 logfile.write('success!\n')
763 else:
764 logfile.write('faked!\n')
765 elif metadata_change:
766 tfile = self.file_open(table._dbt, 'w')
767 cPickle.dump(sql_fields_current, tfile)
768 self.file_close(tfile)
769
770 if metadata_change and \
771 not (query and self.dbengine in ('mysql','oracle','firebird')):
772 table._db.commit()
773 tfile = self.file_open(table._dbt, 'w')
774 cPickle.dump(sql_fields_current, tfile)
775 self.file_close(tfile)
776
779
782
784 return "EXTRACT(%s FROM %s)" % (what, self.expand(first))
785
788
791
794
797
799 return 'NOT NULL DEFAULT %s' % self.represent(default,field_type)
800
803
806
808 return 'SUBSTR(%s,%s,%s)' % (self.expand(field), parameters[0], parameters[1])
809
811 return 'PRIMARY KEY(%s)' % key
812
813 - def _drop(self,table,mode):
814 return ['DROP TABLE %s;' % table]
815
816 - def drop(self, table, mode=''):
817 if table._dbt:
818 logfile = self.file_open(table._loggername, 'a')
819 queries = self._drop(table, mode)
820 for query in queries:
821 if table._dbt:
822 logfile.write(query + '\n')
823 self.execute(query)
824 table._db.commit()
825 del table._db[table._tablename]
826 del table._db.tables[table._db.tables.index(table._tablename)]
827 table._db._update_referenced_by(table._tablename)
828 if table._dbt:
829 self.file_delete(table._dbt)
830 logfile.write('success!\n')
831
836
837 - def insert(self,table,fields):
838 query = self._insert(table,fields)
839 try:
840 self.execute(query)
841 except Exception, e:
842 if isinstance(e,self.integrity_error_class()):
843 return None
844 raise e
845 if hasattr(table,'_primarykey'):
846 return dict([(k[0].name, k[1]) for k in fields \
847 if k[0].name in table._primarykey])
848 id = self.lastrowid(table)
849 if not isinstance(id,int):
850 return id
851 rid = Reference(id)
852 (rid._table, rid._record) = (table, None)
853 return rid
854
857
858 - def NOT(self,first):
860
861 - def AND(self,first,second):
863
864 - def OR(self,first,second):
866
868 if isinstance(second,str):
869 return '(%s IN (%s))' % (self.expand(first),second[:-1])
870 elif second==[] or second==():
871 return '(0)'
872 items =','.join(self.expand(item,first.type) for item in second)
873 return '(%s IN (%s))' % (self.expand(first),items)
874
875 - def LIKE(self,first,second):
877
880
883
885 if first.type in ('string','text'):
886 key = '%'+str(second).replace('%','%%')+'%'
887 elif first.type.startswith('list:'):
888 key = '%|'+str(second).replace('|','||').replace('%','%%')+'|%'
889 return '(%s LIKE %s)' % (self.expand(first),self.expand(key,'string'))
890
891 - def EQ(self,first,second=None):
895
896 - def NE(self,first,second=None):
900
901 - def LT(self,first,second=None):
903
904 - def LE(self,first,second=None):
906
907 - def GT(self,first,second=None):
909
910 - def GE(self,first,second=None):
912
913 - def ADD(self,first,second):
915
916 - def SUB(self,first,second):
918
919 - def MUL(self,first,second):
921
922 - def DIV(self,first,second):
924
925 - def MOD(self,first,second):
927
928 - def AS(self,first,second):
930
931 - def ON(self,first,second):
933
936
937 - def COMMA(self,first,second):
939
940 - def expand(self,expression,field_type=None):
941 if isinstance(expression,Field):
942 return str(expression)
943 elif isinstance(expression, (Expression, Query)):
944 if not expression.second is None:
945 return expression.op(expression.first, expression.second)
946 elif not expression.first is None:
947 return expression.op(expression.first)
948 else:
949 return expression.op()
950 elif field_type:
951 return self.represent(expression,field_type)
952 elif isinstance(expression,(list,tuple)):
953 return ','.join([self.represent(item,field_type) for item in expression])
954 else:
955 return str(expression)
956
957 - def alias(self,table,alias):
958 """
959 given a table object, makes a new table object
960 with alias name.
961 """
962 other = copy.copy(table)
963 other['_ot'] = other._tablename
964 other['ALL'] = SQLALL(other)
965 other['_tablename'] = alias
966 for fieldname in other.fields:
967 other[fieldname] = copy.copy(other[fieldname])
968 other[fieldname]._tablename = alias
969 other[fieldname].tablename = alias
970 other[fieldname].table = other
971 table._db[alias] = other
972 return other
973
975 tablename = table._tablename
976 return ['TRUNCATE TABLE %s %s;' % (tablename, mode or '')]
977
979
980 if table._dbt:
981 logfile = self.file_open(table._loggername, 'a')
982 else:
983 class Logfile(object):
984 def write(self, value):
985 pass
986 def close(self):
987 pass
988 logfile = Logfile()
989
990 try:
991 queries = table._db._adapter._truncate(table, mode)
992 for query in queries:
993 logfile.write(query + '\n')
994 self.execute(query)
995 table._db.commit()
996 logfile.write('success!\n')
997 finally:
998 logfile.close()
999
1000 - def _update(self,tablename,query,fields):
1001 if query:
1002 sql_w = ' WHERE ' + self.expand(query)
1003 else:
1004 sql_w = ''
1005 sql_v = ','.join(['%s=%s' % (field.name, self.expand(value,field.type)) for (field,value) in fields])
1006 return 'UPDATE %s SET %s%s;' % (tablename, sql_v, sql_w)
1007
1008 - def update(self,tablename,query,fields):
1009 sql = self._update(tablename,query,fields)
1010 self.execute(sql)
1011 try:
1012 return self.cursor.rowcount
1013 except:
1014 return None
1015
1016 - def _delete(self,tablename, query):
1017 if query:
1018 sql_w = ' WHERE ' + self.expand(query)
1019 else:
1020 sql_w = ''
1021 return 'DELETE FROM %s%s;' % (tablename, sql_w)
1022
1023 - def delete(self,tablename,query):
1024 sql = self._delete(tablename,query)
1025
1026 db = self.db
1027 table = db[tablename]
1028 if self.dbengine=='sqlite' and table._referenced_by:
1029 deleted = [x[table._id.name] for x in db(query).select(table._id)]
1030
1031 self.execute(sql)
1032 try:
1033 counter = self.cursor.rowcount
1034 except:
1035 counter = None
1036
1037 if self.dbengine=='sqlite' and counter:
1038 for tablename,fieldname in table._referenced_by:
1039 f = db[tablename][fieldname]
1040 if f.type=='reference '+table._tablename and f.ondelete=='CASCADE':
1041 db(db[tablename][fieldname].belongs(deleted)).delete()
1042
1043 return counter
1044
1046 tablenames = self.tables(query)
1047 if len(tablenames)==1:
1048 return tablenames[0]
1049 elif len(tablenames)<1:
1050 raise RuntimeError, "No table selected"
1051 else:
1052 raise RuntimeError, "Too many tables selected"
1053
1054 - def _select(self, query, fields, attributes):
1055 for key in set(attributes.keys())-set(('orderby','groupby','limitby',
1056 'required','cache','left',
1057 'distinct','having', 'join')):
1058 raise SyntaxError, 'invalid select attribute: %s' % key
1059
1060 new_fields = []
1061 for item in fields:
1062 if isinstance(item,SQLALL):
1063 new_fields += item.table
1064 else:
1065 new_fields.append(item)
1066 fields = new_fields
1067 tablenames = self.tables(query)
1068 query = self.filter_tenant(query,tablenames)
1069 if not fields:
1070 for table in tablenames:
1071 for field in self.db[table]:
1072 fields.append(field)
1073 else:
1074 for field in fields:
1075 if isinstance(field,basestring) and table_field.match(field):
1076 tn,fn = field.split('.')
1077 field = self.db[tn][fn]
1078 for tablename in self.tables(field):
1079 if not tablename in tablenames:
1080 tablenames.append(tablename)
1081 if len(tablenames) < 1:
1082 raise SyntaxError, 'Set: no tables selected'
1083 sql_f = ', '.join(map(self.expand,fields))
1084 self._colnames = [c.strip() for c in sql_f.split(', ')]
1085 if query:
1086 sql_w = ' WHERE ' + self.expand(query)
1087 else:
1088 sql_w = ''
1089 sql_o = ''
1090 sql_s = ''
1091 left = attributes.get('left', False)
1092 inner_join = attributes.get('join', False)
1093 distinct = attributes.get('distinct', False)
1094 groupby = attributes.get('groupby', False)
1095 orderby = attributes.get('orderby', False)
1096 having = attributes.get('having', False)
1097 limitby = attributes.get('limitby', False)
1098 if distinct is True:
1099 sql_s += 'DISTINCT'
1100 elif distinct:
1101 sql_s += 'DISTINCT ON (%s)' % distinct
1102 if inner_join:
1103 icommand = self.JOIN()
1104 if not isinstance(inner_join, (tuple, list)):
1105 inner_join = [inner_join]
1106 ijoint = [t._tablename for t in inner_join if not isinstance(t,Expression)]
1107 ijoinon = [t for t in inner_join if isinstance(t, Expression)]
1108 ijoinont = [t.first._tablename for t in ijoinon]
1109 iexcluded = [t for t in tablenames if not t in ijoint + ijoinont]
1110 if left:
1111 join = attributes['left']
1112 command = self.LEFT_JOIN()
1113 if not isinstance(join, (tuple, list)):
1114 join = [join]
1115 joint = [t._tablename for t in join if not isinstance(t,Expression)]
1116 joinon = [t for t in join if isinstance(t, Expression)]
1117
1118 tables_to_merge={}
1119 [tables_to_merge.update(dict.fromkeys(self.tables(t))) for t in joinon]
1120 joinont = [t.first._tablename for t in joinon]
1121 [tables_to_merge.pop(t) for t in joinont if t in tables_to_merge]
1122 important_tablenames = joint + joinont + tables_to_merge.keys()
1123 excluded = [t for t in tablenames if not t in important_tablenames ]
1124 def alias(t):
1125 return str(self.db[t])
1126 if inner_join and not left:
1127 sql_t = ', '.join(alias(t) for t in iexcluded)
1128 for t in ijoinon:
1129 sql_t += ' %s %s' % (icommand, str(t))
1130 elif not inner_join and left:
1131 sql_t = ', '.join([alias(t) for t in excluded + tables_to_merge.keys()])
1132 if joint:
1133 sql_t += ' %s %s' % (command, ','.join([t for t in joint]))
1134 for t in joinon:
1135 sql_t += ' %s %s' % (command, str(t))
1136 elif inner_join and left:
1137 sql_t = ','.join([alias(t) for t in excluded + \
1138 tables_to_merge.keys() if t in iexcluded ])
1139 for t in ijoinon:
1140 sql_t += ' %s %s' % (icommand, str(t))
1141 if joint:
1142 sql_t += ' %s %s' % (command, ','.join([t for t in joint]))
1143 for t in joinon:
1144 sql_t += ' %s %s' % (command, str(t))
1145 else:
1146 sql_t = ', '.join(alias(t) for t in tablenames)
1147 if groupby:
1148 if isinstance(groupby, (list, tuple)):
1149 groupby = xorify(groupby)
1150 sql_o += ' GROUP BY %s' % self.expand(groupby)
1151 if having:
1152 sql_o += ' HAVING %s' % attributes['having']
1153 if orderby:
1154 if isinstance(orderby, (list, tuple)):
1155 orderby = xorify(orderby)
1156 if str(orderby) == '<random>':
1157 sql_o += ' ORDER BY %s' % self.RANDOM()
1158 else:
1159 sql_o += ' ORDER BY %s' % self.expand(orderby)
1160 if limitby:
1161 if not orderby and tablenames:
1162 sql_o += ' ORDER BY %s' % ', '.join(['%s.%s'%(t,x) for t in tablenames for x in ((hasattr(self.db[t],'_primarykey') and self.db[t]._primarykey) or [self.db[t]._id.name])])
1163
1164 return self.select_limitby(sql_s, sql_f, sql_t, sql_w, sql_o, limitby)
1165
1166 - def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
1167 if limitby:
1168 (lmin, lmax) = limitby
1169 sql_o += ' LIMIT %i OFFSET %i' % (lmax - lmin, lmin)
1170 return 'SELECT %s %s FROM %s%s%s;' % (sql_s, sql_f, sql_t, sql_w, sql_o)
1171
1172 - def select(self,query,fields,attributes):
1173 """
1174 Always returns a Rows object, even if it may be empty
1175 """
1176 def response(sql):
1177 self.execute(sql)
1178 return self.cursor.fetchall()
1179 sql = self._select(query,fields,attributes)
1180 if attributes.get('cache', None):
1181 (cache_model, time_expire) = attributes['cache']
1182 del attributes['cache']
1183 key = self.uri + '/' + sql
1184 key = (key<=200) and key or hashlib.md5(key).hexdigest()
1185 rows = cache_model(key, lambda: response(sql), time_expire)
1186 else:
1187 rows = response(sql)
1188 if isinstance(rows,tuple):
1189 rows = list(rows)
1190 limitby = attributes.get('limitby',None) or (0,)
1191 rows = self.rowslice(rows,limitby[0],None)
1192 return self.parse(rows,self._colnames)
1193
1194 - def _count(self,query,distinct=None):
1195 tablenames = self.tables(query)
1196 if query:
1197 sql_w = ' WHERE ' + self.expand(query)
1198 else:
1199 sql_w = ''
1200 sql_t = ','.join(tablenames)
1201 if distinct:
1202 if isinstance(distinct,(list,tuple)):
1203 distinct = xorify(distinct)
1204 sql_d = self.expand(distinct)
1205 return 'SELECT count(DISTINCT %s) FROM %s%s' % (sql_d, sql_t, sql_w)
1206 return 'SELECT count(*) FROM %s%s' % (sql_t, sql_w)
1207
1208 - def count(self,query,distinct=None):
1209 self.execute(self._count(query,distinct))
1210 return self.cursor.fetchone()[0]
1211
1212
1223
1225 return self.connection.commit()
1226
1229
1231 return self.connection.close()
1232
1235
1238
1241
1244
1247
1249 return '%s_%s__constraint' % (table,fieldname)
1250
1253
1255 self.db._lastsql = a[0]
1256 t0 = time.time()
1257 ret = self.cursor.execute(*a,**b)
1258 self.db._timings.append((a[0],time.time()-t0))
1259 return ret
1260
1263
1265 if isinstance(obj,CALLABLETYPES):
1266 obj = obj()
1267 if isinstance(fieldtype, SQLCustomType):
1268 return fieldtype.encoder(obj)
1269 if isinstance(obj, (Expression, Field)):
1270 return str(obj)
1271 if fieldtype.startswith('list:'):
1272 if not obj:
1273 obj = []
1274 if not isinstance(obj, (list, tuple)):
1275 obj = [obj]
1276 if isinstance(obj, (list, tuple)):
1277 obj = bar_encode(obj)
1278 if obj is None:
1279 return 'NULL'
1280 if obj == '' and not fieldtype[:2] in ['st', 'te', 'pa', 'up']:
1281 return 'NULL'
1282 r = self.represent_exceptions(obj,fieldtype)
1283 if r != None:
1284 return r
1285 if fieldtype == 'boolean':
1286 if obj and not str(obj)[:1].upper() in ['F', '0']:
1287 return "'T'"
1288 else:
1289 return "'F'"
1290 if fieldtype == 'id' or fieldtype == 'integer':
1291 return str(int(obj))
1292 if fieldtype.startswith('decimal'):
1293 return str(obj)
1294 elif fieldtype.startswith('reference'):
1295 if fieldtype.find('.')>0:
1296 return repr(obj)
1297 elif isinstance(obj, (Row, Reference)):
1298 return str(obj['id'])
1299 return str(int(obj))
1300 elif fieldtype == 'double':
1301 return repr(float(obj))
1302 if isinstance(obj, unicode):
1303 obj = obj.encode(self.db_codec)
1304 if fieldtype == 'blob':
1305 obj = base64.b64encode(str(obj))
1306 elif fieldtype == 'date':
1307 if isinstance(obj, (datetime.date, datetime.datetime)):
1308 obj = obj.isoformat()[:10]
1309 else:
1310 obj = str(obj)
1311 elif fieldtype == 'datetime':
1312 if isinstance(obj, datetime.datetime):
1313 obj = obj.isoformat()[:19].replace('T',' ')
1314 elif isinstance(obj, datetime.date):
1315 obj = obj.isoformat()[:10]+' 00:00:00'
1316 else:
1317 obj = str(obj)
1318 elif fieldtype == 'time':
1319 if isinstance(obj, datetime.time):
1320 obj = obj.isoformat()[:10]
1321 else:
1322 obj = str(obj)
1323 if not isinstance(obj,str):
1324 obj = str(obj)
1325 try:
1326 obj.decode(self.db_codec)
1327 except:
1328 obj = obj.decode('latin1').encode(self.db_codec)
1329 return "'%s'" % obj.replace("'", "''")
1330
1333
1336
1339
1340 - def rowslice(self,rows,minimum=0,maximum=None):
1341 """ by default this function does nothing, overload when db does not do slicing """
1342 return rows
1343
1344 - def parse(self, rows, colnames, blob_decode=True):
1345 db = self.db
1346 virtualtables = []
1347 new_rows = []
1348 for (i,row) in enumerate(rows):
1349 new_row = Row()
1350 for j,colname in enumerate(colnames):
1351 value = row[j]
1352 if not table_field.match(colnames[j]):
1353 if not '_extra' in new_row:
1354 new_row['_extra'] = Row()
1355 new_row['_extra'][colnames[j]] = value
1356 select_as_parser = re.compile("\s+AS\s+(\S+)")
1357 new_column_name = select_as_parser.search(colnames[j])
1358 if not new_column_name is None:
1359 column_name = new_column_name.groups(0)
1360 setattr(new_row,column_name[0],value)
1361 continue
1362 (tablename, fieldname) = colname.split('.')
1363 table = db[tablename]
1364 field = table[fieldname]
1365 field_type = field.type
1366 if field.type != 'blob' and isinstance(value, str):
1367 try:
1368 value = value.decode(db._db_codec)
1369 except Exception:
1370 pass
1371 if isinstance(value, unicode):
1372 value = value.encode('utf-8')
1373 if not tablename in new_row:
1374 colset = new_row[tablename] = Row()
1375 if tablename not in virtualtables:
1376 virtualtables.append(tablename)
1377 else:
1378 colset = new_row[tablename]
1379
1380 if isinstance(field_type, SQLCustomType):
1381 colset[fieldname] = field_type.decoder(value)
1382
1383 elif not isinstance(field_type, str) or value is None:
1384 colset[fieldname] = value
1385 elif isinstance(field_type, str) and \
1386 field_type.startswith('reference'):
1387 referee = field_type[10:].strip()
1388 if not '.' in referee:
1389 colset[fieldname] = rid = Reference(value)
1390 (rid._table, rid._record) = (db[referee], None)
1391 else:
1392 colset[fieldname] = value
1393 elif field_type == 'boolean':
1394 if value == True or str(value)[:1].lower() == 't':
1395 colset[fieldname] = True
1396 else:
1397 colset[fieldname] = False
1398 elif field_type == 'date' \
1399 and (not isinstance(value, datetime.date)\
1400 or isinstance(value, datetime.datetime)):
1401 (y, m, d) = map(int, str(value)[:10].strip().split('-'))
1402 colset[fieldname] = datetime.date(y, m, d)
1403 elif field_type == 'time' \
1404 and not isinstance(value, datetime.time):
1405 time_items = map(int,str(value)[:8].strip().split(':')[:3])
1406 if len(time_items) == 3:
1407 (h, mi, s) = time_items
1408 else:
1409 (h, mi, s) = time_items + [0]
1410 colset[fieldname] = datetime.time(h, mi, s)
1411 elif field_type == 'datetime'\
1412 and not isinstance(value, datetime.datetime):
1413 (y, m, d) = map(int,str(value)[:10].strip().split('-'))
1414 time_items = map(int,str(value)[11:19].strip().split(':')[:3])
1415 if len(time_items) == 3:
1416 (h, mi, s) = time_items
1417 else:
1418 (h, mi, s) = time_items + [0]
1419 colset[fieldname] = datetime.datetime(y, m, d, h, mi, s)
1420 elif field_type == 'blob' and blob_decode:
1421 colset[fieldname] = base64.b64decode(str(value))
1422 elif field_type.startswith('decimal'):
1423 decimals = int(field_type[8:-1].split(',')[-1])
1424 if self.dbengine == 'sqlite':
1425 value = ('%.' + str(decimals) + 'f') % value
1426 if not isinstance(value, decimal.Decimal):
1427 value = decimal.Decimal(str(value))
1428 colset[fieldname] = value
1429 elif field_type.startswith('list:integer'):
1430 if not self.dbengine=='google:datastore':
1431 colset[fieldname] = bar_decode_integer(value)
1432 else:
1433 colset[fieldname] = value
1434 elif field_type.startswith('list:reference'):
1435 if not self.dbengine=='google:datastore':
1436 colset[fieldname] = bar_decode_integer(value)
1437 else:
1438 colset[fieldname] = value
1439 elif field_type.startswith('list:string'):
1440 if not self.dbengine=='google:datastore':
1441 colset[fieldname] = bar_decode_string(value)
1442 else:
1443 colset[fieldname] = value
1444 else:
1445 colset[fieldname] = value
1446 if field_type == 'id':
1447 id = colset[field.name]
1448 colset.update_record = lambda _ = (colset, table, id), **a: update_record(_, a)
1449 colset.delete_record = lambda t = table, i = id: t._db(t._id==i).delete()
1450 for (referee_table, referee_name) in \
1451 table._referenced_by:
1452 s = db[referee_table][referee_name]
1453 if not referee_table in colset:
1454
1455 colset[referee_table] = Set(db, s == id)
1456
1457
1458 colset['id'] = id
1459 new_rows.append(new_row)
1460 rowsobj = Rows(db, new_rows, colnames, rawrows=rows)
1461 for tablename in virtualtables:
1462 for item in db[tablename].virtualfields:
1463 try:
1464 rowsobj = rowsobj.setvirtualfields(**{tablename:item})
1465 except KeyError:
1466
1467 pass
1468 return rowsobj
1469
1471 fieldname = self.db._request_tenant
1472 for tablename in tablenames:
1473 table = self.db[tablename]
1474 if fieldname in table:
1475 default = table[fieldname].default
1476 if default!=None:
1477 query = query&(table[fieldname]==default)
1478 return query
1479
1480
1481
1482
1483
1485
1486 driver = globals().get('sqlite3',None)
1487
1489 return "web2py_extract('%s',%s)" % (what,self.expand(field))
1490
1491 @staticmethod
1493 table = {
1494 'year': (0, 4),
1495 'month': (5, 7),
1496 'day': (8, 10),
1497 'hour': (11, 13),
1498 'minute': (14, 16),
1499 'second': (17, 19),
1500 }
1501 try:
1502 (i, j) = table[lookup]
1503 return int(s[i:j])
1504 except:
1505 return None
1506
1507 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
1508 credential_decoder=lambda x:x, driver_args={},
1509 adapter_args={}):
1510 self.db = db
1511 self.dbengine = "sqlite"
1512 self.uri = uri
1513 self.pool_size = pool_size
1514 self.folder = folder
1515 self.db_codec = db_codec
1516 self.find_or_make_work_folder()
1517 path_encoding = sys.getfilesystemencoding() or locale.getdefaultlocale()[1] or 'utf8'
1518 if uri.startswith('sqlite:memory'):
1519 dbpath = ':memory:'
1520 else:
1521 dbpath = uri.split('://')[1]
1522 if dbpath[0] != '/':
1523 dbpath = os.path.join(self.folder.decode(path_encoding).encode('utf8'),dbpath)
1524 if not 'check_same_thread' in driver_args:
1525 driver_args['check_same_thread'] = False
1526 def connect(dbpath=dbpath, driver_args=driver_args):
1527 return self.driver.Connection(dbpath, **driver_args)
1528 self.pool_connection(connect)
1529 self.cursor = self.connection.cursor()
1530 self.connection.create_function('web2py_extract', 2, SQLiteAdapter.web2py_extract)
1531
1533 tablename = table._tablename
1534 return ['DELETE FROM %s;' % tablename,
1535 "DELETE FROM sqlite_sequence WHERE name='%s';" % tablename]
1536
1539
1540
1542
1543 driver = globals().get('zxJDBC',None)
1544
1545 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
1546 credential_decoder=lambda x:x, driver_args={},
1547 adapter_args={}):
1548 self.db = db
1549 self.dbengine = "sqlite"
1550 self.uri = uri
1551 self.pool_size = pool_size
1552 self.folder = folder
1553 self.db_codec = db_codec
1554 self.find_or_make_work_folder()
1555 path_encoding = sys.getfilesystemencoding() or locale.getdefaultlocale()[1] or 'utf8'
1556 if uri.startswith('sqlite:memory'):
1557 dbpath = ':memory:'
1558 else:
1559 dbpath = uri.split('://')[1]
1560 if dbpath[0] != '/':
1561 dbpath = os.path.join(self.folder.decode(path_encoding).encode('utf8'),dbpath)
1562 def connect(dbpath=dbpath,driver_args=driver_args):
1563 return self.driver.connect(java.sql.DriverManager.getConnection('jdbc:sqlite:'+dbpath),**driver_args)
1564 self.pool_connection(connect)
1565 self.cursor = self.connection.cursor()
1566
1567
1568
1571
1572
1574
1575 driver = globals().get('pymysql',None)
1576 maxcharlength = 255
1577 commit_on_alter_table = True
1578 support_distributed_transaction = True
1579 types = {
1580 'boolean': 'CHAR(1)',
1581 'string': 'VARCHAR(%(length)s)',
1582 'text': 'LONGTEXT',
1583 'password': 'VARCHAR(%(length)s)',
1584 'blob': 'LONGBLOB',
1585 'upload': 'VARCHAR(%(length)s)',
1586 'integer': 'INT',
1587 'double': 'DOUBLE',
1588 'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
1589 'date': 'DATE',
1590 'time': 'TIME',
1591 'datetime': 'DATETIME',
1592 'id': 'INT AUTO_INCREMENT NOT NULL',
1593 'reference': 'INT, INDEX %(field_name)s__idx (%(field_name)s), FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
1594 'list:integer': 'LONGTEXT',
1595 'list:string': 'LONGTEXT',
1596 'list:reference': 'LONGTEXT',
1597 }
1598
1601
1603 return 'SUBSTRING(%s,%s,%s)' % (self.expand(field), parameters[0], parameters[1])
1604
1605 - def _drop(self,table,mode):
1606
1607 return ['SET FOREIGN_KEY_CHECKS=0;','DROP TABLE %s;' % table,'SET FOREIGN_KEY_CHECKS=1;']
1608
1611
1615
1618
1621
1623 return '; ALTER TABLE %s ADD ' % table
1624
1625 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
1626 credential_decoder=lambda x:x, driver_args={},
1627 adapter_args={}):
1628 self.db = db
1629 self.dbengine = "mysql"
1630 self.uri = uri
1631 self.pool_size = pool_size
1632 self.folder = folder
1633 self.db_codec = db_codec
1634 self.find_or_make_work_folder()
1635 uri = uri.split('://')[1]
1636 m = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:/]+)(\:(?P<port>[0-9]+))?/(?P<db>[^?]+)(\?set_encoding=(?P<charset>\w+))?$').match(uri)
1637 if not m:
1638 raise SyntaxError, \
1639 "Invalid URI string in DAL: %s" % self.uri
1640 user = credential_decoder(m.group('user'))
1641 if not user:
1642 raise SyntaxError, 'User required'
1643 password = credential_decoder(m.group('password'))
1644 if not password:
1645 password = ''
1646 host = m.group('host')
1647 if not host:
1648 raise SyntaxError, 'Host name required'
1649 db = m.group('db')
1650 if not db:
1651 raise SyntaxError, 'Database name required'
1652 port = int(m.group('port') or '3306')
1653 charset = m.group('charset') or 'utf8'
1654 driver_args.update(dict(db=db,
1655 user=credential_decoder(user),
1656 passwd=credential_decoder(password),
1657 host=host,
1658 port=port,
1659 charset=charset))
1660 def connect(driver_args=driver_args):
1661 return self.driver.connect(**driver_args)
1662 self.pool_connection(connect)
1663 self.cursor = self.connection.cursor()
1664 self.execute('SET FOREIGN_KEY_CHECKS=1;')
1665 self.execute("SET sql_mode='NO_BACKSLASH_ESCAPES';")
1666
1668 self.execute('select last_insert_id();')
1669 return int(self.cursor.fetchone()[0])
1670
1671
1672 -class PostgreSQLAdapter(BaseAdapter):
1673
1674 driver = globals().get('psycopg2',None)
1675
1676 support_distributed_transaction = True
1677 types = {
1678 'boolean': 'CHAR(1)',
1679 'string': 'VARCHAR(%(length)s)',
1680 'text': 'TEXT',
1681 'password': 'VARCHAR(%(length)s)',
1682 'blob': 'BYTEA',
1683 'upload': 'VARCHAR(%(length)s)',
1684 'integer': 'INTEGER',
1685 'double': 'FLOAT8',
1686 'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
1687 'date': 'DATE',
1688 'time': 'TIME',
1689 'datetime': 'TIMESTAMP',
1690 'id': 'SERIAL PRIMARY KEY',
1691 'reference': 'INTEGER REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
1692 'list:integer': 'TEXT',
1693 'list:string': 'TEXT',
1694 'list:reference': 'TEXT',
1695 }
1696
1697 - def sequence_name(self,table):
1698 return '%s_id_Seq' % table
1699
1702
1705
1706 - def prepare(self,key):
1707 self.execute("PREPARE TRANSACTION '%s';" % key)
1708
1709 - def commit_prepared(self,key):
1710 self.execute("COMMIT PREPARED '%s';" % key)
1711
1712 - def rollback_prepared(self,key):
1713 self.execute("ROLLBACK PREPARED '%s';" % key)
1714
1715 - def create_sequence_and_triggers(self, query, table, **args):
1716
1717
1718
1719
1720 self.execute(query)
1721
1722 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
1723 credential_decoder=lambda x:x, driver_args={},
1724 adapter_args={}):
1725 self.db = db
1726 self.dbengine = "postgres"
1727 self.uri = uri
1728 self.pool_size = pool_size
1729 self.folder = folder
1730 self.db_codec = db_codec
1731 self.find_or_make_work_folder()
1732 uri = uri.split('://')[1]
1733 m = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:@/]+)(\:(?P<port>[0-9]+))?/(?P<db>[^\?]+)(\?sslmode=(?P<sslmode>.+))?$').match(uri)
1734 if not m:
1735 raise SyntaxError, "Invalid URI string in DAL"
1736 user = credential_decoder(m.group('user'))
1737 if not user:
1738 raise SyntaxError, 'User required'
1739 password = credential_decoder(m.group('password'))
1740 if not password:
1741 password = ''
1742 host = m.group('host')
1743 if not host:
1744 raise SyntaxError, 'Host name required'
1745 db = m.group('db')
1746 if not db:
1747 raise SyntaxError, 'Database name required'
1748 port = m.group('port') or '5432'
1749 sslmode = m.group('sslmode')
1750 if sslmode:
1751 msg = ("dbname='%s' user='%s' host='%s'"
1752 "port=%s password='%s' sslmode='%s'") \
1753 % (db, user, host, port, password, sslmode)
1754 else:
1755 msg = ("dbname='%s' user='%s' host='%s'"
1756 "port=%s password='%s'") \
1757 % (db, user, host, port, password)
1758 def connect(msg=msg,driver_args=driver_args):
1759 return self.driver.connect(msg,**driver_args)
1760 self.pool_connection(connect)
1761 self.connection.set_client_encoding('UTF8')
1762 self.cursor = self.connection.cursor()
1763 self.execute('BEGIN;')
1764 self.execute("SET CLIENT_ENCODING TO 'UNICODE';")
1765 self.execute("SET standard_conforming_strings=on;")
1766
1767 - def lastrowid(self,table):
1768 self.execute("select currval('%s')" % table._sequence_name)
1769 return int(self.cursor.fetchone()[0])
1770
1771 - def LIKE(self,first,second):
1772 return '(%s ILIKE %s)' % (self.expand(first),self.expand(second,'string'))
1773
1774 - def STARTSWITH(self,first,second):
1775 return '(%s ILIKE %s)' % (self.expand(first),self.expand(second+'%','string'))
1776
1777 - def ENDSWITH(self,first,second):
1778 return '(%s ILIKE %s)' % (self.expand(first),self.expand('%'+second,'string'))
1779
1780 - def CONTAINS(self,first,second):
1781 if first.type in ('string','text'):
1782 key = '%'+str(second).replace('%','%%')+'%'
1783 elif first.type.startswith('list:'):
1784 key = '%|'+str(second).replace('|','||').replace('%','%%')+'|%'
1785 return '(%s ILIKE %s)' % (self.expand(first),self.expand(key,'string'))
1786
1787 -class JDBCPostgreSQLAdapter(PostgreSQLAdapter):
1788
1789 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
1790 credential_decoder=lambda x:x, driver_args={},
1791 adapter_args={}):
1792 self.db = db
1793 self.dbengine = "postgres"
1794 self.uri = uri
1795 self.pool_size = pool_size
1796 self.folder = folder
1797 self.db_codec = db_codec
1798 self.find_or_make_work_folder()
1799 uri = uri.split('://')[1]
1800 m = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:/]+)(\:(?P<port>[0-9]+))?/(?P<db>.+)$').match(uri)
1801 if not m:
1802 raise SyntaxError, "Invalid URI string in DAL"
1803 user = credential_decoder(m.group('user'))
1804 if not user:
1805 raise SyntaxError, 'User required'
1806 password = credential_decoder(m.group('password'))
1807 if not password:
1808 password = ''
1809 host = m.group('host')
1810 if not host:
1811 raise SyntaxError, 'Host name required'
1812 db = m.group('db')
1813 if not db:
1814 raise SyntaxError, 'Database name required'
1815 port = m.group('port') or '5432'
1816 msg = ('jdbc:postgresql://%s:%s/%s' % (host, port, db), user, password)
1817 def connect(msg=msg,driver_args=driver_args):
1818 return self.driver.connect(*msg,**driver_args)
1819 self.pool_connection(connect)
1820 self.connection.set_client_encoding('UTF8')
1821 self.cursor = self.connection.cursor()
1822 self.execute('BEGIN;')
1823 self.execute("SET CLIENT_ENCODING TO 'UNICODE';")
1824
1825
1827
1828 driver = globals().get('cx_Oracle',None)
1829
1830 commit_on_alter_table = False
1831 types = {
1832 'boolean': 'CHAR(1)',
1833 'string': 'VARCHAR2(%(length)s)',
1834 'text': 'CLOB',
1835 'password': 'VARCHAR2(%(length)s)',
1836 'blob': 'CLOB',
1837 'upload': 'VARCHAR2(%(length)s)',
1838 'integer': 'INT',
1839 'double': 'FLOAT',
1840 'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
1841 'date': 'DATE',
1842 'time': 'CHAR(8)',
1843 'datetime': 'DATE',
1844 'id': 'NUMBER PRIMARY KEY',
1845 'reference': 'NUMBER, CONSTRAINT %(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
1846 'list:integer': 'CLOB',
1847 'list:string': 'CLOB',
1848 'list:reference': 'CLOB',
1849 }
1850
1852 return '%s_sequence' % tablename
1853
1855 return '%s_trigger' % tablename
1856
1858 return 'LEFT OUTER JOIN'
1859
1861 return 'dbms_random.value'
1862
1863 - def NOT_NULL(self,default,field_type):
1864 return 'DEFAULT %s NOT NULL' % self.represent(default,field_type)
1865
1866 - def _drop(self,table,mode):
1869
1870 - def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
1871 if limitby:
1872 (lmin, lmax) = limitby
1873 if len(sql_w) > 1:
1874 sql_w_row = sql_w + ' AND w_row > %i' % lmin
1875 else:
1876 sql_w_row = 'WHERE w_row > %i' % lmin
1877 return 'SELECT %s %s FROM (SELECT w_tmp.*, ROWNUM w_row FROM (SELECT %s FROM %s%s%s) w_tmp WHERE ROWNUM<=%i) %s %s %s;' % (sql_s, sql_f, sql_f, sql_t, sql_w, sql_o, lmax, sql_t, sql_w_row, sql_o)
1878 return 'SELECT %s %s FROM %s%s%s;' % (sql_s, sql_f, sql_t, sql_w, sql_o)
1879
1885
1887 if fieldtype == 'blob':
1888 obj = base64.b64encode(str(obj))
1889 return ":CLOB('%s')" % obj
1890 elif fieldtype == 'date':
1891 if isinstance(obj, (datetime.date, datetime.datetime)):
1892 obj = obj.isoformat()[:10]
1893 else:
1894 obj = str(obj)
1895 return "to_date('%s','yyyy-mm-dd')" % obj
1896 elif fieldtype == 'datetime':
1897 if isinstance(obj, datetime.datetime):
1898 obj = obj.isoformat()[:19].replace('T',' ')
1899 elif isinstance(obj, datetime.date):
1900 obj = obj.isoformat()[:10]+' 00:00:00'
1901 else:
1902 obj = str(obj)
1903 return "to_date('%s','yyyy-mm-dd hh24:mi:ss')" % obj
1904 return None
1905
1906 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
1907 credential_decoder=lambda x:x, driver_args={},
1908 adapter_args={}):
1909 self.db = db
1910 self.dbengine = "oracle"
1911 self.uri = uri
1912 self.pool_size = pool_size
1913 self.folder = folder
1914 self.db_codec = db_codec
1915 self.find_or_make_work_folder()
1916 uri = uri.split('://')[1]
1917 if not 'threaded' in driver_args:
1918 driver_args['threaded']=True
1919 def connect(uri=uri,driver_args=driver_args):
1920 return self.driver.connect(uri,**driver_args)
1921 self.pool_connection(connect)
1922 self.cursor = self.connection.cursor()
1923 self.execute("ALTER SESSION SET NLS_DATE_FORMAT = 'YYYY-MM-DD HH24:MI:SS';")
1924 self.execute("ALTER SESSION SET NLS_TIMESTAMP_FORMAT = 'YYYY-MM-DD HH24:MI:SS';")
1925 oracle_fix = re.compile("[^']*('[^']*'[^']*)*\:(?P<clob>CLOB\('([^']+|'')*'\))")
1926
1928 args = []
1929 i = 1
1930 while True:
1931 m = self.oracle_fix.match(command)
1932 if not m:
1933 break
1934 command = command[:m.start('clob')] + str(i) + command[m.end('clob'):]
1935 args.append(m.group('clob')[6:-2].replace("''", "'"))
1936 i += 1
1937 return self.log_execute(command[:-1], args)
1938
1946
1951
1952
1954
1955 driver = globals().get('pyodbc',None)
1956
1957 types = {
1958 'boolean': 'BIT',
1959 'string': 'VARCHAR(%(length)s)',
1960 'text': 'TEXT',
1961 'password': 'VARCHAR(%(length)s)',
1962 'blob': 'IMAGE',
1963 'upload': 'VARCHAR(%(length)s)',
1964 'integer': 'INT',
1965 'double': 'FLOAT',
1966 'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
1967 'date': 'DATETIME',
1968 'time': 'CHAR(8)',
1969 'datetime': 'DATETIME',
1970 'id': 'INT IDENTITY PRIMARY KEY',
1971 'reference': 'INT NULL, CONSTRAINT %(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
1972 'reference FK': ', CONSTRAINT FK_%(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
1973 'reference TFK': ' CONSTRAINT FK_%(foreign_table)s_PK FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s) ON DELETE %(on_delete_action)s',
1974 'list:integer': 'TEXT',
1975 'list:string': 'TEXT',
1976 'list:reference': 'TEXT',
1977 }
1978
1980 return "DATEPART(%s,%s)" % (what, self.expand(field))
1981
1983 return 'LEFT OUTER JOIN'
1984
1987
1990
1992 return 'SUBSTRING(%s,%s,%s)' % (self.expand(field), parameters[0], parameters[1])
1993
1995 return 'PRIMARY KEY CLUSTERED (%s)' % key
1996
1997 - def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
1998 if limitby:
1999 (lmin, lmax) = limitby
2000 sql_s += ' TOP %i' % lmax
2001 return 'SELECT %s %s FROM %s%s%s;' % (sql_s, sql_f, sql_t, sql_w, sql_o)
2002
2004 if fieldtype == 'boolean':
2005 if obj and not str(obj)[0].upper() == 'F':
2006 return '1'
2007 else:
2008 return '0'
2009 return None
2010
2011 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
2012 credential_decoder=lambda x:x, driver_args={},
2013 adapter_args={}, fake_connect=False):
2014 self.db = db
2015 self.dbengine = "mssql"
2016 self.uri = uri
2017 self.pool_size = pool_size
2018 self.folder = folder
2019 self.db_codec = db_codec
2020 self.find_or_make_work_folder()
2021
2022 uri = uri.split('://')[1]
2023 if '@' not in uri:
2024 try:
2025 m = re.compile('^(?P<dsn>.+)$').match(uri)
2026 if not m:
2027 raise SyntaxError, \
2028 'Parsing uri string(%s) has no result' % self.uri
2029 dsn = m.group('dsn')
2030 if not dsn:
2031 raise SyntaxError, 'DSN required'
2032 except SyntaxError, e:
2033 logger.error('NdGpatch error')
2034 raise e
2035 cnxn = 'DSN=%s' % dsn
2036 else:
2037 m = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:/]+)(\:(?P<port>[0-9]+))?/(?P<db>[^\?]+)(\?(?P<urlargs>.*))?$').match(uri)
2038 if not m:
2039 raise SyntaxError, \
2040 "Invalid URI string in DAL: %s" % uri
2041 user = credential_decoder(m.group('user'))
2042 if not user:
2043 raise SyntaxError, 'User required'
2044 password = credential_decoder(m.group('password'))
2045 if not password:
2046 password = ''
2047 host = m.group('host')
2048 if not host:
2049 raise SyntaxError, 'Host name required'
2050 db = m.group('db')
2051 if not db:
2052 raise SyntaxError, 'Database name required'
2053 port = m.group('port') or '1433'
2054
2055
2056
2057 argsdict = { 'DRIVER':'{SQL Server}' }
2058 urlargs = m.group('urlargs') or ''
2059 argpattern = re.compile('(?P<argkey>[^=]+)=(?P<argvalue>[^&]*)')
2060 for argmatch in argpattern.finditer(urlargs):
2061 argsdict[str(argmatch.group('argkey')).upper()] = argmatch.group('argvalue')
2062 urlargs = ';'.join(['%s=%s' % (ak, av) for (ak, av) in argsdict.items()])
2063 cnxn = 'SERVER=%s;PORT=%s;DATABASE=%s;UID=%s;PWD=%s;%s' \
2064 % (host, port, db, user, password, urlargs)
2065 def connect(cnxn=cnxn,driver_args=driver_args):
2066 return self.driver.connect(cnxn,**driver_args)
2067 if not fake_connect:
2068 self.pool_connection(connect)
2069 self.cursor = self.connection.cursor()
2070
2072
2073 self.execute('SELECT SCOPE_IDENTITY();')
2074 return int(self.cursor.fetchone()[0])
2075
2078
2079 - def rowslice(self,rows,minimum=0,maximum=None):
2080 if maximum is None:
2081 return rows[minimum:]
2082 return rows[minimum:maximum]
2083
2084
2086 types = {
2087 'boolean': 'CHAR(1)',
2088 'string': 'NVARCHAR(%(length)s)',
2089 'text': 'NTEXT',
2090 'password': 'NVARCHAR(%(length)s)',
2091 'blob': 'IMAGE',
2092 'upload': 'NVARCHAR(%(length)s)',
2093 'integer': 'INT',
2094 'double': 'FLOAT',
2095 'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
2096 'date': 'DATETIME',
2097 'time': 'CHAR(8)',
2098 'datetime': 'DATETIME',
2099 'id': 'INT IDENTITY PRIMARY KEY',
2100 'reference': 'INT, CONSTRAINT %(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
2101 'reference FK': ', CONSTRAINT FK_%(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
2102 'reference TFK': ' CONSTRAINT FK_%(foreign_table)s_PK FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s) ON DELETE %(on_delete_action)s',
2103 'list:integer': 'NTEXT',
2104 'list:string': 'NTEXT',
2105 'list:reference': 'NTEXT',
2106 }
2107
2109 value = BaseAdapter.represent(self, obj, fieldtype)
2110 if (fieldtype == 'string' or fieldtype == 'text') and value[:1]=="'":
2111 value = 'N'+value
2112 return value
2113
2116
2117
2119
2120 driver = globals().get('pyodbc',None)
2121
2122 commit_on_alter_table = False
2123 support_distributed_transaction = True
2124 types = {
2125 'boolean': 'CHAR(1)',
2126 'string': 'VARCHAR(%(length)s)',
2127 'text': 'BLOB SUB_TYPE 1',
2128 'password': 'VARCHAR(%(length)s)',
2129 'blob': 'BLOB SUB_TYPE 0',
2130 'upload': 'VARCHAR(%(length)s)',
2131 'integer': 'INTEGER',
2132 'double': 'DOUBLE PRECISION',
2133 'decimal': 'DECIMAL(%(precision)s,%(scale)s)',
2134 'date': 'DATE',
2135 'time': 'TIME',
2136 'datetime': 'TIMESTAMP',
2137 'id': 'INTEGER PRIMARY KEY',
2138 'reference': 'INTEGER REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
2139 'list:integer': 'BLOB SUB_TYPE 1',
2140 'list:string': 'BLOB SUB_TYPE 1',
2141 'list:reference': 'BLOB SUB_TYPE 1',
2142 }
2143
2145 return 'genid_%s' % tablename
2146
2148 return 'trg_id_%s' % tablename
2149
2152
2153 - def NOT_NULL(self,default,field_type):
2154 return 'DEFAULT %s NOT NULL' % self.represent(default,field_type)
2155
2157 return 'SUBSTRING(%s from %s for %s)' % (self.expand(field), parameters[0], parameters[1])
2158
2159 - def _drop(self,table,mode):
2162
2163 - def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
2164 if limitby:
2165 (lmin, lmax) = limitby
2166 sql_s += ' FIRST %i SKIP %i' % (lmax - lmin, lmin)
2167 return 'SELECT %s %s FROM %s%s%s;' % (sql_s, sql_f, sql_t, sql_w, sql_o)
2168
2170 return ['DELETE FROM %s;' % table._tablename,
2171 'SET GENERATOR %s TO 0;' % table._sequence_name]
2172
2173 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
2174 credential_decoder=lambda x:x, driver_args={},
2175 adapter_args={}):
2176 self.db = db
2177 self.dbengine = "firebird"
2178 self.uri = uri
2179 self.pool_size = pool_size
2180 self.folder = folder
2181 self.db_codec = db_codec
2182 self.find_or_make_work_folder()
2183 uri = uri.split('://')[1]
2184 m = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:/]+)(\:(?P<port>[0-9]+))?/(?P<db>.+?)(\?set_encoding=(?P<charset>\w+))?$').match(uri)
2185 if not m:
2186 raise SyntaxError, "Invalid URI string in DAL: %s" % uri
2187 user = credential_decoder(m.group('user'))
2188 if not user:
2189 raise SyntaxError, 'User required'
2190 password = credential_decoder(m.group('password'))
2191 if not password:
2192 password = ''
2193 host = m.group('host')
2194 if not host:
2195 raise SyntaxError, 'Host name required'
2196 port = int(m.group('port') or 3050)
2197 db = m.group('db')
2198 if not db:
2199 raise SyntaxError, 'Database name required'
2200 charset = m.group('charset') or 'UTF8'
2201 driver_args.update(dict(dsn='%s/%s:%s' % (host,port,db),
2202 user = credential_decoder(user),
2203 password = credential_decoder(password),
2204 charset = charset))
2205 if adapter_args.has_key('driver_name'):
2206 if adapter_args['driver_name'] == 'kinterbasdb':
2207 self.driver = kinterbasdb
2208 elif adapter_args['driver_name'] == 'firebirdsql':
2209 self.driver = firebirdsql
2210 else:
2211 self.driver = kinterbasdb
2212 def connect(driver_args=driver_args):
2213 return self.driver.connect(**driver_args)
2214 self.pool_connection(connect)
2215 self.cursor = self.connection.cursor()
2216
2225
2230
2231
2233
2234 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
2235 credential_decoder=lambda x:x, driver_args={},
2236 adapter_args={}):
2237 self.db = db
2238 self.dbengine = "firebird"
2239 self.uri = uri
2240 self.pool_size = pool_size
2241 self.folder = folder
2242 self.db_codec = db_codec
2243 self.find_or_make_work_folder()
2244 uri = uri.split('://')[1]
2245 m = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<path>[^\?]+)(\?set_encoding=(?P<charset>\w+))?$').match(uri)
2246 if not m:
2247 raise SyntaxError, \
2248 "Invalid URI string in DAL: %s" % self.uri
2249 user = credential_decoder(m.group('user'))
2250 if not user:
2251 raise SyntaxError, 'User required'
2252 password = credential_decoder(m.group('password'))
2253 if not password:
2254 password = ''
2255 pathdb = m.group('path')
2256 if not pathdb:
2257 raise SyntaxError, 'Path required'
2258 charset = m.group('charset')
2259 if not charset:
2260 charset = 'UTF8'
2261 host = ''
2262 driver_args.update(dict(host=host,
2263 database=pathdb,
2264 user=credential_decoder(user),
2265 password=credential_decoder(password),
2266 charset=charset))
2267
2268
2269 if adapter_args.has_key('driver_name'):
2270 if adapter_args['driver_name'] == 'kinterbasdb':
2271 self.driver = kinterbasdb
2272 elif adapter_args['driver_name'] == 'firebirdsql':
2273 self.driver = firebirdsql
2274 else:
2275 self.driver = kinterbasdb
2276 def connect(driver_args=driver_args):
2277 return self.driver.connect(**driver_args)
2278 self.pool_connection(connect)
2279 self.cursor = self.connection.cursor()
2280
2281
2379
2384
2387
2390
2391
2393
2394 driver = globals().get('pyodbc',None)
2395
2396 types = {
2397 'boolean': 'CHAR(1)',
2398 'string': 'VARCHAR(%(length)s)',
2399 'text': 'CLOB',
2400 'password': 'VARCHAR(%(length)s)',
2401 'blob': 'BLOB',
2402 'upload': 'VARCHAR(%(length)s)',
2403 'integer': 'INT',
2404 'double': 'DOUBLE',
2405 'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
2406 'date': 'DATE',
2407 'time': 'TIME',
2408 'datetime': 'TIMESTAMP',
2409 'id': 'INTEGER GENERATED ALWAYS AS IDENTITY PRIMARY KEY NOT NULL',
2410 'reference': 'INT, FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
2411 'reference FK': ', CONSTRAINT FK_%(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
2412 'reference TFK': ' CONSTRAINT FK_%(foreign_table)s_PK FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s) ON DELETE %(on_delete_action)s',
2413 'list:integer': 'CLOB',
2414 'list:string': 'CLOB',
2415 'list:reference': 'CLOB',
2416 }
2417
2419 return 'LEFT OUTER JOIN'
2420
2423
2424 - def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
2425 if limitby:
2426 (lmin, lmax) = limitby
2427 sql_o += ' FETCH FIRST %i ROWS ONLY' % lmax
2428 return 'SELECT %s %s FROM %s%s%s;' % (sql_s, sql_f, sql_t, sql_w, sql_o)
2429
2431 if fieldtype == 'blob':
2432 obj = base64.b64encode(str(obj))
2433 return "BLOB('%s')" % obj
2434 elif fieldtype == 'datetime':
2435 if isinstance(obj, datetime.datetime):
2436 obj = obj.isoformat()[:19].replace('T','-').replace(':','.')
2437 elif isinstance(obj, datetime.date):
2438 obj = obj.isoformat()[:10]+'-00.00.00'
2439 return "'%s'" % obj
2440 return None
2441
2442 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
2443 credential_decoder=lambda x:x, driver_args={},
2444 adapter_args={}):
2445 self.db = db
2446 self.dbengine = "db2"
2447 self.uri = uri
2448 self.pool_size = pool_size
2449 self.folder = folder
2450 self.db_codec = db_codec
2451 self.find_or_make_work_folder()
2452 cnxn = uri.split('://', 1)[1]
2453 def connect(cnxn=cnxn,driver_args=driver_args):
2454 return self.driver.connect(cnxn,**driver_args)
2455 self.pool_connection(connect)
2456 self.cursor = self.connection.cursor()
2457
2459 if command[-1:]==';':
2460 command = command[:-1]
2461 return self.log_execute(command)
2462
2464 self.execute('SELECT DISTINCT IDENTITY_VAL_LOCAL() FROM %s;' % table)
2465 return int(self.cursor.fetchone()[0])
2466
2467 - def rowslice(self,rows,minimum=0,maximum=None):
2468 if maximum is None:
2469 return rows[minimum:]
2470 return rows[minimum:maximum]
2471
2472
2474
2475 driver = globals().get('pyodbc',None)
2476
2477 types = {
2478 'boolean': 'CHAR(1)',
2479 'string': 'VARCHAR(%(length)s)',
2480 'text': 'CLOB',
2481 'password': 'VARCHAR(%(length)s)',
2482 'blob': 'BLOB',
2483 'upload': 'VARCHAR(%(length)s)',
2484 'integer': 'INT',
2485 'double': 'DOUBLE',
2486 'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
2487 'date': 'DATE',
2488 'time': 'TIME',
2489 'datetime': 'TIMESTAMP',
2490 'id': 'INTEGER GENERATED ALWAYS AS IDENTITY PRIMARY KEY NOT NULL',
2491 'reference': 'INT, FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
2492 'reference FK': ', CONSTRAINT FK_%(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
2493 'reference TFK': ' CONSTRAINT FK_%(foreign_table)s_PK FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s) ON DELETE %(on_delete_action)s',
2494 'list:integer': 'CLOB',
2495 'list:string': 'CLOB',
2496 'list:reference': 'CLOB',
2497 }
2498
2499
2500 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
2501 credential_decoder=lambda x:x, driver_args={},
2502 adapter_args={}):
2503 self.db = db
2504 self.dbengine = "teradata"
2505 self.uri = uri
2506 self.pool_size = pool_size
2507 self.folder = folder
2508 self.db_codec = db_codec
2509 self.find_or_make_work_folder()
2510 cnxn = uri.split('://', 1)[1]
2511 def connect(cnxn=cnxn,driver_args=driver_args):
2512 return self.driver.connect(cnxn,**driver_args)
2513 self.pool_connection(connect)
2514 self.cursor = self.connection.cursor()
2515
2516
2517 INGRES_SEQNAME='ii***lineitemsequence'
2518
2519
2520
2522
2523 driver = globals().get('ingresdbi',None)
2524
2525 types = {
2526 'boolean': 'CHAR(1)',
2527 'string': 'VARCHAR(%(length)s)',
2528 'text': 'CLOB',
2529 'password': 'VARCHAR(%(length)s)',
2530 'blob': 'BLOB',
2531 'upload': 'VARCHAR(%(length)s)',
2532 'integer': 'INTEGER4',
2533 'double': 'FLOAT8',
2534 'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
2535 'date': 'ANSIDATE',
2536 'time': 'TIME WITHOUT TIME ZONE',
2537 'datetime': 'TIMESTAMP WITHOUT TIME ZONE',
2538 'id': 'integer4 not null unique with default next value for %s' % INGRES_SEQNAME,
2539 'reference': 'integer4, FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
2540 'reference FK': ', CONSTRAINT FK_%(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
2541 'reference TFK': ' CONSTRAINT FK_%(foreign_table)s_PK FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s) ON DELETE %(on_delete_action)s',
2542 'list:integer': 'CLOB',
2543 'list:string': 'CLOB',
2544 'list:reference': 'CLOB',
2545 }
2546
2548 return 'LEFT OUTER JOIN'
2549
2552
2553 - def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
2554 if limitby:
2555 (lmin, lmax) = limitby
2556 fetch_amt = lmax - lmin
2557 if fetch_amt:
2558 sql_s += ' FIRST %d ' % (fetch_amt, )
2559 if lmin:
2560
2561 sql_o += ' OFFSET %d' % (lmin, )
2562 return 'SELECT %s %s FROM %s%s%s;' % (sql_s, sql_f, sql_t, sql_w, sql_o)
2563
2564 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
2565 credential_decoder=lambda x:x, driver_args={},
2566 adapter_args={}):
2567 self.db = db
2568 self.dbengine = "ingres"
2569 self.uri = uri
2570 self.pool_size = pool_size
2571 self.folder = folder
2572 self.db_codec = db_codec
2573 self.find_or_make_work_folder()
2574 connstr = self._uri.split(':', 1)[1]
2575
2576 connstr = connstr.lstrip()
2577 while connstr.startswith('/'):
2578 connstr = connstr[1:]
2579 database_name=connstr
2580 vnode = '(local)'
2581 servertype = 'ingres'
2582 trace = (0, None)
2583 driver_args.update(dict(database=database_name,
2584 vnode=vnode,
2585 servertype=servertype,
2586 trace=trace))
2587 def connect(driver_args=driver_args):
2588 return self.driver.connect(**driver_args)
2589 self.pool_connection(connect)
2590 self.cursor = self.connection.cursor()
2591
2593
2594
2595
2596 if hasattr(table,'_primarykey'):
2597 modify_tbl_sql = 'modify %s to btree unique on %s' % \
2598 (table._tablename,
2599 ', '.join(["'%s'" % x for x in table.primarykey]))
2600 self.execute(modify_tbl_sql)
2601 else:
2602 tmp_seqname='%s_iisq' % table._tablename
2603 query=query.replace(INGRES_SEQNAME, tmp_seqname)
2604 self.execute('create sequence %s' % tmp_seqname)
2605 self.execute(query)
2606 self.execute('modify %s to btree unique on %s' % (table._tablename, 'id'))
2607
2608
2610 tmp_seqname='%s_iisq' % table
2611 self.execute('select current value for %s' % tmp_seqname)
2612 return int(self.cursor.fetchone()[0])
2613
2616
2617
2619 types = {
2620 'boolean': 'CHAR(1)',
2621 'string': 'NVARCHAR(%(length)s)',
2622 'text': 'NCLOB',
2623 'password': 'NVARCHAR(%(length)s)',
2624 'blob': 'BLOB',
2625 'upload': 'VARCHAR(%(length)s)',
2626 'integer': 'INTEGER4',
2627 'double': 'FLOAT8',
2628 'decimal': 'NUMERIC(%(precision)s,%(scale)s)',
2629 'date': 'ANSIDATE',
2630 'time': 'TIME WITHOUT TIME ZONE',
2631 'datetime': 'TIMESTAMP WITHOUT TIME ZONE',
2632 'id': 'integer4 not null unique with default next value for %s'% INGRES_SEQNAME,
2633 'reference': 'integer4, FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
2634 'reference FK': ', CONSTRAINT FK_%(constraint_name)s FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
2635 'reference TFK': ' CONSTRAINT FK_%(foreign_table)s_PK FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_table)s (%(foreign_key)s) ON DELETE %(on_delete_action)s',
2636 'list:integer': 'NCLOB',
2637 'list:string': 'NCLOB',
2638 'list:reference': 'NCLOB',
2639 }
2640
2642
2643 driver = globals().get('sapdb',None)
2644 support_distributed_transaction = False
2645 types = {
2646 'boolean': 'CHAR(1)',
2647 'string': 'VARCHAR(%(length)s)',
2648 'text': 'LONG',
2649 'password': 'VARCHAR(%(length)s)',
2650 'blob': 'LONG',
2651 'upload': 'VARCHAR(%(length)s)',
2652 'integer': 'INT',
2653 'double': 'FLOAT',
2654 'decimal': 'FIXED(%(precision)s,%(scale)s)',
2655 'date': 'DATE',
2656 'time': 'TIME',
2657 'datetime': 'TIMESTAMP',
2658 'id': 'INT PRIMARY KEY',
2659 'reference': 'INT, FOREIGN KEY (%(field_name)s) REFERENCES %(foreign_key)s ON DELETE %(on_delete_action)s',
2660 'list:integer': 'LONG',
2661 'list:string': 'LONG',
2662 'list:reference': 'LONG',
2663 }
2664
2666 return '%s_id_Seq' % table
2667
2668 - def select_limitby(self, sql_s, sql_f, sql_t, sql_w, sql_o, limitby):
2669 if limitby:
2670 (lmin, lmax) = limitby
2671 if len(sql_w) > 1:
2672 sql_w_row = sql_w + ' AND w_row > %i' % lmin
2673 else:
2674 sql_w_row = 'WHERE w_row > %i' % lmin
2675 return '%s %s FROM (SELECT w_tmp.*, ROWNO w_row FROM (SELECT %s FROM %s%s%s) w_tmp WHERE ROWNO=%i) %s %s %s;' % (sql_s, sql_f, sql_f, sql_t, sql_w, sql_o, lmax, sql_t, sql_w_row, sql_o)
2676 return 'SELECT %s %s FROM %s%s%s;' % (sql_s, sql_f, sql_t, sql_w, sql_o)
2677
2679
2680 self.execute('CREATE SEQUENCE %s;' % table._sequence_name)
2681 self.execute("ALTER TABLE %s ALTER COLUMN %s SET DEFAULT NEXTVAL('%s');" \
2682 % (table._tablename, table._id.name, table._sequence_name))
2683 self.execute(query)
2684
2685 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
2686 credential_decoder=lambda x:x, driver_args={},
2687 adapter_args={}):
2688 self.db = db
2689 self.dbengine = "sapdb"
2690 self.uri = uri
2691 self.pool_size = pool_size
2692 self.folder = folder
2693 self.db_codec = db_codec
2694 self.find_or_make_work_folder()
2695 uri = uri.split('://')[1]
2696 m = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:@/]+)(\:(?P<port>[0-9]+))?/(?P<db>[^\?]+)(\?sslmode=(?P<sslmode>.+))?$').match(uri)
2697 if not m:
2698 raise SyntaxError, "Invalid URI string in DAL"
2699 user = credential_decoder(m.group('user'))
2700 if not user:
2701 raise SyntaxError, 'User required'
2702 password = credential_decoder(m.group('password'))
2703 if not password:
2704 password = ''
2705 host = m.group('host')
2706 if not host:
2707 raise SyntaxError, 'Host name required'
2708 db = m.group('db')
2709 if not db:
2710 raise SyntaxError, 'Database name required'
2711 def connect(user=user,password=password,database=db,
2712 host=host,driver_args=driver_args):
2713 return self.driver.Connection(user,password,database,
2714 host,**driver_args)
2715 self.pool_connection(connect)
2716
2717 self.cursor = self.connection.cursor()
2718
2720 self.execute("select %s.NEXTVAL from dual" % table._sequence_name)
2721 return int(self.cursor.fetchone()[0])
2722
2724
2725 driver = globals().get('cubriddb',None)
2726
2727 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
2728 credential_decoder=lambda x:x, driver_args={},
2729 adapter_args={}):
2730 self.db = db
2731 self.dbengine = "cubrid"
2732 self.uri = uri
2733 self.pool_size = pool_size
2734 self.folder = folder
2735 self.db_codec = db_codec
2736 self.find_or_make_work_folder()
2737 uri = uri.split('://')[1]
2738 m = re.compile('^(?P<user>[^:@]+)(\:(?P<password>[^@]*))?@(?P<host>[^\:/]+)(\:(?P<port>[0-9]+))?/(?P<db>[^?]+)(\?set_encoding=(?P<charset>\w+))?$').match(uri)
2739 if not m:
2740 raise SyntaxError, \
2741 "Invalid URI string in DAL: %s" % self.uri
2742 user = credential_decoder(m.group('user'))
2743 if not user:
2744 raise SyntaxError, 'User required'
2745 password = credential_decoder(m.group('password'))
2746 if not password:
2747 password = ''
2748 host = m.group('host')
2749 if not host:
2750 raise SyntaxError, 'Host name required'
2751 db = m.group('db')
2752 if not db:
2753 raise SyntaxError, 'Database name required'
2754 port = int(m.group('port') or '30000')
2755 charset = m.group('charset') or 'utf8'
2756 user=credential_decoder(user),
2757 passwd=credential_decoder(password),
2758 def connect(host,port,db,user,passwd,driver_args=driver_args):
2759 return self.driver.connect(host,port,db,user,passwd,**driver_args)
2760 self.pool_connection(connect)
2761 self.cursor = self.connection.cursor()
2762 self.execute('SET FOREIGN_KEY_CHECKS=1;')
2763 self.execute("SET sql_mode='NO_BACKSLASH_ESCAPES';")
2764
2765
2766
2767
2769
2770 web2py_filesystem = False
2771
2773 if db._adapter.dbengine != 'mysql':
2774 raise RuntimeError, "only MySQL can store metadata .table files in database for now"
2775 self.db = db
2776 self.filename = filename
2777 self.mode = mode
2778 if not self.web2py_filesystem:
2779 self.db.executesql("CREATE TABLE IF NOT EXISTS web2py_filesystem (path VARCHAR(512), content LONGTEXT, PRIMARY KEY(path) ) ENGINE=InnoDB;")
2780 DatabaseStoredFile.web2py_filesystem = True
2781 self.p=0
2782 self.data = ''
2783 if mode in ('r','rw','a'):
2784 query = "SELECT content FROM web2py_filesystem WHERE path='%s'" % filename
2785 rows = self.db.executesql(query)
2786 if rows:
2787 self.data = rows[0][0]
2788 elif os.path.exists(filename):
2789 datafile = open(filename, 'r')
2790 try:
2791 self.data = datafile.read()
2792 finally:
2793 datafile.close()
2794 elif mode in ('r','rw'):
2795 raise RuntimeError, "File %s does not exist" % filename
2796
2797 - def read(self, bytes):
2798 data = self.data[self.p:self.p+bytes]
2799 self.p += len(data)
2800 return data
2801
2803 i = self.data.find('\n',self.p)+1
2804 if i>0:
2805 data, self.p = self.data[self.p:i], i
2806 else:
2807 data, self.p = self.data[self.p:], len(self.data)
2808 return data
2809
2812
2814 self.db.executesql("DELETE FROM web2py_filesystem WHERE path='%s'" % self.filename)
2815 query = "INSERT INTO web2py_filesystem(path,content) VALUES ('%s','%s')" % \
2816 (self.filename, self.data.replace("'","''"))
2817 self.db.executesql(query)
2818 self.db.commit()
2819
2820 @staticmethod
2822 if os.path.exists(filename):
2823 return True
2824 query = "SELECT path FROM web2py_filesystem WHERE path='%s'" % filename
2825 if db.executesql(query):
2826 return True
2827 return False
2828
2829
2831
2834
2835 - def file_open(self, filename, mode='rb', lock=True):
2837
2840
2842 query = "DELETE FROM web2py_filesystem WHERE path='%s'" % filename
2843 self.db.executesql(query)
2844 self.db.commit()
2845
2847
2848 - def __init__(self, db, uri='google:sql://realm:domain/database', pool_size=0,
2849 folder=None, db_codec='UTF-8', check_reserved=None,
2850 migrate=True, fake_migrate=False,
2851 credential_decoder = lambda x:x, driver_args={},
2852 adapter_args={}):
2853
2854 self.db = db
2855 self.dbengine = "mysql"
2856 self.uri = uri
2857 self.pool_size = pool_size
2858 self.folder = folder
2859 self.db_codec = db_codec
2860 self.folder = folder or '$HOME/'+thread.folder.split('/applications/',1)[1]
2861
2862 m = re.compile('^(?P<instance>.*)/(?P<db>.*)$').match(self.uri[len('google:sql://'):])
2863 if not m:
2864 raise SyntaxError, "Invalid URI string in SQLDB: %s" % self._uri
2865 instance = credential_decoder(m.group('instance'))
2866 db = credential_decoder(m.group('db'))
2867 driver_args['instance'] = instance
2868 if not migrate:
2869 driver_args['database'] = db
2870 def connect(driver_args=driver_args):
2871 return rdbms.connect(**driver_args)
2872 self.pool_connection(connect)
2873 self.cursor = self.connection.cursor()
2874 if migrate:
2875
2876 self.execute('CREATE DATABASE IF NOT EXISTS %s' % db)
2877 self.execute('USE %s' % db)
2878 self.execute("SET FOREIGN_KEY_CHECKS=1;")
2879 self.execute("SET sql_mode='NO_BACKSLASH_ESCAPES';")
2880
2882
2883 @staticmethod
2885 if isinstance(obj, str):
2886 return obj.decode('utf8')
2887 elif not isinstance(obj, unicode):
2888 return unicode(obj)
2889 return obj
2890
2892 if isinstance(obj,CALLABLETYPES):
2893 obj = obj()
2894 if isinstance(fieldtype, SQLCustomType):
2895 return fieldtype.encoder(obj)
2896 if isinstance(obj, (Expression, Field)):
2897 raise SyntaxError, "non supported on GAE"
2898 if self.dbengine=='google:datastore' in globals():
2899 if isinstance(fieldtype, gae.Property):
2900 return obj
2901 if fieldtype.startswith('list:'):
2902 if not obj:
2903 obj = []
2904 if not isinstance(obj, (list, tuple)):
2905 obj = [obj]
2906 if obj == '' and not fieldtype[:2] in ['st','te','pa','up']:
2907 return None
2908 if obj != None:
2909 if isinstance(obj, list) and not fieldtype.startswith('list'):
2910 obj = [self.represent(o, fieldtype) for o in obj]
2911 elif fieldtype in ('integer','id'):
2912 obj = long(obj)
2913 elif fieldtype == 'double':
2914 obj = float(obj)
2915 elif fieldtype.startswith('reference'):
2916 if isinstance(obj, (Row, Reference)):
2917 obj = obj['id']
2918 obj = long(obj)
2919 elif fieldtype == 'boolean':
2920 if obj and not str(obj)[0].upper() == 'F':
2921 obj = True
2922 else:
2923 obj = False
2924 elif fieldtype == 'date':
2925 if not isinstance(obj, datetime.date):
2926 (y, m, d) = map(int,str(obj).strip().split('-'))
2927 obj = datetime.date(y, m, d)
2928 elif isinstance(obj,datetime.datetime):
2929 (y, m, d) = (obj.year, obj.month, obj.day)
2930 obj = datetime.date(y, m, d)
2931 elif fieldtype == 'time':
2932 if not isinstance(obj, datetime.time):
2933 time_items = map(int,str(obj).strip().split(':')[:3])
2934 if len(time_items) == 3:
2935 (h, mi, s) = time_items
2936 else:
2937 (h, mi, s) = time_items + [0]
2938 obj = datetime.time(h, mi, s)
2939 elif fieldtype == 'datetime':
2940 if not isinstance(obj, datetime.datetime):
2941 (y, m, d) = map(int,str(obj)[:10].strip().split('-'))
2942 time_items = map(int,str(obj)[11:].strip().split(':')[:3])
2943 while len(time_items)<3:
2944 time_items.append(0)
2945 (h, mi, s) = time_items
2946 obj = datetime.datetime(y, m, d, h, mi, s)
2947 elif fieldtype == 'blob':
2948 pass
2949 elif fieldtype.startswith('list:string'):
2950 return map(self.to_unicode,obj)
2951 elif fieldtype.startswith('list:'):
2952 return map(int,obj)
2953 else:
2954 obj = self.to_unicode(obj)
2955 return obj
2956
2958 return 'insert %s in %s' % (fields, table)
2959
2960 - def _count(self,query,distinct=None):
2961 return 'count %s' % repr(query)
2962
2963 - def _select(self,query,fields,attributes):
2964 return 'select %s where %s' % (repr(fields), repr(query))
2965
2966 - def _delete(self,tablename, query):
2967 return 'delete %s where %s' % (repr(tablename),repr(query))
2968
2969 - def _update(self,tablename,query,fields):
2970 return 'update %s (%s) where %s' % (repr(tablename),
2971 repr(fields),repr(query))
2972
2974 """
2975 remember: no transactions on many NoSQL
2976 """
2977 pass
2978
2980 """
2981 remember: no transactions on many NoSQL
2982 """
2983 pass
2984
2986 """
2987 remember: no transactions on many NoSQL
2988 """
2989 pass
2990
2991
2992
2993 - def OR(self,first,second): raise SyntaxError, "Not supported"
2994 - def AND(self,first,second): raise SyntaxError, "Not supported"
2995 - def AS(self,first,second): raise SyntaxError, "Not supported"
2996 - def ON(self,first,second): raise SyntaxError, "Not supported"
2997 - def STARTSWITH(self,first,second=None): raise SyntaxError, "Not supported"
2998 - def ENDSWITH(self,first,second=None): raise SyntaxError, "Not supported"
2999 - def ADD(self,first,second): raise SyntaxError, "Not supported"
3000 - def SUB(self,first,second): raise SyntaxError, "Not supported"
3001 - def MUL(self,first,second): raise SyntaxError, "Not supported"
3002 - def DIV(self,first,second): raise SyntaxError, "Not supported"
3003 - def LOWER(self,first): raise SyntaxError, "Not supported"
3004 - def UPPER(self,first): raise SyntaxError, "Not supported"
3006 - def AGGREGATE(self,first,what): raise SyntaxError, "Not supported"
3007 - def LEFT_JOIN(self): raise SyntaxError, "Not supported"
3008 - def RANDOM(self): raise SyntaxError, "Not supported"
3009 - def SUBSTRING(self,field,parameters): raise SyntaxError, "Not supported"
3010 - def PRIMARY_KEY(self,key): raise SyntaxError, "Not supported"
3011 - def LIKE(self,first,second): raise SyntaxError, "Not supported"
3012 - def drop(self,table,mode): raise SyntaxError, "Not supported"
3013 - def alias(self,table,alias): raise SyntaxError, "Not supported"
3014 - def migrate_table(self,*a,**b): raise SyntaxError, "Not supported"
3016 - def prepare(self,key): raise SyntaxError, "Not supported"
3019 - def concat_add(self,table): raise SyntaxError, "Not supported"
3020 - def constraint_name(self, table, fieldname): raise SyntaxError, "Not supported"
3022 - def log_execute(self,*a,**b): raise SyntaxError, "Not supported"
3023 - def execute(self,*a,**b): raise SyntaxError, "Not supported"
3025 - def lastrowid(self,table): raise SyntaxError, "Not supported"
3027 - def rowslice(self,rows,minimum=0,maximum=None): raise SyntaxError, "Not supported"
3028
3029
3030 -class GAEF(object):
3031 - def __init__(self,name,op,value,apply):
3032 self.name=name=='id' and '__key__' or name
3033 self.op=op
3034 self.value=value
3035 self.apply=apply
3037 return '(%s %s %s:%s)' % (self.name, self.op, repr(self.value), type(self.value))
3038
3040 uploads_in_blob = True
3041 types = {}
3042
3044 - def file_open(self, filename, mode='rb', lock=True): pass
3045 - def file_close(self, fileobj, unlock=True): pass
3046
3047 - def __init__(self,db,uri,pool_size=0,folder=None,db_codec ='UTF-8',
3048 credential_decoder=lambda x:x, driver_args={},
3049 adapter_args={}):
3050 self.types.update({
3051 'boolean': gae.BooleanProperty,
3052 'string': (lambda: gae.StringProperty(multiline=True)),
3053 'text': gae.TextProperty,
3054 'password': gae.StringProperty,
3055 'blob': gae.BlobProperty,
3056 'upload': gae.StringProperty,
3057 'integer': gae.IntegerProperty,
3058 'double': gae.FloatProperty,
3059 'decimal': GAEDecimalProperty,
3060 'date': gae.DateProperty,
3061 'time': gae.TimeProperty,
3062 'datetime': gae.DateTimeProperty,
3063 'id': None,
3064 'reference': gae.IntegerProperty,
3065 'list:string': (lambda: gae.StringListProperty(default=None)),
3066 'list:integer': (lambda: gae.ListProperty(int,default=None)),
3067 'list:reference': (lambda: gae.ListProperty(int,default=None)),
3068 })
3069 self.db = db
3070 self.uri = uri
3071 self.dbengine = 'google:datastore'
3072 self.folder = folder
3073 db['_lastsql'] = ''
3074 self.db_codec = 'UTF-8'
3075 self.pool_size = 0
3076 match = re.compile('.*://(?P<namespace>.+)').match(uri)
3077 if match:
3078 namespace_manager.set_namespace(match.group('namespace'))
3079
3080 - def create_table(self,table,migrate=True,fake_migrate=False, polymodel=None):
3081 myfields = {}
3082 for k in table.fields:
3083 if isinstance(polymodel,Table) and k in polymodel.fields():
3084 continue
3085 field = table[k]
3086 attr = {}
3087 if isinstance(field.type, SQLCustomType):
3088 ftype = self.types[field.type.native or field.type.type](**attr)
3089 elif isinstance(field.type, gae.Property):
3090 ftype = field.type
3091 elif field.type.startswith('id'):
3092 continue
3093 elif field.type.startswith('decimal'):
3094 precision, scale = field.type[7:].strip('()').split(',')
3095 precision = int(precision)
3096 scale = int(scale)
3097 ftype = GAEDecimalProperty(precision, scale, **attr)
3098 elif field.type.startswith('reference'):
3099 if field.notnull:
3100 attr = dict(required=True)
3101 referenced = field.type[10:].strip()
3102 ftype = self.types[field.type[:9]](table._db[referenced])
3103 elif field.type.startswith('list:reference'):
3104 if field.notnull:
3105 attr = dict(required=True)
3106 referenced = field.type[15:].strip()
3107 ftype = self.types[field.type[:14]](**attr)
3108 elif field.type.startswith('list:'):
3109 ftype = self.types[field.type](**attr)
3110 elif not field.type in self.types\
3111 or not self.types[field.type]:
3112 raise SyntaxError, 'Field: unknown field type: %s' % field.type
3113 else:
3114 ftype = self.types[field.type](**attr)
3115 myfields[field.name] = ftype
3116 if not polymodel:
3117 table._tableobj = classobj(table._tablename, (gae.Model, ), myfields)
3118 elif polymodel==True:
3119 table._tableobj = classobj(table._tablename, (PolyModel, ), myfields)
3120 elif isinstance(polymodel,Table):
3121 table._tableobj = classobj(table._tablename, (polymodel._tableobj, ), myfields)
3122 else:
3123 raise SyntaxError, "polymodel must be None, True, a table or a tablename"
3124 return None
3125
3126 - def expand(self,expression,field_type=None):
3127 if isinstance(expression,Field):
3128 if expression.type in ('text','blob'):
3129 raise SyntaxError, 'AppEngine does not index by: %s' % expression.type
3130 return expression.name
3131 elif isinstance(expression, (Expression, Query)):
3132 if not expression.second is None:
3133 return expression.op(expression.first, expression.second)
3134 elif not expression.first is None:
3135 return expression.op(expression.first)
3136 else:
3137 return expression.op()
3138 elif field_type:
3139 return self.represent(expression,field_type)
3140 elif isinstance(expression,(list,tuple)):
3141 return ','.join([self.represent(item,field_type) for item in expression])
3142 else:
3143 return str(expression)
3144
3145
3146 - def AND(self,first,second):
3152
3153 - def EQ(self,first,second=None):
3154 if isinstance(second, Key):
3155 return [GAEF(first.name,'=',second,lambda a,b:a==b)]
3156 return [GAEF(first.name,'=',self.represent(second,first.type),lambda a,b:a==b)]
3157
3158 - def NE(self,first,second=None):
3159 if first.type != 'id':
3160 return [GAEF(first.name,'!=',self.represent(second,first.type),lambda a,b:a!=b)]
3161 else:
3162 second = Key.from_path(first._tablename, long(second))
3163 return [GAEF(first.name,'!=',second,lambda a,b:a!=b)]
3164
3165 - def LT(self,first,second=None):
3166 if first.type != 'id':
3167 return [GAEF(first.name,'<',self.represent(second,first.type),lambda a,b:a<b)]
3168 else:
3169 second = Key.from_path(first._tablename, long(second))
3170 return [GAEF(first.name,'<',second,lambda a,b:a<b)]
3171
3172 - def LE(self,first,second=None):
3173 if first.type != 'id':
3174 return [GAEF(first.name,'<=',self.represent(second,first.type),lambda a,b:a<=b)]
3175 else:
3176 second = Key.from_path(first._tablename, long(second))
3177 return [GAEF(first.name,'<=',second,lambda a,b:a<=b)]
3178
3179 - def GT(self,first,second=None):
3180 if first.type != 'id' or second==0 or second == '0':
3181 return [GAEF(first.name,'>',self.represent(second,first.type),lambda a,b:a>b)]
3182 else:
3183 second = Key.from_path(first._tablename, long(second))
3184 return [GAEF(first.name,'>',second,lambda a,b:a>b)]
3185
3186 - def GE(self,first,second=None):
3187 if first.type != 'id':
3188 return [GAEF(first.name,'>=',self.represent(second,first.type),lambda a,b:a>=b)]
3189 else:
3190 second = Key.from_path(first._tablename, long(second))
3191 return [GAEF(first.name,'>=',second,lambda a,b:a>=b)]
3192
3195
3196 - def COMMA(self,first,second):
3198
3199 - def BELONGS(self,first,second=None):
3200 if not isinstance(second,(list, tuple)):
3201 raise SyntaxError, "Not supported"
3202 if first.type != 'id':
3203 return [GAEF(first.name,'in',self.represent(second,first.type),lambda a,b:a in b)]
3204 else:
3205 second = [Key.from_path(first._tablename, i) for i in second]
3206 return [GAEF(first.name,'in',second,lambda a,b:a in b)]
3207
3212
3213 - def NOT(self,first):
3214 nops = { self.EQ: self.NE,
3215 self.NE: self.EQ,
3216 self.LT: self.GE,
3217 self.GT: self.LE,
3218 self.LE: self.GT,
3219 self.GE: self.LT}
3220 if not isinstance(first,Query):
3221 raise SyntaxError, "Not suported"
3222 nop = nops.get(first.op,None)
3223 if not nop:
3224 raise SyntaxError, "Not suported %s" % first.op.__name__
3225 first.op = nop
3226 return self.expand(first)
3227
3229 self.db(table._id > 0).delete()
3230
3231 - def select_raw(self,query,fields=[],attributes={}):
3232 new_fields = []
3233 for item in fields:
3234 if isinstance(item,SQLALL):
3235 new_fields += item.table
3236 else:
3237 new_fields.append(item)
3238 fields = new_fields
3239 if query:
3240 tablename = self.get_table(query)
3241 elif fields:
3242 tablename = fields[0].tablename
3243 query = fields[0].table._id>0
3244 else:
3245 raise SyntaxError, "Unable to determine a tablename"
3246 query = self.filter_tenant(query,[tablename])
3247 tableobj = self.db[tablename]._tableobj
3248 items = tableobj.all()
3249 filters = self.expand(query)
3250 for filter in filters:
3251 if filter.name=='__key__' and filter.op=='>' and filter.value==0:
3252 continue
3253 elif filter.name=='__key__' and filter.op=='=':
3254 if filter.value==0:
3255 items = []
3256 elif isinstance(filter.value, Key):
3257 item = tableobj.get(filter.value)
3258 items = (item and [item]) or []
3259 else:
3260 item = tableobj.get_by_id(filter.value)
3261 items = (item and [item]) or []
3262 elif isinstance(items,list):
3263 items = [i for i in items if filter.apply(getattr(item,filter.name),
3264 filter.value)]
3265 else:
3266 if filter.name=='__key__': items.order('__key__')
3267 items = items.filter('%s %s' % (filter.name,filter.op),filter.value)
3268 if not isinstance(items,list):
3269 if attributes.get('left', None):
3270 raise SyntaxError, 'Set: no left join in appengine'
3271 if attributes.get('groupby', None):
3272 raise SyntaxError, 'Set: no groupby in appengine'
3273 orderby = attributes.get('orderby', False)
3274 if orderby:
3275
3276 if isinstance(orderby, (list, tuple)):
3277 orderby = xorify(orderby)
3278 if isinstance(orderby,Expression):
3279 orderby = self.expand(orderby)
3280 orders = orderby.split(', ')
3281 for order in orders:
3282 order={'-id':'-__key__','id':'__key__'}.get(order,order)
3283 items = items.order(order)
3284 if attributes.get('limitby', None):
3285 (lmin, lmax) = attributes['limitby']
3286 (limit, offset) = (lmax - lmin, lmin)
3287 items = items.fetch(limit, offset=offset)
3288 fields = self.db[tablename].fields
3289 return (items, tablename, fields)
3290
3291 - def select(self,query,fields,attributes):
3292 (items, tablename, fields) = self.select_raw(query,fields,attributes)
3293
3294 rows = [
3295 [t=='id' and int(item.key().id()) or getattr(item, t) for t in fields]
3296 for item in items]
3297 colnames = ['%s.%s' % (tablename, t) for t in fields]
3298 return self.parse(rows, colnames, False)
3299
3300
3301 - def count(self,query,distinct=None):
3302 if distinct:
3303 raise RuntimeError, "COUNT DISTINCT not supported"
3304 (items, tablename, fields) = self.select_raw(query)
3305
3306 try:
3307 return len(items)
3308 except TypeError:
3309 return items.count(limit=None)
3310
3311 - def delete(self,tablename, query):
3312 """
3313 This function was changed on 2010-05-04 because according to
3314 http://code.google.com/p/googleappengine/issues/detail?id=3119
3315 GAE no longer support deleting more than 1000 records.
3316 """
3317
3318 (items, tablename, fields) = self.select_raw(query)
3319
3320 if not isinstance(items,list):
3321 counter = items.count(limit=None)
3322 leftitems = items.fetch(1000)
3323 while len(leftitems):
3324 gae.delete(leftitems)
3325 leftitems = items.fetch(1000)
3326 else:
3327 counter = len(items)
3328 gae.delete(items)
3329 return counter
3330
3331 - def update(self,tablename,query,update_fields):
3332
3333 (items, tablename, fields) = self.select_raw(query)
3334 counter = 0
3335 for item in items:
3336 for field, value in update_fields:
3337 setattr(item, field.name, self.represent(value,field.type))
3338 item.put()
3339 counter += 1
3340 logger.info(str(counter))
3341 return counter
3342
3343 - def insert(self,table,fields):
3344 dfields=dict((f.name,self.represent(v,f.type)) for f,v in fields)
3345
3346 tmp = table._tableobj(**dfields)
3347 tmp.put()
3348 rid = Reference(tmp.key().id())
3349 (rid._table, rid._record) = (table, None)
3350 return rid
3351
3353 parsed_items = []
3354 for item in items:
3355 dfields=dict((f.name,self.represent(v,f.type)) for f,v in item)
3356 parsed_items.append(table._tableobj(**dfields))
3357 gae.put(parsed_items)
3358 return True
3359
3361 return uuid.UUID(uuidv).int
3362
3364 return str(uuid.UUID(int=n))
3365
3367 uploads_in_blob = True
3368 types = {
3369 'boolean': bool,
3370 'string': str,
3371 'text': str,
3372 'password': str,
3373 'blob': str,
3374 'upload': str,
3375 'integer': long,
3376 'double': float,
3377 'date': datetime.date,
3378 'time': datetime.time,
3379 'datetime': datetime.datetime,
3380 'id': long,
3381 'reference': long,
3382 'list:string': list,
3383 'list:integer': list,
3384 'list:reference': list,
3385 }
3386
3388 - def file_open(self, filename, mode='rb', lock=True): pass
3389 - def file_close(self, fileobj, unlock=True): pass
3390
3391 - def expand(self,expression,field_type=None):
3392 if isinstance(expression,Field):
3393 if expression.type=='id':
3394 return "%s._id" % expression.tablename
3395 return BaseAdapter.expand(self,expression,field_type)
3396
3397 - def AND(self,first,second):
3399
3400 - def OR(self,first,second):
3402
3403 - def EQ(self,first,second):
3407
3408 - def NE(self,first,second):
3412
3413 - def COMMA(self,first,second):
3415
3417 value = NoSQLAdapter.represent(self, obj, fieldtype)
3418 if fieldtype=='id':
3419 return repr(str(int(value)))
3420 return repr(not isinstance(value,unicode) and value or value.encode('utf8'))
3421
3422 - def __init__(self,db,uri='couchdb://127.0.0.1:5984',
3423 pool_size=0,folder=None,db_codec ='UTF-8',
3424 credential_decoder=lambda x:x, driver_args={},
3425 adapter_args={}):
3426 self.db = db
3427 self.uri = uri
3428 self.dbengine = 'couchdb'
3429 self.folder = folder
3430 db['_lastsql'] = ''
3431 self.db_codec = 'UTF-8'
3432 self.pool_size = pool_size
3433
3434 url='http://'+uri[10:]
3435 def connect(url=url,driver_args=driver_args):
3436 return couchdb.Server(url,**driver_args)
3437 self.pool_connection(connect)
3438
3439 - def create_table(self, table, migrate=True, fake_migrate=False, polymodel=None):
3440 if migrate:
3441 try:
3442 self.connection.create(table._tablename)
3443 except:
3444 pass
3445
3446 - def insert(self,table,fields):
3453
3454 - def _select(self,query,fields,attributes):
3455 if not isinstance(query,Query):
3456 raise SyntaxError, "Not Supported"
3457 for key in set(attributes.keys())-set(('orderby','groupby','limitby',
3458 'required','cache','left',
3459 'distinct','having')):
3460 raise SyntaxError, 'invalid select attribute: %s' % key
3461 new_fields=[]
3462 for item in fields:
3463 if isinstance(item,SQLALL):
3464 new_fields += item.table
3465 else:
3466 new_fields.append(item)
3467 def uid(fd):
3468 return fd=='id' and '_id' or fd
3469 def get(row,fd):
3470 return fd=='id' and int(row['_id']) or row.get(fd,None)
3471 fields = new_fields
3472 tablename = self.get_table(query)
3473 fieldnames = [f.name for f in (fields or self.db[tablename])]
3474 colnames = ['%s.%s' % (tablename,k) for k in fieldnames]
3475 fields = ','.join(['%s.%s' % (tablename,uid(f)) for f in fieldnames])
3476 fn="function(%(t)s){if(%(query)s)emit(%(order)s,[%(fields)s]);}" %\
3477 dict(t=tablename,
3478 query=self.expand(query),
3479 order='%s._id' % tablename,
3480 fields=fields)
3481 return fn, colnames
3482
3483 - def select(self,query,fields,attributes):
3484 if not isinstance(query,Query):
3485 raise SyntaxError, "Not Supported"
3486 fn, colnames = self._select(query,fields,attributes)
3487 tablename = colnames[0].split('.')[0]
3488 ctable = self.connection[tablename]
3489 rows = [cols['value'] for cols in ctable.query(fn)]
3490 return self.parse(rows, colnames, False)
3491
3492 - def delete(self,tablename,query):
3493 if not isinstance(query,Query):
3494 raise SyntaxError, "Not Supported"
3495 if query.first.type=='id' and query.op==self.EQ:
3496 id = query.second
3497 tablename = query.first.tablename
3498 assert(tablename == query.first.tablename)
3499 ctable = self.connection[tablename]
3500 try:
3501 del ctable[str(id)]
3502 return 1
3503 except couchdb.http.ResourceNotFound:
3504 return 0
3505 else:
3506 tablename = self.get_table(query)
3507 rows = self.select(query,[self.db[tablename]._id],{})
3508 ctable = self.connection[tablename]
3509 for row in rows:
3510 del ctable[str(row.id)]
3511 return len(rows)
3512
3513 - def update(self,tablename,query,fields):
3514 if not isinstance(query,Query):
3515 raise SyntaxError, "Not Supported"
3516 if query.first.type=='id' and query.op==self.EQ:
3517 id = query.second
3518 tablename = query.first.tablename
3519 ctable = self.connection[tablename]
3520 try:
3521 doc = ctable[str(id)]
3522 for key,value in fields:
3523 doc[key.name] = NoSQLAdapter.represent(self,value,self.db[tablename][key.name].type)
3524 ctable.save(doc)
3525 return 1
3526 except couchdb.http.ResourceNotFound:
3527 return 0
3528 else:
3529 tablename = self.get_table(query)
3530 rows = self.select(query,[self.db[tablename]._id],{})
3531 ctable = self.connection[tablename]
3532 table = self.db[tablename]
3533 for row in rows:
3534 doc = ctable[str(row.id)]
3535 for key,value in fields:
3536 doc[key.name] = NoSQLAdapter.represent(self,value,table[key.name].type)
3537 ctable.save(doc)
3538 return len(rows)
3539
3540 - def count(self,query,distinct=None):
3541 if distinct:
3542 raise RuntimeError, "COUNT DISTINCT not supported"
3543 if not isinstance(query,Query):
3544 raise SyntaxError, "Not Supported"
3545 tablename = self.get_table(query)
3546 rows = self.select(query,[self.db[tablename]._id],{})
3547 return len(rows)
3548
3550 """
3551 validates that the given text is clean: only contains [0-9a-zA-Z_]
3552 """
3553
3554 if re.compile('[^0-9a-zA-Z_]').findall(text):
3555 raise SyntaxError, \
3556 'only [0-9a-zA-Z_] allowed in table and field names, received %s' \
3557 % text
3558 return text
3559
3560
3562 uploads_in_blob = True
3563 types = {
3564 'boolean': bool,
3565 'string': str,
3566 'text': str,
3567 'password': str,
3568 'blob': str,
3569 'upload': str,
3570 'integer': long,
3571 'double': float,
3572 'date': datetime.date,
3573 'time': datetime.time,
3574 'datetime': datetime.datetime,
3575 'id': long,
3576 'reference': long,
3577 'list:string': list,
3578 'list:integer': list,
3579 'list:reference': list,
3580 }
3581
3582 - def __init__(self,db,uri='mongodb://127.0.0.1:5984/db',
3583 pool_size=0,folder=None,db_codec ='UTF-8',
3584 credential_decoder=lambda x:x, driver_args={},
3585 adapter_args={}):
3586 self.db = db
3587 self.uri = uri
3588 self.dbengine = 'mongodb'
3589 self.folder = folder
3590 db['_lastsql'] = ''
3591 self.db_codec = 'UTF-8'
3592 self.pool_size = pool_size
3593
3594 m = re.compile('^(?P<host>[^\:/]+)(\:(?P<port>[0-9]+))?/(?P<db>.+)$').match(self._uri[10:])
3595 if not m:
3596 raise SyntaxError, "Invalid URI string in DAL: %s" % self._uri
3597 host = m.group('host')
3598 if not host:
3599 raise SyntaxError, 'mongodb: host name required'
3600 dbname = m.group('db')
3601 if not dbname:
3602 raise SyntaxError, 'mongodb: db name required'
3603 port = m.group('port') or 27017
3604 driver_args.update(dict(host=host,port=port))
3605 def connect(dbname=dbname,driver_args=driver_args):
3606 return pymongo.Connection(**driver_args)[dbname]
3607 self.pool_connection(connect)
3608
3609 - def insert(self,table,fields):
3614
3615
3617 raise RuntimeError, "Not implemented"
3618
3619 - def select(self,query,fields,attributes):
3620 raise RuntimeError, "Not implemented"
3621
3622 - def delete(self,tablename, query):
3623 raise RuntimeError, "Not implemented"
3624
3625 - def update(self,tablename,query,fields):
3626 raise RuntimeError, "Not implemented"
3627
3628
3629
3630
3631
3632
3633 ADAPTERS = {
3634 'sqlite': SQLiteAdapter,
3635 'sqlite:memory': SQLiteAdapter,
3636 'mysql': MySQLAdapter,
3637 'postgres': PostgreSQLAdapter,
3638 'oracle': OracleAdapter,
3639 'mssql': MSSQLAdapter,
3640 'mssql2': MSSQL2Adapter,
3641 'db2': DB2Adapter,
3642 'teradata': TeradataAdapter,
3643 'informix': InformixAdapter,
3644 'firebird': FireBirdAdapter,
3645 'firebird_embedded': FireBirdAdapter,
3646 'ingres': IngresAdapter,
3647 'ingresu': IngresUnicodeAdapter,
3648 'sapdb': SAPDBAdapter,
3649 'cubrid': CubridAdapter,
3650 'jdbc:sqlite': JDBCSQLiteAdapter,
3651 'jdbc:sqlite:memory': JDBCSQLiteAdapter,
3652 'jdbc:postgres': JDBCPostgreSQLAdapter,
3653 'gae': GoogleDatastoreAdapter,
3654 'google:datastore': GoogleDatastoreAdapter,
3655 'google:sql': GoogleSQLAdapter,
3656 'couchdb': CouchDBAdapter,
3657 'mongodb': MongoDBAdapter,
3658 }
3659
3660
3662 """
3663 Field type validation, using web2py's validators mechanism.
3664
3665 makes sure the content of a field is in line with the declared
3666 fieldtype
3667 """
3668 if not have_validators:
3669 return []
3670 field_type, field_length = field.type, field.length
3671 if isinstance(field_type, SQLCustomType):
3672 if hasattr(field_type, 'validator'):
3673 return field_type.validator
3674 else:
3675 field_type = field_type.type
3676 elif not isinstance(field_type,str):
3677 return []
3678 requires=[]
3679 def ff(r,id):
3680 row=r(id)
3681 if not row:
3682 return id
3683 elif hasattr(r, '_format') and isinstance(r._format,str):
3684 return r._format % row
3685 elif hasattr(r, '_format') and callable(r._format):
3686 return r._format(row)
3687 else:
3688 return id
3689 if field_type == 'string':
3690 requires.append(validators.IS_LENGTH(field_length))
3691 elif field_type == 'text':
3692 requires.append(validators.IS_LENGTH(field_length))
3693 elif field_type == 'password':
3694 requires.append(validators.IS_LENGTH(field_length))
3695 elif field_type == 'double':
3696 requires.append(validators.IS_FLOAT_IN_RANGE(-1e100, 1e100))
3697 elif field_type == 'integer':
3698 requires.append(validators.IS_INT_IN_RANGE(-1e100, 1e100))
3699 elif field_type.startswith('decimal'):
3700 requires.append(validators.IS_DECIMAL_IN_RANGE(-10**10, 10**10))
3701 elif field_type == 'date':
3702 requires.append(validators.IS_DATE())
3703 elif field_type == 'time':
3704 requires.append(validators.IS_TIME())
3705 elif field_type == 'datetime':
3706 requires.append(validators.IS_DATETIME())
3707 elif field.db and field_type.startswith('reference') and \
3708 field_type.find('.') < 0 and \
3709 field_type[10:] in field.db.tables:
3710 referenced = field.db[field_type[10:]]
3711 def repr_ref(id, r=referenced, f=ff): return f(r, id)
3712 field.represent = field.represent or repr_ref
3713 if hasattr(referenced, '_format') and referenced._format:
3714 requires = validators.IS_IN_DB(field.db,referenced._id,
3715 referenced._format)
3716 if field.unique:
3717 requires._and = validators.IS_NOT_IN_DB(field.db,field)
3718 if field.tablename == field_type[10:]:
3719 return validators.IS_EMPTY_OR(requires)
3720 return requires
3721 elif field.db and field_type.startswith('list:reference') and \
3722 field_type.find('.') < 0 and \
3723 field_type[15:] in field.db.tables:
3724 referenced = field.db[field_type[15:]]
3725 def list_ref_repr(ids, r=referenced, f=ff):
3726 if not ids:
3727 return None
3728 refs = r._db(r._id.belongs(ids)).select(r._id)
3729 return (refs and ', '.join(str(f(r,ref.id)) for ref in refs) or '')
3730 field.represent = field.represent or list_ref_repr
3731 if hasattr(referenced, '_format') and referenced._format:
3732 requires = validators.IS_IN_DB(field.db,referenced._id,
3733 referenced._format,multiple=True)
3734 else:
3735 requires = validators.IS_IN_DB(field.db,referenced._id,
3736 multiple=True)
3737 if field.unique:
3738 requires._and = validators.IS_NOT_IN_DB(field.db,field)
3739 return requires
3740 elif field_type.startswith('list:'):
3741 def repr_list(values): return', '.join(str(v) for v in (values or []))
3742 field.represent = field.represent or repr_list
3743 if field.unique:
3744 requires.insert(0,validators.IS_NOT_IN_DB(field.db,field))
3745 sff = ['in', 'do', 'da', 'ti', 'de', 'bo']
3746 if field.notnull and not field_type[:2] in sff:
3747 requires.insert(0, validators.IS_NOT_EMPTY())
3748 elif not field.notnull and field_type[:2] in sff and requires:
3749 requires[-1] = validators.IS_EMPTY_OR(requires[-1])
3750 return requires
3751
3752
3754 return str(item).replace('|', '||')
3755
3758
3760 return [int(x) for x in value.split('|') if x.strip()]
3761
3764
3765
3767
3768 """
3769 a dictionary that lets you do d['a'] as well as d.a
3770 this is only used to store a Row
3771 """
3772
3774 key=str(key)
3775 if key in self.get('_extra',{}):
3776 return self._extra[key]
3777 return dict.__getitem__(self, key)
3778
3781
3784
3787
3790
3792 return '<Row ' + dict.__repr__(self) + '>'
3793
3796
3798 try:
3799 return self.as_dict() == other.as_dict()
3800 except AttributeError:
3801 return False
3802
3804 return not (self == other)
3805
3807 return Row(dict(self))
3808
3809 - def as_dict(self,datetime_to_str=False):
3810 SERIALIZABLE_TYPES = (str,unicode,int,long,float,bool,list)
3811 d = dict(self)
3812 for k in copy.copy(d.keys()):
3813 v=d[k]
3814 if d[k] is None:
3815 continue
3816 elif isinstance(v,Row):
3817 d[k]=v.as_dict()
3818 elif isinstance(v,Reference):
3819 d[k]=int(v)
3820 elif isinstance(v,decimal.Decimal):
3821 d[k]=float(v)
3822 elif isinstance(v, (datetime.date, datetime.datetime, datetime.time)):
3823 if datetime_to_str:
3824 d[k] = v.isoformat().replace('T',' ')[:19]
3825 elif not isinstance(v,SERIALIZABLE_TYPES):
3826 del d[k]
3827 return d
3828
3829
3831 return Row(cPickle.loads(data))
3832
3835
3836 copy_reg.pickle(Row, Row_pickler, Row_unpickler)
3837
3838
3839
3840
3841
3842
3843
3846 return copy.copy(self)
3847
3848
3850
3851 """
3852 an instance of this class represents a database connection
3853
3854 Example::
3855
3856 db = DAL('sqlite://test.db')
3857 db.define_table('tablename', Field('fieldname1'),
3858 Field('fieldname2'))
3859 """
3860
3861 @staticmethod
3863 """
3864 # ## this allows gluon to set a folder for this thread
3865 # ## <<<<<<<<< Should go away as new DAL replaces old sql.py
3866 """
3867 BaseAdapter.set_folder(folder)
3868
3869 @staticmethod
3882
3883 @staticmethod
3905
3906
3907 - def __init__(self, uri='sqlite://dummy.db', pool_size=0, folder=None,
3908 db_codec='UTF-8', check_reserved=None,
3909 migrate=True, fake_migrate=False,
3910 migrate_enabled=True, fake_migrate_all=False,
3911 decode_credentials=False, driver_args=None,
3912 adapter_args={}, attempts=5, auto_import=False):
3913 """
3914 Creates a new Database Abstraction Layer instance.
3915
3916 Keyword arguments:
3917
3918 :uri: string that contains information for connecting to a database.
3919 (default: 'sqlite://dummy.db')
3920 :pool_size: How many open connections to make to the database object.
3921 :folder: <please update me>
3922 :db_codec: string encoding of the database (default: 'UTF-8')
3923 :check_reserved: list of adapters to check tablenames and column names
3924 against sql reserved keywords. (Default None)
3925
3926 * 'common' List of sql keywords that are common to all database types
3927 such as "SELECT, INSERT". (recommended)
3928 * 'all' Checks against all known SQL keywords. (not recommended)
3929 <adaptername> Checks against the specific adapters list of keywords
3930 (recommended)
3931 * '<adaptername>_nonreserved' Checks against the specific adapters
3932 list of nonreserved keywords. (if available)
3933 :migrate (defaults to True) sets default migrate behavior for all tables
3934 :fake_migrate (defaults to False) sets default fake_migrate behavior for all tables
3935 :migrate_enabled (defaults to True). If set to False disables ALL migrations
3936 :fake_migrate_all (defaults to False). If sets to True fake migrates ALL tables
3937 :attempts (defaults to 5). Number of times to attempt connecting
3938 """
3939 if not decode_credentials:
3940 credential_decoder = lambda cred: cred
3941 else:
3942 credential_decoder = lambda cred: urllib.unquote(cred)
3943 if folder:
3944 self.set_folder(folder)
3945 self._uri = uri
3946 self._pool_size = pool_size
3947 self._db_codec = db_codec
3948 self._lastsql = ''
3949 self._timings = []
3950 self._pending_references = {}
3951 self._request_tenant = 'request_tenant'
3952 self._common_fields = []
3953 if not str(attempts).isdigit() or attempts < 0:
3954 attempts = 5
3955 if uri:
3956 uris = isinstance(uri,(list,tuple)) and uri or [uri]
3957 error = ''
3958 connected = False
3959 for k in range(attempts):
3960 for uri in uris:
3961 try:
3962 if is_jdbc and not uri.startswith('jdbc:'):
3963 uri = 'jdbc:'+uri
3964 self._dbname = regex_dbname.match(uri).group()
3965 if not self._dbname in ADAPTERS:
3966 raise SyntaxError, "Error in URI '%s' or database not supported" % self._dbname
3967
3968 args = (self,uri,pool_size,folder,db_codec,credential_decoder,driver_args or {}, adapter_args)
3969 self._adapter = ADAPTERS[self._dbname](*args)
3970 connected = True
3971 break
3972 except SyntaxError:
3973 raise
3974 except Exception, error:
3975 sys.stderr.write('DEBUG_c: Exception %r' % ((Exception, error,),))
3976 if connected:
3977 break
3978 else:
3979 time.sleep(1)
3980 if not connected:
3981 raise RuntimeError, "Failure to connect, tried %d times:\n%s" % (attempts, error)
3982 else:
3983 args = (self,'None',0,folder,db_codec)
3984 self._adapter = BaseAdapter(*args)
3985 migrate = fake_migrate = False
3986 adapter = self._adapter
3987 self._uri_hash = hashlib.md5(adapter.uri).hexdigest()
3988 self.tables = SQLCallableList()
3989 self.check_reserved = check_reserved
3990 if self.check_reserved:
3991 from reserved_sql_keywords import ADAPTERS as RSK
3992 self.RSK = RSK
3993 self._migrate = migrate
3994 self._fake_migrate = fake_migrate
3995 self._migrate_enabled = migrate_enabled
3996 self._fake_migrate_all = fake_migrate_all
3997 if auto_import:
3998 self.import_table_definitions(adapter.folder)
3999
4001 pattern = os.path.join(path,self._uri_hash+'_*.table')
4002 for filename in glob.glob(pattern):
4003 tfile = self._adapter.file_open(filename, 'r')
4004 try:
4005 sql_fields = cPickle.load(tfile)
4006 name = filename[len(pattern)-7:-6]
4007 mf = [(value['sortable'],Field(key,type=value['type'])) \
4008 for key, value in sql_fields.items()]
4009 mf.sort(lambda a,b: cmp(a[0],b[0]))
4010 self.define_table(name,*[item[1] for item in mf],
4011 **dict(migrate=migrate,fake_migrate=fake_migrate))
4012 finally:
4013 self._adapter.file_close(tfile)
4014
4016 """
4017 Validates ``name`` against SQL keywords
4018 Uses self.check_reserve which is a list of
4019 operators to use.
4020 self.check_reserved
4021 ['common', 'postgres', 'mysql']
4022 self.check_reserved
4023 ['all']
4024 """
4025 for backend in self.check_reserved:
4026 if name.upper() in self.RSK[backend]:
4027 raise SyntaxError, 'invalid table/column name "%s" is a "%s" reserved SQL keyword' % (name, backend.upper())
4028
4030 if self.has_key(tablename):
4031 return True
4032 else:
4033 return False
4034
4035 - def parse_as_rest(self,patterns,args,vars,query=None,nested_select=True):
4036 """
4037 EXAMPLE:
4038
4039 db.define_table('person',Field('name'),Field('info'))
4040 db.define_table('pet',Field('person',db.person),Field('name'),Field('info'))
4041
4042 @request.restful()
4043 def index():
4044 def GET(*kargs,**kvars):
4045 patterns = [
4046 "/persons[person]",
4047 "/{person.name.startswith}",
4048 "/{person.name}/:field",
4049 "/{person.name}/pets[pet.person]",
4050 "/{person.name}/pet[pet.person]/{pet.name}",
4051 "/{person.name}/pet[pet.person]/{pet.name}/:field"
4052 ]
4053 parser = db.parse_as_rest(patterns,kargs,kvars)
4054 if parser.status == 200:
4055 return dict(content=parser.response)
4056 else:
4057 raise HTTP(parser.status,parser.error)
4058 def POST(table_name,**kvars):
4059 if table_name == 'person':
4060 return db.person.validate_and_insert(**kvars)
4061 elif table_name == 'pet':
4062 return db.pet.validate_and_insert(**kvars)
4063 else:
4064 raise HTTP(400)
4065 return locals()
4066 """
4067
4068 db = self
4069 re1 = re.compile('^{[^\.]+\.[^\.]+(\.(lt|gt|le|ge|eq|ne|contains|startswith|year|month|day|hour|minute|second))?(\.not)?}$')
4070 re2 = re.compile('^.+\[.+\]$')
4071
4072 def auto_table(table,base='',depth=0):
4073 patterns = []
4074 for field in db[table].fields:
4075 if base:
4076 tag = '%s/%s' % (base,field.replace('_','-'))
4077 else:
4078 tag = '/%s/%s' % (table.replace('_','-'),field.replace('_','-'))
4079 f = db[table][field]
4080 if not f.readable: continue
4081 if f.type=='id' or 'slug' in field or f.type.startswith('reference'):
4082 tag += '/{%s.%s}' % (table,field)
4083 patterns.append(tag)
4084 patterns.append(tag+'/:field')
4085 elif f.type.startswith('boolean'):
4086 tag += '/{%s.%s}' % (table,field)
4087 patterns.append(tag)
4088 patterns.append(tag+'/:field')
4089 elif f.type.startswith('double') or f.type.startswith('integer'):
4090 tag += '/{%s.%s.ge}/{%s.%s.lt}' % (table,field,table,field)
4091 patterns.append(tag)
4092 patterns.append(tag+'/:field')
4093 elif f.type.startswith('list:'):
4094 tag += '/{%s.%s.contains}' % (table,field)
4095 patterns.append(tag)
4096 patterns.append(tag+'/:field')
4097 elif f.type in ('date','datetime'):
4098 tag+= '/{%s.%s.year}' % (table,field)
4099 patterns.append(tag)
4100 patterns.append(tag+'/:field')
4101 tag+='/{%s.%s.month}' % (table,field)
4102 patterns.append(tag)
4103 patterns.append(tag+'/:field')
4104 tag+='/{%s.%s.day}' % (table,field)
4105 patterns.append(tag)
4106 patterns.append(tag+'/:field')
4107 if f.type in ('datetime','time'):
4108 tag+= '/{%s.%s.hour}' % (table,field)
4109 patterns.append(tag)
4110 patterns.append(tag+'/:field')
4111 tag+='/{%s.%s.minute}' % (table,field)
4112 patterns.append(tag)
4113 patterns.append(tag+'/:field')
4114 tag+='/{%s.%s.second}' % (table,field)
4115 patterns.append(tag)
4116 patterns.append(tag+'/:field')
4117 if depth>0:
4118 for rtable,rfield in db[table]._referenced_by:
4119 tag+='/%s[%s.%s]' % (rtable,rtable,rfield)
4120 patterns.append(tag)
4121 patterns += auto_table(rtable,base=tag,depth=depth-1)
4122 return patterns
4123
4124 if patterns=='auto':
4125 patterns=[]
4126 for table in db.tables:
4127 if not table.startswith('auth_'):
4128 patterns += auto_table(table,base='',depth=1)
4129 else:
4130 i = 0
4131 while i<len(patterns):
4132 pattern = patterns[i]
4133 tokens = pattern.split('/')
4134 if tokens[-1].startswith(':auto') and re2.match(tokens[-1]):
4135 new_patterns = auto_table(tokens[-1][tokens[-1].find('[')+1:-1],'/'.join(tokens[:-1]))
4136 patterns = patterns[:i]+new_patterns+patterns[i+1:]
4137 i += len(new_patterns)
4138 else:
4139 i += 1
4140 if '/'.join(args) == 'patterns':
4141 return Row({'status':200,'pattern':'list',
4142 'error':None,'response':patterns})
4143 for pattern in patterns:
4144 otable=table=None
4145 dbset=db(query)
4146 i=0
4147 tags = pattern[1:].split('/')
4148
4149 if len(tags)!=len(args):
4150 continue
4151 for tag in tags:
4152
4153 if re1.match(tag):
4154
4155 tokens = tag[1:-1].split('.')
4156 table, field = tokens[0], tokens[1]
4157 if not otable or table == otable:
4158 if len(tokens)==2 or tokens[2]=='eq':
4159 query = db[table][field]==args[i]
4160 elif tokens[2]=='ne':
4161 query = db[table][field]!=args[i]
4162 elif tokens[2]=='lt':
4163 query = db[table][field]<args[i]
4164 elif tokens[2]=='gt':
4165 query = db[table][field]>args[i]
4166 elif tokens[2]=='ge':
4167 query = db[table][field]>=args[i]
4168 elif tokens[2]=='le':
4169 query = db[table][field]<=args[i]
4170 elif tokens[2]=='year':
4171 query = db[table][field].year()==args[i]
4172 elif tokens[2]=='month':
4173 query = db[table][field].month()==args[i]
4174 elif tokens[2]=='day':
4175 query = db[table][field].day()==args[i]
4176 elif tokens[2]=='hour':
4177 query = db[table][field].hour()==args[i]
4178 elif tokens[2]=='minute':
4179 query = db[table][field].minutes()==args[i]
4180 elif tokens[2]=='second':
4181 query = db[table][field].seconds()==args[i]
4182 elif tokens[2]=='startswith':
4183 query = db[table][field].startswith(args[i])
4184 elif tokens[2]=='contains':
4185 query = db[table][field].contains(args[i])
4186 else:
4187 raise RuntimeError, "invalid pattern: %s" % pattern
4188 if len(tokens)==4 and tokens[3]=='not':
4189 query = ~query
4190 elif len(tokens)>=4:
4191 raise RuntimeError, "invalid pattern: %s" % pattern
4192 dbset=dbset(query)
4193 else:
4194 raise RuntimeError, "missing relation in pattern: %s" % pattern
4195 elif otable and re2.match(tag) and args[i]==tag[:tag.find('[')]:
4196
4197 ref = tag[tag.find('[')+1:-1]
4198 if '.' in ref:
4199 table,field = ref.split('.')
4200
4201 if nested_select:
4202 try:
4203 dbset=db(db[table][field].belongs(dbset._select(db[otable]._id)))
4204 except ValueError:
4205 return Row({'status':400,'pattern':pattern,
4206 'error':'invalid path','response':None})
4207 else:
4208 items = [item.id for item in dbset.select(db[otable]._id)]
4209 dbset=db(db[table][field].belongs(items))
4210 else:
4211 dbset=dbset(db[ref])
4212 elif tag==':field' and table:
4213
4214 field = args[i]
4215 if not field in db[table]: break
4216 try:
4217 item = dbset.select(db[table][field],limitby=(0,1)).first()
4218 except ValueError:
4219 return Row({'status':400,'pattern':pattern,
4220 'error':'invalid path','response':None})
4221 if not item:
4222 return Row({'status':404,'pattern':pattern,
4223 'error':'record not found','response':None})
4224 else:
4225 return Row({'status':200,'response':item[field],
4226 'pattern':pattern})
4227 elif tag != args[i]:
4228 break
4229 otable = table
4230 i += 1
4231 if i==len(tags) and table:
4232 otable,ofield = vars.get('order','%s.%s' % (table,field)).split('.',1)
4233 try:
4234 if otable[:1]=='~': orderby = ~db[otable[1:]][ofield]
4235 else: orderby = db[otable][ofield]
4236 except KeyError:
4237 return Row({'status':400,'error':'invalid orderby','response':None})
4238 fields = [field for field in db[table] if field.readable]
4239 count = dbset.count()
4240 try:
4241 limits = (int(vars.get('min',0)),int(vars.get('max',1000)))
4242 if limits[0]<0 or limits[1]<limits[0]: raise ValueError
4243 except ValueError:
4244 Row({'status':400,'error':'invalid limits','response':None})
4245 if count > limits[1]-limits[0]:
4246 Row({'status':400,'error':'too many records','response':None})
4247 try:
4248 response = dbset.select(limitby=limits,orderby=orderby,*fields)
4249 except ValueError:
4250 return Row({'status':400,'pattern':pattern,
4251 'error':'invalid path','response':None})
4252 return Row({'status':200,'response':response,'pattern':pattern})
4253 return Row({'status':400,'error':'no matching pattern','response':None})
4254
4255
4256 - def define_table(
4257 self,
4258 tablename,
4259 *fields,
4260 **args
4261 ):
4262
4263 for key in args:
4264 if key not in [
4265 'migrate',
4266 'primarykey',
4267 'fake_migrate',
4268 'format',
4269 'trigger_name',
4270 'sequence_name',
4271 'polymodel']:
4272 raise SyntaxError, 'invalid table "%s" attribute: %s' % (tablename, key)
4273 migrate = self._migrate_enabled and args.get('migrate',self._migrate)
4274 fake_migrate = self._fake_migrate_all or args.get('fake_migrate',self._fake_migrate)
4275 format = args.get('format',None)
4276 trigger_name = args.get('trigger_name', None)
4277 sequence_name = args.get('sequence_name', None)
4278 primarykey=args.get('primarykey',None)
4279 polymodel=args.get('polymodel',None)
4280 if not isinstance(tablename,str):
4281 raise SyntaxError, "missing table name"
4282 tablename = cleanup(tablename)
4283 lowertablename = tablename.lower()
4284
4285 if tablename.startswith('_') or hasattr(self,lowertablename) or \
4286 regex_python_keywords.match(tablename):
4287 raise SyntaxError, 'invalid table name: %s' % tablename
4288 elif lowertablename in self.tables:
4289 raise SyntaxError, 'table already defined: %s' % tablename
4290 elif self.check_reserved:
4291 self.check_reserved_keyword(tablename)
4292
4293 if self._common_fields:
4294 fields = [f for f in fields] + [f for f in self._common_fields]
4295
4296 t = self[tablename] = Table(self, tablename, *fields,
4297 **dict(primarykey=primarykey,
4298 trigger_name=trigger_name,
4299 sequence_name=sequence_name))
4300
4301 if self._uri in (None,'None'):
4302 return t
4303
4304 t._create_references()
4305
4306 if migrate or self._adapter.dbengine=='google:datastore':
4307 try:
4308 sql_locker.acquire()
4309 self._adapter.create_table(t,migrate=migrate,
4310 fake_migrate=fake_migrate,
4311 polymodel=polymodel)
4312 finally:
4313 sql_locker.release()
4314 else:
4315 t._dbt = None
4316 self.tables.append(tablename)
4317 t._format = format
4318 return t
4319
4321 for tablename in self.tables:
4322 yield self[tablename]
4323
4326
4329
4332
4334 if key[:1]!='_' and key in self:
4335 raise SyntaxError, \
4336 'Object %s exists and cannot be redefined' % key
4337 self[key] = value
4338
4340 return '<DAL ' + dict.__repr__(self) + '>'
4341
4343 if isinstance(query,Table):
4344 query = query._id>0
4345 elif isinstance(query,Field):
4346 query = query!=None
4347 return Set(self, query)
4348
4351
4354
4355 - def executesql(self, query, placeholders=None, as_dict=False):
4356 """
4357 placeholders is optional and will always be None when using DAL
4358 if using raw SQL with placeholders, placeholders may be
4359 a sequence of values to be substituted in
4360 or, *if supported by the DB driver*, a dictionary with keys
4361 matching named placeholders in your SQL.
4362
4363 Added 2009-12-05 "as_dict" optional argument. Will always be
4364 None when using DAL. If using raw SQL can be set to True
4365 and the results cursor returned by the DB driver will be
4366 converted to a sequence of dictionaries keyed with the db
4367 field names. Tested with SQLite but should work with any database
4368 since the cursor.description used to get field names is part of the
4369 Python dbi 2.0 specs. Results returned with as_dict = True are
4370 the same as those returned when applying .to_list() to a DAL query.
4371
4372 [{field1: value1, field2: value2}, {field1: value1b, field2: value2b}]
4373
4374 --bmeredyk
4375 """
4376 if placeholders:
4377 self._adapter.execute(query, placeholders)
4378 else:
4379 self._adapter.execute(query)
4380 if as_dict:
4381 if not hasattr(self._adapter.cursor,'description'):
4382 raise RuntimeError, "database does not support executesql(...,as_dict=True)"
4383
4384
4385
4386 columns = self._adapter.cursor.description
4387
4388 fields = [f[0] for f in columns]
4389
4390 data = self._adapter.cursor.fetchall()
4391
4392
4393 return [dict(zip(fields,row)) for row in data]
4394
4395 try:
4396 return self._adapter.cursor.fetchall()
4397 except:
4398 return None
4399
4401 for tablename in self.tables:
4402 by = self[tablename]._referenced_by
4403 by[:] = [item for item in by if not item[0] == other]
4404
4411
4412 - def import_from_csv_file(self, ifile, id_map={}, null='<NULL>',
4413 unique='uuid', *args, **kwargs):
4414 for line in ifile:
4415 line = line.strip()
4416 if not line:
4417 continue
4418 elif line == 'END':
4419 return
4420 elif not line.startswith('TABLE ') or not line[6:] in self.tables:
4421 raise SyntaxError, 'invalid file format'
4422 else:
4423 tablename = line[6:]
4424 self[tablename].import_from_csv_file(ifile, id_map, null,
4425 unique, *args, **kwargs)
4426
4427
4429 """
4430 Helper class providing a comma-separated string having all the field names
4431 (prefixed by table name and '.')
4432
4433 normally only called from within gluon.sql
4434 """
4435
4438
4440 return ', '.join([str(field) for field in self.table])
4441
4442
4444
4446 if not self._record:
4447 self._record = self._table[int(self)]
4448 if not self._record:
4449 raise RuntimeError, "Using a recursive select but encountered a broken reference: %s %d"%(self._table, int(self))
4450
4452 if key == 'id':
4453 return int(self)
4454 self.__allocate()
4455 return self._record.get(key, None)
4456
4463
4465 if key == 'id':
4466 return int(self)
4467 self.__allocate()
4468 return self._record.get(key, None)
4469
4471 self.__allocate()
4472 self._record[key] = value
4473
4474
4476 return marshal.loads(data)
4477
4479 try:
4480 marshal_dump = marshal.dumps(int(data))
4481 except AttributeError:
4482 marshal_dump = 'i%s' % struct.pack('<i', int(data))
4483 return (Reference_unpickler, (marshal_dump,))
4484
4485 copy_reg.pickle(Reference, Reference_pickler, Reference_unpickler)
4486
4487
4489
4490 """
4491 an instance of this class represents a database table
4492
4493 Example::
4494
4495 db = DAL(...)
4496 db.define_table('users', Field('name'))
4497 db.users.insert(name='me') # print db.users._insert(...) to see SQL
4498 db.users.drop()
4499 """
4500
4501 - def __init__(
4502 self,
4503 db,
4504 tablename,
4505 *fields,
4506 **args
4507 ):
4508 """
4509 Initializes the table and performs checking on the provided fields.
4510
4511 Each table will have automatically an 'id'.
4512
4513 If a field is of type Table, the fields (excluding 'id') from that table
4514 will be used instead.
4515
4516 :raises SyntaxError: when a supplied field is of incorrect type.
4517 """
4518 self._tablename = tablename
4519 self._sequence_name = args.get('sequence_name',None) or \
4520 db and db._adapter.sequence_name(tablename)
4521 self._trigger_name = args.get('trigger_name',None) or \
4522 db and db._adapter.trigger_name(tablename)
4523
4524 primarykey = args.get('primarykey', None)
4525 fieldnames,newfields=set(),[]
4526 if primarykey:
4527 if not isinstance(primarykey,list):
4528 raise SyntaxError, \
4529 "primarykey must be a list of fields from table '%s'" \
4530 % tablename
4531 self._primarykey = primarykey
4532 elif not [f for f in fields if isinstance(f,Field) and f.type=='id']:
4533 field = Field('id', 'id')
4534 newfields.append(field)
4535 fieldnames.add('id')
4536 self._id = field
4537 for field in fields:
4538 if not isinstance(field, (Field, Table)):
4539 raise SyntaxError, \
4540 'define_table argument is not a Field or Table: %s' % field
4541 elif isinstance(field, Field) and not field.name in fieldnames:
4542 if hasattr(field, '_db'):
4543 field = copy.copy(field)
4544 newfields.append(field)
4545 fieldnames.add(field.name)
4546 if field.type=='id':
4547 self._id = field
4548 elif isinstance(field, Table):
4549 table = field
4550 for field in table:
4551 if not field.name in fieldnames and not field.type=='id':
4552 newfields.append(copy.copy(field))
4553 fieldnames.add(field.name)
4554 else:
4555
4556 pass
4557 fields = newfields
4558 self._db = db
4559 tablename = tablename
4560 self.fields = SQLCallableList()
4561 self.virtualfields = []
4562 fields = list(fields)
4563
4564 if db and self._db._adapter.uploads_in_blob==True:
4565 for field in fields:
4566 if isinstance(field, Field) and field.type == 'upload'\
4567 and field.uploadfield is True:
4568 tmp = field.uploadfield = '%s_blob' % field.name
4569 fields.append(self._db.Field(tmp, 'blob', default=''))
4570
4571 lower_fieldnames = set()
4572 reserved = dir(Table) + ['fields']
4573 for field in fields:
4574 if db and db.check_reserved:
4575 db.check_reserved_keyword(field.name)
4576 elif field.name in reserved:
4577 raise SyntaxError, "field name %s not allowed" % field.name
4578
4579 if field.name.lower() in lower_fieldnames:
4580 raise SyntaxError, "duplicate field %s in table %s" \
4581 % (field.name, tablename)
4582 else:
4583 lower_fieldnames.add(field.name.lower())
4584
4585 self.fields.append(field.name)
4586 self[field.name] = field
4587 if field.type == 'id':
4588 self['id'] = field
4589 field.tablename = field._tablename = tablename
4590 field.table = field._table = self
4591 field.db = field._db = self._db
4592 if self._db and field.type!='text' and \
4593 self._db._adapter.maxcharlength < field.length:
4594 field.length = self._db._adapter.maxcharlength
4595 if field.requires == DEFAULT:
4596 field.requires = sqlhtml_validators(field)
4597 self.ALL = SQLALL(self)
4598
4599 if hasattr(self,'_primarykey'):
4600 for k in self._primarykey:
4601 if k not in self.fields:
4602 raise SyntaxError, \
4603 "primarykey must be a list of fields from table '%s " % tablename
4604 else:
4605 self[k].notnull = True
4606
4608 errors = Row()
4609 for key,value in vars.items():
4610 value,error = self[key].validate(value)
4611 if error:
4612 errors[key] = error
4613 return errors
4614
4616 pr = self._db._pending_references
4617 self._referenced_by = []
4618 for fieldname in self.fields:
4619 field=self[fieldname]
4620 if isinstance(field.type,str) and field.type[:10] == 'reference ':
4621 ref = field.type[10:].strip()
4622 if not ref.split():
4623 raise SyntaxError, 'Table: reference to nothing: %s' %ref
4624 refs = ref.split('.')
4625 rtablename = refs[0]
4626 if not rtablename in self._db:
4627 pr[rtablename] = pr.get(rtablename,[]) + [field]
4628 continue
4629 rtable = self._db[rtablename]
4630 if len(refs)==2:
4631 rfieldname = refs[1]
4632 if not hasattr(rtable,'_primarykey'):
4633 raise SyntaxError,\
4634 'keyed tables can only reference other keyed tables (for now)'
4635 if rfieldname not in rtable.fields:
4636 raise SyntaxError,\
4637 "invalid field '%s' for referenced table '%s' in table '%s'" \
4638 % (rfieldname, rtablename, self._tablename)
4639 rtable._referenced_by.append((self._tablename, field.name))
4640 for referee in pr.get(self._tablename,[]):
4641 self._referenced_by.append((referee._tablename,referee.name))
4642
4644 return dict([(k, v) for (k, v) in record.items() if k
4645 in self.fields and (self[k].type!='id' or id)])
4646
4648 """ for keyed table only """
4649 query = None
4650 for k,v in key.iteritems():
4651 if k in self._primarykey:
4652 if query:
4653 query = query & (self[k] == v)
4654 else:
4655 query = (self[k] == v)
4656 else:
4657 raise SyntaxError, \
4658 'Field %s is not part of the primary key of %s' % \
4659 (k,self._tablename)
4660 return query
4661
4663 if not key:
4664 return None
4665 elif isinstance(key, dict):
4666 """ for keyed table """
4667 query = self._build_query(key)
4668 rows = self._db(query).select()
4669 if rows:
4670 return rows[0]
4671 return None
4672 elif str(key).isdigit():
4673 return self._db(self._id == key).select(limitby=(0,1)).first()
4674 elif key:
4675 return dict.__getitem__(self, str(key))
4676
4678 if key!=DEFAULT:
4679 if isinstance(key, Query):
4680 record = self._db(key).select(limitby=(0,1)).first()
4681 elif not str(key).isdigit():
4682 record = None
4683 else:
4684 record = self._db(self._id == key).select(limitby=(0,1)).first()
4685 if record:
4686 for k,v in kwargs.items():
4687 if record[k]!=v: return None
4688 return record
4689 elif kwargs:
4690 query = reduce(lambda a,b:a&b,[self[k]==v for k,v in kwargs.items()])
4691 return self._db(query).select(limitby=(0,1)).first()
4692 else:
4693 return None
4694
4696 if isinstance(key, dict) and isinstance(value, dict):
4697 """ option for keyed table """
4698 if set(key.keys()) == set(self._primarykey):
4699 value = self._filter_fields(value)
4700 kv = {}
4701 kv.update(value)
4702 kv.update(key)
4703 if not self.insert(**kv):
4704 query = self._build_query(key)
4705 self._db(query).update(**self._filter_fields(value))
4706 else:
4707 raise SyntaxError,\
4708 'key must have all fields from primary key: %s'%\
4709 (self._primarykey)
4710 elif str(key).isdigit():
4711 if key == 0:
4712 self.insert(**self._filter_fields(value))
4713 elif not self._db(self._id == key)\
4714 .update(**self._filter_fields(value)):
4715 raise SyntaxError, 'No such record: %s' % key
4716 else:
4717 if isinstance(key, dict):
4718 raise SyntaxError,\
4719 'value must be a dictionary: %s' % value
4720 dict.__setitem__(self, str(key), value)
4721
4723 if isinstance(key, dict):
4724 query = self._build_query(key)
4725 if not self._db(query).delete():
4726 raise SyntaxError, 'No such record: %s' % key
4727 elif not str(key).isdigit() or not self._db(self._id == key).delete():
4728 raise SyntaxError, 'No such record: %s' % key
4729
4732
4734 if key in self:
4735 raise SyntaxError, 'Object exists and cannot be redefined: %s' % key
4736 self[key] = value
4737
4739 for fieldname in self.fields:
4740 yield self[fieldname]
4741
4743 return '<Table ' + dict.__repr__(self) + '>'
4744
4746 if self.get('_ot', None):
4747 return '%s AS %s' % (self._ot, self._tablename)
4748 return self._tablename
4749
4750 - def _drop(self, mode = ''):
4751 return self._db._adapter._drop(self, mode)
4752
4753 - def drop(self, mode = ''):
4754 return self._db._adapter.drop(self,mode)
4755
4756 - def _listify(self,fields,update=False):
4757 new_fields = []
4758 new_fields_names = []
4759 for name in fields:
4760 if not name in self.fields:
4761 if name != 'id':
4762 raise SyntaxError, 'Field %s does not belong to the table' % name
4763 else:
4764 new_fields.append((self[name],fields[name]))
4765 new_fields_names.append(name)
4766 for ofield in self:
4767 if not ofield.name in new_fields_names:
4768 if not update and ofield.default!=None:
4769 new_fields.append((ofield,ofield.default))
4770 elif update and ofield.update!=None:
4771 new_fields.append((ofield,ofield.update))
4772 for ofield in self:
4773 if not ofield.name in new_fields_names and ofield.compute:
4774 try:
4775 new_fields.append((ofield,ofield.compute(Row(fields))))
4776 except KeyError:
4777 pass
4778 if not update and ofield.required and not ofield.name in new_fields_names:
4779 raise SyntaxError,'Table: missing required field: %s' % ofield.name
4780 return new_fields
4781
4784
4787
4789 response = Row()
4790 response.errors = self._validate(**fields)
4791 if not response.errors:
4792 response.id = self.insert(**fields)
4793 else:
4794 response.id = None
4795 return response
4796
4808
4815
4817 return self._db._adapter._truncate(self, mode)
4818
4820 return self._db._adapter.truncate(self, mode)
4821
4822 - def import_from_csv_file(
4823 self,
4824 csvfile,
4825 id_map=None,
4826 null='<NULL>',
4827 unique='uuid',
4828 *args, **kwargs
4829 ):
4830 """
4831 import records from csv file. Column headers must have same names as
4832 table fields. field 'id' is ignored. If column names read 'table.file'
4833 the 'table.' prefix is ignored.
4834 'unique' argument is a field which must be unique
4835 (typically a uuid field)
4836 """
4837
4838 delimiter = kwargs.get('delimiter', ',')
4839 quotechar = kwargs.get('quotechar', '"')
4840 quoting = kwargs.get('quoting', csv.QUOTE_MINIMAL)
4841
4842 reader = csv.reader(csvfile, delimiter=delimiter, quotechar=quotechar, quoting=quoting)
4843 colnames = None
4844 if isinstance(id_map, dict):
4845 if not self._tablename in id_map:
4846 id_map[self._tablename] = {}
4847 id_map_self = id_map[self._tablename]
4848
4849 def fix(field, value, id_map):
4850 if value == null:
4851 value = None
4852 elif field.type=='blob':
4853 value = base64.b64decode(value)
4854 elif field.type=='double':
4855 if not value.strip():
4856 value = None
4857 else:
4858 value = float(value)
4859 elif field.type=='integer':
4860 if not value.strip():
4861 value = None
4862 else:
4863 value = int(value)
4864 elif field.type.startswith('list:string'):
4865 value = bar_decode_string(value)
4866 elif field.type.startswith('list:reference'):
4867 ref_table = field.type[10:].strip()
4868 value = [id_map[ref_table][int(v)] \
4869 for v in bar_decode_string(value)]
4870 elif field.type.startswith('list:'):
4871 value = bar_decode_integer(value)
4872 elif id_map and field.type.startswith('reference'):
4873 try:
4874 value = id_map[field.type[9:].strip()][value]
4875 except KeyError:
4876 pass
4877 return (field.name, value)
4878
4879 def is_id(colname):
4880 if colname in self:
4881 return self[colname].type == 'id'
4882 else:
4883 return False
4884
4885 for line in reader:
4886 if not line:
4887 break
4888 if not colnames:
4889 colnames = [x.split('.',1)[-1] for x in line][:len(line)]
4890 cols, cid = [], []
4891 for i,colname in enumerate(colnames):
4892 if is_id(colname):
4893 cid = i
4894 else:
4895 cols.append(i)
4896 if colname == unique:
4897 unique_idx = i
4898 else:
4899 items = [fix(self[colnames[i]], line[i], id_map) \
4900 for i in cols if colnames[i] in self.fields]
4901
4902
4903 if not unique or unique not in colnames:
4904 new_id = self.insert(**dict(items))
4905 else:
4906 unique_value = line[unique_idx]
4907 query = self._db[self][unique] == unique_value
4908 record = self._db(query).select().first()
4909 if record:
4910 record.update_record(**dict(items))
4911 new_id = record[self._id.name]
4912 else:
4913 new_id = self.insert(**dict(items))
4914 if id_map and cid != []:
4915 id_map_self[line[cid]] = new_id
4916
4919
4920 - def on(self, query):
4921 return Expression(self._db,self._db._adapter.ON,self,query)
4922
4923
4924
4926
4927 - def __init__(
4928 self,
4929 db,
4930 op,
4931 first=None,
4932 second=None,
4933 type=None,
4934 ):
4935
4936 self.db = db
4937 self.op = op
4938 self.first = first
4939 self.second = second
4940
4941 if not type and first and hasattr(first,'type'):
4942 self.type = first.type
4943 else:
4944 self.type = type
4945
4948
4951
4954
4957
4959 return Expression(self.db, self.db._adapter.LOWER, self, None, self.type)
4960
4962 return Expression(self.db, self.db._adapter.UPPER, self, None, self.type)
4963
4966
4969
4972
4975
4978
4981
4984
4986 if start < 0:
4987 pos0 = '(%s - %d)' % (self.len(), abs(start) - 1)
4988 else:
4989 pos0 = start + 1
4990
4991 if stop < 0:
4992 length = '(%s - %d - %s)' % (self.len(), abs(stop) - 1, pos0)
4993 elif stop == sys.maxint:
4994 length = self.len()
4995 else:
4996 length = '(%s - %s)' % (stop + 1, pos0)
4997 return Expression(self.db,self.db._adapter.SUBSTRING,
4998 self, (pos0, length), self.type)
4999
5001 return self[i:i + 1]
5002
5004 return self.db._adapter.expand(self,self.type)
5005
5007 return Expression(self.db,self.db._adapter.COMMA,self,other,self.type)
5008
5010 if hasattr(self,'_op') and self.op == self.db._adapter.INVERT:
5011 return self.first
5012 return Expression(self.db,self.db._adapter.INVERT,self,type=self.type)
5013
5015 return Expression(self.db,self.db._adapter.ADD,self,other,self.type)
5016
5018 if self.type == 'integer':
5019 result_type = 'integer'
5020 elif self.type in ['date','time','datetime','double']:
5021 result_type = 'double'
5022 else:
5023 raise SyntaxError, "subtraction operation not supported for type"
5024 return Expression(self.db,self.db._adapter.SUB,self,other,
5025 result_type)
5027 return Expression(self.db,self.db._adapter.MUL,self,other,self.type)
5028
5030 return Expression(self.db,self.db._adapter.DIV,self,other,self.type)
5031
5033 return Expression(self.db,self.db._adapter.MOD,self,other,self.type)
5034
5036 return Query(self.db, self.db._adapter.EQ, self, value)
5037
5039 return Query(self.db, self.db._adapter.NE, self, value)
5040
5042 return Query(self.db, self.db._adapter.LT, self, value)
5043
5045 return Query(self.db, self.db._adapter.LE, self, value)
5046
5048 return Query(self.db, self.db._adapter.GT, self, value)
5049
5051 return Query(self.db, self.db._adapter.GE, self, value)
5052
5053 - def like(self, value):
5054 return Query(self.db, self.db._adapter.LIKE, self, value)
5055
5057 return Query(self.db, self.db._adapter.BELONGS, self, value)
5058
5060 if not self.type in ('string', 'text'):
5061 raise SyntaxError, "startswith used with incompatible field type"
5062 return Query(self.db, self.db._adapter.STARTSWITH, self, value)
5063
5065 if not self.type in ('string', 'text'):
5066 raise SyntaxError, "endswith used with incompatible field type"
5067 return Query(self.db, self.db._adapter.ENDSWITH, self, value)
5068
5070 if not self.type in ('string', 'text') and not self.type.startswith('list:'):
5071 raise SyntaxError, "contains used with incompatible field type"
5072 return Query(self.db, self.db._adapter.CONTAINS, self, value)
5073
5076
5077
5078
5079
5081 """
5082 allows defining of custom SQL types
5083
5084 Example::
5085
5086 decimal = SQLCustomType(
5087 type ='double',
5088 native ='integer',
5089 encoder =(lambda x: int(float(x) * 100)),
5090 decoder = (lambda x: Decimal("0.00") + Decimal(str(float(x)/100)) )
5091 )
5092
5093 db.define_table(
5094 'example',
5095 Field('value', type=decimal)
5096 )
5097
5098 :param type: the web2py type (default = 'string')
5099 :param native: the backend type
5100 :param encoder: how to encode the value to store it in the backend
5101 :param decoder: how to decode the value retrieved from the backend
5102 :param validator: what validators to use ( default = None, will use the
5103 default validator for type)
5104 """
5105
5106 - def __init__(
5107 self,
5108 type='string',
5109 native=None,
5110 encoder=None,
5111 decoder=None,
5112 validator=None,
5113 _class=None,
5114 ):
5115
5116 self.type = type
5117 self.native = native
5118 self.encoder = encoder or (lambda x: x)
5119 self.decoder = decoder or (lambda x: x)
5120 self.validator = validator
5121 self._class = _class or type
5122
5125
5128
5131
5134
5135
5136 -class Field(Expression):
5137
5138 """
5139 an instance of this class represents a database field
5140
5141 example::
5142
5143 a = Field(name, 'string', length=32, default=None, required=False,
5144 requires=IS_NOT_EMPTY(), ondelete='CASCADE',
5145 notnull=False, unique=False,
5146 uploadfield=True, widget=None, label=None, comment=None,
5147 uploadfield=True, # True means store on disk,
5148 # 'a_field_name' means store in this field in db
5149 # False means file content will be discarded.
5150 writable=True, readable=True, update=None, authorize=None,
5151 autodelete=False, represent=None, uploadfolder=None,
5152 uploadseparate=False # upload to separate directories by uuid_keys
5153 # first 2 character and tablename.fieldname
5154 # False - old behavior
5155 # True - put uploaded file in
5156 # <uploaddir>/<tablename>.<fieldname>/uuid_key[:2]
5157 # directory)
5158
5159 to be used as argument of DAL.define_table
5160
5161 allowed field types:
5162 string, boolean, integer, double, text, blob,
5163 date, time, datetime, upload, password
5164
5165 strings must have a length of Adapter.maxcharlength by default (512 or 255 for mysql)
5166 fields should have a default or they will be required in SQLFORMs
5167 the requires argument is used to validate the field input in SQLFORMs
5168
5169 """
5170
5171 - def __init__(
5172 self,
5173 fieldname,
5174 type='string',
5175 length=None,
5176 default=DEFAULT,
5177 required=False,
5178 requires=DEFAULT,
5179 ondelete='CASCADE',
5180 notnull=False,
5181 unique=False,
5182 uploadfield=True,
5183 widget=None,
5184 label=None,
5185 comment=None,
5186 writable=True,
5187 readable=True,
5188 update=None,
5189 authorize=None,
5190 autodelete=False,
5191 represent=None,
5192 uploadfolder=None,
5193 uploadseparate=False,
5194 compute=None,
5195 custom_store=None,
5196 custom_retrieve=None,
5197 custom_delete=None,
5198 ):
5199 self.db = None
5200 self.op = None
5201 self.first = None
5202 self.second = None
5203 if not isinstance(fieldname,str):
5204 raise SyntaxError, "missing field name"
5205 if fieldname.startswith(':'):
5206 fieldname,readable,writable=fieldname[1:],False,False
5207 elif fieldname.startswith('.'):
5208 fieldname,readable,writable=fieldname[1:],False,False
5209 if '=' in fieldname:
5210 fieldname,default = fieldname.split('=',1)
5211 self.name = fieldname = cleanup(fieldname)
5212 if hasattr(Table,fieldname) or fieldname[0] == '_' or \
5213 regex_python_keywords.match(fieldname):
5214 raise SyntaxError, 'Field: invalid field name: %s' % fieldname
5215 if isinstance(type, Table):
5216 type = 'reference ' + type._tablename
5217 self.type = type
5218 self.length = (length is None) and MAXCHARLENGTH or length
5219 if default==DEFAULT:
5220 self.default = update or None
5221 else:
5222 self.default = default
5223 self.required = required
5224 self.ondelete = ondelete.upper()
5225 self.notnull = notnull
5226 self.unique = unique
5227 self.uploadfield = uploadfield
5228 self.uploadfolder = uploadfolder
5229 self.uploadseparate = uploadseparate
5230 self.widget = widget
5231 self.label = label or ' '.join(item.capitalize() for item in fieldname.split('_'))
5232 self.comment = comment
5233 self.writable = writable
5234 self.readable = readable
5235 self.update = update
5236 self.authorize = authorize
5237 self.autodelete = autodelete
5238 if not represent and type in ('list:integer','list:string'):
5239 represent=lambda x: ', '.join(str(y) for y in x or [])
5240 self.represent = represent
5241 self.compute = compute
5242 self.isattachment = True
5243 self.custom_store = custom_store
5244 self.custom_retrieve = custom_retrieve
5245 self.custom_delete = custom_delete
5246 if self.label is None:
5247 self.label = ' '.join([x.capitalize() for x in
5248 fieldname.split('_')])
5249 if requires is None:
5250 self.requires = []
5251 else:
5252 self.requires = requires
5253
5254 - def store(self, file, filename=None, path=None):
5255 if self.custom_store:
5256 return self.custom_store(file,filename,path)
5257 if not filename:
5258 filename = file.name
5259 filename = os.path.basename(filename.replace('/', os.sep)\
5260 .replace('\\', os.sep))
5261 m = re.compile('\.(?P<e>\w{1,5})$').search(filename)
5262 extension = m and m.group('e') or 'txt'
5263 uuid_key = web2py_uuid().replace('-', '')[-16:]
5264 encoded_filename = base64.b16encode(filename).lower()
5265 newfilename = '%s.%s.%s.%s' % \
5266 (self._tablename, self.name, uuid_key, encoded_filename)
5267 newfilename = newfilename[:200] + '.' + extension
5268 if isinstance(self.uploadfield,Field):
5269 blob_uploadfield_name = self.uploadfield.uploadfield
5270 keys={self.uploadfield.name: newfilename,
5271 blob_uploadfield_name: file.read()}
5272 self.uploadfield.table.insert(**keys)
5273 elif self.uploadfield == True:
5274 if path:
5275 pass
5276 elif self.uploadfolder:
5277 path = self.uploadfolder
5278 elif self.db._adapter.folder:
5279 path = os.path.join(self.db._adapter.folder, '..', 'uploads')
5280 else:
5281 raise RuntimeError, "you must specify a Field(...,uploadfolder=...)"
5282 if self.uploadseparate:
5283 path = os.path.join(path,"%s.%s" % (self._tablename, self.name),uuid_key[:2])
5284 if not os.path.exists(path):
5285 os.makedirs(path)
5286 pathfilename = os.path.join(path, newfilename)
5287 dest_file = open(pathfilename, 'wb')
5288 try:
5289 shutil.copyfileobj(file, dest_file)
5290 finally:
5291 dest_file.close()
5292 return newfilename
5293
5295 if self.custom_retrieve:
5296 return self.custom_retrieve(name, path)
5297 import http
5298 if self.authorize or isinstance(self.uploadfield, str):
5299 row = self.db(self == name).select().first()
5300 if not row:
5301 raise http.HTTP(404)
5302 if self.authorize and not self.authorize(row):
5303 raise http.HTTP(403)
5304 try:
5305 m = regex_content.match(name)
5306 if not m or not self.isattachment:
5307 raise TypeError, 'Can\'t retrieve %s' % name
5308 filename = base64.b16decode(m.group('name'), True)
5309 filename = regex_cleanup_fn.sub('_', filename)
5310 except (TypeError, AttributeError):
5311 filename = name
5312 if isinstance(self.uploadfield, str):
5313 return (filename, cStringIO.StringIO(row[self.uploadfield] or ''))
5314 elif isinstance(self.uploadfield,Field):
5315 blob_uploadfield_name = self.uploadfield.uploadfield
5316 query = self.uploadfield == name
5317 data = self.uploadfield.table(query)[blob_uploadfield_name]
5318 return (filename, cStringIO.StringIO(data))
5319 else:
5320
5321 if path:
5322 pass
5323 elif self.uploadfolder:
5324 path = self.uploadfolder
5325 else:
5326 path = os.path.join(self.db._adapter.folder, '..', 'uploads')
5327 if self.uploadseparate:
5328 t = m.group('table')
5329 f = m.group('field')
5330 u = m.group('uuidkey')
5331 path = os.path.join(path,"%s.%s" % (t,f),u[:2])
5332 return (filename, open(os.path.join(path, name), 'rb'))
5333
5348
5360
5363
5366
5368 try:
5369 return '%s.%s' % (self.tablename, self.name)
5370 except:
5371 return '<no table>.%s' % self.name
5372
5373
5375
5376 """
5377 a query object necessary to define a set.
5378 it can be stored or can be passed to DAL.__call__() to obtain a Set
5379
5380 Example::
5381
5382 query = db.users.name=='Max'
5383 set = db(query)
5384 records = set.select()
5385
5386 """
5387
5388 - def __init__(
5389 self,
5390 db,
5391 op,
5392 first=None,
5393 second=None,
5394 ):
5395 self.db = db
5396 self.op = op
5397 self.first = first
5398 self.second = second
5399
5401 return self.db._adapter.expand(self)
5402
5404 return Query(self.db,self.db._adapter.AND,self,other)
5405
5407 return Query(self.db,self.db._adapter.OR,self,other)
5408
5410 if self.op==self.db._adapter.NOT:
5411 return self.first
5412 return Query(self.db,self.db._adapter.NOT,self)
5413
5414
5415 regex_quotes = re.compile("'[^']*'")
5416
5417
5419 if not orderby:
5420 return None
5421 orderby2 = orderby[0]
5422 for item in orderby[1:]:
5423 orderby2 = orderby2 | item
5424 return orderby2
5425
5426
5428
5429 """
5430 a Set represents a set of records in the database,
5431 the records are identified by the query=Query(...) object.
5432 normally the Set is generated by DAL.__call__(Query(...))
5433
5434 given a set, for example
5435 set = db(db.users.name=='Max')
5436 you can:
5437 set.update(db.users.name='Massimo')
5438 set.delete() # all elements in the set
5439 set.select(orderby=db.users.id, groupby=db.users.name, limitby=(0,10))
5440 and take subsets:
5441 subset = set(db.users.id<5)
5442 """
5443
5445 self.db = db
5446 self._db = db
5447 self.query = query
5448
5450 if isinstance(query,Table):
5451 query = query._id>0
5452 elif isinstance(query,Field):
5453 query = query!=None
5454 if self.query:
5455 return Set(self.db, self.query & query)
5456 else:
5457 return Set(self.db, query)
5458
5459 - def _count(self,distinct=None):
5460 return self.db._adapter._count(self.query,distinct)
5461
5462 - def _select(self, *fields, **attributes):
5463 return self.db._adapter._select(self.query,fields,attributes)
5464
5466 tablename=self.db._adapter.get_table(self.query)
5467 return self.db._adapter._delete(tablename,self.query)
5468
5469 - def _update(self, **update_fields):
5470 tablename = self.db._adapter.get_table(self.query)
5471 fields = self.db[tablename]._listify(update_fields,update=True)
5472 return self.db._adapter._update(tablename,self.query,fields)
5473
5475 return not self.select(limitby=(0,1))
5476
5477 - def count(self,distinct=None):
5478 return self.db._adapter.count(self.query,distinct)
5479
5480 - def select(self, *fields, **attributes):
5481 return self.db._adapter.select(self.query,fields,attributes)
5482
5487
5488 - def update(self, **update_fields):
5489 tablename = self.db._adapter.get_table(self.query)
5490 fields = self.db[tablename]._listify(update_fields,update=True)
5491 if not fields:
5492 raise SyntaxError, "No fields to update"
5493 self.delete_uploaded_files(update_fields)
5494 return self.db._adapter.update(tablename,self.query,fields)
5495
5497 tablename = self.db._adapter.get_table(self.query)
5498 response = Row()
5499 response.errors = self.db[tablename]._validate(**update_fields)
5500 fields = self.db[tablename]._listify(update_fields,update=True)
5501 if not fields:
5502 raise SyntaxError, "No fields to update"
5503 self.delete_uploaded_files(update_fields)
5504 if not response.errors:
5505 response.updated = self.db._adapter.update(tablename,self.query,fields)
5506 else:
5507 response.updated = None
5508 return response
5509
5511 table = self.db[self.db._adapter.tables(self.query)[0]]
5512
5513 if upload_fields:
5514 fields = upload_fields.keys()
5515 else:
5516 fields = table.fields
5517 fields = [f for f in fields if table[f].type == 'upload'
5518 and table[f].uploadfield == True
5519 and table[f].autodelete]
5520 if not fields:
5521 return
5522 for record in self.select(*[table[f] for f in fields]):
5523 for fieldname in fields:
5524 field = table[fieldname]
5525 oldname = record.get(fieldname, None)
5526 if not oldname:
5527 continue
5528 if upload_fields and oldname == upload_fields[fieldname]:
5529 continue
5530 if field.custom_delete:
5531 field.custom_delete(oldname)
5532 else:
5533 uploadfolder = field.uploadfolder
5534 if not uploadfolder:
5535 uploadfolder = os.path.join(self.db._adapter.folder, '..', 'uploads')
5536 if field.uploadseparate:
5537 items = oldname.split('.')
5538 uploadfolder = os.path.join(uploadfolder,
5539 "%s.%s" % (items[0], items[1]),
5540 items[2][:2])
5541 oldpath = os.path.join(uploadfolder, oldname)
5542 if os.path.exists(oldpath):
5543 os.unlink(oldpath)
5544
5546 (colset, table, id) = pack
5547 b = a or dict(colset)
5548 c = dict([(k,v) for (k,v) in b.items() if k in table.fields and table[k].type!='id'])
5549 table._db(table._id==id).update(**c)
5550 for (k, v) in c.items():
5551 colset[k] = v
5552
5553
5554 -class Rows(object):
5555
5556 """
5557 A wrapper for the return value of a select. It basically represents a table.
5558 It has an iterator and each row is represented as a dictionary.
5559 """
5560
5561
5562
5563 - def __init__(
5564 self,
5565 db=None,
5566 records=[],
5567 colnames=[],
5568 compact=True,
5569 rawrows=None
5570 ):
5571 self.db = db
5572 self.records = records
5573 self.colnames = colnames
5574 self.compact = compact
5575 self.response = rawrows
5576
5578 if not keyed_virtualfields:
5579 return self
5580 for row in self.records:
5581 for (tablename,virtualfields) in keyed_virtualfields.items():
5582 attributes = dir(virtualfields)
5583 virtualfields.__dict__.update(row)
5584 if not tablename in row:
5585 box = row[tablename] = Row()
5586 else:
5587 box = row[tablename]
5588 for attribute in attributes:
5589 if attribute[0] != '_':
5590 method = getattr(virtualfields,attribute)
5591 if hasattr(method,'im_func') and method.im_func.func_code.co_argcount:
5592 box[attribute]=method()
5593 return self
5594
5596 if self.colnames!=other.colnames: raise Exception, 'Cannot & incompatible Rows objects'
5597 records = self.records+other.records
5598 return Rows(self.db,records,self.colnames)
5599
5601 if self.colnames!=other.colnames: raise Exception, 'Cannot | incompatible Rows objects'
5602 records = self.records
5603 records += [record for record in other.records \
5604 if not record in records]
5605 return Rows(self.db,records,self.colnames)
5606
5608 if len(self.records):
5609 return 1
5610 return 0
5611
5613 return len(self.records)
5614
5616 return Rows(self.db,self.records[a:b],self.colnames)
5617
5619 row = self.records[i]
5620 keys = row.keys()
5621 if self.compact and len(keys) == 1 and keys[0] != '_extra':
5622 return row[row.keys()[0]]
5623 return row
5624
5626 """
5627 iterator over records
5628 """
5629
5630 for i in xrange(len(self)):
5631 yield self[i]
5632
5634 """
5635 serializes the table into a csv file
5636 """
5637
5638 s = cStringIO.StringIO()
5639 self.export_to_csv_file(s)
5640 return s.getvalue()
5641
5643 if not self.records:
5644 return None
5645 return self[0]
5646
5648 if not self.records:
5649 return None
5650 return self[-1]
5651
5653 """
5654 returns a new Rows object, a subset of the original object,
5655 filtered by the function f
5656 """
5657 if not self.records:
5658 return Rows(self.db, [], self.colnames)
5659 records = []
5660 for i in range(0,len(self)):
5661 row = self[i]
5662 if f(row):
5663 records.append(self.records[i])
5664 return Rows(self.db, records, self.colnames)
5665
5667 """
5668 removes elements from the calling Rows object, filtered by the function f,
5669 and returns a new Rows object containing the removed elements
5670 """
5671 if not self.records:
5672 return Rows(self.db, [], self.colnames)
5673 removed = []
5674 i=0
5675 while i<len(self):
5676 row = self[i]
5677 if f(row):
5678 removed.append(self.records[i])
5679 del self.records[i]
5680 else:
5681 i += 1
5682 return Rows(self.db, removed, self.colnames)
5683
5684 - def sort(self, f, reverse=False):
5685 """
5686 returns a list of sorted elements (not sorted in place)
5687 """
5688 return Rows(self.db,sorted(self,key=f,reverse=reverse),self.colnames)
5689
5690 - def as_list(self,
5691 compact=True,
5692 storage_to_dict=True,
5693 datetime_to_str=True):
5694 """
5695 returns the data as a list or dictionary.
5696 :param storage_to_dict: when True returns a dict, otherwise a list(default True)
5697 :param datetime_to_str: convert datetime fields as strings (default True)
5698 """
5699 (oc, self.compact) = (self.compact, compact)
5700 if storage_to_dict:
5701 items = [item.as_dict(datetime_to_str) for item in self]
5702 else:
5703 items = [item for item in self]
5704 self.compact = compact
5705 return items
5706
5707
5708 - def as_dict(self,
5709 key='id',
5710 compact=True,
5711 storage_to_dict=True,
5712 datetime_to_str=True):
5713 """
5714 returns the data as a dictionary of dictionaries (storage_to_dict=True) or records (False)
5715
5716 :param key: the name of the field to be used as dict key, normally the id
5717 :param compact: ? (default True)
5718 :param storage_to_dict: when True returns a dict, otherwise a list(default True)
5719 :param datetime_to_str: convert datetime fields as strings (default True)
5720 """
5721 rows = self.as_list(compact, storage_to_dict, datetime_to_str)
5722 if isinstance(key,str) and key.count('.')==1:
5723 (table, field) = key.split('.')
5724 return dict([(r[table][field],r) for r in rows])
5725 elif isinstance(key,str):
5726 return dict([(r[key],r) for r in rows])
5727 else:
5728 return dict([(key(r),r) for r in rows])
5729
5731 """
5732 export data to csv, the first line contains the column names
5733
5734 :param ofile: where the csv must be exported to
5735 :param null: how null values must be represented (default '<NULL>')
5736 :param delimiter: delimiter to separate values (default ',')
5737 :param quotechar: character to use to quote string values (default '"')
5738 :param quoting: quote system, use csv.QUOTE_*** (default csv.QUOTE_MINIMAL)
5739 :param represent: use the fields .represent value (default False)
5740 :param colnames: list of column names to use (default self.colnames)
5741 This will only work when exporting rows objects!!!!
5742 DO NOT use this with db.export_to_csv()
5743 """
5744 delimiter = kwargs.get('delimiter', ',')
5745 quotechar = kwargs.get('quotechar', '"')
5746 quoting = kwargs.get('quoting', csv.QUOTE_MINIMAL)
5747 represent = kwargs.get('represent', False)
5748 writer = csv.writer(ofile, delimiter=delimiter,
5749 quotechar=quotechar, quoting=quoting)
5750 colnames = kwargs.get('colnames', self.colnames)
5751
5752 writer.writerow(colnames)
5753
5754 def none_exception(value):
5755 """
5756 returns a cleaned up value that can be used for csv export:
5757 - unicode text is encoded as such
5758 - None values are replaced with the given representation (default <NULL>)
5759 """
5760 if value is None:
5761 return null
5762 elif isinstance(value, unicode):
5763 return value.encode('utf8')
5764 elif isinstance(value,Reference):
5765 return int(value)
5766 elif hasattr(value, 'isoformat'):
5767 return value.isoformat()[:19].replace('T', ' ')
5768 elif isinstance(value, (list,tuple)):
5769 return bar_encode(value)
5770 return value
5771
5772 for record in self:
5773 row = []
5774 for col in colnames:
5775 if not table_field.match(col):
5776 row.append(record._extra[col])
5777 else:
5778 (t, f) = col.split('.')
5779 field = self.db[t][f]
5780 if isinstance(record.get(t, None), (Row,dict)):
5781 value = record[t][f]
5782 else:
5783 value = record[f]
5784 if field.type=='blob' and value!=None:
5785 value = base64.b64encode(value)
5786 elif represent and field.represent:
5787 value = field.represent(value)
5788 row.append(none_exception(value))
5789 writer.writerow(row)
5790
5792 """
5793 serializes the table using sqlhtml.SQLTABLE (if present)
5794 """
5795
5796 import sqlhtml
5797 return sqlhtml.SQLTABLE(self).xml()
5798
5799 - def json(self, mode='object', default=None):
5800 """
5801 serializes the table to a JSON list of objects
5802 """
5803 mode = mode.lower()
5804 if not mode in ['object', 'array']:
5805 raise SyntaxError, 'Invalid JSON serialization mode: %s' % mode
5806
5807 def inner_loop(record, col):
5808 (t, f) = col.split('.')
5809 res = None
5810 if not table_field.match(col):
5811 res = record._extra[col]
5812 else:
5813 if isinstance(record.get(t, None), Row):
5814 res = record[t][f]
5815 else:
5816 res = record[f]
5817 if mode == 'object':
5818 return (f, res)
5819 else:
5820 return res
5821
5822 if mode == 'object':
5823 items = [dict([inner_loop(record, col) for col in
5824 self.colnames]) for record in self]
5825 else:
5826 items = [[inner_loop(record, col) for col in self.colnames]
5827 for record in self]
5828 if have_serializers:
5829 return serializers.json(items,default=default or serializers.custom_json)
5830 else:
5831 import simplejson
5832 return simplejson.dumps(items)
5833
5835 return cPickle.loads(data)
5836
5838 return Rows_unpickler, \
5839 (cPickle.dumps(data.as_list(storage_to_dict=True,
5840 datetime_to_str=False)),)
5841
5842 copy_reg.pickle(Rows, Rows_pickler, Rows_unpickler)
5843
5844
5845
5846
5847
5848
5850 """
5851
5852 >>> if len(sys.argv)<2: db = DAL(\"sqlite://test.db\")
5853 >>> if len(sys.argv)>1: db = DAL(sys.argv[1])
5854 >>> tmp = db.define_table('users',\
5855 Field('stringf', 'string', length=32, required=True),\
5856 Field('booleanf', 'boolean', default=False),\
5857 Field('passwordf', 'password', notnull=True),\
5858 Field('uploadf', 'upload'),\
5859 Field('blobf', 'blob'),\
5860 Field('integerf', 'integer', unique=True),\
5861 Field('doublef', 'double', unique=True,notnull=True),\
5862 Field('datef', 'date', default=datetime.date.today()),\
5863 Field('timef', 'time'),\
5864 Field('datetimef', 'datetime'),\
5865 migrate='test_user.table')
5866
5867 Insert a field
5868
5869 >>> db.users.insert(stringf='a', booleanf=True, passwordf='p', blobf='0A',\
5870 uploadf=None, integerf=5, doublef=3.14,\
5871 datef=datetime.date(2001, 1, 1),\
5872 timef=datetime.time(12, 30, 15),\
5873 datetimef=datetime.datetime(2002, 2, 2, 12, 30, 15))
5874 1
5875
5876 Drop the table
5877
5878 >>> db.users.drop()
5879
5880 Examples of insert, select, update, delete
5881
5882 >>> tmp = db.define_table('person',\
5883 Field('name'),\
5884 Field('birth','date'),\
5885 migrate='test_person.table')
5886 >>> person_id = db.person.insert(name=\"Marco\",birth='2005-06-22')
5887 >>> person_id = db.person.insert(name=\"Massimo\",birth='1971-12-21')
5888
5889 commented len(db().select(db.person.ALL))
5890 commented 2
5891
5892 >>> me = db(db.person.id==person_id).select()[0] # test select
5893 >>> me.name
5894 'Massimo'
5895 >>> db(db.person.name=='Massimo').update(name='massimo') # test update
5896 1
5897 >>> db(db.person.name=='Marco').select().first().delete_record() # test delete
5898 1
5899
5900 Update a single record
5901
5902 >>> me.update_record(name=\"Max\")
5903 >>> me.name
5904 'Max'
5905
5906 Examples of complex search conditions
5907
5908 >>> len(db((db.person.name=='Max')&(db.person.birth<'2003-01-01')).select())
5909 1
5910 >>> len(db((db.person.name=='Max')&(db.person.birth<datetime.date(2003,01,01))).select())
5911 1
5912 >>> len(db((db.person.name=='Max')|(db.person.birth<'2003-01-01')).select())
5913 1
5914 >>> me = db(db.person.id==person_id).select(db.person.name)[0]
5915 >>> me.name
5916 'Max'
5917
5918 Examples of search conditions using extract from date/datetime/time
5919
5920 >>> len(db(db.person.birth.month()==12).select())
5921 1
5922 >>> len(db(db.person.birth.year()>1900).select())
5923 1
5924
5925 Example of usage of NULL
5926
5927 >>> len(db(db.person.birth==None).select()) ### test NULL
5928 0
5929 >>> len(db(db.person.birth!=None).select()) ### test NULL
5930 1
5931
5932 Examples of search conditions using lower, upper, and like
5933
5934 >>> len(db(db.person.name.upper()=='MAX').select())
5935 1
5936 >>> len(db(db.person.name.like('%ax')).select())
5937 1
5938 >>> len(db(db.person.name.upper().like('%AX')).select())
5939 1
5940 >>> len(db(~db.person.name.upper().like('%AX')).select())
5941 0
5942
5943 orderby, groupby and limitby
5944
5945 >>> people = db().select(db.person.name, orderby=db.person.name)
5946 >>> order = db.person.name|~db.person.birth
5947 >>> people = db().select(db.person.name, orderby=order)
5948
5949 >>> people = db().select(db.person.name, orderby=db.person.name, groupby=db.person.name)
5950
5951 >>> people = db().select(db.person.name, orderby=order, limitby=(0,100))
5952
5953 Example of one 2 many relation
5954
5955 >>> tmp = db.define_table('dog',\
5956 Field('name'),\
5957 Field('birth','date'),\
5958 Field('owner',db.person),\
5959 migrate='test_dog.table')
5960 >>> db.dog.insert(name='Snoopy', birth=None, owner=person_id)
5961 1
5962
5963 A simple JOIN
5964
5965 >>> len(db(db.dog.owner==db.person.id).select())
5966 1
5967
5968 >>> len(db().select(db.person.ALL, db.dog.name,left=db.dog.on(db.dog.owner==db.person.id)))
5969 1
5970
5971 Drop tables
5972
5973 >>> db.dog.drop()
5974 >>> db.person.drop()
5975
5976 Example of many 2 many relation and Set
5977
5978 >>> tmp = db.define_table('author', Field('name'),\
5979 migrate='test_author.table')
5980 >>> tmp = db.define_table('paper', Field('title'),\
5981 migrate='test_paper.table')
5982 >>> tmp = db.define_table('authorship',\
5983 Field('author_id', db.author),\
5984 Field('paper_id', db.paper),\
5985 migrate='test_authorship.table')
5986 >>> aid = db.author.insert(name='Massimo')
5987 >>> pid = db.paper.insert(title='QCD')
5988 >>> tmp = db.authorship.insert(author_id=aid, paper_id=pid)
5989
5990 Define a Set
5991
5992 >>> authored_papers = db((db.author.id==db.authorship.author_id)&(db.paper.id==db.authorship.paper_id))
5993 >>> rows = authored_papers.select(db.author.name, db.paper.title)
5994 >>> for row in rows: print row.author.name, row.paper.title
5995 Massimo QCD
5996
5997 Example of search condition using belongs
5998
5999 >>> set = (1, 2, 3)
6000 >>> rows = db(db.paper.id.belongs(set)).select(db.paper.ALL)
6001 >>> print rows[0].title
6002 QCD
6003
6004 Example of search condition using nested select
6005
6006 >>> nested_select = db()._select(db.authorship.paper_id)
6007 >>> rows = db(db.paper.id.belongs(nested_select)).select(db.paper.ALL)
6008 >>> print rows[0].title
6009 QCD
6010
6011 Example of expressions
6012
6013 >>> mynumber = db.define_table('mynumber', Field('x', 'integer'))
6014 >>> db(mynumber.id>0).delete()
6015 0
6016 >>> for i in range(10): tmp = mynumber.insert(x=i)
6017 >>> db(mynumber.id>0).select(mynumber.x.sum())[0](mynumber.x.sum())
6018 45
6019
6020 >>> db(mynumber.x+2==5).select(mynumber.x + 2)[0](mynumber.x + 2)
6021 5
6022
6023 Output in csv
6024
6025 >>> print str(authored_papers.select(db.author.name, db.paper.title)).strip()
6026 author.name,paper.title\r
6027 Massimo,QCD
6028
6029 Delete all leftover tables
6030
6031 >>> DAL.distributed_transaction_commit(db)
6032
6033 >>> db.mynumber.drop()
6034 >>> db.authorship.drop()
6035 >>> db.author.drop()
6036 >>> db.paper.drop()
6037 """
6038
6039
6040
6041
6042 SQLField = Field
6043 SQLTable = Table
6044 SQLXorable = Expression
6045 SQLQuery = Query
6046 SQLSet = Set
6047 SQLRows = Rows
6048 SQLStorage = Row
6049 SQLDB = DAL
6050 GQLDB = DAL
6051 DAL.Field = Field
6052 DAL.Table = Table
6053
6054
6055
6056
6057
6058 if __name__ == '__main__':
6059 import doctest
6060 doctest.testmod()
6061