Welcome to pyDAL’s API documentation!

Contents:

Indices and tables

Subpackages

pydal.adapters package

Submodules

pydal.adapters.base module

class pydal.adapters.base.BaseAdapter(db, uri, pool_size=0, folder=None, db_codec='UTF-8', credential_decoder=<function IDENTITY>, driver_args={}, adapter_args={}, do_connect=True, after_connection=None, entity_quoting=False)[source]

Bases: pydal.connection.ConnectionPool

adapt(value)[source]
close_connection(**kwargs)
common_filter(query, tablist)[source]
connector()[source]
dbengine = 'None'
drivers = ()
drop_table(table, mode='')[source]
expand_all(fields, tabledict)[source]
find_driver()[source]
get_table(*queries)[source]
iterparse(sql, fields, colnames, blob_decode=True, cacheable=False)[source]

Iterator to parse one row at a time. It doesn’t support the old style virtual fields

parse(rows, fields, colnames, blob_decode=True, cacheable=False)[source]
parse_value(value, field_itype, field_type, blob_decode=True)[source]
represent(obj, field_type)[source]
rowslice(rows, minimum=0, maximum=None)[source]
sqlsafe_field(fieldname)[source]
sqlsafe_table(tablename, original_tablename=None)[source]
support_distributed_transaction = False
tables(*queries)[source]
test_connection()[source]
types
uploads_in_blob = False
class pydal.adapters.base.DebugHandler(adapter)[source]

Bases: pydal.helpers.classes.ExecutionHandler

before_execute(command)[source]
class pydal.adapters.base.NoSQLAdapter(db, uri, pool_size=0, folder=None, db_codec='UTF-8', credential_decoder=<function IDENTITY>, driver_args={}, adapter_args={}, do_connect=True, after_connection=None, entity_quoting=False)[source]

Bases: pydal.adapters.base.BaseAdapter

can_select_for_update = False
commit()[source]
commit_prepared(key)[source]
create_table(table, migrate=True, fake_migrate=False, polymodel=None)[source]
drop(**kwargs)
drop_table(table, mode='')[source]
id_query(table)[source]
nested_select(*args, **kwargs)[source]
prepare()[source]
rollback()[source]
rollback_prepared(key)[source]
class pydal.adapters.base.NullAdapter(db, uri, pool_size=0, folder=None, db_codec='UTF-8', credential_decoder=<function IDENTITY>, driver_args={}, adapter_args={}, do_connect=True, after_connection=None, entity_quoting=False)[source]

Bases: pydal.adapters.base.BaseAdapter

connector()[source]
find_driver()[source]
class pydal.adapters.base.SQLAdapter(*args, **kwargs)[source]

Bases: pydal.adapters.base.BaseAdapter

adapt(obj)[source]
bulk_insert(table, items)[source]
can_select_for_update = True
commit(**kwargs)
commit_on_alter_table = False
commit_prepared(**kwargs)
count(query, distinct=None)[source]
create_index(table, index_name, *fields, **kwargs)[source]
create_sequence_and_triggers(query, table, **args)[source]
create_table(*args, **kwargs)[source]
delete(table, query)[source]
distributed_transaction_begin(key)[source]
drop(**kwargs)
drop_index(table, index_name)[source]
drop_table(table, mode='')[source]
execute(**kwargs)
execution_handlers = []
fetchall()[source]
fetchone()[source]
filter_sql_command(command)[source]
id_query(table)[source]
index_expander(**kwds)[source]
insert(table, fields)[source]
iterselect(query, fields, attributes)[source]
lastrowid(table)[source]
migrator_cls

alias of pydal.migrator.Migrator

nested_select(query, fields, attributes)[source]
prepare(**kwargs)
represent(obj, field_type)[source]
rollback(**kwargs)
rollback_prepared(**kwargs)
select(query, fields, attributes)[source]
smart_adapt(obj)[source]
sqlsafe_field(fieldname)[source]
sqlsafe_table(tablename, original_tablename=None)[source]
table_alias(tbl, current_scope=[])[source]
test_connection()[source]
truncate(table, mode='')[source]
update(table, query, fields)[source]

pydal.adapters.couchdb module

class pydal.adapters.couchdb.CouchDB(db, uri, pool_size=0, folder=None, db_codec='UTF-8', credential_decoder=<function IDENTITY>, driver_args={}, adapter_args={}, do_connect=True, after_connection=None, entity_quoting=False)[source]

Bases: pydal.adapters.base.NoSQLAdapter

connector()[source]
count(query, distinct=None)[source]
create_table(table, migrate=True, fake_migrate=False, polymodel=None)[source]
dbengine = 'couchdb'
delete(table, query)[source]
drivers = ('couchdb',)
insert(table, fields)[source]
select(query, fields, attributes)[source]
update(table, query, fields)[source]
uploads_in_blob = True

pydal.adapters.cubrid module

pydal.adapters.db2 module

class pydal.adapters.db2.DB2(*args, **kwargs)[source]

Bases: pydal.adapters.base.SQLAdapter

dbengine = 'db2'
execute(**kwargs)
lastrowid(table)[source]
rowslice(rows, minimum=0, maximum=None)[source]
class pydal.adapters.db2.DB2IBM(*args, **kwargs)[source]

Bases: pydal.adapters.db2.DB2

connector()[source]
drivers = ('ibm_db_dbi',)
class pydal.adapters.db2.DB2Pyodbc(*args, **kwargs)[source]

Bases: pydal.adapters.db2.DB2

connector()[source]
drivers = ('pyodbc',)

pydal.adapters.firebird module

class pydal.adapters.firebird.FireBird(*args, **kwargs)[source]

Bases: pydal.adapters.base.SQLAdapter

REGEX_URI = <_sre.SRE_Pattern object at 0x2fcfd20>
commit_on_alter_table = True
connector()[source]
create_sequence_and_triggers(query, table, **args)[source]
dbengine = 'firebird'
drivers = ('kinterbasdb', 'firebirdsql', 'fdb', 'pyodbc')
lastrowid(table)[source]
support_distributed_transaction = True
class pydal.adapters.firebird.FireBirdEmbedded(*args, **kwargs)[source]

Bases: pydal.adapters.firebird.FireBird

REGEX_URI = <_sre.SRE_Pattern object at 0x2f42ed0>

pydal.adapters.google_adapters module

Adapter for GAE

pydal.adapters.imap module

pydal.adapters.informix module

class pydal.adapters.informix.Informix(*args, **kwargs)[source]

Bases: pydal.helpers.classes.ConnectionConfigurationMixin, pydal.adapters.base.SQLAdapter

connector()[source]
dbengine = 'informix'
drivers = ('informixdb',)
execute(**kwargs)
lastrowid(table)[source]
test_connection()[source]
class pydal.adapters.informix.InformixSE(*args, **kwargs)[source]

Bases: pydal.adapters.informix.Informix

rowslice(rows, minimum=0, maximum=None)[source]

pydal.adapters.ingres module

class pydal.adapters.ingres.Ingres(*args, **kwargs)[source]

Bases: pydal.adapters.base.SQLAdapter

connector()[source]
create_sequence_and_triggers(query, table, **args)[source]
dbengine = 'ingres'
drivers = ('pyodbc',)
class pydal.adapters.ingres.IngresUnicode(*args, **kwargs)[source]

Bases: pydal.adapters.ingres.Ingres

pydal.adapters.mongo module

class pydal.adapters.mongo.Binary[source]

Bases: object

class pydal.adapters.mongo.Expansion(adapter, crud, query, fields=(), tablename=None, groupby=None, distinct=False, having=None)[source]

Bases: object

Class to encapsulate a pydal expression and track the parse expansion and its results.

Two different MongoDB mechanisms are targeted here. If the query is sufficiently simple, then simple queries are generated. The bulk of the complexity here is however to support more complex queries that are targeted to the MongoDB Aggregation Pipeline.

This class supports four operations: ‘count’, ‘select’, ‘update’ and ‘delete’.

Behavior varies somewhat for each operation type. However building each pipeline stage is shared where the behavior is the same (or similar) for the different operations.

In general an attempt is made to build the query without using the pipeline, and if that fails then the query is rebuilt with the pipeline.

QUERY constructed in _build_pipeline_query():
$project : used to calculate expressions if needed $match: filters out records
FIELDS constructed in _expand_fields():
FIELDS:COUNT
$group : filter for distinct if needed $group: count the records remaining
FIELDS:SELECT
$group : implement aggregations if needed $project: implement expressions (etc) for select
FIELDS:UPDATE
$project: implement expressions (etc) for update
HAVING constructed in _add_having():
$project : used to calculate expressions $match: filters out records $project : used to filter out previous expression fields
annotate_expression(expression)[source]
dialect
get_collection(safe=None)[source]
class pydal.adapters.mongo.Mongo(db, uri, pool_size=0, folder=None, db_codec='UTF-8', credential_decoder=<function IDENTITY>, driver_args={}, adapter_args={}, do_connect=True, after_connection=None, entity_quoting=False)[source]

Bases: pydal.helpers.classes.ConnectionConfigurationMixin, pydal.adapters.base.NoSQLAdapter

bulk_insert(table, items)[source]
check_notnull(table, values)[source]
check_unique(table, values)[source]
connector()[source]
count(query, distinct=None, snapshot=True)[source]
dbengine = 'mongodb'
delete(table, query, safe=None)[source]
drivers = ('pymongo',)
find_driver()[source]
insert(table, fields, safe=None)[source]

Safe determines whether a asynchronous request is done or a synchronous action is done For safety, we use by default synchronous requests

object_id(arg=None)[source]

Convert input to a valid Mongodb ObjectId instance

self.object_id(“<random>”) -> ObjectId (not unique) instance

represent(obj, field_type)[source]
select(query, fields, attributes, snapshot=False)[source]
truncate(table, mode, safe=None)[source]
update(table, query, fields, safe=None)[source]
class pydal.adapters.mongo.MongoBlob[source]

Bases: pydal.adapters.mongo.Binary

MONGO_BLOB_BYTES = 0
MONGO_BLOB_NON_UTF8_STR = 1
static decode(value)[source]

pydal.adapters.mssql module

class pydal.adapters.mssql.MSSQL(db, uri, pool_size=0, folder=None, db_codec='UTF-8', credential_decoder=<function IDENTITY>, driver_args={}, adapter_args={}, do_connect=True, srid=4326, after_connection=None)[source]

Bases: pydal.adapters.base.SQLAdapter

REGEX_ARGPATTERN = <_sre.SRE_Pattern object>
REGEX_DSN = <_sre.SRE_Pattern object>
REGEX_URI = <_sre.SRE_Pattern object at 0x2fc7fc0>
connector()[source]
dbengine = 'mssql'
drivers = ('pyodbc',)
lastrowid(table)[source]
class pydal.adapters.mssql.MSSQL1(db, uri, pool_size=0, folder=None, db_codec='UTF-8', credential_decoder=<function IDENTITY>, driver_args={}, adapter_args={}, do_connect=True, srid=4326, after_connection=None)[source]

Bases: pydal.adapters.mssql.MSSQL, pydal.adapters.mssql.Slicer

class pydal.adapters.mssql.MSSQL1N(db, uri, pool_size=0, folder=None, db_codec='UTF-8', credential_decoder=<function IDENTITY>, driver_args={}, adapter_args={}, do_connect=True, srid=4326, after_connection=None)[source]

Bases: pydal.adapters.mssql.MSSQLN, pydal.adapters.mssql.Slicer

class pydal.adapters.mssql.MSSQL3(db, uri, pool_size=0, folder=None, db_codec='UTF-8', credential_decoder=<function IDENTITY>, driver_args={}, adapter_args={}, do_connect=True, srid=4326, after_connection=None)[source]

Bases: pydal.adapters.mssql.MSSQL

class pydal.adapters.mssql.MSSQL3N(db, uri, pool_size=0, folder=None, db_codec='UTF-8', credential_decoder=<function IDENTITY>, driver_args={}, adapter_args={}, do_connect=True, srid=4326, after_connection=None)[source]

Bases: pydal.adapters.mssql.MSSQLN

class pydal.adapters.mssql.MSSQL4(db, uri, pool_size=0, folder=None, db_codec='UTF-8', credential_decoder=<function IDENTITY>, driver_args={}, adapter_args={}, do_connect=True, srid=4326, after_connection=None)[source]

Bases: pydal.adapters.mssql.MSSQL

class pydal.adapters.mssql.MSSQL4N(db, uri, pool_size=0, folder=None, db_codec='UTF-8', credential_decoder=<function IDENTITY>, driver_args={}, adapter_args={}, do_connect=True, srid=4326, after_connection=None)[source]

Bases: pydal.adapters.mssql.MSSQLN

class pydal.adapters.mssql.MSSQLN(db, uri, pool_size=0, folder=None, db_codec='UTF-8', credential_decoder=<function IDENTITY>, driver_args={}, adapter_args={}, do_connect=True, srid=4326, after_connection=None)[source]

Bases: pydal.adapters.mssql.MSSQL

execute(**kwargs)
represent(obj, field_type)[source]
class pydal.adapters.mssql.Slicer[source]

Bases: object

rowslice(rows, minimum=0, maximum=None)[source]
class pydal.adapters.mssql.Sybase(db, uri, pool_size=0, folder=None, db_codec='UTF-8', credential_decoder=<function IDENTITY>, driver_args={}, adapter_args={}, do_connect=True, srid=4326, after_connection=None)[source]

Bases: pydal.adapters.mssql.MSSQL1

connector()[source]
dbengine = 'sybase'
class pydal.adapters.mssql.Vertica(db, uri, pool_size=0, folder=None, db_codec='UTF-8', credential_decoder=<function IDENTITY>, driver_args={}, adapter_args={}, do_connect=True, srid=4326, after_connection=None)[source]

Bases: pydal.adapters.mssql.MSSQL1

lastrowid(table)[source]

pydal.adapters.mysql module

class pydal.adapters.mysql.Cubrid(*args, **kwargs)[source]

Bases: pydal.adapters.mysql.MySQL

dbengine = 'cubrid'
drivers = ('cubriddb',)
class pydal.adapters.mysql.MySQL(*args, **kwargs)[source]

Bases: pydal.adapters.base.SQLAdapter

REGEX_URI = <_sre.SRE_Pattern object at 0x2fb1fb0>
after_connection()[source]
commit_on_alter_table = True
commit_prepared(**kwargs)
connector()[source]
dbengine = 'mysql'
distributed_transaction_begin(key)[source]
drivers = ('MySQLdb', 'pymysql', 'mysqlconnector')
prepare(**kwargs)
rollback_prepared(**kwargs)
support_distributed_transaction = True

pydal.adapters.oracle module

class pydal.adapters.oracle.Oracle(*args, **kwargs)[source]

Bases: pydal.adapters.base.SQLAdapter

after_connection()[source]
cmd_fix = <_sre.SRE_Pattern object>
connector()[source]
create_sequence_and_triggers(query, table, **args)[source]
dbengine = 'oracle'
drivers = ('cx_Oracle',)
execute(**kwargs)
fetchall()[source]
insert(table, fields)[source]
lastrowid(table)[source]
sqlsafe_table(tablename, original_tablename=None)[source]
test_connection()[source]

pydal.adapters.postgres module

class pydal.adapters.postgres.JDBCPostgre(db, uri, pool_size=0, folder=None, db_codec='UTF-8', credential_decoder=<function IDENTITY>, driver_args={}, adapter_args={}, do_connect=True, srid=4326, after_connection=None)[source]

Bases: pydal.adapters.postgres.Postgre

REGEX_URI = <_sre.SRE_Pattern object>
after_connection()[source]
connector()[source]
drivers = ('zxJDBC',)
class pydal.adapters.postgres.Postgre(db, uri, pool_size=0, folder=None, db_codec='UTF-8', credential_decoder=<function IDENTITY>, driver_args={}, adapter_args={}, do_connect=True, srid=4326, after_connection=None)[source]

Bases: pydal.helpers.classes.ConnectionConfigurationMixin, pydal.adapters.base.SQLAdapter

REGEX_URI = <_sre.SRE_Pattern object at 0x2fb9ee0>
after_connection()[source]
commit_prepared(**kwargs)
connector()[source]
dbengine = 'postgres'
drivers = ('psycopg2', 'pg8000')
lastrowid(table)[source]
prepare(**kwargs)
rollback_prepared(**kwargs)
support_distributed_transaction = True
class pydal.adapters.postgres.PostgreBoolean(db, uri, pool_size=0, folder=None, db_codec='UTF-8', credential_decoder=<function IDENTITY>, driver_args={}, adapter_args={}, do_connect=True, srid=4326, after_connection=None)[source]

Bases: pydal.adapters.postgres.PostgreNew

class pydal.adapters.postgres.PostgreMeta[source]

Bases: pydal.adapters.AdapterMeta

class pydal.adapters.postgres.PostgreNew(db, uri, pool_size=0, folder=None, db_codec='UTF-8', credential_decoder=<function IDENTITY>, driver_args={}, adapter_args={}, do_connect=True, srid=4326, after_connection=None)[source]

Bases: pydal.adapters.postgres.Postgre

class pydal.adapters.postgres.PostgrePG8000(db, uri, pool_size=0, folder=None, db_codec='UTF-8', credential_decoder=<function IDENTITY>, driver_args={}, adapter_args={}, do_connect=True, srid=4326, after_connection=None)[source]

Bases: pydal.adapters.postgres.Postgre

adapt(obj)[source]
drivers = ('pg8000',)
execute(**kwargs)
class pydal.adapters.postgres.PostgrePG8000Boolean(db, uri, pool_size=0, folder=None, db_codec='UTF-8', credential_decoder=<function IDENTITY>, driver_args={}, adapter_args={}, do_connect=True, srid=4326, after_connection=None)[source]

Bases: pydal.adapters.postgres.PostgrePG8000New, pydal.adapters.postgres.PostgreBoolean

class pydal.adapters.postgres.PostgrePG8000New(db, uri, pool_size=0, folder=None, db_codec='UTF-8', credential_decoder=<function IDENTITY>, driver_args={}, adapter_args={}, do_connect=True, srid=4326, after_connection=None)[source]

Bases: pydal.adapters.postgres.PostgrePG8000, pydal.adapters.postgres.PostgreNew

class pydal.adapters.postgres.PostgrePsyco(db, uri, pool_size=0, folder=None, db_codec='UTF-8', credential_decoder=<function IDENTITY>, driver_args={}, adapter_args={}, do_connect=True, srid=4326, after_connection=None)[source]

Bases: pydal.adapters.postgres.Postgre

adapt(obj)[source]
drivers = ('psycopg2',)
class pydal.adapters.postgres.PostgrePsycoBoolean(db, uri, pool_size=0, folder=None, db_codec='UTF-8', credential_decoder=<function IDENTITY>, driver_args={}, adapter_args={}, do_connect=True, srid=4326, after_connection=None)[source]

Bases: pydal.adapters.postgres.PostgrePsycoNew, pydal.adapters.postgres.PostgreBoolean

class pydal.adapters.postgres.PostgrePsycoNew(db, uri, pool_size=0, folder=None, db_codec='UTF-8', credential_decoder=<function IDENTITY>, driver_args={}, adapter_args={}, do_connect=True, srid=4326, after_connection=None)[source]

Bases: pydal.adapters.postgres.PostgrePsyco, pydal.adapters.postgres.PostgreNew

pydal.adapters.sapdb module

pydal.adapters.sqlite module

class pydal.adapters.sqlite.JDBCSQLite(*args, **kwargs)[source]

Bases: pydal.adapters.sqlite.SQLite

after_connection()[source]
connector()[source]
drivers = ('zxJDBC_sqlite',)
class pydal.adapters.sqlite.SQLite(*args, **kwargs)[source]

Bases: pydal.adapters.base.SQLAdapter

after_connection()[source]
connector()[source]
dbengine = 'sqlite'
delete(table, query)[source]
drivers = ('sqlite2', 'sqlite3')
select(query, fields, attributes)[source]
static web2py_extract(lookup, s)[source]
static web2py_regexp(expression, item)[source]
class pydal.adapters.sqlite.Spatialite(*args, **kwargs)[source]

Bases: pydal.adapters.sqlite.SQLite

SPATIALLIBS = {'Darwin': 'libspatialite.dylib', 'Linux': 'libspatialite.so', 'Windows': 'mod_spatialite.dll'}
after_connections()[source]
dbengine = 'spatialite'

pydal.adapters.teradata module

class pydal.adapters.teradata.Teradata(*args, **kwargs)[source]

Bases: pydal.adapters.base.SQLAdapter

close()[source]
connector()[source]
dbengine = ''
drivers = ('pyodbc',)
lastrowid(table)[source]

Module contents

class pydal.adapters.AdapterMeta[source]

Bases: type

Metaclass to support manipulation of adapter classes.

At the moment is used to intercept entity_quoting argument passed to DAL.

class pydal.adapters.Adapters(namespace=None)[source]

Bases: pydal.helpers._internals.Dispatcher

get_for(uri)[source]
register_for(*uris)[source]
pydal.adapters.with_connection(f)[source]
pydal.adapters.with_connection_or_raise(f)[source]

pydal.helpers package

Submodules

pydal.helpers.classes module

class pydal.helpers.classes.BasicStorage(*args, **kwargs)[source]

Bases: object

clear(*args, **kwargs)
copy(*args, **kwargs)
get(key, default=None)[source]
has_key(key)
items()[source]
iteritems()[source]
iterkeys()[source]
itervalues()[source]
keys()[source]
pop(*args, **kwargs)
update(*args, **kwargs)[source]
values()[source]
class pydal.helpers.classes.ConnectionConfigurationMixin[source]

Bases: object

class pydal.helpers.classes.DatabaseStoredFile(db, filename, mode)[source]
close()[source]
close_connection()[source]
escape(obj)[source]
static exists(db, filename)[source]
static is_operational_error(db, error)[source]
static is_programming_error(db, error)[source]
read(bytes=None)[source]
readline()[source]
static try_create_web2py_filesystem(db)[source]
web2py_filesystems = set([])
write(data)[source]
class pydal.helpers.classes.ExecutionHandler(adapter)[source]

Bases: object

after_execute(command)[source]
before_execute(command)[source]
class pydal.helpers.classes.FakeCursor[source]

Bases: object

The Python Database API Specification has a cursor() method, which NoSql drivers generally don’t support. If the exception in this function is taken then it likely means that some piece of functionality has not yet been implemented in the driver. And something is using the cursor.

https://www.python.org/dev/peps/pep-0249/

warn_bad_usage(attr)[source]
class pydal.helpers.classes.FakeDriver(*args, **kwargs)[source]

Bases: pydal.helpers.classes.BasicStorage

close()[source]
commit()[source]
cursor()[source]
class pydal.helpers.classes.MethodAdder(table)[source]

Bases: object

register(method_name=None)[source]
class pydal.helpers.classes.NullCursor[source]

Bases: pydal.helpers.classes.FakeCursor

lastrowid = 1
class pydal.helpers.classes.NullDriver(*args, **kwargs)[source]

Bases: pydal.helpers.classes.FakeDriver

class pydal.helpers.classes.OpRow(table)[source]

Bases: object

del_value(key)[source]
get(key, default=None)[source]
items()[source]
iteritems()[source]
iterkeys()[source]
itervalues()[source]
keys()[source]
op_values()[source]
set_value(key, value, field=None)[source]
values()[source]
class pydal.helpers.classes.RecordDeleter(colset, table, id)[source]

Bases: pydal.helpers.classes.RecordOperator

class pydal.helpers.classes.RecordOperator(colset, table, id)[source]

Bases: object

class pydal.helpers.classes.RecordUpdater(colset, table, id)[source]

Bases: pydal.helpers.classes.RecordOperator

class pydal.helpers.classes.Reference[source]

Bases: long

get(key, default=None)[source]
pydal.helpers.classes.Reference_pickler(data)[source]
pydal.helpers.classes.Reference_unpickler(data)[source]
class pydal.helpers.classes.SQLALL(table)[source]

Bases: object

Helper class providing a comma-separated string having all the field names (prefixed by table name and ‘.’)

normally only called from within gluon.dal

class pydal.helpers.classes.SQLCallableList[source]

Bases: list

class pydal.helpers.classes.SQLCustomType(type='string', native=None, encoder=None, decoder=None, validator=None, _class=None, widget=None, represent=None)[source]

Bases: object

Allows defining of custom SQL types

Parameters:
  • type – the web2py type (default = ‘string’)
  • native – the backend type
  • encoder – how to encode the value to store it in the backend
  • decoder – how to decode the value retrieved from the backend
  • validator – what validators to use ( default = None, will use the default validator for type)
Example::

Define as:

decimal = SQLCustomType(
type =’double’, native =’integer’, encoder =(lambda x: int(float(x) * 100)), decoder = (lambda x: Decimal(“0.00”) + Decimal(str(float(x)/100)) ) )
db.define_table(
‘example’, Field(‘value’, type=decimal) )
endswith(text=None)[source]
startswith(text=None)[source]
class pydal.helpers.classes.Serializable[source]

Bases: object

as_dict(flat=False, sanitize=True)[source]
as_json(sanitize=True)[source]
as_xml(sanitize=True)[source]
as_yaml(sanitize=True)[source]
class pydal.helpers.classes.TimingHandler(adapter)[source]

Bases: pydal.helpers.classes.ExecutionHandler

MAXSTORAGE = 100
after_execute(command)[source]
before_execute(command)[source]
timings
class pydal.helpers.classes.cachedprop(fget, doc=None)[source]

Bases: object

pydal.helpers.classes.pickle_basicstorage(s)[source]

pydal.helpers.methods module

pydal.helpers.methods.archive_record(qset, fs, archive_table, current_record)[source]
pydal.helpers.methods.attempt_upload(table, fields)[source]
pydal.helpers.methods.attempt_upload_on_insert(table)[source]
pydal.helpers.methods.attempt_upload_on_update(table)[source]
pydal.helpers.methods.auto_represent(field)[source]
pydal.helpers.methods.auto_validators(field)[source]
pydal.helpers.methods.bar_decode_integer(value)[source]
pydal.helpers.methods.bar_decode_string(value)[source]
pydal.helpers.methods.bar_encode(items)[source]
pydal.helpers.methods.bar_escape(item)[source]
pydal.helpers.methods.bar_unescape(item)[source]
pydal.helpers.methods.cleanup(text)[source]

Validates that the given text is clean: only contains [0-9a-zA-Z_]

pydal.helpers.methods.delete_uploaded_files(dbset, upload_fields=None)[source]
pydal.helpers.methods.geoLine(*line)[source]
pydal.helpers.methods.geoPoint(x, y)[source]
pydal.helpers.methods.geoPolygon(*line)[source]
pydal.helpers.methods.hide_password(uri)[source]
pydal.helpers.methods.int2uuid(n)[source]
pydal.helpers.methods.list_represent(values, row=None)[source]
pydal.helpers.methods.merge_tablemaps(*maplist)[source]

Merge arguments into a single dict, check for name collisions.

pydal.helpers.methods.pluralize(singular, rules=[(<_sre.SRE_Pattern object>, <_sre.SRE_Pattern object>, 'children'), (<_sre.SRE_Pattern object>, <_sre.SRE_Pattern object>, 'eet'), (<_sre.SRE_Pattern object>, <_sre.SRE_Pattern object>, 'eeth'), (<_sre.SRE_Pattern object>, <_sre.SRE_Pattern object>, 'l\\1aves'), (<_sre.SRE_Pattern object>, <_sre.SRE_Pattern object>, 'ses'), (<_sre.SRE_Pattern object>, <_sre.SRE_Pattern object>, 'men'), (<_sre.SRE_Pattern object>, <_sre.SRE_Pattern object>, 'ives'), (<_sre.SRE_Pattern object>, <_sre.SRE_Pattern object>, 'eaux'), (<_sre.SRE_Pattern object>, <_sre.SRE_Pattern object>, 'lves'), (<_sre.SRE_Pattern object>, <_sre.SRE_Pattern object>, 'es'), (<_sre.SRE_Pattern object>, <_sre.SRE_Pattern object>, 'es'), (<_sre.SRE_Pattern object>, <_sre.SRE_Pattern object>, 'ies'), (<_sre.SRE_Pattern object>, <_sre.SRE_Pattern object>, 's')])[source]
pydal.helpers.methods.smart_query(fields, text)[source]
pydal.helpers.methods.use_common_filters(query)[source]
pydal.helpers.methods.uuid2int(uuidv)[source]
pydal.helpers.methods.varquote_aux(name, quotestr='%s')[source]
pydal.helpers.methods.xorify(orderby)[source]

pydal.helpers.regex module

Module contents

Submodules

pydal.base module

This file is part of the web2py Web Framework
Copyrighted by Massimo Di Pierro <mdipierro@cs.depaul.edu>

This file contains the DAL support for many relational databases, including:

  • SQLite & SpatiaLite
  • MySQL
  • Postgres
  • Firebird
  • Oracle
  • MS SQL
  • DB2
  • Interbase
  • Ingres
  • Informix (9+ and SE)
  • SapDB (experimental)
  • Cubrid (experimental)
  • CouchDB (experimental)
  • MongoDB (in progress)
  • Google:nosql
  • Google:sql
  • Teradata
  • IMAP (experimental)

Example of usage:

>>> # from dal import DAL, Field

### create DAL connection (and create DB if it doesn't exist)
>>> db = DAL(('sqlite://storage.sqlite','mysql://a:b@localhost/x'),
... folder=None)

### define a table 'person' (create/alter as necessary)
>>> person = db.define_table('person',Field('name','string'))

### insert a record
>>> id = person.insert(name='James')

### retrieve it by id
>>> james = person(id)

### retrieve it by name
>>> james = person(name='James')

### retrieve it by arbitrary query
>>> query = (person.name=='James') & (person.name.startswith('J'))
>>> james = db(query).select(person.ALL)[0]

### update one record
>>> james.update_record(name='Jim')
<Row {'id': 1, 'name': 'Jim'}>

### update multiple records by query
>>> db(person.name.like('J%')).update(name='James')
1

### delete records by query
>>> db(person.name.lower() == 'jim').delete()
0

### retrieve multiple records (rows)
>>> people = db(person).select(orderby=person.name,
... groupby=person.name, limitby=(0,100))

### further filter them
>>> james = people.find(lambda row: row.name == 'James').first()
>>> print james.id, james.name
1 James

### check aggregates
>>> counter = person.id.count()
>>> print db(person).select(counter).first()(counter)
1

### delete one record
>>> james.delete_record()
1

### delete (drop) entire database table
>>> person.drop()

Supported DAL URI strings:

'sqlite://test.db'
'spatialite://test.db'
'sqlite:memory'
'spatialite:memory'
'jdbc:sqlite://test.db'
'mysql://root:none@localhost/test'
'postgres://mdipierro:password@localhost/test'
'postgres:psycopg2://mdipierro:password@localhost/test'
'postgres:pg8000://mdipierro:password@localhost/test'
'jdbc:postgres://mdipierro:none@localhost/test'
'mssql://web2py:none@A64X2/web2py_test'
'mssql2://web2py:none@A64X2/web2py_test' # alternate mappings
'mssql3://web2py:none@A64X2/web2py_test' # better pagination (requires >= 2005)
'mssql4://web2py:none@A64X2/web2py_test' # best pagination (requires >= 2012)
'oracle://username:password@database'
'firebird://user:password@server:3050/database'
'db2:ibm_db_dbi://DSN=dsn;UID=user;PWD=pass'
'db2:pyodbc://driver=DB2;hostname=host;database=database;uid=user;pwd=password;port=port'
'firebird://username:password@hostname/database'
'firebird_embedded://username:password@c://path'
'informix://user:password@server:3050/database'
'informixu://user:password@server:3050/database' # unicode informix
'ingres://database'  # or use an ODBC connection string, e.g. 'ingres://dsn=dsn_name'
'google:datastore' # for google app engine datastore (uses ndb by default)
'google:sql' # for google app engine with sql (mysql compatible)
'teradata://DSN=dsn;UID=user;PWD=pass; DATABASE=database' # experimental
'imap://user:password@server:port' # experimental
'mongodb://user:password@server:port/database' # experimental

For more info:

help(DAL)
help(Field)
class pydal.base.DAL(uri='sqlite://dummy.db', pool_size=0, folder=None, db_codec='UTF-8', check_reserved=None, migrate=True, fake_migrate=False, migrate_enabled=True, fake_migrate_all=False, decode_credentials=False, driver_args=None, adapter_args=None, attempts=5, auto_import=False, bigint_id=False, debug=False, lazy_tables=False, db_uid=None, do_connect=True, after_connection=None, tables=None, ignore_field_case=True, entity_quoting=True, table_hash=None)[source]

Bases: pydal.helpers.classes.Serializable, pydal.helpers.classes.BasicStorage

An instance of this class represents a database connection

Parameters:
  • uri (str) –

    contains information for connecting to a database. Defaults to ‘sqlite://dummy.db’

    Note

    experimental: you can specify a dictionary as uri parameter i.e. with:

    db = DAL({"uri": "sqlite://storage.sqlite",
              "tables": {...}, ...})
    

    for an example of dict input you can check the output of the scaffolding db model with

    db.as_dict()

    Note that for compatibility with Python older than version 2.6.5 you should cast your dict input keys to str due to a syntax limitation on kwarg names. for proper DAL dictionary input you can use one of:

    obj = serializers.cast_keys(dict, [encoding="utf-8"])
    #or else (for parsing json input)
    obj = serializers.loads_json(data, unicode_keys=False)
    
  • pool_size – How many open connections to make to the database object.
  • folder – where .table files will be created. Automatically set within web2py. Use an explicit path when using DAL outside web2py
  • db_codec – string encoding of the database (default: ‘UTF-8’)
  • table_hash – database identifier with .tables. If your connection hash change you can still using old .tables if they have db_hash as prefix
  • check_reserved

    list of adapters to check tablenames and column names against sql/nosql reserved keywords. Defaults to None

    • ’common’ List of sql keywords that are common to all database types such as “SELECT, INSERT”. (recommended)
    • ’all’ Checks against all known SQL keywords
    • ’<adaptername>’’ Checks against the specific adapters list of keywords
    • ’<adaptername>_nonreserved’ Checks against the specific adapters list of nonreserved keywords. (if available)
  • migrate – sets default migrate behavior for all tables
  • fake_migrate – sets default fake_migrate behavior for all tables
  • migrate_enabled – If set to False disables ALL migrations
  • fake_migrate_all – If set to True fake migrates ALL tables
  • attempts – Number of times to attempt connecting
  • auto_import – If set to True, tries import automatically table definitions from the databases folder (works only for simple models)
  • bigint_id – If set, turn on bigint instead of int for id and reference fields
  • lazy_tables – delays table definition until table access
  • after_connection – can a callable that will be executed after the connection

Example

Use as:

db = DAL('sqlite://test.db')

or:

db = DAL(**{"uri": ..., "tables": [...]...}) # experimental

db.define_table('tablename', Field('fieldname1'),
                             Field('fieldname2'))
class Row(*args, **kwargs)

Bases: pydal.helpers.classes.BasicStorage

A dictionary that lets you do d[‘a’] as well as d.a this is only used to store a Row

as_dict(datetime_to_str=False, custom_types=None)
as_json(mode='object', default=None, colnames=None, serialize=True, **kwargs)

serializes the row to a JSON object kwargs are passed to .as_dict method only “object” mode supported

serialize = False used by Rows.as_json

TODO: return array mode with query column order

mode and colnames are not implemented

as_xml(row_name='row', colnames=None, indent=' ')
get(key, default=None)
class Rows(db=None, records=[], colnames=[], compact=True, rawrows=None, fields=[])

Bases: pydal.objects.BasicRows

A wrapper for the return value of a select. It basically represents a table. It has an iterator and each row is represented as a Row dictionary.

append(row)
column(column=None)
exclude(f)

Removes elements from the calling Rows object, filtered by the function f, and returns a new Rows object containing the removed elements

find(f, limitby=None)

Returns a new Rows object, a subset of the original object, filtered by the function f

first()
group_by_value(*fields, **args)

Regroups the rows, by one of the fields

insert(position, row)
join(field, name=None, constraint=None, fields=[], orderby=None)
last()
render(i=None, fields=None)

Takes an index and returns a copy of the indexed row with values transformed via the “represent” attributes of the associated fields.

Parameters:
  • i – index. If not specified, a generator is returned for iteration over all the rows.
  • fields – a list of fields to transform (if None, all fields with “represent” attributes will be transformed)
setvirtualfields(**keyed_virtualfields)

For reference:

db.define_table('x', Field('number', 'integer'))
if db(db.x).isempty(): [db.x.insert(number=i) for i in range(10)]

from gluon.dal import lazy_virtualfield

class MyVirtualFields(object):
    # normal virtual field (backward compatible, discouraged)
    def normal_shift(self): return self.x.number+1
    # lazy virtual field (because of @staticmethod)
    @lazy_virtualfield
    def lazy_shift(instance, row, delta=4): return row.x.number+delta
db.x.virtualfields.append(MyVirtualFields())

for row in db(db.x).select():
    print row.number, row.normal_shift, row.lazy_shift(delta=7)
sort(f, reverse=False)

Returns a list of sorted elements (not sorted in place)

class Table(db, tablename, *fields, **args)

Bases: pydal.helpers.classes.Serializable, pydal.helpers.classes.BasicStorage

Represents a database table

Example::
You can create a table as::
db = DAL(…) db.define_table(‘users’, Field(‘name’))

And then:

db.users.insert(name='me') # print db.users._insert(...) to see SQL
db.users.drop()
as_dict(flat=False, sanitize=True)
bulk_insert(items)

here items is a list of dictionaries

create_index(name, *fields, **kwargs)
drop(mode='')
drop_index(name)
fields
import_from_csv_file(csvfile, id_map=None, null='<NULL>', unique='uuid', id_offset=None, transform=None, validate=False, **kwargs)

Import records from csv file. Column headers must have same names as table fields. Field ‘id’ is ignored. If column names read ‘table.file’ the ‘table.’ prefix is ignored.

  • ‘unique’ argument is a field which must be unique (typically a uuid field)
  • ‘restore’ argument is default False; if set True will remove old values in table first.
  • ‘id_map’ if set to None will not map ids

The import will keep the id numbers in the restored table. This assumes that there is an field of type id that is integer and in incrementing order. Will keep the id numbers in restored table.

insert(**fields)
on(query)
query_name(*args, **kwargs)
sql_fullref
sql_shortref
sqlsafe
sqlsafe_alias
truncate(mode='')
update(*args, **kwargs)
update_or_insert(_key=<function <lambda>>, **values)
validate_and_insert(**fields)
validate_and_update(_key=<function <lambda>>, **fields)
validate_and_update_or_insert(_key=<function <lambda>>, **fields)
with_alias(alias)
as_dict(flat=False, sanitize=True)[source]
can_join()[source]
check_reserved_keyword(name)[source]

Validates name against SQL keywords Uses self._check_reserved which is a list of operators to use.

close()[source]
commit()[source]
define_table(tablename, *fields, **kwargs)[source]
static distributed_transaction_begin(*instances)[source]
static distributed_transaction_commit(*instances)[source]
executesql(query, placeholders=None, as_dict=False, fields=None, colnames=None, as_ordered_dict=False)[source]

Executes an arbitrary query

Parameters:
  • query (str) – the query to submit to the backend
  • placeholders – is optional and will always be None. If using raw SQL with placeholders, placeholders may be a sequence of values to be substituted in or, (if supported by the DB driver), a dictionary with keys matching named placeholders in your SQL.
  • as_dict – will always be None when using DAL. If using raw SQL can be set to True and the results cursor returned by the DB driver will be converted to a sequence of dictionaries keyed with the db field names. Results returned with as_dict=True are the same as those returned when applying .to_list() to a DAL query. If “as_ordered_dict”=True the behaviour is the same as when “as_dict”=True with the keys (field names) guaranteed to be in the same order as returned by the select name executed on the database.
  • fields

    list of DAL Fields that match the fields returned from the DB. The Field objects should be part of one or more Table objects defined on the DAL object. The “fields” list can include one or more DAL Table objects in addition to or instead of including Field objects, or it can be just a single table (not in a list). In that case, the Field objects will be extracted from the table(s).

    Note

    if either fields or colnames is provided, the results will be converted to a DAL Rows object using the db._adapter.parse() method

  • colnames – list of field names in tablename.fieldname format

Note

It is also possible to specify both “fields” and the associated “colnames”. In that case, “fields” can also include DAL Expression objects in addition to Field objects. For Field objects in “fields”, the associated “colnames” must still be in tablename.fieldname format. For Expression objects in “fields”, the associated “colnames” can be any arbitrary labels.

DAL Table objects referred to by “fields” or “colnames” can be dummy tables and do not have to represent any real tables in the database. Also, note that the “fields” and “colnames” must be in the same order as the fields in the results cursor returned from the DB.

execution_handlers = [<class 'pydal.helpers.classes.TimingHandler'>]
export_to_csv_file(ofile, *args, **kwargs)[source]
static get_instances()[source]

Returns a dictionary with uri as key with timings and defined tables:

{'sqlite://storage.sqlite': {
    'dbstats': [(select auth_user.email from auth_user, 0.02009)],
    'dbtables': {
        'defined': ['auth_cas', 'auth_event', 'auth_group',
            'auth_membership', 'auth_permission', 'auth_user'],
        'lazy': '[]'
        }
    }
}
has_representer(name)[source]
import_from_csv_file(ifile, id_map=None, null='<NULL>', unique='uuid', map_tablenames=None, ignore_missing_tables=False, *args, **kwargs)[source]
import_table_definitions(path, migrate=False, fake_migrate=False, tables=None)[source]
lazy_define_table(tablename, *fields, **kwargs)[source]
logger = <logging.Logger object>
parse_as_rest(patterns, args, vars, queries=None, nested_select=True)[source]
record_operators = {'delete_record': <class 'pydal.helpers.classes.RecordDeleter'>, 'update_record': <class 'pydal.helpers.classes.RecordUpdater'>}
represent(name, *args, **kwargs)[source]
representers = {}
rollback()[source]
serializers = None
static set_folder(folder)[source]
smart_query(fields, text)[source]
tables
uuid()
validators = None
validators_method = None
where(query=None, ignore_common_filters=None)[source]
pydal.base.DAL_pickler(db)[source]
pydal.base.DAL_unpickler(db_uid)[source]
class pydal.base.MetaDAL[source]

Bases: type

pydal.connection module

class pydal.connection.ConnectionPool[source]

Bases: object

POOLS = {}
after_connection()[source]
after_connection_hook()[source]

Hook for the after_connection parameter

check_active_connection = True
close(action='commit', really=True)[source]
static close_all_instances(action)[source]

to close cleanly databases in a multithreaded environment

close_cursor(cursor)[source]
connection
cursor
cursors
lock_cursor(cursor)[source]
reconnect()[source]

Defines: self.connection and self.cursor if self.pool_size>0 it will try pull the connection from the pool if the connection is not active (closed by db server) it will loop if not self.pool_size or no active connections in pool makes a new one

release_cursor(cursor)[source]
static set_folder(folder)[source]

pydal.objects module

class pydal.objects.BasicRows[source]

Bases: object

Abstract class for Rows and IterRows

as_csv()

Serializes the table into a csv file

as_dict(key='id', compact=True, storage_to_dict=True, datetime_to_str=False, custom_types=None)[source]

Returns the data as a dictionary of dictionaries (storage_to_dict=True) or records (False)

Parameters:
  • key – the name of the field to be used as dict key, normally the id
  • compact – ? (default True)
  • storage_to_dict – when True returns a dict, otherwise a list(default True)
  • datetime_to_str – convert datetime fields as strings (default False)
as_json(mode='object', default=None)[source]

Serializes the rows to a JSON list or object with objects mode=’object’ is not implemented (should return a nested object structure)

as_list(compact=True, storage_to_dict=True, datetime_to_str=False, custom_types=None)[source]

Returns the data as a list or dictionary.

Parameters:
  • storage_to_dict – when True returns a dict, otherwise a list
  • datetime_to_str – convert datetime fields as strings
as_trees(parent_name='parent_id', children_name='children', render=False)[source]

returns the data as list of trees.

Parameters:
  • parent_name – the name of the field holding the reference to the parent (default parent_id).
  • children_name – the name where the children of each row will be stored as a list (default children).
  • render – whether we will render the fields using their represent (default False) can be a list of fields to render or True to render all.
as_xml(row_name='row', rows_name='rows')[source]
export_to_csv_file(ofile, null='<NULL>', *args, **kwargs)[source]

Exports data to csv, the first line contains the column names

Parameters:
  • ofile – where the csv must be exported to
  • null – how null values must be represented (default ‘<NULL>’)
  • delimiter – delimiter to separate values (default ‘,’)
  • quotechar – character to use to quote string values (default ‘”’)
  • quoting – quote system, use csv.QUOTE_*** (default csv.QUOTE_MINIMAL)
  • represent – use the fields .represent value (default False)
  • colnames – list of column names to use (default self.colnames)

This will only work when exporting rows objects!!!! DO NOT use this with db.export_to_csv()

json(mode='object', default=None)

Serializes the rows to a JSON list or object with objects mode=’object’ is not implemented (should return a nested object structure)

xml(strict=False, row_name='row', rows_name='rows')[source]

Serializes the table using sqlhtml.SQLTABLE (if present)

class pydal.objects.Expression(db, op, first=None, second=None, type=None, **optional_args)[source]

Bases: object

abs()[source]
avg()[source]
belongs(*value, **kwattr)[source]

Accepts the following inputs:

field.belongs(1, 2)
field.belongs((1, 2))
field.belongs(query)

Does NOT accept:

field.belongs(1)

If the set you want back includes None values, you can do:

field.belongs((1, None), null=True)
cast(cast_as, **kwargs)[source]
coalesce(*others)[source]
coalesce_zero()[source]
contains(value, all=False, case_sensitive=False)[source]

For GAE contains() is always case sensitive

day()[source]
endswith(value)[source]
epoch()[source]
hour()[source]
ilike(value, escape=None)[source]
len()[source]
like(value, case_sensitive=True, escape=None)[source]
lower()[source]
max()[source]
min()[source]
minutes()[source]
month()[source]
regexp(value)[source]
replace(a, b)[source]
seconds()[source]
st_asgeojson(precision=15, options=0, version=1)[source]
st_astext()[source]
st_contains(value)[source]
st_distance(other)[source]
st_dwithin(value, distance)[source]
st_equals(value)[source]
st_intersects(value)[source]
st_overlaps(value)[source]
st_simplify(value)[source]
st_simplifypreservetopology(value)[source]
st_touches(value)[source]
st_within(value)[source]
st_x()[source]
st_y()[source]
startswith(value)[source]
sum()[source]
upper()[source]
with_alias(alias)[source]
year()[source]
class pydal.objects.Field(fieldname, type='string', length=None, default=<function <lambda>>, required=False, requires=<function <lambda>>, ondelete='CASCADE', notnull=False, unique=False, uploadfield=True, widget=None, label=None, comment=None, writable=True, readable=True, searchable=True, listable=True, regex=None, options=None, update=None, authorize=None, autodelete=False, represent=None, uploadfolder=None, uploadseparate=False, uploadfs=None, compute=None, custom_store=None, custom_retrieve=None, custom_retrieve_file_properties=None, custom_delete=None, filter_in=None, filter_out=None, custom_qualifier=None, map_none=None, rname=None, **others)[source]

Bases: pydal.objects.Expression, pydal.helpers.classes.Serializable

Lazy

Represents a database field

Example

Usage:

a = Field(name, 'string', length=32, default=None, required=False,
    requires=IS_NOT_EMPTY(), ondelete='CASCADE',
    notnull=False, unique=False,
    regex=None, options=None,
    uploadfield=True, widget=None, label=None, comment=None,
    uploadfield=True, # True means store on disk,
                      # 'a_field_name' means store in this field in db
                      # False means file content will be discarded.
    writable=True, readable=True, searchable=True, listable=True,
    update=None, authorize=None,
    autodelete=False, represent=None, uploadfolder=None,
    uploadseparate=False # upload to separate directories by uuid_keys
                         # first 2 character and tablename.fieldname
                         # False - old behavior
                         # True - put uploaded file in
                         #   <uploaddir>/<tablename>.<fieldname>/uuid_key[:2]
                         #        directory)
    uploadfs=None        # a pyfilesystem where to store upload
    )

to be used as argument of DAL.define_table

alias of FieldMethod

Method

alias of FieldMethod

Virtual

alias of FieldVirtual

as_dict(flat=False, sanitize=True)[source]
bind(table)[source]
clone(point_self_references_to=False, **args)[source]
count(distinct=None)[source]
formatter(value)[source]
longname
retrieve(name, path=None, nameonly=False)[source]

If nameonly==True return (filename, fullfilename) instead of (filename, stream)

retrieve_file_properties(name, path=None)[source]
set_attributes(*args, **attributes)[source]
sqlsafe
sqlsafe_name
store(file, filename=None, path=None)[source]
validate(value)[source]
class pydal.objects.FieldMethod(name, f=None, handler=None)[source]

Bases: object

class pydal.objects.FieldVirtual(name, f=None, ftype='string', label=None, table_name=None, readable=True, listable=True)[source]

Bases: object

class pydal.objects.IterRows(db, sql, fields, colnames, blob_decode, cacheable)[source]

Bases: pydal.objects.BasicRows

first()[source]
next()
class pydal.objects.LazyReferenceGetter(table, id)[source]

Bases: object

class pydal.objects.LazySet(field, id)[source]

Bases: object

count(distinct=None, cache=None)[source]
delete()[source]
isempty()[source]
nested_select(*fields, **attributes)[source]
select(*fields, **attributes)[source]
update(**update_fields)[source]
update_naive(**update_fields)[source]
validate_and_update(**update_fields)[source]
where(query, ignore_common_filters=False)[source]
class pydal.objects.Query(db, op, first=None, second=None, ignore_common_filters=False, **optional_args)[source]

Bases: pydal.helpers.classes.Serializable

Necessary to define a set. It can be stored or can be passed to DAL.__call__() to obtain a Set

Example

Use as:

query = db.users.name=='Max'
set = db(query)
records = set.select()
as_dict(flat=False, sanitize=True)[source]

Experimental stuff

This allows to return a plain dictionary with the basic query representation. Can be used with json/xml services for client-side db I/O

Example

Usage:

q = db.auth_user.id != 0
q.as_dict(flat=True)
{
"op": "NE",
"first":{
    "tablename": "auth_user",
    "fieldname": "id"
    },
"second":0
}
case(t=1, f=0)[source]
class pydal.objects.Row(*args, **kwargs)[source]

Bases: pydal.helpers.classes.BasicStorage

A dictionary that lets you do d[‘a’] as well as d.a this is only used to store a Row

as_dict(datetime_to_str=False, custom_types=None)[source]
as_json(mode='object', default=None, colnames=None, serialize=True, **kwargs)[source]

serializes the row to a JSON object kwargs are passed to .as_dict method only “object” mode supported

serialize = False used by Rows.as_json

TODO: return array mode with query column order

mode and colnames are not implemented

as_xml(row_name='row', colnames=None, indent=' ')[source]
get(key, default=None)[source]
class pydal.objects.Rows(db=None, records=[], colnames=[], compact=True, rawrows=None, fields=[])[source]

Bases: pydal.objects.BasicRows

A wrapper for the return value of a select. It basically represents a table. It has an iterator and each row is represented as a Row dictionary.

append(row)[source]
column(column=None)[source]
exclude(f)[source]

Removes elements from the calling Rows object, filtered by the function f, and returns a new Rows object containing the removed elements

find(f, limitby=None)[source]

Returns a new Rows object, a subset of the original object, filtered by the function f

first()[source]
group_by_value(*fields, **args)[source]

Regroups the rows, by one of the fields

insert(position, row)[source]
join(field, name=None, constraint=None, fields=[], orderby=None)[source]
last()[source]
render(i=None, fields=None)[source]

Takes an index and returns a copy of the indexed row with values transformed via the “represent” attributes of the associated fields.

Parameters:
  • i – index. If not specified, a generator is returned for iteration over all the rows.
  • fields – a list of fields to transform (if None, all fields with “represent” attributes will be transformed)
setvirtualfields(**keyed_virtualfields)[source]

For reference:

db.define_table('x', Field('number', 'integer'))
if db(db.x).isempty(): [db.x.insert(number=i) for i in range(10)]

from gluon.dal import lazy_virtualfield

class MyVirtualFields(object):
    # normal virtual field (backward compatible, discouraged)
    def normal_shift(self): return self.x.number+1
    # lazy virtual field (because of @staticmethod)
    @lazy_virtualfield
    def lazy_shift(instance, row, delta=4): return row.x.number+delta
db.x.virtualfields.append(MyVirtualFields())

for row in db(db.x).select():
    print row.number, row.normal_shift, row.lazy_shift(delta=7)
sort(f, reverse=False)[source]

Returns a list of sorted elements (not sorted in place)

class pydal.objects.Select(db, query, fields, attributes)[source]

Bases: pydal.helpers.classes.BasicStorage

fields
on(query)[source]
query_name(outer_scoped=[])[source]
sql_shortref
update(*args, **kwargs)[source]
with_alias(alias)[source]
class pydal.objects.Set(db, query, ignore_common_filters=None)[source]

Bases: pydal.helpers.classes.Serializable

Represents a set of records in the database. Records are identified by the query=Query(…) object. Normally the Set is generated by DAL.__call__(Query(…))

Given a set, for example:

myset = db(db.users.name=='Max')

you can:

myset.update(db.users.name='Massimo')
myset.delete() # all elements in the set
myset.select(orderby=db.users.id, groupby=db.users.name, limitby=(0, 10))

and take subsets:

subset = myset(db.users.id<5)
as_dict(flat=False, sanitize=True)[source]
build(d)[source]

Experimental: see .parse()

count(distinct=None, cache=None)[source]
delete()[source]
isempty()[source]
iterselect(*fields, **attributes)[source]
nested_select(*fields, **attributes)[source]
parse(dquery)[source]

Experimental: Turn a dictionary into a Query object

select(*fields, **attributes)[source]
update(**update_fields)[source]
update_naive(**update_fields)[source]

Same as update but does not call table._before_update and _after_update

validate_and_update(**update_fields)[source]
where(query, ignore_common_filters=False)[source]
class pydal.objects.Table(db, tablename, *fields, **args)[source]

Bases: pydal.helpers.classes.Serializable, pydal.helpers.classes.BasicStorage

Represents a database table

Example::
You can create a table as::
db = DAL(…) db.define_table(‘users’, Field(‘name’))

And then:

db.users.insert(name='me') # print db.users._insert(...) to see SQL
db.users.drop()
as_dict(flat=False, sanitize=True)[source]
bulk_insert(items)[source]

here items is a list of dictionaries

create_index(name, *fields, **kwargs)[source]
drop(mode='')[source]
drop_index(name)[source]
fields
import_from_csv_file(csvfile, id_map=None, null='<NULL>', unique='uuid', id_offset=None, transform=None, validate=False, **kwargs)[source]

Import records from csv file. Column headers must have same names as table fields. Field ‘id’ is ignored. If column names read ‘table.file’ the ‘table.’ prefix is ignored.

  • ‘unique’ argument is a field which must be unique (typically a uuid field)
  • ‘restore’ argument is default False; if set True will remove old values in table first.
  • ‘id_map’ if set to None will not map ids

The import will keep the id numbers in the restored table. This assumes that there is an field of type id that is integer and in incrementing order. Will keep the id numbers in restored table.

insert(**fields)[source]
on(query)[source]
query_name(*args, **kwargs)[source]
sql_fullref
sql_shortref
sqlsafe
sqlsafe_alias
truncate(mode='')[source]
update(*args, **kwargs)[source]
update_or_insert(_key=<function <lambda>>, **values)[source]
validate_and_insert(**fields)[source]
validate_and_update(_key=<function <lambda>>, **fields)[source]
validate_and_update_or_insert(_key=<function <lambda>>, **fields)[source]
with_alias(alias)[source]
class pydal.objects.VirtualCommand(method, row)[source]

Bases: object

pydal.objects.pickle_row(s)[source]

Module contents