aboutsummaryrefslogtreecommitdiffstats
path: root/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy
diff options
context:
space:
mode:
Diffstat (limited to 'lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy')
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/__init__.py122
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/connectors/__init__.py10
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/connectors/mxodbc.py150
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/connectors/mysqldb.py150
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/connectors/pyodbc.py124
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/connectors/zxJDBC.py58
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/cprocessors.py7
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/cprocessors.sobin37050 -> 0 bytes
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/cresultproxy.py7
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/cresultproxy.sobin45289 -> 0 bytes
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/databases/__init__.py37
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/__init__.py19
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/access/__init__.py0
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/access/base.py450
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/drizzle/__init__.py18
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/drizzle/base.py582
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/drizzle/mysqldb.py68
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/firebird/__init__.py22
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/firebird/base.py700
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/firebird/kinterbasdb.py167
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/informix/__init__.py9
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/informix/base.py593
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/informix/informixdb.py73
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/maxdb/__init__.py9
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/maxdb/base.py1116
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/maxdb/sapdb.py23
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/mssql/__init__.py26
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/mssql/adodbapi.py69
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/mssql/base.py1456
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/mssql/information_schema.py96
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/mssql/mxodbc.py93
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/mssql/pymssql.py109
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/mssql/pyodbc.py221
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/mssql/zxjdbc.py75
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/mysql/__init__.py27
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/mysql/base.py2571
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/mysql/mysqlconnector.py135
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/mysql/mysqldb.py79
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/mysql/oursql.py266
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/mysql/pymysql.py38
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/mysql/pyodbc.py82
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/mysql/zxjdbc.py117
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/oracle/__init__.py23
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/oracle/base.py1139
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/oracle/cx_oracle.py718
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/oracle/zxjdbc.py215
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/postgres.py16
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/postgresql/__init__.py20
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/postgresql/base.py1449
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/postgresql/pg8000.py121
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/postgresql/psycopg2.py334
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/postgresql/pypostgresql.py73
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/postgresql/zxjdbc.py42
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/sqlite/__init__.py20
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/sqlite/base.py753
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/sqlite/pysqlite.py247
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/sybase/__init__.py26
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/sybase/base.py434
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/sybase/mxodbc.py23
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/sybase/pyodbc.py83
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/sybase/pysybase.py100
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/engine/__init__.py301
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/engine/base.py2995
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/engine/ddl.py172
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/engine/default.py801
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/engine/reflection.py477
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/engine/strategies.py242
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/engine/threadlocal.py126
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/engine/url.py221
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/event.py347
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/events.py429
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/exc.py238
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/ext/__init__.py6
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/ext/associationproxy.py912
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/ext/compiler.py355
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/ext/declarative.py1425
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/ext/horizontal_shard.py128
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/ext/hybrid.py425
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/ext/mutable.py554
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/ext/orderinglist.py321
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/ext/serializer.py161
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/ext/sqlsoup.py797
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/interfaces.py305
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/log.py212
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/orm/__init__.py1278
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/orm/attributes.py1335
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/orm/collections.py1473
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/orm/dependency.py1161
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/orm/deprecated_interfaces.py583
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/orm/descriptor_props.py405
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/orm/dynamic.py313
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/orm/evaluator.py111
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/orm/events.py1046
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/orm/exc.py119
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/orm/identity.py254
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/orm/instrumentation.py691
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/orm/interfaces.py754
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/orm/mapper.py2825
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/orm/properties.py1250
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/orm/query.py2936
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/orm/scoping.py133
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/orm/session.py1725
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/orm/shard.py15
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/orm/state.py557
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/orm/strategies.py1300
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/orm/sync.py107
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/orm/unitofwork.py583
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/orm/util.py625
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/pool.py958
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/processors.py109
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/schema.py2950
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/sql/__init__.py66
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/sql/compiler.py1793
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/sql/expression.py5127
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/sql/functions.py134
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/sql/operators.py154
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/sql/util.py717
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/sql/visitors.py266
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/types.py2140
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/util/__init__.py32
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/util/_collections.py897
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/util/compat.py211
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/util/deprecations.py118
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/util/langhelpers.py791
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/util/queue.py191
-rwxr-xr-xlib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/util/topological.py83
126 files changed, 0 insertions, 66076 deletions
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/__init__.py b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/__init__.py
deleted file mode 100755
index 6d8a0825..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/__init__.py
+++ /dev/null
@@ -1,122 +0,0 @@
-# sqlalchemy/__init__.py
-# Copyright (C) 2005-2011 the SQLAlchemy authors and contributors <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-import inspect
-import sys
-
-import sqlalchemy.exc as exceptions
-
-from sqlalchemy.sql import (
- alias,
- and_,
- asc,
- between,
- bindparam,
- case,
- cast,
- collate,
- delete,
- desc,
- distinct,
- except_,
- except_all,
- exists,
- extract,
- func,
- insert,
- intersect,
- intersect_all,
- join,
- literal,
- literal_column,
- modifier,
- not_,
- null,
- or_,
- outerjoin,
- outparam,
- over,
- select,
- subquery,
- text,
- tuple_,
- type_coerce,
- union,
- union_all,
- update,
- )
-
-from sqlalchemy.types import (
- BLOB,
- BOOLEAN,
- BigInteger,
- Binary,
- Boolean,
- CHAR,
- CLOB,
- DATE,
- DATETIME,
- DECIMAL,
- Date,
- DateTime,
- Enum,
- FLOAT,
- Float,
- INT,
- INTEGER,
- Integer,
- Interval,
- LargeBinary,
- NCHAR,
- NVARCHAR,
- NUMERIC,
- Numeric,
- PickleType,
- REAL,
- SMALLINT,
- SmallInteger,
- String,
- TEXT,
- TIME,
- TIMESTAMP,
- Text,
- Time,
- TypeDecorator,
- Unicode,
- UnicodeText,
- VARCHAR,
- )
-
-
-from sqlalchemy.schema import (
- CheckConstraint,
- Column,
- ColumnDefault,
- Constraint,
- DDL,
- DefaultClause,
- FetchedValue,
- ForeignKey,
- ForeignKeyConstraint,
- Index,
- MetaData,
- PassiveDefault,
- PrimaryKeyConstraint,
- Sequence,
- Table,
- ThreadLocalMetaData,
- UniqueConstraint,
- )
-
-from sqlalchemy.engine import create_engine, engine_from_config
-
-
-__all__ = sorted(name for name, obj in locals().items()
- if not (name.startswith('_') or inspect.ismodule(obj)))
-
-__version__ = '0.7.0'
-
-del inspect, sys
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/connectors/__init__.py b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/connectors/__init__.py
deleted file mode 100755
index 340c5b8f..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/connectors/__init__.py
+++ /dev/null
@@ -1,10 +0,0 @@
-# connectors/__init__.py
-# Copyright (C) 2005-2011 the SQLAlchemy authors and contributors <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-
-class Connector(object):
- pass
-
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/connectors/mxodbc.py b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/connectors/mxodbc.py
deleted file mode 100755
index 5573dda4..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/connectors/mxodbc.py
+++ /dev/null
@@ -1,150 +0,0 @@
-# connectors/mxodbc.py
-# Copyright (C) 2005-2011 the SQLAlchemy authors and contributors <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-"""
-Provide an SQLALchemy connector for the eGenix mxODBC commercial
-Python adapter for ODBC. This is not a free product, but eGenix
-provides SQLAlchemy with a license for use in continuous integration
-testing.
-
-This has been tested for use with mxODBC 3.1.2 on SQL Server 2005
-and 2008, using the SQL Server Native driver. However, it is
-possible for this to be used on other database platforms.
-
-For more info on mxODBC, see http://www.egenix.com/
-
-"""
-
-import sys
-import re
-import warnings
-
-from sqlalchemy.connectors import Connector
-
-class MxODBCConnector(Connector):
- driver='mxodbc'
-
- supports_sane_multi_rowcount = False
- supports_unicode_statements = False
- supports_unicode_binds = False
-
- supports_native_decimal = True
-
- @classmethod
- def dbapi(cls):
- # this classmethod will normally be replaced by an instance
- # attribute of the same name, so this is normally only called once.
- cls._load_mx_exceptions()
- platform = sys.platform
- if platform == 'win32':
- from mx.ODBC import Windows as module
- # this can be the string "linux2", and possibly others
- elif 'linux' in platform:
- from mx.ODBC import unixODBC as module
- elif platform == 'darwin':
- from mx.ODBC import iODBC as module
- else:
- raise ImportError, "Unrecognized platform for mxODBC import"
- return module
-
- @classmethod
- def _load_mx_exceptions(cls):
- """ Import mxODBC exception classes into the module namespace,
- as if they had been imported normally. This is done here
- to avoid requiring all SQLAlchemy users to install mxODBC.
- """
- global InterfaceError, ProgrammingError
- from mx.ODBC import InterfaceError
- from mx.ODBC import ProgrammingError
-
- def on_connect(self):
- def connect(conn):
- conn.stringformat = self.dbapi.MIXED_STRINGFORMAT
- conn.datetimeformat = self.dbapi.PYDATETIME_DATETIMEFORMAT
- conn.decimalformat = self.dbapi.DECIMAL_DECIMALFORMAT
- conn.errorhandler = self._error_handler()
- return connect
-
- def _error_handler(self):
- """ Return a handler that adjusts mxODBC's raised Warnings to
- emit Python standard warnings.
- """
- from mx.ODBC.Error import Warning as MxOdbcWarning
- def error_handler(connection, cursor, errorclass, errorvalue):
-
- if issubclass(errorclass, MxOdbcWarning):
- errorclass.__bases__ = (Warning,)
- warnings.warn(message=str(errorvalue),
- category=errorclass,
- stacklevel=2)
- else:
- raise errorclass, errorvalue
- return error_handler
-
- def create_connect_args(self, url):
- """ Return a tuple of *args,**kwargs for creating a connection.
-
- The mxODBC 3.x connection constructor looks like this:
-
- connect(dsn, user='', password='',
- clear_auto_commit=1, errorhandler=None)
-
- This method translates the values in the provided uri
- into args and kwargs needed to instantiate an mxODBC Connection.
-
- The arg 'errorhandler' is not used by SQLAlchemy and will
- not be populated.
-
- """
- opts = url.translate_connect_args(username='user')
- opts.update(url.query)
- args = opts.pop('host')
- opts.pop('port', None)
- opts.pop('database', None)
- return (args,), opts
-
- def is_disconnect(self, e, connection, cursor):
- # TODO: eGenix recommends checking connection.closed here
- # Does that detect dropped connections ?
- if isinstance(e, self.dbapi.ProgrammingError):
- return "connection already closed" in str(e)
- elif isinstance(e, self.dbapi.Error):
- return '[08S01]' in str(e)
- else:
- return False
-
- def _get_server_version_info(self, connection):
- # eGenix suggests using conn.dbms_version instead
- # of what we're doing here
- dbapi_con = connection.connection
- version = []
- r = re.compile('[.\-]')
- # 18 == pyodbc.SQL_DBMS_VER
- for n in r.split(dbapi_con.getinfo(18)[1]):
- try:
- version.append(int(n))
- except ValueError:
- version.append(n)
- return tuple(version)
-
- def do_execute(self, cursor, statement, parameters, context=None):
- if context:
- native_odbc_execute = context.execution_options.\
- get('native_odbc_execute', 'auto')
- if native_odbc_execute is True:
- # user specified native_odbc_execute=True
- cursor.execute(statement, parameters)
- elif native_odbc_execute is False:
- # user specified native_odbc_execute=False
- cursor.executedirect(statement, parameters)
- elif context.is_crud:
- # statement is UPDATE, DELETE, INSERT
- cursor.execute(statement, parameters)
- else:
- # all other statements
- cursor.executedirect(statement, parameters)
- else:
- cursor.executedirect(statement, parameters)
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/connectors/mysqldb.py b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/connectors/mysqldb.py
deleted file mode 100755
index 189c412a..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/connectors/mysqldb.py
+++ /dev/null
@@ -1,150 +0,0 @@
-"""Define behaviors common to MySQLdb dialects.
-
-Currently includes MySQL and Drizzle.
-
-"""
-
-from sqlalchemy.connectors import Connector
-from sqlalchemy.engine import base as engine_base, default
-from sqlalchemy.sql import operators as sql_operators
-from sqlalchemy import exc, log, schema, sql, types as sqltypes, util
-from sqlalchemy import processors
-import re
-
-# the subclassing of Connector by all classes
-# here is not strictly necessary
-
-class MySQLDBExecutionContext(Connector):
-
- @property
- def rowcount(self):
- if hasattr(self, '_rowcount'):
- return self._rowcount
- else:
- return self.cursor.rowcount
-
-class MySQLDBCompiler(Connector):
- def visit_mod(self, binary, **kw):
- return self.process(binary.left) + " %% " + self.process(binary.right)
-
- def post_process_text(self, text):
- return text.replace('%', '%%')
-
-class MySQLDBIdentifierPreparer(Connector):
-
- def _escape_identifier(self, value):
- value = value.replace(self.escape_quote, self.escape_to_quote)
- return value.replace("%", "%%")
-
-class MySQLDBConnector(Connector):
- driver = 'mysqldb'
- supports_unicode_statements = False
- supports_sane_rowcount = True
- supports_sane_multi_rowcount = True
-
- supports_native_decimal = True
-
- default_paramstyle = 'format'
-
- @classmethod
- def dbapi(cls):
- # is overridden when pymysql is used
- return __import__('MySQLdb')
-
- def do_executemany(self, cursor, statement, parameters, context=None):
- rowcount = cursor.executemany(statement, parameters)
- if context is not None:
- context._rowcount = rowcount
-
- def create_connect_args(self, url):
- opts = url.translate_connect_args(database='db', username='user',
- password='passwd')
- opts.update(url.query)
-
- util.coerce_kw_type(opts, 'compress', bool)
- util.coerce_kw_type(opts, 'connect_timeout', int)
- util.coerce_kw_type(opts, 'client_flag', int)
- util.coerce_kw_type(opts, 'local_infile', int)
- # Note: using either of the below will cause all strings to be returned
- # as Unicode, both in raw SQL operations and with column types like
- # String and MSString.
- util.coerce_kw_type(opts, 'use_unicode', bool)
- util.coerce_kw_type(opts, 'charset', str)
-
- # Rich values 'cursorclass' and 'conv' are not supported via
- # query string.
-
- ssl = {}
- for key in ['ssl_ca', 'ssl_key', 'ssl_cert', 'ssl_capath', 'ssl_cipher']:
- if key in opts:
- ssl[key[4:]] = opts[key]
- util.coerce_kw_type(ssl, key[4:], str)
- del opts[key]
- if ssl:
- opts['ssl'] = ssl
-
- # FOUND_ROWS must be set in CLIENT_FLAGS to enable
- # supports_sane_rowcount.
- client_flag = opts.get('client_flag', 0)
- if self.dbapi is not None:
- try:
- CLIENT_FLAGS = __import__(
- self.dbapi.__name__ + '.constants.CLIENT'
- ).constants.CLIENT
- client_flag |= CLIENT_FLAGS.FOUND_ROWS
- except (AttributeError, ImportError):
- pass
- opts['client_flag'] = client_flag
- return [[], opts]
-
- def _get_server_version_info(self, connection):
- dbapi_con = connection.connection
- version = []
- r = re.compile('[.\-]')
- for n in r.split(dbapi_con.get_server_info()):
- try:
- version.append(int(n))
- except ValueError:
- version.append(n)
- return tuple(version)
-
- def _extract_error_code(self, exception):
- return exception.args[0]
-
- def _detect_charset(self, connection):
- """Sniff out the character set in use for connection results."""
-
- # Note: MySQL-python 1.2.1c7 seems to ignore changes made
- # on a connection via set_character_set()
- if self.server_version_info < (4, 1, 0):
- try:
- return connection.connection.character_set_name()
- except AttributeError:
- # < 1.2.1 final MySQL-python drivers have no charset support.
- # a query is needed.
- pass
-
- # Prefer 'character_set_results' for the current connection over the
- # value in the driver. SET NAMES or individual variable SETs will
- # change the charset without updating the driver's view of the world.
- #
- # If it's decided that issuing that sort of SQL leaves you SOL, then
- # this can prefer the driver value.
- rs = connection.execute("SHOW VARIABLES LIKE 'character_set%%'")
- opts = dict([(row[0], row[1]) for row in self._compat_fetchall(rs)])
-
- if 'character_set_results' in opts:
- return opts['character_set_results']
- try:
- return connection.connection.character_set_name()
- except AttributeError:
- # Still no charset on < 1.2.1 final...
- if 'character_set' in opts:
- return opts['character_set']
- else:
- util.warn(
- "Could not detect the connection character set with this "
- "combination of MySQL server and MySQL-python. "
- "MySQL-python >= 1.2.2 is recommended. Assuming latin1.")
- return 'latin1'
-
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/connectors/pyodbc.py b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/connectors/pyodbc.py
deleted file mode 100755
index 3f6d6cb5..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/connectors/pyodbc.py
+++ /dev/null
@@ -1,124 +0,0 @@
-# connectors/pyodbc.py
-# Copyright (C) 2005-2011 the SQLAlchemy authors and contributors <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-from sqlalchemy.connectors import Connector
-from sqlalchemy.util import asbool
-
-import sys
-import re
-import urllib
-
-class PyODBCConnector(Connector):
- driver='pyodbc'
-
- supports_sane_multi_rowcount = False
- # PyODBC unicode is broken on UCS-4 builds
- supports_unicode = sys.maxunicode == 65535
- supports_unicode_statements = supports_unicode
- supports_native_decimal = True
- default_paramstyle = 'named'
-
- # for non-DSN connections, this should
- # hold the desired driver name
- pyodbc_driver_name = None
-
- # will be set to True after initialize()
- # if the freetds.so is detected
- freetds = False
-
- @classmethod
- def dbapi(cls):
- return __import__('pyodbc')
-
- def create_connect_args(self, url):
- opts = url.translate_connect_args(username='user')
- opts.update(url.query)
-
- keys = opts
- query = url.query
-
- connect_args = {}
- for param in ('ansi', 'unicode_results', 'autocommit'):
- if param in keys:
- connect_args[param] = asbool(keys.pop(param))
-
- if 'odbc_connect' in keys:
- connectors = [urllib.unquote_plus(keys.pop('odbc_connect'))]
- else:
- dsn_connection = 'dsn' in keys or \
- ('host' in keys and 'database' not in keys)
- if dsn_connection:
- connectors= ['dsn=%s' % (keys.pop('host', '') or \
- keys.pop('dsn', ''))]
- else:
- port = ''
- if 'port' in keys and not 'port' in query:
- port = ',%d' % int(keys.pop('port'))
-
- connectors = ["DRIVER={%s}" %
- keys.pop('driver', self.pyodbc_driver_name),
- 'Server=%s%s' % (keys.pop('host', ''), port),
- 'Database=%s' % keys.pop('database', '') ]
-
- user = keys.pop("user", None)
- if user:
- connectors.append("UID=%s" % user)
- connectors.append("PWD=%s" % keys.pop('password', ''))
- else:
- connectors.append("Trusted_Connection=Yes")
-
- # if set to 'Yes', the ODBC layer will try to automagically
- # convert textual data from your database encoding to your
- # client encoding. This should obviously be set to 'No' if
- # you query a cp1253 encoded database from a latin1 client...
- if 'odbc_autotranslate' in keys:
- connectors.append("AutoTranslate=%s" %
- keys.pop("odbc_autotranslate"))
-
- connectors.extend(['%s=%s' % (k,v) for k,v in keys.iteritems()])
- return [[";".join (connectors)], connect_args]
-
- def is_disconnect(self, e, connection, cursor):
- if isinstance(e, self.dbapi.ProgrammingError):
- return "The cursor's connection has been closed." in str(e) or \
- 'Attempt to use a closed connection.' in str(e)
- elif isinstance(e, self.dbapi.Error):
- return '[08S01]' in str(e)
- else:
- return False
-
- def initialize(self, connection):
- # determine FreeTDS first. can't issue SQL easily
- # without getting unicode_statements/binds set up.
-
- pyodbc = self.dbapi
-
- dbapi_con = connection.connection
-
- self.freetds = bool(re.match(r".*libtdsodbc.*\.so",
- dbapi_con.getinfo(pyodbc.SQL_DRIVER_NAME)
- ))
-
- # the "Py2K only" part here is theoretical.
- # have not tried pyodbc + python3.1 yet.
- # Py2K
- self.supports_unicode_statements = not self.freetds
- self.supports_unicode_binds = not self.freetds
- # end Py2K
-
- # run other initialization which asks for user name, etc.
- super(PyODBCConnector, self).initialize(connection)
-
- def _get_server_version_info(self, connection):
- dbapi_con = connection.connection
- version = []
- r = re.compile('[.\-]')
- for n in r.split(dbapi_con.getinfo(self.dbapi.SQL_DBMS_VER)):
- try:
- version.append(int(n))
- except ValueError:
- version.append(n)
- return tuple(version)
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/connectors/zxJDBC.py b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/connectors/zxJDBC.py
deleted file mode 100755
index 20bf9d9c..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/connectors/zxJDBC.py
+++ /dev/null
@@ -1,58 +0,0 @@
-# connectors/zxJDBC.py
-# Copyright (C) 2005-2011 the SQLAlchemy authors and contributors <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-import sys
-from sqlalchemy.connectors import Connector
-
-class ZxJDBCConnector(Connector):
- driver = 'zxjdbc'
-
- supports_sane_rowcount = False
- supports_sane_multi_rowcount = False
-
- supports_unicode_binds = True
- supports_unicode_statements = sys.version > '2.5.0+'
- description_encoding = None
- default_paramstyle = 'qmark'
-
- jdbc_db_name = None
- jdbc_driver_name = None
-
- @classmethod
- def dbapi(cls):
- from com.ziclix.python.sql import zxJDBC
- return zxJDBC
-
- def _driver_kwargs(self):
- """Return kw arg dict to be sent to connect()."""
- return {}
-
- def _create_jdbc_url(self, url):
- """Create a JDBC url from a :class:`~sqlalchemy.engine.url.URL`"""
- return 'jdbc:%s://%s%s/%s' % (self.jdbc_db_name, url.host,
- url.port is not None
- and ':%s' % url.port or '',
- url.database)
-
- def create_connect_args(self, url):
- opts = self._driver_kwargs()
- opts.update(url.query)
- return [
- [self._create_jdbc_url(url),
- url.username, url.password,
- self.jdbc_driver_name],
- opts]
-
- def is_disconnect(self, e, connection, cursor):
- if not isinstance(e, self.dbapi.ProgrammingError):
- return False
- e = str(e)
- return 'connection is closed' in e or 'cursor is closed' in e
-
- def _get_server_version_info(self, connection):
- # use connection.connection.dbversion, and parse appropriately
- # to get a tuple
- raise NotImplementedError()
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/cprocessors.py b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/cprocessors.py
deleted file mode 100755
index 4f5a32f8..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/cprocessors.py
+++ /dev/null
@@ -1,7 +0,0 @@
-def __bootstrap__():
- global __bootstrap__, __loader__, __file__
- import sys, pkg_resources, imp
- __file__ = pkg_resources.resource_filename(__name__,'cprocessors.so')
- __loader__ = None; del __bootstrap__, __loader__
- imp.load_dynamic(__name__,__file__)
-__bootstrap__()
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/cprocessors.so b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/cprocessors.so
deleted file mode 100755
index 2ad6ad69..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/cprocessors.so
+++ /dev/null
Binary files differ
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/cresultproxy.py b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/cresultproxy.py
deleted file mode 100755
index dd863cec..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/cresultproxy.py
+++ /dev/null
@@ -1,7 +0,0 @@
-def __bootstrap__():
- global __bootstrap__, __loader__, __file__
- import sys, pkg_resources, imp
- __file__ = pkg_resources.resource_filename(__name__,'cresultproxy.so')
- __loader__ = None; del __bootstrap__, __loader__
- imp.load_dynamic(__name__,__file__)
-__bootstrap__()
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/cresultproxy.so b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/cresultproxy.so
deleted file mode 100755
index 0fa2cd5f..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/cresultproxy.so
+++ /dev/null
Binary files differ
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/databases/__init__.py b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/databases/__init__.py
deleted file mode 100755
index dddc8f68..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/databases/__init__.py
+++ /dev/null
@@ -1,37 +0,0 @@
-# databases/__init__.py
-# Copyright (C) 2005-2011 the SQLAlchemy authors and contributors <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-"""Include imports from the sqlalchemy.dialects package for backwards
-compatibility with pre 0.6 versions.
-
-"""
-from sqlalchemy.dialects.sqlite import base as sqlite
-from sqlalchemy.dialects.postgresql import base as postgresql
-postgres = postgresql
-from sqlalchemy.dialects.mysql import base as mysql
-from sqlalchemy.dialects.drizzle import base as drizzle
-from sqlalchemy.dialects.oracle import base as oracle
-from sqlalchemy.dialects.firebird import base as firebird
-from sqlalchemy.dialects.maxdb import base as maxdb
-from sqlalchemy.dialects.informix import base as informix
-from sqlalchemy.dialects.mssql import base as mssql
-from sqlalchemy.dialects.access import base as access
-from sqlalchemy.dialects.sybase import base as sybase
-
-
-__all__ = (
- 'access',
- 'drizzle',
- 'firebird',
- 'informix',
- 'maxdb',
- 'mssql',
- 'mysql',
- 'postgresql',
- 'sqlite',
- 'oracle',
- 'sybase',
- )
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/__init__.py b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/__init__.py
deleted file mode 100755
index 48e578a0..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/__init__.py
+++ /dev/null
@@ -1,19 +0,0 @@
-# dialects/__init__.py
-# Copyright (C) 2005-2011 the SQLAlchemy authors and contributors <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-__all__ = (
-# 'access',
- 'drizzle',
- 'firebird',
-# 'informix',
-# 'maxdb',
- 'mssql',
- 'mysql',
- 'oracle',
- 'postgresql',
- 'sqlite',
- 'sybase',
- )
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/access/__init__.py b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/access/__init__.py
deleted file mode 100755
index e69de29b..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/access/__init__.py
+++ /dev/null
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/access/base.py b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/access/base.py
deleted file mode 100755
index 7c62dcc3..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/access/base.py
+++ /dev/null
@@ -1,450 +0,0 @@
-# access/base.py
-# Copyright (C) 2007-2011 the SQLAlchemy authors and contributors <see AUTHORS file>
-# Copyright (C) 2007 Paul Johnston, paj@pajhome.org.uk
-# Portions derived from jet2sql.py by Matt Keranen, mksql@yahoo.com
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-"""
-Support for the Microsoft Access database.
-
-This dialect is *not* ported to SQLAlchemy 0.6 or 0.7.
-
-This dialect is *not* tested on SQLAlchemy 0.6 or 0.7.
-
-
-"""
-from sqlalchemy import sql, schema, types, exc, pool
-from sqlalchemy.sql import compiler, expression
-from sqlalchemy.engine import default, base, reflection
-from sqlalchemy import processors
-
-class AcNumeric(types.Numeric):
- def get_col_spec(self):
- return "NUMERIC"
-
- def bind_processor(self, dialect):
- return processors.to_str
-
- def result_processor(self, dialect, coltype):
- return None
-
-class AcFloat(types.Float):
- def get_col_spec(self):
- return "FLOAT"
-
- def bind_processor(self, dialect):
- """By converting to string, we can use Decimal types round-trip."""
- return processors.to_str
-
-class AcInteger(types.Integer):
- def get_col_spec(self):
- return "INTEGER"
-
-class AcTinyInteger(types.Integer):
- def get_col_spec(self):
- return "TINYINT"
-
-class AcSmallInteger(types.SmallInteger):
- def get_col_spec(self):
- return "SMALLINT"
-
-class AcDateTime(types.DateTime):
- def get_col_spec(self):
- return "DATETIME"
-
-class AcDate(types.Date):
-
- def get_col_spec(self):
- return "DATETIME"
-
-class AcText(types.Text):
- def get_col_spec(self):
- return "MEMO"
-
-class AcString(types.String):
- def get_col_spec(self):
- return "TEXT" + (self.length and ("(%d)" % self.length) or "")
-
-class AcUnicode(types.Unicode):
- def get_col_spec(self):
- return "TEXT" + (self.length and ("(%d)" % self.length) or "")
-
- def bind_processor(self, dialect):
- return None
-
- def result_processor(self, dialect, coltype):
- return None
-
-class AcChar(types.CHAR):
- def get_col_spec(self):
- return "TEXT" + (self.length and ("(%d)" % self.length) or "")
-
-class AcBinary(types.LargeBinary):
- def get_col_spec(self):
- return "BINARY"
-
-class AcBoolean(types.Boolean):
- def get_col_spec(self):
- return "YESNO"
-
-class AcTimeStamp(types.TIMESTAMP):
- def get_col_spec(self):
- return "TIMESTAMP"
-
-class AccessExecutionContext(default.DefaultExecutionContext):
- def _has_implicit_sequence(self, column):
- if column.primary_key and column.autoincrement:
- if isinstance(column.type, types.Integer) and \
- not column.foreign_keys:
- if column.default is None or \
- (isinstance(column.default, schema.Sequence) and \
- column.default.optional):
- return True
- return False
-
- def post_exec(self):
- """If we inserted into a row with a COUNTER column, fetch the ID"""
-
- if self.compiled.isinsert:
- tbl = self.compiled.statement.table
- if not hasattr(tbl, 'has_sequence'):
- tbl.has_sequence = None
- for column in tbl.c:
- if getattr(column, 'sequence', False) or \
- self._has_implicit_sequence(column):
- tbl.has_sequence = column
- break
-
- if bool(tbl.has_sequence):
- # TBD: for some reason _last_inserted_ids doesn't exist here
- # (but it does at corresponding point in mssql???)
- #if not len(self._last_inserted_ids) or
- # self._last_inserted_ids[0] is None:
- self.cursor.execute("SELECT @@identity AS lastrowid")
- row = self.cursor.fetchone()
- self._last_inserted_ids = [int(row[0])]
- #+ self._last_inserted_ids[1:]
- # print "LAST ROW ID", self._last_inserted_ids
-
- super(AccessExecutionContext, self).post_exec()
-
-
-const, daoEngine = None, None
-class AccessDialect(default.DefaultDialect):
- colspecs = {
- types.Unicode : AcUnicode,
- types.Integer : AcInteger,
- types.SmallInteger: AcSmallInteger,
- types.Numeric : AcNumeric,
- types.Float : AcFloat,
- types.DateTime : AcDateTime,
- types.Date : AcDate,
- types.String : AcString,
- types.LargeBinary : AcBinary,
- types.Boolean : AcBoolean,
- types.Text : AcText,
- types.CHAR: AcChar,
- types.TIMESTAMP: AcTimeStamp,
- }
- name = 'access'
- supports_sane_rowcount = False
- supports_sane_multi_rowcount = False
-
- ported_sqla_06 = False
-
- def type_descriptor(self, typeobj):
- newobj = types.adapt_type(typeobj, self.colspecs)
- return newobj
-
- def __init__(self, **params):
- super(AccessDialect, self).__init__(**params)
- self.text_as_varchar = False
- self._dtbs = None
-
- @classmethod
- def dbapi(cls):
- import win32com.client, pythoncom
-
- global const, daoEngine
- if const is None:
- const = win32com.client.constants
- for suffix in (".36", ".35", ".30"):
- try:
- daoEngine = win32com.client.\
- gencache.\
- EnsureDispatch("DAO.DBEngine" + suffix)
- break
- except pythoncom.com_error:
- pass
- else:
- raise exc.InvalidRequestError(
- "Can't find a DB engine. Check "
- "http://support.microsoft.com/kb/239114 for details.")
-
- import pyodbc as module
- return module
-
- def create_connect_args(self, url):
- opts = url.translate_connect_args()
- connectors = ["Driver={Microsoft Access Driver (*.mdb)}"]
- connectors.append("Dbq=%s" % opts["database"])
- user = opts.get("username", None)
- if user:
- connectors.append("UID=%s" % user)
- connectors.append("PWD=%s" % opts.get("password", ""))
- return [[";".join(connectors)], {}]
-
- def last_inserted_ids(self):
- return self.context.last_inserted_ids
-
- def do_execute(self, cursor, statement, params, context=None):
- if params == {}:
- params = ()
- super(AccessDialect, self).\
- do_execute(cursor, statement, params, **kwargs)
-
- def _execute(self, c, statement, parameters):
- try:
- if parameters == {}:
- parameters = ()
- c.execute(statement, parameters)
- self.context.rowcount = c.rowcount
- except Exception, e:
- raise exc.DBAPIError.instance(statement, parameters, e)
-
- def has_table(self, connection, tablename, schema=None):
- # This approach seems to be more reliable that using DAO
- try:
- connection.execute('select top 1 * from [%s]' % tablename)
- return True
- except Exception, e:
- return False
-
- def reflecttable(self, connection, table, include_columns):
- # This is defined in the function, as it relies on win32com constants,
- # that aren't imported until dbapi method is called
- if not hasattr(self, 'ischema_names'):
- self.ischema_names = {
- const.dbByte: AcBinary,
- const.dbInteger: AcInteger,
- const.dbLong: AcInteger,
- const.dbSingle: AcFloat,
- const.dbDouble: AcFloat,
- const.dbDate: AcDateTime,
- const.dbLongBinary: AcBinary,
- const.dbMemo: AcText,
- const.dbBoolean: AcBoolean,
- const.dbText: AcUnicode, # All Access strings are
- # unicode
- const.dbCurrency: AcNumeric,
- }
-
- # A fresh DAO connection is opened for each reflection
- # This is necessary, so we get the latest updates
- dtbs = daoEngine.OpenDatabase(connection.engine.url.database)
-
- try:
- for tbl in dtbs.TableDefs:
- if tbl.Name.lower() == table.name.lower():
- break
- else:
- raise exc.NoSuchTableError(table.name)
-
- for col in tbl.Fields:
- coltype = self.ischema_names[col.Type]
- if col.Type == const.dbText:
- coltype = coltype(col.Size)
-
- colargs = \
- {
- 'nullable': not(col.Required or
- col.Attributes & const.dbAutoIncrField),
- }
- default = col.DefaultValue
-
- if col.Attributes & const.dbAutoIncrField:
- colargs['default'] = schema.Sequence(col.Name + '_seq')
- elif default:
- if col.Type == const.dbBoolean:
- default = default == 'Yes' and '1' or '0'
- colargs['server_default'] = \
- schema.DefaultClause(sql.text(default))
-
- table.append_column(
- schema.Column(col.Name, coltype, **colargs))
-
- # TBD: check constraints
-
- # Find primary key columns first
- for idx in tbl.Indexes:
- if idx.Primary:
- for col in idx.Fields:
- thecol = table.c[col.Name]
- table.primary_key.add(thecol)
- if isinstance(thecol.type, AcInteger) and \
- not (thecol.default and
- isinstance(
- thecol.default.arg,
- schema.Sequence
- )):
- thecol.autoincrement = False
-
- # Then add other indexes
- for idx in tbl.Indexes:
- if not idx.Primary:
- if len(idx.Fields) == 1:
- col = table.c[idx.Fields[0].Name]
- if not col.primary_key:
- col.index = True
- col.unique = idx.Unique
- else:
- pass # TBD: multi-column indexes
-
-
- for fk in dtbs.Relations:
- if fk.ForeignTable != table.name:
- continue
- scols = [c.ForeignName for c in fk.Fields]
- rcols = ['%s.%s' % (fk.Table, c.Name) for c in fk.Fields]
- table.append_constraint(
- schema.ForeignKeyConstraint(scols, rcols,\
- link_to_name=True))
-
- finally:
- dtbs.Close()
-
- @reflection.cache
- def get_table_names(self, connection, schema=None, **kw):
- # A fresh DAO connection is opened for each reflection
- # This is necessary, so we get the latest updates
- dtbs = daoEngine.OpenDatabase(connection.engine.url.database)
-
- names = [t.Name for t in dtbs.TableDefs
- if t.Name[:4] != "MSys" and t.Name[:4] != "~TMP"]
- dtbs.Close()
- return names
-
-
-class AccessCompiler(compiler.SQLCompiler):
- extract_map = compiler.SQLCompiler.extract_map.copy()
- extract_map.update ({
- 'month': 'm',
- 'day': 'd',
- 'year': 'yyyy',
- 'second': 's',
- 'hour': 'h',
- 'doy': 'y',
- 'minute': 'n',
- 'quarter': 'q',
- 'dow': 'w',
- 'week': 'ww'
- })
-
- def visit_select_precolumns(self, select):
- """Access puts TOP, it's version of LIMIT here """
- s = select.distinct and "DISTINCT " or ""
- if select.limit:
- s += "TOP %s " % (select.limit)
- if select.offset:
- raise exc.InvalidRequestError(
- 'Access does not support LIMIT with an offset')
- return s
-
- def limit_clause(self, select):
- """Limit in access is after the select keyword"""
- return ""
-
- def binary_operator_string(self, binary):
- """Access uses "mod" instead of "%" """
- return binary.operator == '%' and 'mod' or binary.operator
-
- def label_select_column(self, select, column, asfrom):
- if isinstance(column, expression.Function):
- return column.label()
- else:
- return super(AccessCompiler, self).\
- label_select_column(select, column, asfrom)
-
- function_rewrites = {'current_date': 'now',
- 'current_timestamp': 'now',
- 'length': 'len',
- }
- def visit_function(self, func):
- """Access function names differ from the ANSI SQL names;
- rewrite common ones"""
- func.name = self.function_rewrites.get(func.name, func.name)
- return super(AccessCompiler, self).visit_function(func)
-
- def for_update_clause(self, select):
- """FOR UPDATE is not supported by Access; silently ignore"""
- return ''
-
- # Strip schema
- def visit_table(self, table, asfrom=False, **kwargs):
- if asfrom:
- return self.preparer.quote(table.name, table.quote)
- else:
- return ""
-
- def visit_join(self, join, asfrom=False, **kwargs):
- return (self.process(join.left, asfrom=True) + \
- (join.isouter and " LEFT OUTER JOIN " or " INNER JOIN ") + \
- self.process(join.right, asfrom=True) + " ON " + \
- self.process(join.onclause))
-
- def visit_extract(self, extract, **kw):
- field = self.extract_map.get(extract.field, extract.field)
- return 'DATEPART("%s", %s)' % \
- (field, self.process(extract.expr, **kw))
-
-class AccessDDLCompiler(compiler.DDLCompiler):
- def get_column_specification(self, column, **kwargs):
- colspec = self.preparer.format_column(column) + " " + \
- column.type.dialect_impl(self.dialect).get_col_spec()
-
- # install a sequence if we have an implicit IDENTITY column
- if (not getattr(column.table, 'has_sequence', False)) and \
- column.primary_key and \
- column.autoincrement and \
- isinstance(column.type, types.Integer) and \
- not column.foreign_keys:
- if column.default is None or \
- (isinstance(column.default, schema.Sequence) and
- column.default.optional):
- column.sequence = schema.Sequence(column.name + '_seq')
-
- if not column.nullable:
- colspec += " NOT NULL"
-
- if hasattr(column, 'sequence'):
- column.table.has_sequence = column
- colspec = self.preparer.format_column(column) + " counter"
- else:
- default = self.get_column_default_string(column)
- if default is not None:
- colspec += " DEFAULT " + default
-
- return colspec
-
- def visit_drop_index(self, drop):
- index = drop.element
- self.append("\nDROP INDEX [%s].[%s]" % \
- (index.table.name,
- self._index_identifier(index.name)))
-
-class AccessIdentifierPreparer(compiler.IdentifierPreparer):
- reserved_words = compiler.RESERVED_WORDS.copy()
- reserved_words.update(['value', 'text'])
- def __init__(self, dialect):
- super(AccessIdentifierPreparer, self).\
- __init__(dialect, initial_quote='[', final_quote=']')
-
-
-dialect = AccessDialect
-dialect.poolclass = pool.SingletonThreadPool
-dialect.statement_compiler = AccessCompiler
-dialect.ddlcompiler = AccessDDLCompiler
-dialect.preparer = AccessIdentifierPreparer
-dialect.execution_ctx_cls = AccessExecutionContext
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/drizzle/__init__.py b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/drizzle/__init__.py
deleted file mode 100755
index bbd716f5..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/drizzle/__init__.py
+++ /dev/null
@@ -1,18 +0,0 @@
-from sqlalchemy.dialects.drizzle import base, mysqldb
-
-# default dialect
-base.dialect = mysqldb.dialect
-
-from sqlalchemy.dialects.drizzle.base import \
- BIGINT, BINARY, BLOB, BOOLEAN, CHAR, DATE, DATETIME, \
- DECIMAL, DOUBLE, ENUM, \
- FLOAT, INTEGER, \
- NUMERIC, REAL, TEXT, TIME, TIMESTAMP, \
- VARBINARY, VARCHAR, dialect
-
-__all__ = (
-'BIGINT', 'BINARY', 'BLOB', 'BOOLEAN', 'CHAR', 'DATE', 'DATETIME', 'DECIMAL', 'DOUBLE',
-'ENUM', 'FLOAT', 'INTEGER',
-'NUMERIC', 'SET', 'REAL', 'TEXT', 'TIME', 'TIMESTAMP',
-'VARBINARY', 'VARCHAR', 'dialect'
-)
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/drizzle/base.py b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/drizzle/base.py
deleted file mode 100755
index ca2678e5..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/drizzle/base.py
+++ /dev/null
@@ -1,582 +0,0 @@
-# drizzle/base.py
-# Copyright (C) 2005-2011 the SQLAlchemy authors and contributors <see AUTHORS file>
-# Copyright (C) 2010-2011 Monty Taylor <mordred@inaugust.com>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-"""Support for the Drizzle database.
-
-Supported Versions and Features
--------------------------------
-
-SQLAlchemy supports the Drizzle database starting with 2010.08.
-with capabilities increasing with more modern servers.
-
-Most available DBAPI drivers are supported; see below.
-
-===================================== ===============
-Feature Minimum Version
-===================================== ===============
-sqlalchemy.orm 2010.08
-Table Reflection 2010.08
-DDL Generation 2010.08
-utf8/Full Unicode Connections 2010.08
-Transactions 2010.08
-Two-Phase Transactions 2010.08
-Nested Transactions 2010.08
-===================================== ===============
-
-See the official Drizzle documentation for detailed information about features
-supported in any given server release.
-
-Connecting
-----------
-
-See the API documentation on individual drivers for details on connecting.
-
-Connection Timeouts
--------------------
-
-Drizzle features an automatic connection close behavior, for connections that
-have been idle for eight hours or more. To circumvent having this issue, use
-the ``pool_recycle`` option which controls the maximum age of any connection::
-
- engine = create_engine('drizzle+mysqldb://...', pool_recycle=3600)
-
-Storage Engines
----------------
-
-Drizzle defaults to the ``InnoDB`` storage engine, which is transactional.
-
-Storage engines can be elected when creating tables in SQLAlchemy by supplying
-a ``drizzle_engine='whatever'`` to the ``Table`` constructor. Any Drizzle table
-creation option can be specified in this syntax::
-
- Table('mytable', metadata,
- Column('data', String(32)),
- drizzle_engine='InnoDB',
- )
-
-Keys
-----
-
-Not all Drizzle storage engines support foreign keys. For ``BlitzDB`` and
-similar engines, the information loaded by table reflection will not include
-foreign keys. For these tables, you may supply a
-:class:`~sqlalchemy.ForeignKeyConstraint` at reflection time::
-
- Table('mytable', metadata,
- ForeignKeyConstraint(['other_id'], ['othertable.other_id']),
- autoload=True
- )
-
-When creating tables, SQLAlchemy will automatically set ``AUTO_INCREMENT`` on
-an integer primary key column::
-
- >>> t = Table('mytable', metadata,
- ... Column('mytable_id', Integer, primary_key=True)
- ... )
- >>> t.create()
- CREATE TABLE mytable (
- id INTEGER NOT NULL AUTO_INCREMENT,
- PRIMARY KEY (id)
- )
-
-You can disable this behavior by supplying ``autoincrement=False`` to the
-:class:`~sqlalchemy.Column`. This flag can also be used to enable
-auto-increment on a secondary column in a multi-column key for some storage
-engines::
-
- Table('mytable', metadata,
- Column('gid', Integer, primary_key=True, autoincrement=False),
- Column('id', Integer, primary_key=True)
- )
-
-Drizzle SQL Extensions
-----------------------
-
-Many of the Drizzle SQL extensions are handled through SQLAlchemy's generic
-function and operator support::
-
- table.select(table.c.password==func.md5('plaintext'))
- table.select(table.c.username.op('regexp')('^[a-d]'))
-
-And of course any valid Drizzle statement can be executed as a string as well.
-
-Some limited direct support for Drizzle extensions to SQL is currently
-available.
-
-* SELECT pragma::
-
- select(..., prefixes=['HIGH_PRIORITY', 'SQL_SMALL_RESULT'])
-
-* UPDATE with LIMIT::
-
- update(..., drizzle_limit=10)
-
-"""
-
-import datetime, inspect, re, sys
-
-from sqlalchemy import schema as sa_schema
-from sqlalchemy import exc, log, sql, util
-from sqlalchemy.sql import operators as sql_operators
-from sqlalchemy.sql import functions as sql_functions
-from sqlalchemy.sql import compiler
-from array import array as _array
-
-from sqlalchemy.engine import reflection
-from sqlalchemy.engine import base as engine_base, default
-from sqlalchemy import types as sqltypes
-from sqlalchemy.dialects.mysql import base as mysql_dialect
-
-from sqlalchemy.types import DATE, DATETIME, BOOLEAN, TIME, \
- BLOB, BINARY, VARBINARY
-
-class _NumericType(object):
- """Base for Drizzle numeric types."""
-
- def __init__(self, **kw):
- super(_NumericType, self).__init__(**kw)
-
-class _FloatType(_NumericType, sqltypes.Float):
- def __init__(self, precision=None, scale=None, asdecimal=True, **kw):
- if isinstance(self, (REAL, DOUBLE)) and \
- (
- (precision is None and scale is not None) or
- (precision is not None and scale is None)
- ):
- raise exc.ArgumentError(
- "You must specify both precision and scale or omit "
- "both altogether.")
-
- super(_FloatType, self).__init__(precision=precision, asdecimal=asdecimal, **kw)
- self.scale = scale
-
-class _StringType(mysql_dialect._StringType):
- """Base for Drizzle string types."""
-
- def __init__(self, collation=None,
- binary=False,
- **kw):
- kw['national'] = False
- super(_StringType, self).__init__(collation=collation,
- binary=binary,
- **kw)
-
-
-class NUMERIC(_NumericType, sqltypes.NUMERIC):
- """Drizzle NUMERIC type."""
-
- __visit_name__ = 'NUMERIC'
-
- def __init__(self, precision=None, scale=None, asdecimal=True, **kw):
- """Construct a NUMERIC.
-
- :param precision: Total digits in this number. If scale and precision
- are both None, values are stored to limits allowed by the server.
-
- :param scale: The number of digits after the decimal point.
-
- """
- super(NUMERIC, self).__init__(precision=precision, scale=scale, asdecimal=asdecimal, **kw)
-
-
-class DECIMAL(_NumericType, sqltypes.DECIMAL):
- """Drizzle DECIMAL type."""
-
- __visit_name__ = 'DECIMAL'
-
- def __init__(self, precision=None, scale=None, asdecimal=True, **kw):
- """Construct a DECIMAL.
-
- :param precision: Total digits in this number. If scale and precision
- are both None, values are stored to limits allowed by the server.
-
- :param scale: The number of digits after the decimal point.
-
- """
- super(DECIMAL, self).__init__(precision=precision, scale=scale,
- asdecimal=asdecimal, **kw)
-
-
-class DOUBLE(_FloatType):
- """Drizzle DOUBLE type."""
-
- __visit_name__ = 'DOUBLE'
-
- def __init__(self, precision=None, scale=None, asdecimal=True, **kw):
- """Construct a DOUBLE.
-
- :param precision: Total digits in this number. If scale and precision
- are both None, values are stored to limits allowed by the server.
-
- :param scale: The number of digits after the decimal point.
-
- """
- super(DOUBLE, self).__init__(precision=precision, scale=scale,
- asdecimal=asdecimal, **kw)
-
-class REAL(_FloatType, sqltypes.REAL):
- """Drizzle REAL type."""
-
- __visit_name__ = 'REAL'
-
- def __init__(self, precision=None, scale=None, asdecimal=True, **kw):
- """Construct a REAL.
-
- :param precision: Total digits in this number. If scale and precision
- are both None, values are stored to limits allowed by the server.
-
- :param scale: The number of digits after the decimal point.
-
- """
- super(REAL, self).__init__(precision=precision, scale=scale,
- asdecimal=asdecimal, **kw)
-
-class FLOAT(_FloatType, sqltypes.FLOAT):
- """Drizzle FLOAT type."""
-
- __visit_name__ = 'FLOAT'
-
- def __init__(self, precision=None, scale=None, asdecimal=False, **kw):
- """Construct a FLOAT.
-
- :param precision: Total digits in this number. If scale and precision
- are both None, values are stored to limits allowed by the server.
-
- :param scale: The number of digits after the decimal point.
-
- """
- super(FLOAT, self).__init__(precision=precision, scale=scale,
- asdecimal=asdecimal, **kw)
-
- def bind_processor(self, dialect):
- return None
-
-class INTEGER(sqltypes.INTEGER):
- """Drizzle INTEGER type."""
-
- __visit_name__ = 'INTEGER'
-
- def __init__(self, **kw):
- """Construct an INTEGER.
-
- """
- super(INTEGER, self).__init__(**kw)
-
-class BIGINT(sqltypes.BIGINT):
- """Drizzle BIGINTEGER type."""
-
- __visit_name__ = 'BIGINT'
-
- def __init__(self, **kw):
- """Construct a BIGINTEGER.
-
- """
- super(BIGINT, self).__init__(**kw)
-
-
-class _DrizzleTime(mysql_dialect._MSTime):
- """Drizzle TIME type."""
-
-class TIMESTAMP(sqltypes.TIMESTAMP):
- """Drizzle TIMESTAMP type."""
- __visit_name__ = 'TIMESTAMP'
-
-class TEXT(_StringType, sqltypes.TEXT):
- """Drizzle TEXT type, for text up to 2^16 characters."""
-
- __visit_name__ = 'TEXT'
-
- def __init__(self, length=None, **kw):
- """Construct a TEXT.
-
- :param length: Optional, if provided the server may optimize storage
- by substituting the smallest TEXT type sufficient to store
- ``length`` characters.
-
- :param collation: Optional, a column-level collation for this string
- value. Takes precedence to 'binary' short-hand.
-
- :param binary: Defaults to False: short-hand, pick the binary
- collation type that matches the column's character set. Generates
- BINARY in schema. This does not affect the type of data stored,
- only the collation of character data.
-
- """
- super(TEXT, self).__init__(length=length, **kw)
-
-class VARCHAR(_StringType, sqltypes.VARCHAR):
- """Drizzle VARCHAR type, for variable-length character data."""
-
- __visit_name__ = 'VARCHAR'
-
- def __init__(self, length=None, **kwargs):
- """Construct a VARCHAR.
-
- :param collation: Optional, a column-level collation for this string
- value. Takes precedence to 'binary' short-hand.
-
- :param binary: Defaults to False: short-hand, pick the binary
- collation type that matches the column's character set. Generates
- BINARY in schema. This does not affect the type of data stored,
- only the collation of character data.
-
- """
- super(VARCHAR, self).__init__(length=length, **kwargs)
-
-class CHAR(_StringType, sqltypes.CHAR):
- """Drizzle CHAR type, for fixed-length character data."""
-
- __visit_name__ = 'CHAR'
-
- def __init__(self, length=None, **kwargs):
- """Construct a CHAR.
-
- :param length: Maximum data length, in characters.
-
- :param binary: Optional, use the default binary collation for the
- national character set. This does not affect the type of data
- stored, use a BINARY type for binary data.
-
- :param collation: Optional, request a particular collation. Must be
- compatible with the national character set.
-
- """
- super(CHAR, self).__init__(length=length, **kwargs)
-
-class ENUM(mysql_dialect.ENUM):
- """Drizzle ENUM type."""
-
- def __init__(self, *enums, **kw):
- """Construct an ENUM.
-
- Example:
-
- Column('myenum', ENUM("foo", "bar", "baz"))
-
- :param enums: The range of valid values for this ENUM. Values will be
- quoted when generating the schema according to the quoting flag (see
- below).
-
- :param strict: Defaults to False: ensure that a given value is in this
- ENUM's range of permissible values when inserting or updating rows.
- Note that Drizzle will not raise a fatal error if you attempt to store
- an out of range value- an alternate value will be stored instead.
- (See Drizzle ENUM documentation.)
-
- :param collation: Optional, a column-level collation for this string
- value. Takes precedence to 'binary' short-hand.
-
- :param binary: Defaults to False: short-hand, pick the binary
- collation type that matches the column's character set. Generates
- BINARY in schema. This does not affect the type of data stored,
- only the collation of character data.
-
- :param quoting: Defaults to 'auto': automatically determine enum value
- quoting. If all enum values are surrounded by the same quoting
- character, then use 'quoted' mode. Otherwise, use 'unquoted' mode.
-
- 'quoted': values in enums are already quoted, they will be used
- directly when generating the schema - this usage is deprecated.
-
- 'unquoted': values in enums are not quoted, they will be escaped and
- surrounded by single quotes when generating the schema.
-
- Previous versions of this type always required manually quoted
- values to be supplied; future versions will always quote the string
- literals for you. This is a transitional option.
-
- """
- super(ENUM, self).__init__(*enums, **kw)
-
-class _DrizzleBoolean(sqltypes.Boolean):
- def get_dbapi_type(self, dbapi):
- return dbapi.NUMERIC
-
-colspecs = {
- sqltypes.Numeric: NUMERIC,
- sqltypes.Float: FLOAT,
- sqltypes.Time: _DrizzleTime,
- sqltypes.Enum: ENUM,
- sqltypes.Boolean: _DrizzleBoolean,
-}
-
-# All the types we have in Drizzle
-ischema_names = {
- 'BIGINT': BIGINT,
- 'BINARY': BINARY,
- 'BLOB': BLOB,
- 'BOOLEAN': BOOLEAN,
- 'CHAR': CHAR,
- 'DATE': DATE,
- 'DATETIME': DATETIME,
- 'DECIMAL': DECIMAL,
- 'DOUBLE': DOUBLE,
- 'ENUM': ENUM,
- 'FLOAT': FLOAT,
- 'INT': INTEGER,
- 'INTEGER': INTEGER,
- 'NUMERIC': NUMERIC,
- 'TEXT': TEXT,
- 'TIME': TIME,
- 'TIMESTAMP': TIMESTAMP,
- 'VARBINARY': VARBINARY,
- 'VARCHAR': VARCHAR,
-}
-
-class DrizzleCompiler(mysql_dialect.MySQLCompiler):
-
- def visit_typeclause(self, typeclause):
- type_ = typeclause.type.dialect_impl(self.dialect)
- if isinstance(type_, sqltypes.Integer):
- return 'INTEGER'
- else:
- return super(DrizzleCompiler, self).visit_typeclause(typeclause)
-
- def visit_cast(self, cast, **kwargs):
- type_ = self.process(cast.typeclause)
- if type_ is None:
- return self.process(cast.clause)
-
- return 'CAST(%s AS %s)' % (self.process(cast.clause), type_)
-
-
-class DrizzleDDLCompiler(mysql_dialect.MySQLDDLCompiler):
- pass
-
-class DrizzleTypeCompiler(mysql_dialect.MySQLTypeCompiler):
- def _extend_numeric(self, type_, spec):
- return spec
-
- def _extend_string(self, type_, defaults, spec):
- """Extend a string-type declaration with standard SQL
- COLLATE annotations and Drizzle specific extensions.
-
- """
-
- def attr(name):
- return getattr(type_, name, defaults.get(name))
-
- if attr('collation'):
- collation = 'COLLATE %s' % type_.collation
- elif attr('binary'):
- collation = 'BINARY'
- else:
- collation = None
-
- return ' '.join([c for c in (spec, collation)
- if c is not None])
-
- def visit_NCHAR(self, type):
- raise NotImplementedError("Drizzle does not support NCHAR")
-
- def visit_NVARCHAR(self, type):
- raise NotImplementedError("Drizzle does not support NVARCHAR")
-
- def visit_FLOAT(self, type_):
- if type_.scale is not None and type_.precision is not None:
- return "FLOAT(%s, %s)" % (type_.precision, type_.scale)
- else:
- return "FLOAT"
-
- def visit_BOOLEAN(self, type_):
- return "BOOLEAN"
-
- def visit_BLOB(self, type_):
- return "BLOB"
-
-
-class DrizzleExecutionContext(mysql_dialect.MySQLExecutionContext):
- pass
-
-class DrizzleIdentifierPreparer(mysql_dialect.MySQLIdentifierPreparer):
- pass
-
-class DrizzleDialect(mysql_dialect.MySQLDialect):
- """Details of the Drizzle dialect. Not used directly in application code."""
-
- name = 'drizzle'
-
- _supports_cast = True
- supports_sequences = False
- supports_native_boolean = True
- supports_views = False
-
-
- default_paramstyle = 'format'
- colspecs = colspecs
-
- statement_compiler = DrizzleCompiler
- ddl_compiler = DrizzleDDLCompiler
- type_compiler = DrizzleTypeCompiler
- ischema_names = ischema_names
- preparer = DrizzleIdentifierPreparer
-
- def on_connect(self):
- """Force autocommit - Drizzle Bug#707842 doesn't set this
- properly"""
- def connect(conn):
- conn.autocommit(False)
- return connect
-
- def do_commit(self, connection):
- """Execute a COMMIT."""
-
- connection.commit()
-
- def do_rollback(self, connection):
- """Execute a ROLLBACK."""
-
- connection.rollback()
-
- @reflection.cache
- def get_table_names(self, connection, schema=None, **kw):
- """Return a Unicode SHOW TABLES from a given schema."""
- if schema is not None:
- current_schema = schema
- else:
- current_schema = self.default_schema_name
-
- charset = 'utf8'
- rp = connection.execute("SHOW TABLES FROM %s" %
- self.identifier_preparer.quote_identifier(current_schema))
- return [row[0] for row in self._compat_fetchall(rp, charset=charset)]
-
- @reflection.cache
- def get_view_names(self, connection, schema=None, **kw):
- raise NotImplementedError
-
- def _detect_casing(self, connection):
- """Sniff out identifier case sensitivity.
-
- Cached per-connection. This value can not change without a server
- restart.
-
- """
- return 0
-
- def _detect_collations(self, connection):
- """Pull the active COLLATIONS list from the server.
-
- Cached per-connection.
- """
-
- collations = {}
- charset = self._connection_charset
- rs = connection.execute('SELECT CHARACTER_SET_NAME, COLLATION_NAME from data_dictionary.COLLATIONS')
- for row in self._compat_fetchall(rs, charset):
- collations[row[0]] = row[1]
- return collations
-
- def _detect_ansiquotes(self, connection):
- """Detect and adjust for the ANSI_QUOTES sql mode."""
-
- self._server_ansiquotes = False
-
- self._backslash_escapes = False
-
-log.class_logger(DrizzleDialect)
-
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/drizzle/mysqldb.py b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/drizzle/mysqldb.py
deleted file mode 100755
index dcf02609..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/drizzle/mysqldb.py
+++ /dev/null
@@ -1,68 +0,0 @@
-"""Support for the Drizzle database via the Drizzle-python adapter.
-
-Drizzle-Python is available at:
-
- http://sourceforge.net/projects/mysql-python
-
-At least version 1.2.1 or 1.2.2 should be used.
-
-Connecting
------------
-
-Connect string format::
-
- drizzle+mysqldb://<user>:<password>@<host>[:<port>]/<dbname>
-
-Character Sets
---------------
-
-Drizzle is all utf8 all the time.
-
-Known Issues
--------------
-
-Drizzle-python at least as of version 1.2.2 has a serious memory leak related
-to unicode conversion, a feature which is disabled via ``use_unicode=0``.
-The recommended connection form with SQLAlchemy is::
-
- engine = create_engine('mysql://scott:tiger@localhost/test?charset=utf8&use_unicode=0', pool_recycle=3600)
-
-
-"""
-
-from sqlalchemy.dialects.drizzle.base import (DrizzleDialect,
- DrizzleExecutionContext,
- DrizzleCompiler, DrizzleIdentifierPreparer)
-from sqlalchemy.connectors.mysqldb import (
- MySQLDBExecutionContext,
- MySQLDBCompiler,
- MySQLDBIdentifierPreparer,
- MySQLDBConnector
- )
-
-class DrizzleExecutionContext_mysqldb(
- MySQLDBExecutionContext,
- DrizzleExecutionContext):
- pass
-
-
-class DrizzleCompiler_mysqldb(MySQLDBCompiler, DrizzleCompiler):
- pass
-
-
-class DrizzleIdentifierPreparer_mysqldb(
- MySQLDBIdentifierPreparer,
- DrizzleIdentifierPreparer):
- pass
-
-class DrizzleDialect_mysqldb(MySQLDBConnector, DrizzleDialect):
- execution_ctx_cls = DrizzleExecutionContext_mysqldb
- statement_compiler = DrizzleCompiler_mysqldb
- preparer = DrizzleIdentifierPreparer_mysqldb
-
- def _detect_charset(self, connection):
- """Sniff out the character set in use for connection results."""
- return 'utf8'
-
-
-dialect = DrizzleDialect_mysqldb
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/firebird/__init__.py b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/firebird/__init__.py
deleted file mode 100755
index e87b5bb5..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/firebird/__init__.py
+++ /dev/null
@@ -1,22 +0,0 @@
-# firebird/__init__.py
-# Copyright (C) 2005-2011 the SQLAlchemy authors and contributors <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-from sqlalchemy.dialects.firebird import base, kinterbasdb
-
-base.dialect = kinterbasdb.dialect
-
-from sqlalchemy.dialects.firebird.base import \
- SMALLINT, BIGINT, FLOAT, FLOAT, DATE, TIME, \
- TEXT, NUMERIC, FLOAT, TIMESTAMP, VARCHAR, CHAR, BLOB,\
- dialect
-
-__all__ = (
- 'SMALLINT', 'BIGINT', 'FLOAT', 'FLOAT', 'DATE', 'TIME',
- 'TEXT', 'NUMERIC', 'FLOAT', 'TIMESTAMP', 'VARCHAR', 'CHAR', 'BLOB',
- 'dialect'
-)
-
-
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/firebird/base.py b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/firebird/base.py
deleted file mode 100755
index 5f07b57b..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/firebird/base.py
+++ /dev/null
@@ -1,700 +0,0 @@
-# firebird/base.py
-# Copyright (C) 2005-2011 the SQLAlchemy authors and contributors <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-"""
-Support for the Firebird database.
-
-Connectivity is usually supplied via the kinterbasdb_ DBAPI module.
-
-Dialects
-~~~~~~~~
-
-Firebird offers two distinct dialects_ (not to be confused with a
-SQLAlchemy ``Dialect``):
-
-dialect 1
- This is the old syntax and behaviour, inherited from Interbase pre-6.0.
-
-dialect 3
- This is the newer and supported syntax, introduced in Interbase 6.0.
-
-The SQLAlchemy Firebird dialect detects these versions and
-adjusts its representation of SQL accordingly. However,
-support for dialect 1 is not well tested and probably has
-incompatibilities.
-
-Locking Behavior
-~~~~~~~~~~~~~~~~
-
-Firebird locks tables aggressively. For this reason, a DROP TABLE may
-hang until other transactions are released. SQLAlchemy does its best
-to release transactions as quickly as possible. The most common cause
-of hanging transactions is a non-fully consumed result set, i.e.::
-
- result = engine.execute("select * from table")
- row = result.fetchone()
- return
-
-Where above, the ``ResultProxy`` has not been fully consumed. The
-connection will be returned to the pool and the transactional state
-rolled back once the Python garbage collector reclaims the objects
-which hold onto the connection, which often occurs asynchronously.
-The above use case can be alleviated by calling ``first()`` on the
-``ResultProxy`` which will fetch the first row and immediately close
-all remaining cursor/connection resources.
-
-RETURNING support
-~~~~~~~~~~~~~~~~~
-
-Firebird 2.0 supports returning a result set from inserts, and 2.1
-extends that to deletes and updates. This is generically exposed by
-the SQLAlchemy ``returning()`` method, such as::
-
- # INSERT..RETURNING
- result = table.insert().returning(table.c.col1, table.c.col2).\\
- values(name='foo')
- print result.fetchall()
-
- # UPDATE..RETURNING
- raises = empl.update().returning(empl.c.id, empl.c.salary).\\
- where(empl.c.sales>100).\\
- values(dict(salary=empl.c.salary * 1.1))
- print raises.fetchall()
-
-
-.. _dialects: http://mc-computing.com/Databases/Firebird/SQL_Dialect.html
-
-"""
-
-import datetime, re
-
-from sqlalchemy import schema as sa_schema
-from sqlalchemy import exc, types as sqltypes, sql, util
-from sqlalchemy.sql import expression
-from sqlalchemy.engine import base, default, reflection
-from sqlalchemy.sql import compiler
-
-
-from sqlalchemy.types import (BIGINT, BLOB, BOOLEAN, DATE,
- FLOAT, INTEGER, NUMERIC, SMALLINT,
- TEXT, TIME, TIMESTAMP)
-
-
-RESERVED_WORDS = set([
- "active", "add", "admin", "after", "all", "alter", "and", "any", "as",
- "asc", "ascending", "at", "auto", "avg", "before", "begin", "between",
- "bigint", "bit_length", "blob", "both", "by", "case", "cast", "char",
- "character", "character_length", "char_length", "check", "close",
- "collate", "column", "commit", "committed", "computed", "conditional",
- "connect", "constraint", "containing", "count", "create", "cross",
- "cstring", "current", "current_connection", "current_date",
- "current_role", "current_time", "current_timestamp",
- "current_transaction", "current_user", "cursor", "database", "date",
- "day", "dec", "decimal", "declare", "default", "delete", "desc",
- "descending", "disconnect", "distinct", "do", "domain", "double",
- "drop", "else", "end", "entry_point", "escape", "exception",
- "execute", "exists", "exit", "external", "extract", "fetch", "file",
- "filter", "float", "for", "foreign", "from", "full", "function",
- "gdscode", "generator", "gen_id", "global", "grant", "group",
- "having", "hour", "if", "in", "inactive", "index", "inner",
- "input_type", "insensitive", "insert", "int", "integer", "into", "is",
- "isolation", "join", "key", "leading", "left", "length", "level",
- "like", "long", "lower", "manual", "max", "maximum_segment", "merge",
- "min", "minute", "module_name", "month", "names", "national",
- "natural", "nchar", "no", "not", "null", "numeric", "octet_length",
- "of", "on", "only", "open", "option", "or", "order", "outer",
- "output_type", "overflow", "page", "pages", "page_size", "parameter",
- "password", "plan", "position", "post_event", "precision", "primary",
- "privileges", "procedure", "protected", "rdb$db_key", "read", "real",
- "record_version", "recreate", "recursive", "references", "release",
- "reserv", "reserving", "retain", "returning_values", "returns",
- "revoke", "right", "rollback", "rows", "row_count", "savepoint",
- "schema", "second", "segment", "select", "sensitive", "set", "shadow",
- "shared", "singular", "size", "smallint", "snapshot", "some", "sort",
- "sqlcode", "stability", "start", "starting", "starts", "statistics",
- "sub_type", "sum", "suspend", "table", "then", "time", "timestamp",
- "to", "trailing", "transaction", "trigger", "trim", "uncommitted",
- "union", "unique", "update", "upper", "user", "using", "value",
- "values", "varchar", "variable", "varying", "view", "wait", "when",
- "where", "while", "with", "work", "write", "year",
- ])
-
-
-class _StringType(sqltypes.String):
- """Base for Firebird string types."""
-
- def __init__(self, charset = None, **kw):
- self.charset = charset
- super(_StringType, self).__init__(**kw)
-
-class VARCHAR(_StringType, sqltypes.VARCHAR):
- """Firebird VARCHAR type"""
- __visit_name__ = 'VARCHAR'
-
- def __init__(self, length = None, **kwargs):
- super(VARCHAR, self).__init__(length=length, **kwargs)
-
-class CHAR(_StringType, sqltypes.CHAR):
- """Firebird CHAR type"""
- __visit_name__ = 'CHAR'
-
- def __init__(self, length = None, **kwargs):
- super(CHAR, self).__init__(length=length, **kwargs)
-
-colspecs = {
-}
-
-ischema_names = {
- 'SHORT': SMALLINT,
- 'LONG': BIGINT,
- 'QUAD': FLOAT,
- 'FLOAT': FLOAT,
- 'DATE': DATE,
- 'TIME': TIME,
- 'TEXT': TEXT,
- 'INT64': NUMERIC,
- 'DOUBLE': FLOAT,
- 'TIMESTAMP': TIMESTAMP,
- 'VARYING': VARCHAR,
- 'CSTRING': CHAR,
- 'BLOB': BLOB,
- }
-
-
-# TODO: date conversion types (should be implemented as _FBDateTime,
-# _FBDate, etc. as bind/result functionality is required)
-
-class FBTypeCompiler(compiler.GenericTypeCompiler):
- def visit_boolean(self, type_):
- return self.visit_SMALLINT(type_)
-
- def visit_datetime(self, type_):
- return self.visit_TIMESTAMP(type_)
-
- def visit_TEXT(self, type_):
- return "BLOB SUB_TYPE 1"
-
- def visit_BLOB(self, type_):
- return "BLOB SUB_TYPE 0"
-
- def _extend_string(self, type_, basic):
- charset = getattr(type_, 'charset', None)
- if charset is None:
- return basic
- else:
- return '%s CHARACTER SET %s' % (basic, charset)
-
- def visit_CHAR(self, type_):
- basic = super(FBTypeCompiler, self).visit_CHAR(type_)
- return self._extend_string(type_, basic)
-
- def visit_VARCHAR(self, type_):
- basic = super(FBTypeCompiler, self).visit_VARCHAR(type_)
- return self._extend_string(type_, basic)
-
-
-
-class FBCompiler(sql.compiler.SQLCompiler):
- """Firebird specific idiosincrasies"""
-
- def visit_mod(self, binary, **kw):
- # Firebird lacks a builtin modulo operator, but there is
- # an equivalent function in the ib_udf library.
- return "mod(%s, %s)" % (
- self.process(binary.left),
- self.process(binary.right))
-
- def visit_alias(self, alias, asfrom=False, **kwargs):
- if self.dialect._version_two:
- return super(FBCompiler, self).\
- visit_alias(alias, asfrom=asfrom, **kwargs)
- else:
- # Override to not use the AS keyword which FB 1.5 does not like
- if asfrom:
- alias_name = isinstance(alias.name,
- expression._generated_label) and \
- self._truncated_identifier("alias",
- alias.name) or alias.name
-
- return self.process(
- alias.original, asfrom=asfrom, **kwargs) + \
- " " + \
- self.preparer.format_alias(alias, alias_name)
- else:
- return self.process(alias.original, **kwargs)
-
- def visit_substring_func(self, func, **kw):
- s = self.process(func.clauses.clauses[0])
- start = self.process(func.clauses.clauses[1])
- if len(func.clauses.clauses) > 2:
- length = self.process(func.clauses.clauses[2])
- return "SUBSTRING(%s FROM %s FOR %s)" % (s, start, length)
- else:
- return "SUBSTRING(%s FROM %s)" % (s, start)
-
- def visit_length_func(self, function, **kw):
- if self.dialect._version_two:
- return "char_length" + self.function_argspec(function)
- else:
- return "strlen" + self.function_argspec(function)
-
- visit_char_length_func = visit_length_func
-
- def function_argspec(self, func, **kw):
- # TODO: this probably will need to be
- # narrowed to a fixed list, some no-arg functions
- # may require parens - see similar example in the oracle
- # dialect
- if func.clauses is not None and len(func.clauses):
- return self.process(func.clause_expr)
- else:
- return ""
-
- def default_from(self):
- return " FROM rdb$database"
-
- def visit_sequence(self, seq):
- return "gen_id(%s, 1)" % self.preparer.format_sequence(seq)
-
- def get_select_precolumns(self, select):
- """Called when building a ``SELECT`` statement, position is just
- before column list Firebird puts the limit and offset right
- after the ``SELECT``...
- """
-
- result = ""
- if select._limit:
- result += "FIRST %s " % self.process(sql.literal(select._limit))
- if select._offset:
- result +="SKIP %s " % self.process(sql.literal(select._offset))
- if select._distinct:
- result += "DISTINCT "
- return result
-
- def limit_clause(self, select):
- """Already taken care of in the `get_select_precolumns` method."""
-
- return ""
-
- def returning_clause(self, stmt, returning_cols):
-
- columns = [
- self.process(
- self.label_select_column(None, c, asfrom=False),
- within_columns_clause=True,
- result_map=self.result_map
- )
- for c in expression._select_iterables(returning_cols)
- ]
- return 'RETURNING ' + ', '.join(columns)
-
-
-class FBDDLCompiler(sql.compiler.DDLCompiler):
- """Firebird syntactic idiosincrasies"""
-
- def visit_create_sequence(self, create):
- """Generate a ``CREATE GENERATOR`` statement for the sequence."""
-
- # no syntax for these
- # http://www.firebirdsql.org/manual/generatorguide-sqlsyntax.html
- if create.element.start is not None:
- raise NotImplemented(
- "Firebird SEQUENCE doesn't support START WITH")
- if create.element.increment is not None:
- raise NotImplemented(
- "Firebird SEQUENCE doesn't support INCREMENT BY")
-
- if self.dialect._version_two:
- return "CREATE SEQUENCE %s" % \
- self.preparer.format_sequence(create.element)
- else:
- return "CREATE GENERATOR %s" % \
- self.preparer.format_sequence(create.element)
-
- def visit_drop_sequence(self, drop):
- """Generate a ``DROP GENERATOR`` statement for the sequence."""
-
- if self.dialect._version_two:
- return "DROP SEQUENCE %s" % \
- self.preparer.format_sequence(drop.element)
- else:
- return "DROP GENERATOR %s" % \
- self.preparer.format_sequence(drop.element)
-
-
-class FBIdentifierPreparer(sql.compiler.IdentifierPreparer):
- """Install Firebird specific reserved words."""
-
- reserved_words = RESERVED_WORDS
-
- def __init__(self, dialect):
- super(FBIdentifierPreparer, self).__init__(dialect, omit_schema=True)
-
-
-class FBExecutionContext(default.DefaultExecutionContext):
- def fire_sequence(self, seq, type_):
- """Get the next value from the sequence using ``gen_id()``."""
-
- return self._execute_scalar(
- "SELECT gen_id(%s, 1) FROM rdb$database" %
- self.dialect.identifier_preparer.format_sequence(seq),
- type_
- )
-
-
-class FBDialect(default.DefaultDialect):
- """Firebird dialect"""
-
- name = 'firebird'
-
- max_identifier_length = 31
-
- supports_sequences = True
- sequences_optional = False
- supports_default_values = True
- postfetch_lastrowid = False
-
- supports_native_boolean = False
-
- requires_name_normalize = True
- supports_empty_insert = False
-
- statement_compiler = FBCompiler
- ddl_compiler = FBDDLCompiler
- preparer = FBIdentifierPreparer
- type_compiler = FBTypeCompiler
- execution_ctx_cls = FBExecutionContext
-
- colspecs = colspecs
- ischema_names = ischema_names
-
- # defaults to dialect ver. 3,
- # will be autodetected off upon
- # first connect
- _version_two = True
-
- def initialize(self, connection):
- super(FBDialect, self).initialize(connection)
- self._version_two = ('firebird' in self.server_version_info and \
- self.server_version_info >= (2, )
- ) or \
- ('interbase' in self.server_version_info and \
- self.server_version_info >= (6, )
- )
-
- if not self._version_two:
- # TODO: whatever other pre < 2.0 stuff goes here
- self.ischema_names = ischema_names.copy()
- self.ischema_names['TIMESTAMP'] = sqltypes.DATE
- self.colspecs = {
- sqltypes.DateTime: sqltypes.DATE
- }
-
- self.implicit_returning = self._version_two and \
- self.__dict__.get('implicit_returning', True)
-
- def normalize_name(self, name):
- # Remove trailing spaces: FB uses a CHAR() type,
- # that is padded with spaces
- name = name and name.rstrip()
- if name is None:
- return None
- elif name.upper() == name and \
- not self.identifier_preparer._requires_quotes(name.lower()):
- return name.lower()
- else:
- return name
-
- def denormalize_name(self, name):
- if name is None:
- return None
- elif name.lower() == name and \
- not self.identifier_preparer._requires_quotes(name.lower()):
- return name.upper()
- else:
- return name
-
- def has_table(self, connection, table_name, schema=None):
- """Return ``True`` if the given table exists, ignoring
- the `schema`."""
-
- tblqry = """
- SELECT 1 AS has_table FROM rdb$database
- WHERE EXISTS (SELECT rdb$relation_name
- FROM rdb$relations
- WHERE rdb$relation_name=?)
- """
- c = connection.execute(tblqry, [self.denormalize_name(table_name)])
- return c.first() is not None
-
- def has_sequence(self, connection, sequence_name, schema=None):
- """Return ``True`` if the given sequence (generator) exists."""
-
- genqry = """
- SELECT 1 AS has_sequence FROM rdb$database
- WHERE EXISTS (SELECT rdb$generator_name
- FROM rdb$generators
- WHERE rdb$generator_name=?)
- """
- c = connection.execute(genqry, [self.denormalize_name(sequence_name)])
- return c.first() is not None
-
- @reflection.cache
- def get_table_names(self, connection, schema=None, **kw):
- s = """
- SELECT DISTINCT rdb$relation_name
- FROM rdb$relation_fields
- WHERE rdb$system_flag=0 AND rdb$view_context IS NULL
- """
- return [self.normalize_name(row[0]) for row in connection.execute(s)]
-
- @reflection.cache
- def get_view_names(self, connection, schema=None, **kw):
- s = """
- SELECT distinct rdb$view_name
- FROM rdb$view_relations
- """
- return [self.normalize_name(row[0]) for row in connection.execute(s)]
-
- @reflection.cache
- def get_view_definition(self, connection, view_name, schema=None, **kw):
- qry = """
- SELECT rdb$view_source AS view_source
- FROM rdb$relations
- WHERE rdb$relation_name=?
- """
- rp = connection.execute(qry, [self.denormalize_name(view_name)])
- row = rp.first()
- if row:
- return row['view_source']
- else:
- return None
-
- @reflection.cache
- def get_primary_keys(self, connection, table_name, schema=None, **kw):
- # Query to extract the PK/FK constrained fields of the given table
- keyqry = """
- SELECT se.rdb$field_name AS fname
- FROM rdb$relation_constraints rc
- JOIN rdb$index_segments se ON rc.rdb$index_name=se.rdb$index_name
- WHERE rc.rdb$constraint_type=? AND rc.rdb$relation_name=?
- """
- tablename = self.denormalize_name(table_name)
- # get primary key fields
- c = connection.execute(keyqry, ["PRIMARY KEY", tablename])
- pkfields = [self.normalize_name(r['fname']) for r in c.fetchall()]
- return pkfields
-
- @reflection.cache
- def get_column_sequence(self, connection,
- table_name, column_name,
- schema=None, **kw):
- tablename = self.denormalize_name(table_name)
- colname = self.denormalize_name(column_name)
- # Heuristic-query to determine the generator associated to a PK field
- genqry = """
- SELECT trigdep.rdb$depended_on_name AS fgenerator
- FROM rdb$dependencies tabdep
- JOIN rdb$dependencies trigdep
- ON tabdep.rdb$dependent_name=trigdep.rdb$dependent_name
- AND trigdep.rdb$depended_on_type=14
- AND trigdep.rdb$dependent_type=2
- JOIN rdb$triggers trig ON
- trig.rdb$trigger_name=tabdep.rdb$dependent_name
- WHERE tabdep.rdb$depended_on_name=?
- AND tabdep.rdb$depended_on_type=0
- AND trig.rdb$trigger_type=1
- AND tabdep.rdb$field_name=?
- AND (SELECT count(*)
- FROM rdb$dependencies trigdep2
- WHERE trigdep2.rdb$dependent_name = trigdep.rdb$dependent_name) = 2
- """
- genr = connection.execute(genqry, [tablename, colname]).first()
- if genr is not None:
- return dict(name=self.normalize_name(genr['fgenerator']))
-
- @reflection.cache
- def get_columns(self, connection, table_name, schema=None, **kw):
- # Query to extract the details of all the fields of the given table
- tblqry = """
- SELECT r.rdb$field_name AS fname,
- r.rdb$null_flag AS null_flag,
- t.rdb$type_name AS ftype,
- f.rdb$field_sub_type AS stype,
- f.rdb$field_length/
- COALESCE(cs.rdb$bytes_per_character,1) AS flen,
- f.rdb$field_precision AS fprec,
- f.rdb$field_scale AS fscale,
- COALESCE(r.rdb$default_source,
- f.rdb$default_source) AS fdefault
- FROM rdb$relation_fields r
- JOIN rdb$fields f ON r.rdb$field_source=f.rdb$field_name
- JOIN rdb$types t
- ON t.rdb$type=f.rdb$field_type AND
- t.rdb$field_name='RDB$FIELD_TYPE'
- LEFT JOIN rdb$character_sets cs ON
- f.rdb$character_set_id=cs.rdb$character_set_id
- WHERE f.rdb$system_flag=0 AND r.rdb$relation_name=?
- ORDER BY r.rdb$field_position
- """
- # get the PK, used to determine the eventual associated sequence
- pkey_cols = self.get_primary_keys(connection, table_name)
-
- tablename = self.denormalize_name(table_name)
- # get all of the fields for this table
- c = connection.execute(tblqry, [tablename])
- cols = []
- while True:
- row = c.fetchone()
- if row is None:
- break
- name = self.normalize_name(row['fname'])
- orig_colname = row['fname']
-
- # get the data type
- colspec = row['ftype'].rstrip()
- coltype = self.ischema_names.get(colspec)
- if coltype is None:
- util.warn("Did not recognize type '%s' of column '%s'" %
- (colspec, name))
- coltype = sqltypes.NULLTYPE
- elif colspec == 'INT64':
- coltype = coltype(
- precision=row['fprec'],
- scale=row['fscale'] * -1)
- elif colspec in ('VARYING', 'CSTRING'):
- coltype = coltype(row['flen'])
- elif colspec == 'TEXT':
- coltype = TEXT(row['flen'])
- elif colspec == 'BLOB':
- if row['stype'] == 1:
- coltype = TEXT()
- else:
- coltype = BLOB()
- else:
- coltype = coltype()
-
- # does it have a default value?
- defvalue = None
- if row['fdefault'] is not None:
- # the value comes down as "DEFAULT 'value'": there may be
- # more than one whitespace around the "DEFAULT" keyword
- # and it may also be lower case
- # (see also http://tracker.firebirdsql.org/browse/CORE-356)
- defexpr = row['fdefault'].lstrip()
- assert defexpr[:8].rstrip().upper() == \
- 'DEFAULT', "Unrecognized default value: %s" % \
- defexpr
- defvalue = defexpr[8:].strip()
- if defvalue == 'NULL':
- # Redundant
- defvalue = None
- col_d = {
- 'name' : name,
- 'type' : coltype,
- 'nullable' : not bool(row['null_flag']),
- 'default' : defvalue,
- 'autoincrement':defvalue is None
- }
-
- if orig_colname.lower() == orig_colname:
- col_d['quote'] = True
-
- # if the PK is a single field, try to see if its linked to
- # a sequence thru a trigger
- if len(pkey_cols)==1 and name==pkey_cols[0]:
- seq_d = self.get_column_sequence(connection, tablename, name)
- if seq_d is not None:
- col_d['sequence'] = seq_d
-
- cols.append(col_d)
- return cols
-
- @reflection.cache
- def get_foreign_keys(self, connection, table_name, schema=None, **kw):
- # Query to extract the details of each UK/FK of the given table
- fkqry = """
- SELECT rc.rdb$constraint_name AS cname,
- cse.rdb$field_name AS fname,
- ix2.rdb$relation_name AS targetrname,
- se.rdb$field_name AS targetfname
- FROM rdb$relation_constraints rc
- JOIN rdb$indices ix1 ON ix1.rdb$index_name=rc.rdb$index_name
- JOIN rdb$indices ix2 ON ix2.rdb$index_name=ix1.rdb$foreign_key
- JOIN rdb$index_segments cse ON
- cse.rdb$index_name=ix1.rdb$index_name
- JOIN rdb$index_segments se
- ON se.rdb$index_name=ix2.rdb$index_name
- AND se.rdb$field_position=cse.rdb$field_position
- WHERE rc.rdb$constraint_type=? AND rc.rdb$relation_name=?
- ORDER BY se.rdb$index_name, se.rdb$field_position
- """
- tablename = self.denormalize_name(table_name)
-
- c = connection.execute(fkqry, ["FOREIGN KEY", tablename])
- fks = util.defaultdict(lambda:{
- 'name' : None,
- 'constrained_columns' : [],
- 'referred_schema' : None,
- 'referred_table' : None,
- 'referred_columns' : []
- })
-
- for row in c:
- cname = self.normalize_name(row['cname'])
- fk = fks[cname]
- if not fk['name']:
- fk['name'] = cname
- fk['referred_table'] = self.normalize_name(row['targetrname'])
- fk['constrained_columns'].append(
- self.normalize_name(row['fname']))
- fk['referred_columns'].append(
- self.normalize_name(row['targetfname']))
- return fks.values()
-
- @reflection.cache
- def get_indexes(self, connection, table_name, schema=None, **kw):
- qry = """
- SELECT ix.rdb$index_name AS index_name,
- ix.rdb$unique_flag AS unique_flag,
- ic.rdb$field_name AS field_name
- FROM rdb$indices ix
- JOIN rdb$index_segments ic
- ON ix.rdb$index_name=ic.rdb$index_name
- LEFT OUTER JOIN rdb$relation_constraints
- ON rdb$relation_constraints.rdb$index_name =
- ic.rdb$index_name
- WHERE ix.rdb$relation_name=? AND ix.rdb$foreign_key IS NULL
- AND rdb$relation_constraints.rdb$constraint_type IS NULL
- ORDER BY index_name, field_name
- """
- c = connection.execute(qry, [self.denormalize_name(table_name)])
-
- indexes = util.defaultdict(dict)
- for row in c:
- indexrec = indexes[row['index_name']]
- if 'name' not in indexrec:
- indexrec['name'] = self.normalize_name(row['index_name'])
- indexrec['column_names'] = []
- indexrec['unique'] = bool(row['unique_flag'])
-
- indexrec['column_names'].append(
- self.normalize_name(row['field_name']))
-
- return indexes.values()
-
- def do_execute(self, cursor, statement, parameters, context=None):
- # kinterbase does not accept a None, but wants an empty list
- # when there are no arguments.
- cursor.execute(statement, parameters or [])
-
- def do_rollback(self, connection):
- # Use the retaining feature, that keeps the transaction going
- connection.rollback(True)
-
- def do_commit(self, connection):
- # Use the retaining feature, that keeps the transaction going
- connection.commit(True)
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/firebird/kinterbasdb.py b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/firebird/kinterbasdb.py
deleted file mode 100755
index ebb7805a..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/firebird/kinterbasdb.py
+++ /dev/null
@@ -1,167 +0,0 @@
-# firebird/kinterbasdb.py
-# Copyright (C) 2005-2011 the SQLAlchemy authors and contributors <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-"""
-The most common way to connect to a Firebird engine is implemented by
-kinterbasdb__, currently maintained__ directly by the Firebird people.
-
-The connection URL is of the form
-``firebird[+kinterbasdb]://user:password@host:port/path/to/db[?key=value&key=value...]``.
-
-Kinterbasedb backend specific keyword arguments are:
-
-* type_conv - select the kind of mapping done on the types: by default
- SQLAlchemy uses 200 with Unicode, datetime and decimal support (see
- details__).
-
-* concurrency_level - set the backend policy with regards to threading
- issues: by default SQLAlchemy uses policy 1 (see details__).
-
-* enable_rowcount - True by default, setting this to False disables
- the usage of "cursor.rowcount" with the
- Kinterbasdb dialect, which SQLAlchemy ordinarily calls upon automatically
- after any UPDATE or DELETE statement. When disabled, SQLAlchemy's
- ResultProxy will return -1 for result.rowcount. The rationale here is
- that Kinterbasdb requires a second round trip to the database when
- .rowcount is called - since SQLA's resultproxy automatically closes
- the cursor after a non-result-returning statement, rowcount must be
- called, if at all, before the result object is returned. Additionally,
- cursor.rowcount may not return correct results with older versions
- of Firebird, and setting this flag to False will also cause the
- SQLAlchemy ORM to ignore its usage. The behavior can also be controlled on a
- per-execution basis using the `enable_rowcount` option with
- :meth:`execution_options()`::
-
- conn = engine.connect().execution_options(enable_rowcount=True)
- r = conn.execute(stmt)
- print r.rowcount
-
-__ http://sourceforge.net/projects/kinterbasdb
-__ http://firebirdsql.org/index.php?op=devel&sub=python
-__ http://kinterbasdb.sourceforge.net/dist_docs/usage.html#adv_param_conv_dynamic_type_translation
-__ http://kinterbasdb.sourceforge.net/dist_docs/usage.html#special_issue_concurrency
-"""
-
-from sqlalchemy.dialects.firebird.base import FBDialect, \
- FBCompiler, FBExecutionContext
-from sqlalchemy import util, types as sqltypes
-from sqlalchemy.util.compat import decimal
-from re import match
-
-
-class _FBNumeric_kinterbasdb(sqltypes.Numeric):
- def bind_processor(self, dialect):
- def process(value):
- if isinstance(value, decimal.Decimal):
- return str(value)
- else:
- return value
- return process
-
-class FBExecutionContext_kinterbasdb(FBExecutionContext):
- @property
- def rowcount(self):
- if self.execution_options.get('enable_rowcount',
- self.dialect.enable_rowcount):
- return self.cursor.rowcount
- else:
- return -1
-
-class FBDialect_kinterbasdb(FBDialect):
- driver = 'kinterbasdb'
- supports_sane_rowcount = False
- supports_sane_multi_rowcount = False
- execution_ctx_cls = FBExecutionContext_kinterbasdb
-
- supports_native_decimal = True
-
- colspecs = util.update_copy(
- FBDialect.colspecs,
- {
- sqltypes.Numeric:_FBNumeric_kinterbasdb,
- }
-
- )
-
- def __init__(self, type_conv=200, concurrency_level=1,
- enable_rowcount=True, **kwargs):
- super(FBDialect_kinterbasdb, self).__init__(**kwargs)
- self.enable_rowcount = enable_rowcount
- self.type_conv = type_conv
- self.concurrency_level = concurrency_level
- if enable_rowcount:
- self.supports_sane_rowcount = True
-
- @classmethod
- def dbapi(cls):
- k = __import__('kinterbasdb')
- return k
-
- def create_connect_args(self, url):
- opts = url.translate_connect_args(username='user')
- if opts.get('port'):
- opts['host'] = "%s/%s" % (opts['host'], opts['port'])
- del opts['port']
- opts.update(url.query)
-
- util.coerce_kw_type(opts, 'type_conv', int)
-
- type_conv = opts.pop('type_conv', self.type_conv)
- concurrency_level = opts.pop('concurrency_level',
- self.concurrency_level)
-
- if self.dbapi is not None:
- initialized = getattr(self.dbapi, 'initialized', None)
- if initialized is None:
- # CVS rev 1.96 changed the name of the attribute:
- # http://kinterbasdb.cvs.sourceforge.net/viewvc/kinterbasdb/Kinterbasdb-3.0/__init__.py?r1=1.95&r2=1.96
- initialized = getattr(self.dbapi, '_initialized', False)
- if not initialized:
- self.dbapi.init(type_conv=type_conv,
- concurrency_level=concurrency_level)
- return ([], opts)
-
- def _get_server_version_info(self, connection):
- """Get the version of the Firebird server used by a connection.
-
- Returns a tuple of (`major`, `minor`, `build`), three integers
- representing the version of the attached server.
- """
-
- # This is the simpler approach (the other uses the services api),
- # that for backward compatibility reasons returns a string like
- # LI-V6.3.3.12981 Firebird 2.0
- # where the first version is a fake one resembling the old
- # Interbase signature.
-
- fbconn = connection.connection
- version = fbconn.server_version
-
- return self._parse_version_info(version)
-
- def _parse_version_info(self, version):
- m = match('\w+-V(\d+)\.(\d+)\.(\d+)\.(\d+)( \w+ (\d+)\.(\d+))?', version)
- if not m:
- raise AssertionError(
- "Could not determine version from string '%s'" % version)
-
- if m.group(5) != None:
- return tuple([int(x) for x in m.group(6, 7, 4)] + ['firebird'])
- else:
- return tuple([int(x) for x in m.group(1, 2, 3)] + ['interbase'])
-
- def is_disconnect(self, e, connection, cursor):
- if isinstance(e, (self.dbapi.OperationalError,
- self.dbapi.ProgrammingError)):
- msg = str(e)
- return ('Unable to complete network request to host' in msg or
- 'Invalid connection state' in msg or
- 'Invalid cursor state' in msg or
- 'connection shutdown' in msg)
- else:
- return False
-
-dialect = FBDialect_kinterbasdb
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/informix/__init__.py b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/informix/__init__.py
deleted file mode 100755
index 9dfd0ca5..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/informix/__init__.py
+++ /dev/null
@@ -1,9 +0,0 @@
-# informix/__init__.py
-# Copyright (C) 2005-2011 the SQLAlchemy authors and contributors <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-from sqlalchemy.dialects.informix import base, informixdb
-
-base.dialect = informixdb.dialect \ No newline at end of file
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/informix/base.py b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/informix/base.py
deleted file mode 100755
index f6749e5c..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/informix/base.py
+++ /dev/null
@@ -1,593 +0,0 @@
-# informix/base.py
-# Copyright (C) 2005-2011 the SQLAlchemy authors and contributors <see AUTHORS file>
-# coding: gbk
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-"""Support for the Informix database.
-
-This dialect is mostly functional as of SQLAlchemy 0.6.5.
-
-
-"""
-
-
-import datetime
-
-from sqlalchemy import sql, schema, exc, pool, util
-from sqlalchemy.sql import compiler, text
-from sqlalchemy.engine import default, reflection
-from sqlalchemy import types as sqltypes
-
-RESERVED_WORDS = set(
- ["abs", "absolute", "access", "access_method", "acos", "active", "add",
- "address", "add_months", "admin", "after", "aggregate", "alignment",
- "all", "allocate", "all_rows", "altere", "and", "ansi", "any", "append",
- "array", "as", "asc", "ascii", "asin", "at", "atan", "atan2", "attach",
- "attributes", "audit", "authentication", "authid", "authorization",
- "authorized", "auto", "autofree", "auto_reprepare", "auto_stat_mode",
- "avg", "avoid_execute", "avoid_fact", "avoid_full", "avoid_hash",
- "avoid_index", "avoid_index_sj", "avoid_multi_index", "avoid_nl",
- "avoid_star_join", "avoid_subqf", "based", "before", "begin",
- "between", "bigint", "bigserial", "binary", "bitand", "bitandnot",
- "bitnot", "bitor", "bitxor", "blob", "blobdir", "boolean", "both",
- "bound_impl_pdq", "buffered", "builtin", "by", "byte", "cache", "call",
- "cannothash", "cardinality", "cascade", "case", "cast", "ceil", "char",
- "character", "character_length", "char_length", "check", "class",
- "class_origin", "client", "clob", "clobdir", "close", "cluster",
- "clustersize", "cobol", "codeset", "collation", "collection",
- "column", "columns", "commit", "committed", "commutator", "component",
- "components", "concat", "concurrent", "connect", "connection",
- "connection_name", "connect_by_iscycle", "connect_by_isleaf",
- "connect_by_rootconst", "constraint", "constraints", "constructor",
- "context", "continue", "copy", "cos", "costfunc", "count", "crcols",
- "create", "cross", "current", "current_role", "currval", "cursor",
- "cycle", "database", "datafiles", "dataskip", "date", "datetime",
- "day", "dba", "dbdate", "dbinfo", "dbpassword", "dbsecadm",
- "dbservername", "deallocate", "debug", "debugmode", "debug_env", "dec",
- "decimal", "declare", "decode", "decrypt_binary", "decrypt_char",
- "dec_t", "default", "default_role", "deferred", "deferred_prepare",
- "define", "delay", "delete", "deleting", "delimited", "delimiter",
- "deluxe", "desc", "describe", "descriptor", "detach", "diagnostics",
- "directives", "dirty", "disable", "disabled", "disconnect", "disk",
- "distinct", "distributebinary", "distributesreferences",
- "distributions", "document", "domain", "donotdistribute", "dormant",
- "double", "drop", "dtime_t", "each", "elif", "else", "enabled",
- "encryption", "encrypt_aes", "encrypt_tdes", "end", "enum",
- "environment", "error", "escape", "exception", "exclusive", "exec",
- "execute", "executeanywhere", "exemption", "exists", "exit", "exp",
- "explain", "explicit", "express", "expression", "extdirectives",
- "extend", "extent", "external", "fact", "false", "far", "fetch",
- "file", "filetoblob", "filetoclob", "fillfactor", "filtering", "first",
- "first_rows", "fixchar", "fixed", "float", "floor", "flush", "for",
- "force", "forced", "force_ddl_exec", "foreach", "foreign", "format",
- "format_units", "fortran", "found", "fraction", "fragment",
- "fragments", "free", "from", "full", "function", "general", "get",
- "gethint", "global", "go", "goto", "grant", "greaterthan",
- "greaterthanorequal", "group", "handlesnulls", "hash", "having", "hdr",
- "hex", "high", "hint", "hold", "home", "hour", "idslbacreadarray",
- "idslbacreadset", "idslbacreadtree", "idslbacrules",
- "idslbacwritearray", "idslbacwriteset", "idslbacwritetree",
- "idssecuritylabel", "if", "ifx_auto_reprepare", "ifx_batchedread_table",
- "ifx_int8_t", "ifx_lo_create_spec_t", "ifx_lo_stat_t", "immediate",
- "implicit", "implicit_pdq", "in", "inactive", "increment", "index",
- "indexes", "index_all", "index_sj", "indicator", "informix", "init",
- "initcap", "inline", "inner", "inout", "insert", "inserting", "instead",
- "int", "int8", "integ", "integer", "internal", "internallength",
- "interval", "into", "intrvl_t", "is", "iscanonical", "isolation",
- "item", "iterator", "java", "join", "keep", "key", "label", "labeleq",
- "labelge", "labelglb", "labelgt", "labelle", "labellt", "labellub",
- "labeltostring", "language", "last", "last_day", "leading", "left",
- "length", "lessthan", "lessthanorequal", "let", "level", "like",
- "limit", "list", "listing", "load", "local", "locator", "lock", "locks",
- "locopy", "loc_t", "log", "log10", "logn", "long", "loop", "lotofile",
- "low", "lower", "lpad", "ltrim", "lvarchar", "matched", "matches",
- "max", "maxerrors", "maxlen", "maxvalue", "mdy", "median", "medium",
- "memory", "memory_resident", "merge", "message_length", "message_text",
- "middle", "min", "minute", "minvalue", "mod", "mode", "moderate",
- "modify", "module", "money", "month", "months_between", "mounting",
- "multiset", "multi_index", "name", "nchar", "negator", "new", "next",
- "nextval", "next_day", "no", "nocache", "nocycle", "nomaxvalue",
- "nomigrate", "nominvalue", "none", "non_dim", "non_resident", "noorder",
- "normal", "not", "notemplatearg", "notequal", "null", "nullif",
- "numeric", "numrows", "numtodsinterval", "numtoyminterval", "nvarchar",
- "nvl", "octet_length", "of", "off", "old", "on", "online", "only",
- "opaque", "opclass", "open", "optcompind", "optical", "optimization",
- "option", "or", "order", "ordered", "out", "outer", "output",
- "override", "page", "parallelizable", "parameter", "partition",
- "pascal", "passedbyvalue", "password", "pdqpriority", "percaltl_cos",
- "pipe", "pli", "pload", "policy", "pow", "power", "precision",
- "prepare", "previous", "primary", "prior", "private", "privileges",
- "procedure", "properties", "public", "put", "raise", "range", "raw",
- "read", "real", "recordend", "references", "referencing", "register",
- "rejectfile", "relative", "release", "remainder", "rename",
- "reoptimization", "repeatable", "replace", "replication", "reserve",
- "resolution", "resource", "restart", "restrict", "resume", "retain",
- "retainupdatelocks", "return", "returned_sqlstate", "returning",
- "returns", "reuse", "revoke", "right", "robin", "role", "rollback",
- "rollforward", "root", "round", "routine", "row", "rowid", "rowids",
- "rows", "row_count", "rpad", "rtrim", "rule", "sameas", "samples",
- "sampling", "save", "savepoint", "schema", "scroll", "seclabel_by_comp",
- "seclabel_by_name", "seclabel_to_char", "second", "secondary",
- "section", "secured", "security", "selconst", "select", "selecting",
- "selfunc", "selfuncargs", "sequence", "serial", "serial8",
- "serializable", "serveruuid", "server_name", "session", "set",
- "setsessionauth", "share", "short", "siblings", "signed", "sin",
- "sitename", "size", "skall", "skinhibit", "skip", "skshow",
- "smallfloat", "smallint", "some", "specific", "sql", "sqlcode",
- "sqlcontext", "sqlerror", "sqlstate", "sqlwarning", "sqrt",
- "stability", "stack", "standard", "start", "star_join", "statchange",
- "statement", "static", "statistics", "statlevel", "status", "stdev",
- "step", "stop", "storage", "store", "strategies", "string",
- "stringtolabel", "struct", "style", "subclass_origin", "substr",
- "substring", "sum", "support", "sync", "synonym", "sysdate",
- "sysdbclose", "sysdbopen", "system", "sys_connect_by_path", "table",
- "tables", "tan", "task", "temp", "template", "test", "text", "then",
- "time", "timeout", "to", "today", "to_char", "to_date",
- "to_dsinterval", "to_number", "to_yminterval", "trace", "trailing",
- "transaction", "transition", "tree", "trigger", "triggers", "trim",
- "true", "trunc", "truncate", "trusted", "type", "typedef", "typeid",
- "typename", "typeof", "uid", "uncommitted", "under", "union",
- "unique", "units", "unknown", "unload", "unlock", "unsigned",
- "update", "updating", "upon", "upper", "usage", "use",
- "uselastcommitted", "user", "use_hash", "use_nl", "use_subqf",
- "using", "value", "values", "var", "varchar", "variable", "variance",
- "variant", "varying", "vercols", "view", "violations", "void",
- "volatile", "wait", "warning", "weekday", "when", "whenever", "where",
- "while", "with", "without", "work", "write", "writedown", "writeup",
- "xadatasource", "xid", "xload", "xunload", "year"
- ])
-
-class InfoDateTime(sqltypes.DateTime):
- def bind_processor(self, dialect):
- def process(value):
- if value is not None:
- if value.microsecond:
- value = value.replace(microsecond=0)
- return value
- return process
-
-class InfoTime(sqltypes.Time):
- def bind_processor(self, dialect):
- def process(value):
- if value is not None:
- if value.microsecond:
- value = value.replace(microsecond=0)
- return value
- return process
-
- def result_processor(self, dialect, coltype):
- def process(value):
- if isinstance(value, datetime.datetime):
- return value.time()
- else:
- return value
- return process
-
-colspecs = {
- sqltypes.DateTime : InfoDateTime,
- sqltypes.TIMESTAMP: InfoDateTime,
- sqltypes.Time: InfoTime,
-}
-
-
-ischema_names = {
- 0 : sqltypes.CHAR, # CHAR
- 1 : sqltypes.SMALLINT, # SMALLINT
- 2 : sqltypes.INTEGER, # INT
- 3 : sqltypes.FLOAT, # Float
- 3 : sqltypes.Float, # SmallFloat
- 5 : sqltypes.DECIMAL, # DECIMAL
- 6 : sqltypes.Integer, # Serial
- 7 : sqltypes.DATE, # DATE
- 8 : sqltypes.Numeric, # MONEY
- 10 : sqltypes.DATETIME, # DATETIME
- 11 : sqltypes.LargeBinary, # BYTE
- 12 : sqltypes.TEXT, # TEXT
- 13 : sqltypes.VARCHAR, # VARCHAR
- 15 : sqltypes.NCHAR, # NCHAR
- 16 : sqltypes.NVARCHAR, # NVARCHAR
- 17 : sqltypes.Integer, # INT8
- 18 : sqltypes.Integer, # Serial8
- 43 : sqltypes.String, # LVARCHAR
- -1 : sqltypes.BLOB, # BLOB
- -1 : sqltypes.CLOB, # CLOB
-}
-
-
-class InfoTypeCompiler(compiler.GenericTypeCompiler):
- def visit_DATETIME(self, type_):
- return "DATETIME YEAR TO SECOND"
-
- def visit_TIME(self, type_):
- return "DATETIME HOUR TO SECOND"
-
- def visit_TIMESTAMP(self, type_):
- return "DATETIME YEAR TO SECOND"
-
- def visit_large_binary(self, type_):
- return "BYTE"
-
- def visit_boolean(self, type_):
- return "SMALLINT"
-
-class InfoSQLCompiler(compiler.SQLCompiler):
- def default_from(self):
- return " from systables where tabname = 'systables' "
-
- def get_select_precolumns(self, select):
- s = ""
- if select._offset:
- s += "SKIP %s " % select._offset
- if select._limit:
- s += "FIRST %s " % select._limit
- s += select._distinct and "DISTINCT " or ""
- return s
-
- def visit_select(self, select, asfrom=False, parens=True, **kw):
- text = compiler.SQLCompiler.visit_select(self, select, asfrom, parens, **kw)
- if asfrom and parens and self.dialect.server_version_info < (11,):
- #assuming that 11 version doesn't need this, not tested
- return "table(multiset" + text + ")"
- else:
- return text
-
- def limit_clause(self, select):
- return ""
-
- def visit_function(self, func, **kw):
- if func.name.lower() == 'current_date':
- return "today"
- elif func.name.lower() == 'current_time':
- return "CURRENT HOUR TO SECOND"
- elif func.name.lower() in ('current_timestamp', 'now'):
- return "CURRENT YEAR TO SECOND"
- else:
- return compiler.SQLCompiler.visit_function(self, func, **kw)
-
- def visit_mod(self, binary, **kw):
- return "MOD(%s, %s)" % (self.process(binary.left), self.process(binary.right))
-
-
-class InfoDDLCompiler(compiler.DDLCompiler):
-
- def visit_add_constraint(self, create):
- preparer = self.preparer
- return "ALTER TABLE %s ADD CONSTRAINT %s" % (
- self.preparer.format_table(create.element.table),
- self.process(create.element)
- )
-
- def get_column_specification(self, column, **kw):
- colspec = self.preparer.format_column(column)
- first = None
- if column.primary_key and column.autoincrement:
- try:
- first = [c for c in column.table.primary_key.columns
- if (c.autoincrement and
- isinstance(c.type, sqltypes.Integer) and
- not c.foreign_keys)].pop(0)
- except IndexError:
- pass
-
- if column is first:
- colspec += " SERIAL"
- else:
- colspec += " " + self.dialect.type_compiler.process(column.type)
- default = self.get_column_default_string(column)
- if default is not None:
- colspec += " DEFAULT " + default
-
- if not column.nullable:
- colspec += " NOT NULL"
-
- return colspec
-
- def get_column_default_string(self, column):
- if (isinstance(column.server_default, schema.DefaultClause) and
- isinstance(column.server_default.arg, basestring)):
- if isinstance(column.type, (sqltypes.Integer, sqltypes.Numeric)):
- return self.sql_compiler.process(text(column.server_default.arg))
-
- return super(InfoDDLCompiler, self).get_column_default_string(column)
-
- ### Informix wants the constraint name at the end, hence this ist c&p from sql/compiler.py
- def visit_primary_key_constraint(self, constraint):
- if len(constraint) == 0:
- return ''
- text = "PRIMARY KEY "
- text += "(%s)" % ', '.join(self.preparer.quote(c.name, c.quote)
- for c in constraint)
- text += self.define_constraint_deferrability(constraint)
-
- if constraint.name is not None:
- text += " CONSTRAINT %s" % self.preparer.format_constraint(constraint)
- return text
-
- def visit_foreign_key_constraint(self, constraint):
- preparer = self.dialect.identifier_preparer
- remote_table = list(constraint._elements.values())[0].column.table
- text = "FOREIGN KEY (%s) REFERENCES %s (%s)" % (
- ', '.join(preparer.quote(f.parent.name, f.parent.quote)
- for f in constraint._elements.values()),
- preparer.format_table(remote_table),
- ', '.join(preparer.quote(f.column.name, f.column.quote)
- for f in constraint._elements.values())
- )
- text += self.define_constraint_cascades(constraint)
- text += self.define_constraint_deferrability(constraint)
-
- if constraint.name is not None:
- text += " CONSTRAINT %s " % \
- preparer.format_constraint(constraint)
- return text
-
- def visit_unique_constraint(self, constraint):
- text = "UNIQUE (%s)" % (', '.join(self.preparer.quote(c.name, c.quote) for c in constraint))
- text += self.define_constraint_deferrability(constraint)
-
- if constraint.name is not None:
- text += "CONSTRAINT %s " % self.preparer.format_constraint(constraint)
- return text
-
-class InformixIdentifierPreparer(compiler.IdentifierPreparer):
-
- reserved_words = RESERVED_WORDS
-
-
-class InformixDialect(default.DefaultDialect):
- name = 'informix'
-
- max_identifier_length = 128 # adjusts at runtime based on server version
-
- type_compiler = InfoTypeCompiler
- statement_compiler = InfoSQLCompiler
- ddl_compiler = InfoDDLCompiler
- colspecs = colspecs
- ischema_names = ischema_names
- preparer = InformixIdentifierPreparer
- default_paramstyle = 'qmark'
-
- def __init__(self, has_transactions=True, *args, **kwargs):
- self.has_transactions = has_transactions
- default.DefaultDialect.__init__(self, *args, **kwargs)
-
- def initialize(self, connection):
- super(InformixDialect, self).initialize(connection)
-
- # http://www.querix.com/support/knowledge-base/error_number_message/error_200
- if self.server_version_info < (9, 2):
- self.max_identifier_length = 18
- else:
- self.max_identifier_length = 128
-
- def do_begin(self, connection):
- cu = connection.cursor()
- cu.execute('SET LOCK MODE TO WAIT')
- if self.has_transactions:
- cu.execute('SET ISOLATION TO REPEATABLE READ')
-
- def do_commit(self, connection):
- if self.has_transactions:
- connection.commit()
-
- def do_rollback(self, connection):
- if self.has_transactions:
- connection.rollback()
-
- def _get_table_names(self, connection, schema, type, **kw):
- schema = schema or self.default_schema_name
- s = "select tabname, owner from systables where owner=? and tabtype=?"
- return [row[0] for row in connection.execute(s, schema, type)]
-
- @reflection.cache
- def get_table_names(self, connection, schema=None, **kw):
- return self._get_table_names(connection, schema, 'T', **kw)
-
- @reflection.cache
- def get_view_names(self, connection, schema=None, **kw):
- return self._get_table_names(connection, schema, 'V', **kw)
-
- @reflection.cache
- def get_schema_names(self, connection, **kw):
- s = "select owner from systables"
- return [row[0] for row in connection.execute(s)]
-
- def has_table(self, connection, table_name, schema=None):
- schema = schema or self.default_schema_name
- cursor = connection.execute(
- """select tabname from systables where tabname=? and owner=?""",
- table_name, schema)
- return cursor.first() is not None
-
- @reflection.cache
- def get_columns(self, connection, table_name, schema=None, **kw):
- schema = schema or self.default_schema_name
- c = connection.execute(
- """select colname, coltype, collength, t3.default, t1.colno from
- syscolumns as t1 , systables as t2 , OUTER sysdefaults as t3
- where t1.tabid = t2.tabid and t2.tabname=? and t2.owner=?
- and t3.tabid = t2.tabid and t3.colno = t1.colno
- order by t1.colno""", table_name, schema)
-
- primary_cols = self.get_primary_keys(connection, table_name, schema, **kw)
-
- columns = []
- rows = c.fetchall()
- for name, colattr, collength, default, colno in rows:
- name = name.lower()
-
- autoincrement = False
- primary_key = False
-
- if name in primary_cols:
- primary_key = True
-
- # in 7.31, coltype = 0x000
- # ^^-- column type
- # ^-- 1 not null, 0 null
- not_nullable, coltype = divmod(colattr, 256)
- if coltype not in (0, 13) and default:
- default = default.split()[-1]
-
- if coltype == 6: # Serial, mark as autoincrement
- autoincrement = True
-
- if coltype == 0 or coltype == 13: # char, varchar
- coltype = ischema_names[coltype](collength)
- if default:
- default = "'%s'" % default
- elif coltype == 5: # decimal
- precision, scale = (collength & 0xFF00) >> 8, collength & 0xFF
- if scale == 255:
- scale = 0
- coltype = sqltypes.Numeric(precision, scale)
- else:
- try:
- coltype = ischema_names[coltype]
- except KeyError:
- util.warn("Did not recognize type '%s' of column '%s'" %
- (coltype, name))
- coltype = sqltypes.NULLTYPE
-
- column_info = dict(name=name, type=coltype, nullable=not not_nullable,
- default=default, autoincrement=autoincrement,
- primary_key=primary_key)
- columns.append(column_info)
- return columns
-
- @reflection.cache
- def get_foreign_keys(self, connection, table_name, schema=None, **kw):
- schema_sel = schema or self.default_schema_name
- c = connection.execute(
- """select t1.constrname as cons_name,
- t4.colname as local_column, t7.tabname as remote_table,
- t6.colname as remote_column, t7.owner as remote_owner
- from sysconstraints as t1 , systables as t2 ,
- sysindexes as t3 , syscolumns as t4 ,
- sysreferences as t5 , syscolumns as t6 , systables as t7 ,
- sysconstraints as t8 , sysindexes as t9
- where t1.tabid = t2.tabid and t2.tabname=? and t2.owner=? and t1.constrtype = 'R'
- and t3.tabid = t2.tabid and t3.idxname = t1.idxname
- and t4.tabid = t2.tabid and t4.colno in (t3.part1, t3.part2, t3.part3,
- t3.part4, t3.part5, t3.part6, t3.part7, t3.part8, t3.part9, t3.part10,
- t3.part11, t3.part11, t3.part12, t3.part13, t3.part4, t3.part15, t3.part16)
- and t5.constrid = t1.constrid and t8.constrid = t5.primary
- and t6.tabid = t5.ptabid and t6.colno in (t9.part1, t9.part2, t9.part3,
- t9.part4, t9.part5, t9.part6, t9.part7, t9.part8, t9.part9, t9.part10,
- t9.part11, t9.part11, t9.part12, t9.part13, t9.part4, t9.part15, t9.part16) and t9.idxname =
- t8.idxname
- and t7.tabid = t5.ptabid""", table_name, schema_sel)
-
-
- def fkey_rec():
- return {
- 'name' : None,
- 'constrained_columns' : [],
- 'referred_schema' : None,
- 'referred_table' : None,
- 'referred_columns' : []
- }
-
- fkeys = util.defaultdict(fkey_rec)
-
- rows = c.fetchall()
- for cons_name, local_column, \
- remote_table, remote_column, remote_owner in rows:
-
- rec = fkeys[cons_name]
- rec['name'] = cons_name
- local_cols, remote_cols = \
- rec['constrained_columns'], rec['referred_columns']
-
- if not rec['referred_table']:
- rec['referred_table'] = remote_table
- if schema is not None:
- rec['referred_schema'] = remote_owner
-
- if local_column not in local_cols:
- local_cols.append(local_column)
- if remote_column not in remote_cols:
- remote_cols.append(remote_column)
-
- return fkeys.values()
-
- @reflection.cache
- def get_primary_keys(self, connection, table_name, schema=None, **kw):
- schema = schema or self.default_schema_name
-
- # Select the column positions from sysindexes for sysconstraints
- data = connection.execute(
- """select t2.*
- from systables as t1, sysindexes as t2, sysconstraints as t3
- where t1.tabid=t2.tabid and t1.tabname=? and t1.owner=?
- and t2.idxname=t3.idxname and t3.constrtype='P'""",
- table_name, schema
- ).fetchall()
-
- colpositions = set()
-
- for row in data:
- colpos = set([getattr(row, 'part%d' % x) for x in range(1,16)])
- colpositions |= colpos
-
- if not len(colpositions):
- return []
-
- # Select the column names using the columnpositions
- # TODO: Maybe cache a bit of those col infos (eg select all colnames for one table)
- place_holder = ','.join('?'*len(colpositions))
- c = connection.execute(
- """select t1.colname
- from syscolumns as t1, systables as t2
- where t2.tabname=? and t1.tabid = t2.tabid and
- t1.colno in (%s)""" % place_holder,
- table_name, *colpositions
- ).fetchall()
-
- return reduce(lambda x,y: list(x)+list(y), c, [])
-
- @reflection.cache
- def get_indexes(self, connection, table_name, schema, **kw):
- # TODO: schema...
- c = connection.execute(
- """select t1.*
- from sysindexes as t1 , systables as t2
- where t1.tabid = t2.tabid and t2.tabname=?""",
- table_name)
-
- indexes = []
- for row in c.fetchall():
- colnames = [getattr(row, 'part%d' % x) for x in range(1,16)]
- colnames = [x for x in colnames if x]
- place_holder = ','.join('?'*len(colnames))
- c = connection.execute(
- """select t1.colname
- from syscolumns as t1, systables as t2
- where t2.tabname=? and t1.tabid = t2.tabid and
- t1.colno in (%s)""" % place_holder,
- table_name, *colnames
- ).fetchall()
- c = reduce(lambda x,y: list(x)+list(y), c, [])
- indexes.append({
- 'name': row.idxname,
- 'unique': row.idxtype.lower() == 'u',
- 'column_names': c
- })
- return indexes
-
- @reflection.cache
- def get_view_definition(self, connection, view_name, schema=None, **kw):
- schema = schema or self.default_schema_name
- c = connection.execute(
- """select t1.viewtext
- from sysviews as t1 , systables as t2
- where t1.tabid=t2.tabid and t2.tabname=?
- and t2.owner=? order by seqno""",
- view_name, schema).fetchall()
-
- return ''.join([row[0] for row in c])
-
- def _get_default_schema_name(self, connection):
- return connection.execute('select CURRENT_ROLE from systables').scalar()
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/informix/informixdb.py b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/informix/informixdb.py
deleted file mode 100755
index 1b6833af..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/informix/informixdb.py
+++ /dev/null
@@ -1,73 +0,0 @@
-# informix/informixdb.py
-# Copyright (C) 2005-2011 the SQLAlchemy authors and contributors <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-"""
-Support for the informixdb DBAPI.
-
-informixdb is available at:
-
- http://informixdb.sourceforge.net/
-
-Connecting
-^^^^^^^^^^
-
-Sample informix connection::
-
- engine = create_engine('informix+informixdb://user:password@host/dbname')
-
-"""
-
-import re
-
-from sqlalchemy.dialects.informix.base import InformixDialect
-from sqlalchemy.engine import default
-
-VERSION_RE = re.compile(r'(\d+)\.(\d+)(.+\d+)')
-
-class InformixExecutionContext_informixdb(default.DefaultExecutionContext):
- def post_exec(self):
- if self.isinsert:
- self._lastrowid = self.cursor.sqlerrd[1]
-
- def get_lastrowid(self):
- return self._lastrowid
-
-
-class InformixDialect_informixdb(InformixDialect):
- driver = 'informixdb'
- execution_ctx_cls = InformixExecutionContext_informixdb
-
- @classmethod
- def dbapi(cls):
- return __import__('informixdb')
-
- def create_connect_args(self, url):
- if url.host:
- dsn = '%s@%s' % (url.database, url.host)
- else:
- dsn = url.database
-
- if url.username:
- opt = {'user': url.username, 'password': url.password}
- else:
- opt = {}
-
- return ([dsn], opt)
-
- def _get_server_version_info(self, connection):
- # http://informixdb.sourceforge.net/manual.html#inspecting-version-numbers
- v = VERSION_RE.split(connection.connection.dbms_version)
- return (int(v[1]), int(v[2]), v[3])
-
- def is_disconnect(self, e, connection, cursor):
- if isinstance(e, self.dbapi.OperationalError):
- return 'closed the connection' in str(e) \
- or 'connection not open' in str(e)
- else:
- return False
-
-
-dialect = InformixDialect_informixdb
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/maxdb/__init__.py b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/maxdb/__init__.py
deleted file mode 100755
index 0a39965b..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/maxdb/__init__.py
+++ /dev/null
@@ -1,9 +0,0 @@
-# maxdb/__init__.py
-# Copyright (C) 2005-2011 the SQLAlchemy authors and contributors <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-from sqlalchemy.dialects.maxdb import base, sapdb
-
-base.dialect = sapdb.dialect \ No newline at end of file
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/maxdb/base.py b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/maxdb/base.py
deleted file mode 100755
index a6d43a2a..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/maxdb/base.py
+++ /dev/null
@@ -1,1116 +0,0 @@
-# maxdb/base.py
-# Copyright (C) 2005-2011 the SQLAlchemy authors and contributors <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-"""Support for the MaxDB database.
-
-This dialect is *not* ported to SQLAlchemy 0.6 or 0.7.
-
-This dialect is *not* tested on SQLAlchemy 0.6 or 0.7.
-
-Overview
---------
-
-The ``maxdb`` dialect is **experimental** and has only been tested on 7.6.03.007
-and 7.6.00.037. Of these, **only 7.6.03.007 will work** with SQLAlchemy's ORM.
-The earlier version has severe ``LEFT JOIN`` limitations and will return
-incorrect results from even very simple ORM queries.
-
-Only the native Python DB-API is currently supported. ODBC driver support
-is a future enhancement.
-
-Connecting
-----------
-
-The username is case-sensitive. If you usually connect to the
-database with sqlcli and other tools in lower case, you likely need to
-use upper case for DB-API.
-
-Implementation Notes
---------------------
-
-With the 7.6.00.37 driver and Python 2.5, it seems that all DB-API
-generated exceptions are broken and can cause Python to crash.
-
-For 'somecol.in_([])' to work, the IN operator's generation must be changed
-to cast 'NULL' to a numeric, i.e. NUM(NULL). The DB-API doesn't accept a
-bind parameter there, so that particular generation must inline the NULL value,
-which depends on [ticket:807].
-
-The DB-API is very picky about where bind params may be used in queries.
-
-Bind params for some functions (e.g. MOD) need type information supplied.
-The dialect does not yet do this automatically.
-
-Max will occasionally throw up 'bad sql, compile again' exceptions for
-perfectly valid SQL. The dialect does not currently handle these, more
-research is needed.
-
-MaxDB 7.5 and Sap DB <= 7.4 reportedly do not support schemas. A very
-slightly different version of this dialect would be required to support
-those versions, and can easily be added if there is demand. Some other
-required components such as an Max-aware 'old oracle style' join compiler
-(thetas with (+) outer indicators) are already done and available for
-integration- email the devel list if you're interested in working on
-this.
-
-Versions tested: 7.6.03.07 and 7.6.00.37, native Python DB-API
-
-* MaxDB has severe limitations on OUTER JOINs, which are essential to ORM
- eager loading. And rather than raise an error if a SELECT can't be serviced,
- the database simply returns incorrect results.
-* Version 7.6.03.07 seems to JOIN properly, however the docs do not show the
- OUTER restrictions being lifted (as of this writing), and no changelog is
- available to confirm either. If you are using a different server version and
- your tasks require the ORM or any semi-advanced SQL through the SQL layer,
- running the SQLAlchemy test suite against your database is HIGHLY
- recommended before you begin.
-* Version 7.6.00.37 is LHS/RHS sensitive in `FROM lhs LEFT OUTER JOIN rhs ON
- lhs.col=rhs.col` vs `rhs.col=lhs.col`!
-* Version 7.6.00.37 is confused by `SELECT DISTINCT col as alias FROM t ORDER
- BY col` - these aliased, DISTINCT, ordered queries need to be re-written to
- order by the alias name.
-* Version 7.6.x supports creating a SAVEPOINT but not its RELEASE.
-* MaxDB supports autoincrement-style columns (DEFAULT SERIAL) and independent
- sequences. When including a DEFAULT SERIAL column in an insert, 0 needs to
- be inserted rather than NULL to generate a value.
-* MaxDB supports ANSI and "old Oracle style" theta joins with (+) outer join
- indicators.
-* The SQLAlchemy dialect is schema-aware and probably won't function correctly
- on server versions (pre-7.6?). Support for schema-less server versions could
- be added if there's call.
-* ORDER BY is not supported in subqueries. LIMIT is not supported in
- subqueries. In 7.6.00.37, TOP does work in subqueries, but without limit not
- so useful. OFFSET does not work in 7.6 despite being in the docs. Row number
- tricks in WHERE via ROWNO may be possible but it only seems to allow
- less-than comparison!
-* Version 7.6.03.07 can't LIMIT if a derived table is in FROM: `SELECT * FROM
- (SELECT * FROM a) LIMIT 2`
-* MaxDB does not support sql's CAST and can only usefullly cast two types.
- There isn't much implicit type conversion, so be precise when creating
- `PassiveDefaults` in DDL generation: `'3'` and `3` aren't the same.
-
-sapdb.dbapi
-^^^^^^^^^^^
-
-* As of 2007-10-22 the Python 2.4 and 2.5 compatible versions of the DB-API
- are no longer available. A forum posting at SAP states that the Python
- driver will be available again "in the future". The last release from MySQL
- AB works if you can find it.
-* sequence.NEXTVAL skips every other value!
-* No rowcount for executemany()
-* If an INSERT into a table with a DEFAULT SERIAL column inserts the results
- of a function `INSERT INTO t VALUES (LENGTH('foo'))`, the cursor won't have
- the serial id. It needs to be manually yanked from tablename.CURRVAL.
-* Super-duper picky about where bind params can be placed. Not smart about
- converting Python types for some functions, such as `MOD(5, ?)`.
-* LONG (text, binary) values in result sets are read-once. The dialect uses a
- caching RowProxy when these types are present.
-* Connection objects seem like they want to be either `close()`d or garbage
- collected, but not both. There's a warning issued but it seems harmless.
-
-
-"""
-import datetime, itertools, re
-
-from sqlalchemy import exc, schema, sql, util, processors
-from sqlalchemy.sql import operators as sql_operators, expression as sql_expr
-from sqlalchemy.sql import compiler, visitors
-from sqlalchemy.engine import base as engine_base, default, reflection
-from sqlalchemy import types as sqltypes
-
-
-class _StringType(sqltypes.String):
- _type = None
-
- def __init__(self, length=None, encoding=None, **kw):
- super(_StringType, self).__init__(length=length, **kw)
- self.encoding = encoding
-
- def bind_processor(self, dialect):
- if self.encoding == 'unicode':
- return None
- else:
- def process(value):
- if isinstance(value, unicode):
- return value.encode(dialect.encoding)
- else:
- return value
- return process
-
- def result_processor(self, dialect, coltype):
- #XXX: this code is probably very slow and one should try (if at all
- # possible) to determine the correct code path on a per-connection
- # basis (ie, here in result_processor, instead of inside the processor
- # function itself) and probably also use a few generic
- # processors, or possibly per query (though there is no mechanism
- # for that yet).
- def process(value):
- while True:
- if value is None:
- return None
- elif isinstance(value, unicode):
- return value
- elif isinstance(value, str):
- if self.convert_unicode or dialect.convert_unicode:
- return value.decode(dialect.encoding)
- else:
- return value
- elif hasattr(value, 'read'):
- # some sort of LONG, snarf and retry
- value = value.read(value.remainingLength())
- continue
- else:
- # unexpected type, return as-is
- return value
- return process
-
-
-class MaxString(_StringType):
- _type = 'VARCHAR'
-
-
-class MaxUnicode(_StringType):
- _type = 'VARCHAR'
-
- def __init__(self, length=None, **kw):
- kw['encoding'] = 'unicode'
- super(MaxUnicode, self).__init__(length=length, **kw)
-
-
-class MaxChar(_StringType):
- _type = 'CHAR'
-
-
-class MaxText(_StringType):
- _type = 'LONG'
-
- def __init__(self, length=None, **kw):
- super(MaxText, self).__init__(length, **kw)
-
- def get_col_spec(self):
- spec = 'LONG'
- if self.encoding is not None:
- spec = ' '.join((spec, self.encoding))
- elif self.convert_unicode:
- spec = ' '.join((spec, 'UNICODE'))
-
- return spec
-
-
-class MaxNumeric(sqltypes.Numeric):
- """The FIXED (also NUMERIC, DECIMAL) data type."""
-
- def __init__(self, precision=None, scale=None, **kw):
- kw.setdefault('asdecimal', True)
- super(MaxNumeric, self).__init__(scale=scale, precision=precision,
- **kw)
-
- def bind_processor(self, dialect):
- return None
-
-
-class MaxTimestamp(sqltypes.DateTime):
- def bind_processor(self, dialect):
- def process(value):
- if value is None:
- return None
- elif isinstance(value, basestring):
- return value
- elif dialect.datetimeformat == 'internal':
- ms = getattr(value, 'microsecond', 0)
- return value.strftime("%Y%m%d%H%M%S" + ("%06u" % ms))
- elif dialect.datetimeformat == 'iso':
- ms = getattr(value, 'microsecond', 0)
- return value.strftime("%Y-%m-%d %H:%M:%S." + ("%06u" % ms))
- else:
- raise exc.InvalidRequestError(
- "datetimeformat '%s' is not supported." % (
- dialect.datetimeformat,))
- return process
-
- def result_processor(self, dialect, coltype):
- if dialect.datetimeformat == 'internal':
- def process(value):
- if value is None:
- return None
- else:
- return datetime.datetime(
- *[int(v)
- for v in (value[0:4], value[4:6], value[6:8],
- value[8:10], value[10:12], value[12:14],
- value[14:])])
- elif dialect.datetimeformat == 'iso':
- def process(value):
- if value is None:
- return None
- else:
- return datetime.datetime(
- *[int(v)
- for v in (value[0:4], value[5:7], value[8:10],
- value[11:13], value[14:16], value[17:19],
- value[20:])])
- else:
- raise exc.InvalidRequestError(
- "datetimeformat '%s' is not supported." %
- dialect.datetimeformat)
- return process
-
-
-class MaxDate(sqltypes.Date):
- def bind_processor(self, dialect):
- def process(value):
- if value is None:
- return None
- elif isinstance(value, basestring):
- return value
- elif dialect.datetimeformat == 'internal':
- return value.strftime("%Y%m%d")
- elif dialect.datetimeformat == 'iso':
- return value.strftime("%Y-%m-%d")
- else:
- raise exc.InvalidRequestError(
- "datetimeformat '%s' is not supported." % (
- dialect.datetimeformat,))
- return process
-
- def result_processor(self, dialect, coltype):
- if dialect.datetimeformat == 'internal':
- def process(value):
- if value is None:
- return None
- else:
- return datetime.date(int(value[0:4]), int(value[4:6]),
- int(value[6:8]))
- elif dialect.datetimeformat == 'iso':
- def process(value):
- if value is None:
- return None
- else:
- return datetime.date(int(value[0:4]), int(value[5:7]),
- int(value[8:10]))
- else:
- raise exc.InvalidRequestError(
- "datetimeformat '%s' is not supported." %
- dialect.datetimeformat)
- return process
-
-
-class MaxTime(sqltypes.Time):
- def bind_processor(self, dialect):
- def process(value):
- if value is None:
- return None
- elif isinstance(value, basestring):
- return value
- elif dialect.datetimeformat == 'internal':
- return value.strftime("%H%M%S")
- elif dialect.datetimeformat == 'iso':
- return value.strftime("%H-%M-%S")
- else:
- raise exc.InvalidRequestError(
- "datetimeformat '%s' is not supported." % (
- dialect.datetimeformat,))
- return process
-
- def result_processor(self, dialect, coltype):
- if dialect.datetimeformat == 'internal':
- def process(value):
- if value is None:
- return None
- else:
- return datetime.time(int(value[0:4]), int(value[4:6]),
- int(value[6:8]))
- elif dialect.datetimeformat == 'iso':
- def process(value):
- if value is None:
- return None
- else:
- return datetime.time(int(value[0:4]), int(value[5:7]),
- int(value[8:10]))
- else:
- raise exc.InvalidRequestError(
- "datetimeformat '%s' is not supported." %
- dialect.datetimeformat)
- return process
-
-
-class MaxBlob(sqltypes.LargeBinary):
- def bind_processor(self, dialect):
- return processors.to_str
-
- def result_processor(self, dialect, coltype):
- def process(value):
- if value is None:
- return None
- else:
- return value.read(value.remainingLength())
- return process
-
-class MaxDBTypeCompiler(compiler.GenericTypeCompiler):
- def _string_spec(self, string_spec, type_):
- if type_.length is None:
- spec = 'LONG'
- else:
- spec = '%s(%s)' % (string_spec, type_.length)
-
- if getattr(type_, 'encoding'):
- spec = ' '.join([spec, getattr(type_, 'encoding').upper()])
- return spec
-
- def visit_text(self, type_):
- spec = 'LONG'
- if getattr(type_, 'encoding', None):
- spec = ' '.join((spec, type_.encoding))
- elif type_.convert_unicode:
- spec = ' '.join((spec, 'UNICODE'))
-
- return spec
-
- def visit_char(self, type_):
- return self._string_spec("CHAR", type_)
-
- def visit_string(self, type_):
- return self._string_spec("VARCHAR", type_)
-
- def visit_large_binary(self, type_):
- return "LONG BYTE"
-
- def visit_numeric(self, type_):
- if type_.scale and type_.precision:
- return 'FIXED(%s, %s)' % (type_.precision, type_.scale)
- elif type_.precision:
- return 'FIXED(%s)' % type_.precision
- else:
- return 'INTEGER'
-
- def visit_BOOLEAN(self, type_):
- return "BOOLEAN"
-
-colspecs = {
- sqltypes.Numeric: MaxNumeric,
- sqltypes.DateTime: MaxTimestamp,
- sqltypes.Date: MaxDate,
- sqltypes.Time: MaxTime,
- sqltypes.String: MaxString,
- sqltypes.Unicode:MaxUnicode,
- sqltypes.LargeBinary: MaxBlob,
- sqltypes.Text: MaxText,
- sqltypes.CHAR: MaxChar,
- sqltypes.TIMESTAMP: MaxTimestamp,
- sqltypes.BLOB: MaxBlob,
- sqltypes.Unicode: MaxUnicode,
- }
-
-ischema_names = {
- 'boolean': sqltypes.BOOLEAN,
- 'char': sqltypes.CHAR,
- 'character': sqltypes.CHAR,
- 'date': sqltypes.DATE,
- 'fixed': sqltypes.Numeric,
- 'float': sqltypes.FLOAT,
- 'int': sqltypes.INT,
- 'integer': sqltypes.INT,
- 'long binary': sqltypes.BLOB,
- 'long unicode': sqltypes.Text,
- 'long': sqltypes.Text,
- 'long': sqltypes.Text,
- 'smallint': sqltypes.SmallInteger,
- 'time': sqltypes.Time,
- 'timestamp': sqltypes.TIMESTAMP,
- 'varchar': sqltypes.VARCHAR,
- }
-
-# TODO: migrate this to sapdb.py
-class MaxDBExecutionContext(default.DefaultExecutionContext):
- def post_exec(self):
- # DB-API bug: if there were any functions as values,
- # then do another select and pull CURRVAL from the
- # autoincrement column's implicit sequence... ugh
- if self.compiled.isinsert and not self.executemany:
- table = self.compiled.statement.table
- index, serial_col = _autoserial_column(table)
-
- if serial_col and (not self.compiled._safeserial or
- not(self._last_inserted_ids) or
- self._last_inserted_ids[index] in (None, 0)):
- if table.schema:
- sql = "SELECT %s.CURRVAL FROM DUAL" % (
- self.compiled.preparer.format_table(table))
- else:
- sql = "SELECT CURRENT_SCHEMA.%s.CURRVAL FROM DUAL" % (
- self.compiled.preparer.format_table(table))
-
- rs = self.cursor.execute(sql)
- id = rs.fetchone()[0]
-
- if not self._last_inserted_ids:
- # This shouldn't ever be > 1? Right?
- self._last_inserted_ids = \
- [None] * len(table.primary_key.columns)
- self._last_inserted_ids[index] = id
-
- super(MaxDBExecutionContext, self).post_exec()
-
- def get_result_proxy(self):
- if self.cursor.description is not None:
- for column in self.cursor.description:
- if column[1] in ('Long Binary', 'Long', 'Long Unicode'):
- return MaxDBResultProxy(self)
- return engine_base.ResultProxy(self)
-
- @property
- def rowcount(self):
- if hasattr(self, '_rowcount'):
- return self._rowcount
- else:
- return self.cursor.rowcount
-
- def fire_sequence(self, seq):
- if seq.optional:
- return None
- return self._execute_scalar("SELECT %s.NEXTVAL FROM DUAL" % (
- self.dialect.identifier_preparer.format_sequence(seq)))
-
-class MaxDBCachedColumnRow(engine_base.RowProxy):
- """A RowProxy that only runs result_processors once per column."""
-
- def __init__(self, parent, row):
- super(MaxDBCachedColumnRow, self).__init__(parent, row)
- self.columns = {}
- self._row = row
- self._parent = parent
-
- def _get_col(self, key):
- if key not in self.columns:
- self.columns[key] = self._parent._get_col(self._row, key)
- return self.columns[key]
-
- def __iter__(self):
- for i in xrange(len(self._row)):
- yield self._get_col(i)
-
- def __repr__(self):
- return repr(list(self))
-
- def __eq__(self, other):
- return ((other is self) or
- (other == tuple([self._get_col(key)
- for key in xrange(len(self._row))])))
- def __getitem__(self, key):
- if isinstance(key, slice):
- indices = key.indices(len(self._row))
- return tuple([self._get_col(i) for i in xrange(*indices)])
- else:
- return self._get_col(key)
-
- def __getattr__(self, name):
- try:
- return self._get_col(name)
- except KeyError:
- raise AttributeError(name)
-
-
-class MaxDBResultProxy(engine_base.ResultProxy):
- _process_row = MaxDBCachedColumnRow
-
-class MaxDBCompiler(compiler.SQLCompiler):
-
- function_conversion = {
- 'CURRENT_DATE': 'DATE',
- 'CURRENT_TIME': 'TIME',
- 'CURRENT_TIMESTAMP': 'TIMESTAMP',
- }
-
- # These functions must be written without parens when called with no
- # parameters. e.g. 'SELECT DATE FROM DUAL' not 'SELECT DATE() FROM DUAL'
- bare_functions = set([
- 'CURRENT_SCHEMA', 'DATE', 'FALSE', 'SYSDBA', 'TIME', 'TIMESTAMP',
- 'TIMEZONE', 'TRANSACTION', 'TRUE', 'USER', 'UID', 'USERGROUP',
- 'UTCDATE', 'UTCDIFF'])
-
- def visit_mod(self, binary, **kw):
- return "mod(%s, %s)" % \
- (self.process(binary.left), self.process(binary.right))
-
- def default_from(self):
- return ' FROM DUAL'
-
- def for_update_clause(self, select):
- clause = select.for_update
- if clause is True:
- return " WITH LOCK EXCLUSIVE"
- elif clause is None:
- return ""
- elif clause == "read":
- return " WITH LOCK"
- elif clause == "ignore":
- return " WITH LOCK (IGNORE) EXCLUSIVE"
- elif clause == "nowait":
- return " WITH LOCK (NOWAIT) EXCLUSIVE"
- elif isinstance(clause, basestring):
- return " WITH LOCK %s" % clause.upper()
- elif not clause:
- return ""
- else:
- return " WITH LOCK EXCLUSIVE"
-
- def function_argspec(self, fn, **kw):
- if fn.name.upper() in self.bare_functions:
- return ""
- elif len(fn.clauses) > 0:
- return compiler.SQLCompiler.function_argspec(self, fn, **kw)
- else:
- return ""
-
- def visit_function(self, fn, **kw):
- transform = self.function_conversion.get(fn.name.upper(), None)
- if transform:
- fn = fn._clone()
- fn.name = transform
- return super(MaxDBCompiler, self).visit_function(fn, **kw)
-
- def visit_cast(self, cast, **kwargs):
- # MaxDB only supports casts * to NUMERIC, * to VARCHAR or
- # date/time to VARCHAR. Casts of LONGs will fail.
- if isinstance(cast.type, (sqltypes.Integer, sqltypes.Numeric)):
- return "NUM(%s)" % self.process(cast.clause)
- elif isinstance(cast.type, sqltypes.String):
- return "CHR(%s)" % self.process(cast.clause)
- else:
- return self.process(cast.clause)
-
- def visit_sequence(self, sequence):
- if sequence.optional:
- return None
- else:
- return (
- self.dialect.identifier_preparer.format_sequence(sequence) +
- ".NEXTVAL")
-
- class ColumnSnagger(visitors.ClauseVisitor):
- def __init__(self):
- self.count = 0
- self.column = None
- def visit_column(self, column):
- self.column = column
- self.count += 1
-
- def _find_labeled_columns(self, columns, use_labels=False):
- labels = {}
- for column in columns:
- if isinstance(column, basestring):
- continue
- snagger = self.ColumnSnagger()
- snagger.traverse(column)
- if snagger.count == 1:
- if isinstance(column, sql_expr._Label):
- labels[unicode(snagger.column)] = column.name
- elif use_labels:
- labels[unicode(snagger.column)] = column._label
-
- return labels
-
- def order_by_clause(self, select, **kw):
- order_by = self.process(select._order_by_clause, **kw)
-
- # ORDER BY clauses in DISTINCT queries must reference aliased
- # inner columns by alias name, not true column name.
- if order_by and getattr(select, '_distinct', False):
- labels = self._find_labeled_columns(select.inner_columns,
- select.use_labels)
- if labels:
- for needs_alias in labels.keys():
- r = re.compile(r'(^| )(%s)(,| |$)' %
- re.escape(needs_alias))
- order_by = r.sub((r'\1%s\3' % labels[needs_alias]),
- order_by)
-
- # No ORDER BY in subqueries.
- if order_by:
- if self.is_subquery():
- # It's safe to simply drop the ORDER BY if there is no
- # LIMIT. Right? Other dialects seem to get away with
- # dropping order.
- if select._limit:
- raise exc.InvalidRequestError(
- "MaxDB does not support ORDER BY in subqueries")
- else:
- return ""
- return " ORDER BY " + order_by
- else:
- return ""
-
- def get_select_precolumns(self, select):
- # Convert a subquery's LIMIT to TOP
- sql = select._distinct and 'DISTINCT ' or ''
- if self.is_subquery() and select._limit:
- if select._offset:
- raise exc.InvalidRequestError(
- 'MaxDB does not support LIMIT with an offset.')
- sql += 'TOP %s ' % select._limit
- return sql
-
- def limit_clause(self, select):
- # The docs say offsets are supported with LIMIT. But they're not.
- # TODO: maybe emulate by adding a ROWNO/ROWNUM predicate?
- # TODO: does MaxDB support bind params for LIMIT / TOP ?
- if self.is_subquery():
- # sub queries need TOP
- return ''
- elif select._offset:
- raise exc.InvalidRequestError(
- 'MaxDB does not support LIMIT with an offset.')
- else:
- return ' \n LIMIT %s' % (select._limit,)
-
- def visit_insert(self, insert):
- self.isinsert = True
- self._safeserial = True
-
- colparams = self._get_colparams(insert)
- for value in (insert.parameters or {}).itervalues():
- if isinstance(value, sql_expr.Function):
- self._safeserial = False
- break
-
- return ''.join(('INSERT INTO ',
- self.preparer.format_table(insert.table),
- ' (',
- ', '.join([self.preparer.format_column(c[0])
- for c in colparams]),
- ') VALUES (',
- ', '.join([c[1] for c in colparams]),
- ')'))
-
-
-class MaxDBIdentifierPreparer(compiler.IdentifierPreparer):
- reserved_words = set([
- 'abs', 'absolute', 'acos', 'adddate', 'addtime', 'all', 'alpha',
- 'alter', 'any', 'ascii', 'asin', 'atan', 'atan2', 'avg', 'binary',
- 'bit', 'boolean', 'byte', 'case', 'ceil', 'ceiling', 'char',
- 'character', 'check', 'chr', 'column', 'concat', 'constraint', 'cos',
- 'cosh', 'cot', 'count', 'cross', 'curdate', 'current', 'curtime',
- 'database', 'date', 'datediff', 'day', 'dayname', 'dayofmonth',
- 'dayofweek', 'dayofyear', 'dec', 'decimal', 'decode', 'default',
- 'degrees', 'delete', 'digits', 'distinct', 'double', 'except',
- 'exists', 'exp', 'expand', 'first', 'fixed', 'float', 'floor', 'for',
- 'from', 'full', 'get_objectname', 'get_schema', 'graphic', 'greatest',
- 'group', 'having', 'hex', 'hextoraw', 'hour', 'ifnull', 'ignore',
- 'index', 'initcap', 'inner', 'insert', 'int', 'integer', 'internal',
- 'intersect', 'into', 'join', 'key', 'last', 'lcase', 'least', 'left',
- 'length', 'lfill', 'list', 'ln', 'locate', 'log', 'log10', 'long',
- 'longfile', 'lower', 'lpad', 'ltrim', 'makedate', 'maketime',
- 'mapchar', 'max', 'mbcs', 'microsecond', 'min', 'minute', 'mod',
- 'month', 'monthname', 'natural', 'nchar', 'next', 'no', 'noround',
- 'not', 'now', 'null', 'num', 'numeric', 'object', 'of', 'on',
- 'order', 'packed', 'pi', 'power', 'prev', 'primary', 'radians',
- 'real', 'reject', 'relative', 'replace', 'rfill', 'right', 'round',
- 'rowid', 'rowno', 'rpad', 'rtrim', 'second', 'select', 'selupd',
- 'serial', 'set', 'show', 'sign', 'sin', 'sinh', 'smallint', 'some',
- 'soundex', 'space', 'sqrt', 'stamp', 'statistics', 'stddev',
- 'subdate', 'substr', 'substring', 'subtime', 'sum', 'sysdba',
- 'table', 'tan', 'tanh', 'time', 'timediff', 'timestamp', 'timezone',
- 'to', 'toidentifier', 'transaction', 'translate', 'trim', 'trunc',
- 'truncate', 'ucase', 'uid', 'unicode', 'union', 'update', 'upper',
- 'user', 'usergroup', 'using', 'utcdate', 'utcdiff', 'value', 'values',
- 'varchar', 'vargraphic', 'variance', 'week', 'weekofyear', 'when',
- 'where', 'with', 'year', 'zoned' ])
-
- def _normalize_name(self, name):
- if name is None:
- return None
- if name.isupper():
- lc_name = name.lower()
- if not self._requires_quotes(lc_name):
- return lc_name
- return name
-
- def _denormalize_name(self, name):
- if name is None:
- return None
- elif (name.islower() and
- not self._requires_quotes(name)):
- return name.upper()
- else:
- return name
-
- def _maybe_quote_identifier(self, name):
- if self._requires_quotes(name):
- return self.quote_identifier(name)
- else:
- return name
-
-
-class MaxDBDDLCompiler(compiler.DDLCompiler):
- def get_column_specification(self, column, **kw):
- colspec = [self.preparer.format_column(column),
- self.dialect.type_compiler.process(column.type)]
-
- if not column.nullable:
- colspec.append('NOT NULL')
-
- default = column.default
- default_str = self.get_column_default_string(column)
-
- # No DDL default for columns specified with non-optional sequence-
- # this defaulting behavior is entirely client-side. (And as a
- # consequence, non-reflectable.)
- if (default and isinstance(default, schema.Sequence) and
- not default.optional):
- pass
- # Regular default
- elif default_str is not None:
- colspec.append('DEFAULT %s' % default_str)
- # Assign DEFAULT SERIAL heuristically
- elif column.primary_key and column.autoincrement:
- # For SERIAL on a non-primary key member, use
- # DefaultClause(text('SERIAL'))
- try:
- first = [c for c in column.table.primary_key.columns
- if (c.autoincrement and
- (isinstance(c.type, sqltypes.Integer) or
- (isinstance(c.type, MaxNumeric) and
- c.type.precision)) and
- not c.foreign_keys)].pop(0)
- if column is first:
- colspec.append('DEFAULT SERIAL')
- except IndexError:
- pass
- return ' '.join(colspec)
-
- def get_column_default_string(self, column):
- if isinstance(column.server_default, schema.DefaultClause):
- if isinstance(column.default.arg, basestring):
- if isinstance(column.type, sqltypes.Integer):
- return str(column.default.arg)
- else:
- return "'%s'" % column.default.arg
- else:
- return unicode(self._compile(column.default.arg, None))
- else:
- return None
-
- def visit_create_sequence(self, create):
- """Creates a SEQUENCE.
-
- TODO: move to module doc?
-
- start
- With an integer value, set the START WITH option.
-
- increment
- An integer value to increment by. Default is the database default.
-
- maxdb_minvalue
- maxdb_maxvalue
- With an integer value, sets the corresponding sequence option.
-
- maxdb_no_minvalue
- maxdb_no_maxvalue
- Defaults to False. If true, sets the corresponding sequence option.
-
- maxdb_cycle
- Defaults to False. If true, sets the CYCLE option.
-
- maxdb_cache
- With an integer value, sets the CACHE option.
-
- maxdb_no_cache
- Defaults to False. If true, sets NOCACHE.
- """
- sequence = create.element
-
- if (not sequence.optional and
- (not self.checkfirst or
- not self.dialect.has_sequence(self.connection, sequence.name))):
-
- ddl = ['CREATE SEQUENCE',
- self.preparer.format_sequence(sequence)]
-
- sequence.increment = 1
-
- if sequence.increment is not None:
- ddl.extend(('INCREMENT BY', str(sequence.increment)))
-
- if sequence.start is not None:
- ddl.extend(('START WITH', str(sequence.start)))
-
- opts = dict([(pair[0][6:].lower(), pair[1])
- for pair in sequence.kwargs.items()
- if pair[0].startswith('maxdb_')])
-
- if 'maxvalue' in opts:
- ddl.extend(('MAXVALUE', str(opts['maxvalue'])))
- elif opts.get('no_maxvalue', False):
- ddl.append('NOMAXVALUE')
- if 'minvalue' in opts:
- ddl.extend(('MINVALUE', str(opts['minvalue'])))
- elif opts.get('no_minvalue', False):
- ddl.append('NOMINVALUE')
-
- if opts.get('cycle', False):
- ddl.append('CYCLE')
-
- if 'cache' in opts:
- ddl.extend(('CACHE', str(opts['cache'])))
- elif opts.get('no_cache', False):
- ddl.append('NOCACHE')
-
- return ' '.join(ddl)
-
-
-class MaxDBDialect(default.DefaultDialect):
- name = 'maxdb'
- supports_alter = True
- supports_unicode_statements = True
- max_identifier_length = 32
- supports_sane_rowcount = True
- supports_sane_multi_rowcount = False
-
- preparer = MaxDBIdentifierPreparer
- statement_compiler = MaxDBCompiler
- ddl_compiler = MaxDBDDLCompiler
- execution_ctx_cls = MaxDBExecutionContext
-
- ported_sqla_06 = False
-
- colspecs = colspecs
- ischema_names = ischema_names
-
- # MaxDB-specific
- datetimeformat = 'internal'
-
- def __init__(self, _raise_known_sql_errors=False, **kw):
- super(MaxDBDialect, self).__init__(**kw)
- self._raise_known = _raise_known_sql_errors
-
- if self.dbapi is None:
- self.dbapi_type_map = {}
- else:
- self.dbapi_type_map = {
- 'Long Binary': MaxBlob(),
- 'Long byte_t': MaxBlob(),
- 'Long Unicode': MaxText(),
- 'Timestamp': MaxTimestamp(),
- 'Date': MaxDate(),
- 'Time': MaxTime(),
- datetime.datetime: MaxTimestamp(),
- datetime.date: MaxDate(),
- datetime.time: MaxTime(),
- }
-
- def do_execute(self, cursor, statement, parameters, context=None):
- res = cursor.execute(statement, parameters)
- if isinstance(res, int) and context is not None:
- context._rowcount = res
-
- def do_release_savepoint(self, connection, name):
- # Does MaxDB truly support RELEASE SAVEPOINT <id>? All my attempts
- # produce "SUBTRANS COMMIT/ROLLBACK not allowed without SUBTRANS
- # BEGIN SQLSTATE: I7065"
- # Note that ROLLBACK TO works fine. In theory, a RELEASE should
- # just free up some transactional resources early, before the overall
- # COMMIT/ROLLBACK so omitting it should be relatively ok.
- pass
-
- def _get_default_schema_name(self, connection):
- return self.identifier_preparer._normalize_name(
- connection.execute(
- 'SELECT CURRENT_SCHEMA FROM DUAL').scalar())
-
- def has_table(self, connection, table_name, schema=None):
- denormalize = self.identifier_preparer._denormalize_name
- bind = [denormalize(table_name)]
- if schema is None:
- sql = ("SELECT tablename FROM TABLES "
- "WHERE TABLES.TABLENAME=? AND"
- " TABLES.SCHEMANAME=CURRENT_SCHEMA ")
- else:
- sql = ("SELECT tablename FROM TABLES "
- "WHERE TABLES.TABLENAME = ? AND"
- " TABLES.SCHEMANAME=? ")
- bind.append(denormalize(schema))
-
- rp = connection.execute(sql, bind)
- return bool(rp.first())
-
- @reflection.cache
- def get_table_names(self, connection, schema=None, **kw):
- if schema is None:
- sql = (" SELECT TABLENAME FROM TABLES WHERE "
- " SCHEMANAME=CURRENT_SCHEMA ")
- rs = connection.execute(sql)
- else:
- sql = (" SELECT TABLENAME FROM TABLES WHERE "
- " SCHEMANAME=? ")
- matchname = self.identifier_preparer._denormalize_name(schema)
- rs = connection.execute(sql, matchname)
- normalize = self.identifier_preparer._normalize_name
- return [normalize(row[0]) for row in rs]
-
- def reflecttable(self, connection, table, include_columns):
- denormalize = self.identifier_preparer._denormalize_name
- normalize = self.identifier_preparer._normalize_name
-
- st = ('SELECT COLUMNNAME, MODE, DATATYPE, CODETYPE, LEN, DEC, '
- ' NULLABLE, "DEFAULT", DEFAULTFUNCTION '
- 'FROM COLUMNS '
- 'WHERE TABLENAME=? AND SCHEMANAME=%s '
- 'ORDER BY POS')
-
- fk = ('SELECT COLUMNNAME, FKEYNAME, '
- ' REFSCHEMANAME, REFTABLENAME, REFCOLUMNNAME, RULE, '
- ' (CASE WHEN REFSCHEMANAME = CURRENT_SCHEMA '
- ' THEN 1 ELSE 0 END) AS in_schema '
- 'FROM FOREIGNKEYCOLUMNS '
- 'WHERE TABLENAME=? AND SCHEMANAME=%s '
- 'ORDER BY FKEYNAME ')
-
- params = [denormalize(table.name)]
- if not table.schema:
- st = st % 'CURRENT_SCHEMA'
- fk = fk % 'CURRENT_SCHEMA'
- else:
- st = st % '?'
- fk = fk % '?'
- params.append(denormalize(table.schema))
-
- rows = connection.execute(st, params).fetchall()
- if not rows:
- raise exc.NoSuchTableError(table.fullname)
-
- include_columns = set(include_columns or [])
-
- for row in rows:
- (name, mode, col_type, encoding, length, scale,
- nullable, constant_def, func_def) = row
-
- name = normalize(name)
-
- if include_columns and name not in include_columns:
- continue
-
- type_args, type_kw = [], {}
- if col_type == 'FIXED':
- type_args = length, scale
- # Convert FIXED(10) DEFAULT SERIAL to our Integer
- if (scale == 0 and
- func_def is not None and func_def.startswith('SERIAL')):
- col_type = 'INTEGER'
- type_args = length,
- elif col_type in 'FLOAT':
- type_args = length,
- elif col_type in ('CHAR', 'VARCHAR'):
- type_args = length,
- type_kw['encoding'] = encoding
- elif col_type == 'LONG':
- type_kw['encoding'] = encoding
-
- try:
- type_cls = ischema_names[col_type.lower()]
- type_instance = type_cls(*type_args, **type_kw)
- except KeyError:
- util.warn("Did not recognize type '%s' of column '%s'" %
- (col_type, name))
- type_instance = sqltypes.NullType
-
- col_kw = {'autoincrement': False}
- col_kw['nullable'] = (nullable == 'YES')
- col_kw['primary_key'] = (mode == 'KEY')
-
- if func_def is not None:
- if func_def.startswith('SERIAL'):
- if col_kw['primary_key']:
- # No special default- let the standard autoincrement
- # support handle SERIAL pk columns.
- col_kw['autoincrement'] = True
- else:
- # strip current numbering
- col_kw['server_default'] = schema.DefaultClause(
- sql.text('SERIAL'))
- col_kw['autoincrement'] = True
- else:
- col_kw['server_default'] = schema.DefaultClause(
- sql.text(func_def))
- elif constant_def is not None:
- col_kw['server_default'] = schema.DefaultClause(sql.text(
- "'%s'" % constant_def.replace("'", "''")))
-
- table.append_column(schema.Column(name, type_instance, **col_kw))
-
- fk_sets = itertools.groupby(connection.execute(fk, params),
- lambda row: row.FKEYNAME)
- for fkeyname, fkey in fk_sets:
- fkey = list(fkey)
- if include_columns:
- key_cols = set([r.COLUMNNAME for r in fkey])
- if key_cols != include_columns:
- continue
-
- columns, referants = [], []
- quote = self.identifier_preparer._maybe_quote_identifier
-
- for row in fkey:
- columns.append(normalize(row.COLUMNNAME))
- if table.schema or not row.in_schema:
- referants.append('.'.join(
- [quote(normalize(row[c]))
- for c in ('REFSCHEMANAME', 'REFTABLENAME',
- 'REFCOLUMNNAME')]))
- else:
- referants.append('.'.join(
- [quote(normalize(row[c]))
- for c in ('REFTABLENAME', 'REFCOLUMNNAME')]))
-
- constraint_kw = {'name': fkeyname.lower()}
- if fkey[0].RULE is not None:
- rule = fkey[0].RULE
- if rule.startswith('DELETE '):
- rule = rule[7:]
- constraint_kw['ondelete'] = rule
-
- table_kw = {}
- if table.schema or not row.in_schema:
- table_kw['schema'] = normalize(fkey[0].REFSCHEMANAME)
-
- ref_key = schema._get_table_key(normalize(fkey[0].REFTABLENAME),
- table_kw.get('schema'))
- if ref_key not in table.metadata.tables:
- schema.Table(normalize(fkey[0].REFTABLENAME),
- table.metadata,
- autoload=True, autoload_with=connection,
- **table_kw)
-
- constraint = schema.ForeignKeyConstraint(
- columns, referants, link_to_name=True,
- **constraint_kw)
- table.append_constraint(constraint)
-
- def has_sequence(self, connection, name):
- # [ticket:726] makes this schema-aware.
- denormalize = self.identifier_preparer._denormalize_name
- sql = ("SELECT sequence_name FROM SEQUENCES "
- "WHERE SEQUENCE_NAME=? ")
-
- rp = connection.execute(sql, denormalize(name))
- return bool(rp.first())
-
-
-def _autoserial_column(table):
- """Finds the effective DEFAULT SERIAL column of a Table, if any."""
-
- for index, col in enumerate(table.primary_key.columns):
- if (isinstance(col.type, (sqltypes.Integer, sqltypes.Numeric)) and
- col.autoincrement):
- if isinstance(col.default, schema.Sequence):
- if col.default.optional:
- return index, col
- elif (col.default is None or
- (not isinstance(col.server_default, schema.DefaultClause))):
- return index, col
-
- return None, None
-
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/maxdb/sapdb.py b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/maxdb/sapdb.py
deleted file mode 100755
index da04d809..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/maxdb/sapdb.py
+++ /dev/null
@@ -1,23 +0,0 @@
-# maxdb/sapdb.py
-# Copyright (C) 2005-2011 the SQLAlchemy authors and contributors <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-from sqlalchemy.dialects.maxdb.base import MaxDBDialect
-
-class MaxDBDialect_sapdb(MaxDBDialect):
- driver = 'sapdb'
-
- @classmethod
- def dbapi(cls):
- from sapdb import dbapi as _dbapi
- return _dbapi
-
- def create_connect_args(self, url):
- opts = url.translate_connect_args(username='user')
- opts.update(url.query)
- return [], opts
-
-
-dialect = MaxDBDialect_sapdb \ No newline at end of file
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/mssql/__init__.py b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/mssql/__init__.py
deleted file mode 100755
index 6e7bae44..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/mssql/__init__.py
+++ /dev/null
@@ -1,26 +0,0 @@
-# mssql/__init__.py
-# Copyright (C) 2005-2011 the SQLAlchemy authors and contributors <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-from sqlalchemy.dialects.mssql import base, pyodbc, adodbapi, \
- pymssql, zxjdbc, mxodbc
-
-base.dialect = pyodbc.dialect
-
-from sqlalchemy.dialects.mssql.base import \
- INTEGER, BIGINT, SMALLINT, TINYINT, VARCHAR, NVARCHAR, CHAR, \
- NCHAR, TEXT, NTEXT, DECIMAL, NUMERIC, FLOAT, DATETIME,\
- DATETIME2, DATETIMEOFFSET, DATE, TIME, SMALLDATETIME, \
- BINARY, VARBINARY, BIT, REAL, IMAGE, TIMESTAMP,\
- MONEY, SMALLMONEY, UNIQUEIDENTIFIER, SQL_VARIANT, dialect
-
-
-__all__ = (
- 'INTEGER', 'BIGINT', 'SMALLINT', 'TINYINT', 'VARCHAR', 'NVARCHAR', 'CHAR',
- 'NCHAR', 'TEXT', 'NTEXT', 'DECIMAL', 'NUMERIC', 'FLOAT', 'DATETIME',
- 'DATETIME2', 'DATETIMEOFFSET', 'DATE', 'TIME', 'SMALLDATETIME',
- 'BINARY', 'VARBINARY', 'BIT', 'REAL', 'IMAGE', 'TIMESTAMP',
- 'MONEY', 'SMALLMONEY', 'UNIQUEIDENTIFIER', 'SQL_VARIANT', 'dialect'
-) \ No newline at end of file
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/mssql/adodbapi.py b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/mssql/adodbapi.py
deleted file mode 100755
index f2d945de..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/mssql/adodbapi.py
+++ /dev/null
@@ -1,69 +0,0 @@
-# mssql/adodbapi.py
-# Copyright (C) 2005-2011 the SQLAlchemy authors and contributors <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-"""
-The adodbapi dialect is not implemented for 0.6 at this time.
-
-"""
-import datetime
-from sqlalchemy import types as sqltypes, util
-from sqlalchemy.dialects.mssql.base import MSDateTime, MSDialect
-import sys
-
-class MSDateTime_adodbapi(MSDateTime):
- def result_processor(self, dialect, coltype):
- def process(value):
- # adodbapi will return datetimes with empty time
- # values as datetime.date() objects.
- # Promote them back to full datetime.datetime()
- if type(value) is datetime.date:
- return datetime.datetime(value.year, value.month, value.day)
- return value
- return process
-
-
-class MSDialect_adodbapi(MSDialect):
- supports_sane_rowcount = True
- supports_sane_multi_rowcount = True
- supports_unicode = sys.maxunicode == 65535
- supports_unicode_statements = True
- driver = 'adodbapi'
-
- @classmethod
- def import_dbapi(cls):
- import adodbapi as module
- return module
-
- colspecs = util.update_copy(
- MSDialect.colspecs,
- {
- sqltypes.DateTime:MSDateTime_adodbapi
- }
- )
-
- def create_connect_args(self, url):
- keys = url.query
-
- connectors = ["Provider=SQLOLEDB"]
- if 'port' in keys:
- connectors.append ("Data Source=%s, %s" %
- (keys.get("host"), keys.get("port")))
- else:
- connectors.append ("Data Source=%s" % keys.get("host"))
- connectors.append ("Initial Catalog=%s" % keys.get("database"))
- user = keys.get("user")
- if user:
- connectors.append("User Id=%s" % user)
- connectors.append("Password=%s" % keys.get("password", ""))
- else:
- connectors.append("Integrated Security=SSPI")
- return [[";".join (connectors)], {}]
-
- def is_disconnect(self, e, connection, cursor):
- return isinstance(e, self.dbapi.adodbapi.DatabaseError) and \
- "'connection failure'" in str(e)
-
-dialect = MSDialect_adodbapi
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/mssql/base.py b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/mssql/base.py
deleted file mode 100755
index e349092f..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/mssql/base.py
+++ /dev/null
@@ -1,1456 +0,0 @@
-# mssql/base.py
-# Copyright (C) 2005-2011 the SQLAlchemy authors and contributors <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-"""Support for the Microsoft SQL Server database.
-
-Connecting
-----------
-
-See the individual driver sections below for details on connecting.
-
-Auto Increment Behavior
------------------------
-
-``IDENTITY`` columns are supported by using SQLAlchemy
-``schema.Sequence()`` objects. In other words::
-
- from sqlalchemy import Table, Integer, Sequence, Column
-
- Table('test', metadata,
- Column('id', Integer,
- Sequence('blah',100,10), primary_key=True),
- Column('name', String(20))
- ).create(some_engine)
-
-would yield::
-
- CREATE TABLE test (
- id INTEGER NOT NULL IDENTITY(100,10) PRIMARY KEY,
- name VARCHAR(20) NULL,
- )
-
-Note that the ``start`` and ``increment`` values for sequences are
-optional and will default to 1,1.
-
-Implicit ``autoincrement`` behavior works the same in MSSQL as it
-does in other dialects and results in an ``IDENTITY`` column.
-
-* Support for ``SET IDENTITY_INSERT ON`` mode (automagic on / off for
- ``INSERT`` s)
-
-* Support for auto-fetching of ``@@IDENTITY/@@SCOPE_IDENTITY()`` on
- ``INSERT``
-
-Collation Support
------------------
-
-MSSQL specific string types support a collation parameter that
-creates a column-level specific collation for the column. The
-collation parameter accepts a Windows Collation Name or a SQL
-Collation Name. Supported types are MSChar, MSNChar, MSString,
-MSNVarchar, MSText, and MSNText. For example::
-
- from sqlalchemy.dialects.mssql import VARCHAR
- Column('login', VARCHAR(32, collation='Latin1_General_CI_AS'))
-
-When such a column is associated with a :class:`.Table`, the
-CREATE TABLE statement for this column will yield::
-
- login VARCHAR(32) COLLATE Latin1_General_CI_AS NULL
-
-LIMIT/OFFSET Support
---------------------
-
-MSSQL has no support for the LIMIT or OFFSET keysowrds. LIMIT is
-supported directly through the ``TOP`` Transact SQL keyword::
-
- select.limit
-
-will yield::
-
- SELECT TOP n
-
-If using SQL Server 2005 or above, LIMIT with OFFSET
-support is available through the ``ROW_NUMBER OVER`` construct.
-For versions below 2005, LIMIT with OFFSET usage will fail.
-
-Nullability
------------
-MSSQL has support for three levels of column nullability. The default
-nullability allows nulls and is explicit in the CREATE TABLE
-construct::
-
- name VARCHAR(20) NULL
-
-If ``nullable=None`` is specified then no specification is made. In
-other words the database's configured default is used. This will
-render::
-
- name VARCHAR(20)
-
-If ``nullable`` is ``True`` or ``False`` then the column will be
-``NULL` or ``NOT NULL`` respectively.
-
-Date / Time Handling
---------------------
-DATE and TIME are supported. Bind parameters are converted
-to datetime.datetime() objects as required by most MSSQL drivers,
-and results are processed from strings if needed.
-The DATE and TIME types are not available for MSSQL 2005 and
-previous - if a server version below 2008 is detected, DDL
-for these types will be issued as DATETIME.
-
-Compatibility Levels
---------------------
-MSSQL supports the notion of setting compatibility levels at the
-database level. This allows, for instance, to run a database that
-is compatibile with SQL2000 while running on a SQL2005 database
-server. ``server_version_info`` will always return the database
-server version information (in this case SQL2005) and not the
-compatibiility level information. Because of this, if running under
-a backwards compatibility mode SQAlchemy may attempt to use T-SQL
-statements that are unable to be parsed by the database server.
-
-Triggers
---------
-
-SQLAlchemy by default uses OUTPUT INSERTED to get at newly
-generated primary key values via IDENTITY columns or other
-server side defaults. MS-SQL does not
-allow the usage of OUTPUT INSERTED on tables that have triggers.
-To disable the usage of OUTPUT INSERTED on a per-table basis,
-specify ``implicit_returning=False`` for each :class:`.Table`
-which has triggers::
-
- Table('mytable', metadata,
- Column('id', Integer, primary_key=True),
- # ...,
- implicit_returning=False
- )
-
-Declarative form::
-
- class MyClass(Base):
- # ...
- __table_args__ = {'implicit_returning':False}
-
-
-This option can also be specified engine-wide using the
-``implicit_returning=False`` argument on :func:`.create_engine`.
-
-Enabling Snapshot Isolation
----------------------------
-
-Not necessarily specific to SQLAlchemy, SQL Server has a default transaction
-isolation mode that locks entire tables, and causes even mildly concurrent
-applications to have long held locks and frequent deadlocks.
-Enabling snapshot isolation for the database as a whole is recommended
-for modern levels of concurrency support. This is accomplished via the
-following ALTER DATABASE commands executed at the SQL prompt::
-
- ALTER DATABASE MyDatabase SET ALLOW_SNAPSHOT_ISOLATION ON
-
- ALTER DATABASE MyDatabase SET READ_COMMITTED_SNAPSHOT ON
-
-Background on SQL Server snapshot isolation is available at
-http://msdn.microsoft.com/en-us/library/ms175095.aspx.
-
-Known Issues
-------------
-
-* No support for more than one ``IDENTITY`` column per table
-* reflection of indexes does not work with versions older than
- SQL Server 2005
-
-"""
-import datetime, operator, re
-
-from sqlalchemy import sql, schema as sa_schema, exc, util
-from sqlalchemy.sql import select, compiler, expression, \
- operators as sql_operators, \
- util as sql_util
-from sqlalchemy.engine import default, base, reflection
-from sqlalchemy import types as sqltypes
-from sqlalchemy.types import INTEGER, BIGINT, SMALLINT, DECIMAL, NUMERIC, \
- FLOAT, TIMESTAMP, DATETIME, DATE, BINARY,\
- VARBINARY, BLOB
-
-
-from sqlalchemy.dialects.mssql import information_schema as ischema
-
-MS_2008_VERSION = (10,)
-MS_2005_VERSION = (9,)
-MS_2000_VERSION = (8,)
-
-RESERVED_WORDS = set(
- ['add', 'all', 'alter', 'and', 'any', 'as', 'asc', 'authorization',
- 'backup', 'begin', 'between', 'break', 'browse', 'bulk', 'by', 'cascade',
- 'case', 'check', 'checkpoint', 'close', 'clustered', 'coalesce',
- 'collate', 'column', 'commit', 'compute', 'constraint', 'contains',
- 'containstable', 'continue', 'convert', 'create', 'cross', 'current',
- 'current_date', 'current_time', 'current_timestamp', 'current_user',
- 'cursor', 'database', 'dbcc', 'deallocate', 'declare', 'default',
- 'delete', 'deny', 'desc', 'disk', 'distinct', 'distributed', 'double',
- 'drop', 'dump', 'else', 'end', 'errlvl', 'escape', 'except', 'exec',
- 'execute', 'exists', 'exit', 'external', 'fetch', 'file', 'fillfactor',
- 'for', 'foreign', 'freetext', 'freetexttable', 'from', 'full',
- 'function', 'goto', 'grant', 'group', 'having', 'holdlock', 'identity',
- 'identity_insert', 'identitycol', 'if', 'in', 'index', 'inner', 'insert',
- 'intersect', 'into', 'is', 'join', 'key', 'kill', 'left', 'like',
- 'lineno', 'load', 'merge', 'national', 'nocheck', 'nonclustered', 'not',
- 'null', 'nullif', 'of', 'off', 'offsets', 'on', 'open', 'opendatasource',
- 'openquery', 'openrowset', 'openxml', 'option', 'or', 'order', 'outer',
- 'over', 'percent', 'pivot', 'plan', 'precision', 'primary', 'print',
- 'proc', 'procedure', 'public', 'raiserror', 'read', 'readtext',
- 'reconfigure', 'references', 'replication', 'restore', 'restrict',
- 'return', 'revert', 'revoke', 'right', 'rollback', 'rowcount',
- 'rowguidcol', 'rule', 'save', 'schema', 'securityaudit', 'select',
- 'session_user', 'set', 'setuser', 'shutdown', 'some', 'statistics',
- 'system_user', 'table', 'tablesample', 'textsize', 'then', 'to', 'top',
- 'tran', 'transaction', 'trigger', 'truncate', 'tsequal', 'union',
- 'unique', 'unpivot', 'update', 'updatetext', 'use', 'user', 'values',
- 'varying', 'view', 'waitfor', 'when', 'where', 'while', 'with',
- 'writetext',
- ])
-
-
-class REAL(sqltypes.REAL):
- __visit_name__ = 'REAL'
-
- def __init__(self, **kw):
- # REAL is a synonym for FLOAT(24) on SQL server
- kw['precision'] = 24
- super(REAL, self).__init__(**kw)
-
-class TINYINT(sqltypes.Integer):
- __visit_name__ = 'TINYINT'
-
-
-# MSSQL DATE/TIME types have varied behavior, sometimes returning
-# strings. MSDate/TIME check for everything, and always
-# filter bind parameters into datetime objects (required by pyodbc,
-# not sure about other dialects).
-
-class _MSDate(sqltypes.Date):
- def bind_processor(self, dialect):
- def process(value):
- if type(value) == datetime.date:
- return datetime.datetime(value.year, value.month, value.day)
- else:
- return value
- return process
-
- _reg = re.compile(r"(\d+)-(\d+)-(\d+)")
- def result_processor(self, dialect, coltype):
- def process(value):
- if isinstance(value, datetime.datetime):
- return value.date()
- elif isinstance(value, basestring):
- return datetime.date(*[
- int(x or 0)
- for x in self._reg.match(value).groups()
- ])
- else:
- return value
- return process
-
-class TIME(sqltypes.TIME):
- def __init__(self, precision=None, **kwargs):
- self.precision = precision
- super(TIME, self).__init__()
-
- __zero_date = datetime.date(1900, 1, 1)
-
- def bind_processor(self, dialect):
- def process(value):
- if isinstance(value, datetime.datetime):
- value = datetime.datetime.combine(
- self.__zero_date, value.time())
- elif isinstance(value, datetime.time):
- value = datetime.datetime.combine(self.__zero_date, value)
- return value
- return process
-
- _reg = re.compile(r"(\d+):(\d+):(\d+)(?:\.(\d+))?")
- def result_processor(self, dialect, coltype):
- def process(value):
- if isinstance(value, datetime.datetime):
- return value.time()
- elif isinstance(value, basestring):
- return datetime.time(*[
- int(x or 0)
- for x in self._reg.match(value).groups()])
- else:
- return value
- return process
-
-class _DateTimeBase(object):
- def bind_processor(self, dialect):
- def process(value):
- if type(value) == datetime.date:
- return datetime.datetime(value.year, value.month, value.day)
- else:
- return value
- return process
-
-class _MSDateTime(_DateTimeBase, sqltypes.DateTime):
- pass
-
-class SMALLDATETIME(_DateTimeBase, sqltypes.DateTime):
- __visit_name__ = 'SMALLDATETIME'
-
-class DATETIME2(_DateTimeBase, sqltypes.DateTime):
- __visit_name__ = 'DATETIME2'
-
- def __init__(self, precision=None, **kw):
- super(DATETIME2, self).__init__(**kw)
- self.precision = precision
-
-
-# TODO: is this not an Interval ?
-class DATETIMEOFFSET(sqltypes.TypeEngine):
- __visit_name__ = 'DATETIMEOFFSET'
-
- def __init__(self, precision=None, **kwargs):
- self.precision = precision
-
-class _StringType(object):
- """Base for MSSQL string types."""
-
- def __init__(self, collation=None):
- self.collation = collation
-
-class TEXT(_StringType, sqltypes.TEXT):
- """MSSQL TEXT type, for variable-length text up to 2^31 characters."""
-
- def __init__(self, length=None, collation=None, **kw):
- """Construct a TEXT.
-
- :param collation: Optional, a column-level collation for this string
- value. Accepts a Windows Collation Name or a SQL Collation Name.
-
- """
- _StringType.__init__(self, collation)
- sqltypes.Text.__init__(self, length, **kw)
-
-class NTEXT(_StringType, sqltypes.UnicodeText):
- """MSSQL NTEXT type, for variable-length unicode text up to 2^30
- characters."""
-
- __visit_name__ = 'NTEXT'
-
- def __init__(self, length=None, collation=None, **kw):
- """Construct a NTEXT.
-
- :param collation: Optional, a column-level collation for this string
- value. Accepts a Windows Collation Name or a SQL Collation Name.
-
- """
- _StringType.__init__(self, collation)
- sqltypes.UnicodeText.__init__(self, length, **kw)
-
-
-class VARCHAR(_StringType, sqltypes.VARCHAR):
- """MSSQL VARCHAR type, for variable-length non-Unicode data with a maximum
- of 8,000 characters."""
-
- def __init__(self, length=None, collation=None, **kw):
- """Construct a VARCHAR.
-
- :param length: Optinal, maximum data length, in characters.
-
- :param convert_unicode: defaults to False. If True, convert
- ``unicode`` data sent to the database to a ``str``
- bytestring, and convert bytestrings coming back from the
- database into ``unicode``.
-
- Bytestrings are encoded using the dialect's
- :attr:`~sqlalchemy.engine.base.Dialect.encoding`, which
- defaults to `utf-8`.
-
- If False, may be overridden by
- :attr:`sqlalchemy.engine.base.Dialect.convert_unicode`.
-
- :param collation: Optional, a column-level collation for this string
- value. Accepts a Windows Collation Name or a SQL Collation Name.
-
- """
- _StringType.__init__(self, collation)
- sqltypes.VARCHAR.__init__(self, length, **kw)
-
-class NVARCHAR(_StringType, sqltypes.NVARCHAR):
- """MSSQL NVARCHAR type.
-
- For variable-length unicode character data up to 4,000 characters."""
-
- def __init__(self, length=None, collation=None, **kw):
- """Construct a NVARCHAR.
-
- :param length: Optional, Maximum data length, in characters.
-
- :param collation: Optional, a column-level collation for this string
- value. Accepts a Windows Collation Name or a SQL Collation Name.
-
- """
- _StringType.__init__(self, collation)
- sqltypes.NVARCHAR.__init__(self, length, **kw)
-
-class CHAR(_StringType, sqltypes.CHAR):
- """MSSQL CHAR type, for fixed-length non-Unicode data with a maximum
- of 8,000 characters."""
-
- def __init__(self, length=None, collation=None, **kw):
- """Construct a CHAR.
-
- :param length: Optinal, maximum data length, in characters.
-
- :param convert_unicode: defaults to False. If True, convert
- ``unicode`` data sent to the database to a ``str``
- bytestring, and convert bytestrings coming back from the
- database into ``unicode``.
-
- Bytestrings are encoded using the dialect's
- :attr:`~sqlalchemy.engine.base.Dialect.encoding`, which
- defaults to `utf-8`.
-
- If False, may be overridden by
- :attr:`sqlalchemy.engine.base.Dialect.convert_unicode`.
-
- :param collation: Optional, a column-level collation for this string
- value. Accepts a Windows Collation Name or a SQL Collation Name.
-
- """
- _StringType.__init__(self, collation)
- sqltypes.CHAR.__init__(self, length, **kw)
-
-class NCHAR(_StringType, sqltypes.NCHAR):
- """MSSQL NCHAR type.
-
- For fixed-length unicode character data up to 4,000 characters."""
-
- def __init__(self, length=None, collation=None, **kw):
- """Construct an NCHAR.
-
- :param length: Optional, Maximum data length, in characters.
-
- :param collation: Optional, a column-level collation for this string
- value. Accepts a Windows Collation Name or a SQL Collation Name.
-
- """
- _StringType.__init__(self, collation)
- sqltypes.NCHAR.__init__(self, length, **kw)
-
-class IMAGE(sqltypes.LargeBinary):
- __visit_name__ = 'IMAGE'
-
-class BIT(sqltypes.TypeEngine):
- __visit_name__ = 'BIT'
-
-
-class MONEY(sqltypes.TypeEngine):
- __visit_name__ = 'MONEY'
-
-class SMALLMONEY(sqltypes.TypeEngine):
- __visit_name__ = 'SMALLMONEY'
-
-class UNIQUEIDENTIFIER(sqltypes.TypeEngine):
- __visit_name__ = "UNIQUEIDENTIFIER"
-
-class SQL_VARIANT(sqltypes.TypeEngine):
- __visit_name__ = 'SQL_VARIANT'
-
-# old names.
-MSDateTime = _MSDateTime
-MSDate = _MSDate
-MSReal = REAL
-MSTinyInteger = TINYINT
-MSTime = TIME
-MSSmallDateTime = SMALLDATETIME
-MSDateTime2 = DATETIME2
-MSDateTimeOffset = DATETIMEOFFSET
-MSText = TEXT
-MSNText = NTEXT
-MSString = VARCHAR
-MSNVarchar = NVARCHAR
-MSChar = CHAR
-MSNChar = NCHAR
-MSBinary = BINARY
-MSVarBinary = VARBINARY
-MSImage = IMAGE
-MSBit = BIT
-MSMoney = MONEY
-MSSmallMoney = SMALLMONEY
-MSUniqueIdentifier = UNIQUEIDENTIFIER
-MSVariant = SQL_VARIANT
-
-ischema_names = {
- 'int' : INTEGER,
- 'bigint': BIGINT,
- 'smallint' : SMALLINT,
- 'tinyint' : TINYINT,
- 'varchar' : VARCHAR,
- 'nvarchar' : NVARCHAR,
- 'char' : CHAR,
- 'nchar' : NCHAR,
- 'text' : TEXT,
- 'ntext' : NTEXT,
- 'decimal' : DECIMAL,
- 'numeric' : NUMERIC,
- 'float' : FLOAT,
- 'datetime' : DATETIME,
- 'datetime2' : DATETIME2,
- 'datetimeoffset' : DATETIMEOFFSET,
- 'date': DATE,
- 'time': TIME,
- 'smalldatetime' : SMALLDATETIME,
- 'binary' : BINARY,
- 'varbinary' : VARBINARY,
- 'bit': BIT,
- 'real' : REAL,
- 'image' : IMAGE,
- 'timestamp': TIMESTAMP,
- 'money': MONEY,
- 'smallmoney': SMALLMONEY,
- 'uniqueidentifier': UNIQUEIDENTIFIER,
- 'sql_variant': SQL_VARIANT,
-}
-
-
-class MSTypeCompiler(compiler.GenericTypeCompiler):
- def _extend(self, spec, type_, length=None):
- """Extend a string-type declaration with standard SQL
- COLLATE annotations.
-
- """
-
- if getattr(type_, 'collation', None):
- collation = 'COLLATE %s' % type_.collation
- else:
- collation = None
-
- if not length:
- length = type_.length
-
- if length:
- spec = spec + "(%s)" % length
-
- return ' '.join([c for c in (spec, collation)
- if c is not None])
-
- def visit_FLOAT(self, type_):
- precision = getattr(type_, 'precision', None)
- if precision is None:
- return "FLOAT"
- else:
- return "FLOAT(%(precision)s)" % {'precision': precision}
-
- def visit_TINYINT(self, type_):
- return "TINYINT"
-
- def visit_DATETIMEOFFSET(self, type_):
- if type_.precision:
- return "DATETIMEOFFSET(%s)" % type_.precision
- else:
- return "DATETIMEOFFSET"
-
- def visit_TIME(self, type_):
- precision = getattr(type_, 'precision', None)
- if precision:
- return "TIME(%s)" % precision
- else:
- return "TIME"
-
- def visit_DATETIME2(self, type_):
- precision = getattr(type_, 'precision', None)
- if precision:
- return "DATETIME2(%s)" % precision
- else:
- return "DATETIME2"
-
- def visit_SMALLDATETIME(self, type_):
- return "SMALLDATETIME"
-
- def visit_unicode(self, type_):
- return self.visit_NVARCHAR(type_)
-
- def visit_unicode_text(self, type_):
- return self.visit_NTEXT(type_)
-
- def visit_NTEXT(self, type_):
- return self._extend("NTEXT", type_)
-
- def visit_TEXT(self, type_):
- return self._extend("TEXT", type_)
-
- def visit_VARCHAR(self, type_):
- return self._extend("VARCHAR", type_,
- length = type_.length or 'max')
-
- def visit_CHAR(self, type_):
- return self._extend("CHAR", type_)
-
- def visit_NCHAR(self, type_):
- return self._extend("NCHAR", type_)
-
- def visit_NVARCHAR(self, type_):
- return self._extend("NVARCHAR", type_,
- length = type_.length or 'max')
-
- def visit_date(self, type_):
- if self.dialect.server_version_info < MS_2008_VERSION:
- return self.visit_DATETIME(type_)
- else:
- return self.visit_DATE(type_)
-
- def visit_time(self, type_):
- if self.dialect.server_version_info < MS_2008_VERSION:
- return self.visit_DATETIME(type_)
- else:
- return self.visit_TIME(type_)
-
- def visit_large_binary(self, type_):
- return self.visit_IMAGE(type_)
-
- def visit_IMAGE(self, type_):
- return "IMAGE"
-
- def visit_VARBINARY(self, type_):
- return self._extend(
- "VARBINARY",
- type_,
- length=type_.length or 'max')
-
- def visit_boolean(self, type_):
- return self.visit_BIT(type_)
-
- def visit_BIT(self, type_):
- return "BIT"
-
- def visit_MONEY(self, type_):
- return "MONEY"
-
- def visit_SMALLMONEY(self, type_):
- return 'SMALLMONEY'
-
- def visit_UNIQUEIDENTIFIER(self, type_):
- return "UNIQUEIDENTIFIER"
-
- def visit_SQL_VARIANT(self, type_):
- return 'SQL_VARIANT'
-
-class MSExecutionContext(default.DefaultExecutionContext):
- _enable_identity_insert = False
- _select_lastrowid = False
- _result_proxy = None
- _lastrowid = None
-
- def pre_exec(self):
- """Activate IDENTITY_INSERT if needed."""
-
- if self.isinsert:
- tbl = self.compiled.statement.table
- seq_column = tbl._autoincrement_column
- insert_has_sequence = seq_column is not None
-
- if insert_has_sequence:
- self._enable_identity_insert = \
- seq_column.key in self.compiled_parameters[0]
- else:
- self._enable_identity_insert = False
-
- self._select_lastrowid = insert_has_sequence and \
- not self.compiled.returning and \
- not self._enable_identity_insert and \
- not self.executemany
-
- if self._enable_identity_insert:
- self.cursor.execute("SET IDENTITY_INSERT %s ON" %
- self.dialect.identifier_preparer.format_table(tbl))
-
- def post_exec(self):
- """Disable IDENTITY_INSERT if enabled."""
-
- if self._select_lastrowid:
- if self.dialect.use_scope_identity:
- self.cursor.execute(
- "SELECT scope_identity() AS lastrowid", ())
- else:
- self.cursor.execute("SELECT @@identity AS lastrowid", ())
- # fetchall() ensures the cursor is consumed without closing it
- row = self.cursor.fetchall()[0]
- self._lastrowid = int(row[0])
-
- if (self.isinsert or self.isupdate or self.isdelete) and \
- self.compiled.returning:
- self._result_proxy = base.FullyBufferedResultProxy(self)
-
- if self._enable_identity_insert:
- self.cursor.execute(
- "SET IDENTITY_INSERT %s OFF" %
- self.dialect.identifier_preparer.
- format_table(self.compiled.statement.table)
- )
-
- def get_lastrowid(self):
- return self._lastrowid
-
- def handle_dbapi_exception(self, e):
- if self._enable_identity_insert:
- try:
- self.cursor.execute(
- "SET IDENTITY_INSERT %s OFF" %
- self.dialect.identifier_preparer.\
- format_table(self.compiled.statement.table)
- )
- except:
- pass
-
- def get_result_proxy(self):
- if self._result_proxy:
- return self._result_proxy
- else:
- return base.ResultProxy(self)
-
-class MSSQLCompiler(compiler.SQLCompiler):
- returning_precedes_values = True
-
- extract_map = util.update_copy(
- compiler.SQLCompiler.extract_map,
- {
- 'doy': 'dayofyear',
- 'dow': 'weekday',
- 'milliseconds': 'millisecond',
- 'microseconds': 'microsecond'
- })
-
- def __init__(self, *args, **kwargs):
- self.tablealiases = {}
- super(MSSQLCompiler, self).__init__(*args, **kwargs)
-
- def visit_now_func(self, fn, **kw):
- return "CURRENT_TIMESTAMP"
-
- def visit_current_date_func(self, fn, **kw):
- return "GETDATE()"
-
- def visit_length_func(self, fn, **kw):
- return "LEN%s" % self.function_argspec(fn, **kw)
-
- def visit_char_length_func(self, fn, **kw):
- return "LEN%s" % self.function_argspec(fn, **kw)
-
- def visit_concat_op(self, binary, **kw):
- return "%s + %s" % \
- (self.process(binary.left, **kw),
- self.process(binary.right, **kw))
-
- def visit_match_op(self, binary, **kw):
- return "CONTAINS (%s, %s)" % (
- self.process(binary.left, **kw),
- self.process(binary.right, **kw))
-
- def get_select_precolumns(self, select):
- """ MS-SQL puts TOP, it's version of LIMIT here """
- if select._distinct or select._limit:
- s = select._distinct and "DISTINCT " or ""
-
- # ODBC drivers and possibly others
- # don't support bind params in the SELECT clause on SQL Server.
- # so have to use literal here.
- if select._limit:
- if not select._offset:
- s += "TOP %d " % select._limit
- return s
- return compiler.SQLCompiler.get_select_precolumns(self, select)
-
- def limit_clause(self, select):
- # Limit in mssql is after the select keyword
- return ""
-
- def visit_select(self, select, **kwargs):
- """Look for ``LIMIT`` and OFFSET in a select statement, and if
- so tries to wrap it in a subquery with ``row_number()`` criterion.
-
- """
- if not getattr(select, '_mssql_visit', None) and select._offset:
- # to use ROW_NUMBER(), an ORDER BY is required.
- orderby = self.process(select._order_by_clause)
- if not orderby:
- raise exc.InvalidRequestError('MSSQL requires an order_by when '
- 'using an offset.')
-
- _offset = select._offset
- _limit = select._limit
- select._mssql_visit = True
- select = select.column(
- sql.literal_column("ROW_NUMBER() OVER (ORDER BY %s)" \
- % orderby).label("mssql_rn")
- ).order_by(None).alias()
-
- mssql_rn = sql.column('mssql_rn')
- limitselect = sql.select([c for c in select.c if
- c.key!='mssql_rn'])
- limitselect.append_whereclause(mssql_rn> _offset)
- if _limit is not None:
- limitselect.append_whereclause(mssql_rn<=(_limit + _offset))
- return self.process(limitselect, iswrapper=True, **kwargs)
- else:
- return compiler.SQLCompiler.visit_select(self, select, **kwargs)
-
- def _schema_aliased_table(self, table):
- if getattr(table, 'schema', None) is not None:
- if table not in self.tablealiases:
- self.tablealiases[table] = table.alias()
- return self.tablealiases[table]
- else:
- return None
-
- def visit_table(self, table, mssql_aliased=False, **kwargs):
- if mssql_aliased is table:
- return super(MSSQLCompiler, self).visit_table(table, **kwargs)
-
- # alias schema-qualified tables
- alias = self._schema_aliased_table(table)
- if alias is not None:
- return self.process(alias, mssql_aliased=table, **kwargs)
- else:
- return super(MSSQLCompiler, self).visit_table(table, **kwargs)
-
- def visit_alias(self, alias, **kwargs):
- # translate for schema-qualified table aliases
- kwargs['mssql_aliased'] = alias.original
- return super(MSSQLCompiler, self).visit_alias(alias, **kwargs)
-
- def visit_extract(self, extract, **kw):
- field = self.extract_map.get(extract.field, extract.field)
- return 'DATEPART("%s", %s)' % \
- (field, self.process(extract.expr, **kw))
-
- def visit_rollback_to_savepoint(self, savepoint_stmt):
- return ("ROLLBACK TRANSACTION %s"
- % self.preparer.format_savepoint(savepoint_stmt))
-
- def visit_column(self, column, result_map=None, **kwargs):
- if column.table is not None and \
- (not self.isupdate and not self.isdelete) or self.is_subquery():
- # translate for schema-qualified table aliases
- t = self._schema_aliased_table(column.table)
- if t is not None:
- converted = expression._corresponding_column_or_error(
- t, column)
-
- if result_map is not None:
- result_map[column.name.lower()] = \
- (column.name, (column, ),
- column.type)
-
- return super(MSSQLCompiler, self).\
- visit_column(converted,
- result_map=None, **kwargs)
-
- return super(MSSQLCompiler, self).visit_column(column,
- result_map=result_map,
- **kwargs)
-
- def visit_binary(self, binary, **kwargs):
- """Move bind parameters to the right-hand side of an operator, where
- possible.
-
- """
- if (
- isinstance(binary.left, expression._BindParamClause)
- and binary.operator == operator.eq
- and not isinstance(binary.right, expression._BindParamClause)
- ):
- return self.process(
- expression._BinaryExpression(binary.right,
- binary.left,
- binary.operator),
- **kwargs)
- else:
- if (
- (binary.operator is operator.eq or
- binary.operator is operator.ne)
- and (
- (isinstance(binary.left, expression._FromGrouping)
- and isinstance(binary.left.element,
- expression._ScalarSelect))
- or (isinstance(binary.right, expression._FromGrouping)
- and isinstance(binary.right.element,
- expression._ScalarSelect))
- or isinstance(binary.left, expression._ScalarSelect)
- or isinstance(binary.right, expression._ScalarSelect)
- )
- ):
- op = binary.operator == operator.eq and "IN" or "NOT IN"
- return self.process(
- expression._BinaryExpression(binary.left,
- binary.right, op),
- **kwargs)
- return super(MSSQLCompiler, self).visit_binary(binary, **kwargs)
-
- def returning_clause(self, stmt, returning_cols):
-
- if self.isinsert or self.isupdate:
- target = stmt.table.alias("inserted")
- else:
- target = stmt.table.alias("deleted")
-
- adapter = sql_util.ClauseAdapter(target)
- def col_label(col):
- adapted = adapter.traverse(col)
- if isinstance(col, expression._Label):
- return adapted.label(c.key)
- else:
- return self.label_select_column(None, adapted, asfrom=False)
-
- columns = [
- self.process(
- col_label(c),
- within_columns_clause=True,
- result_map=self.result_map
- )
- for c in expression._select_iterables(returning_cols)
- ]
- return 'OUTPUT ' + ', '.join(columns)
-
- def label_select_column(self, select, column, asfrom):
- if isinstance(column, expression.Function):
- return column.label(None)
- else:
- return super(MSSQLCompiler, self).\
- label_select_column(select, column, asfrom)
-
- def for_update_clause(self, select):
- # "FOR UPDATE" is only allowed on "DECLARE CURSOR" which
- # SQLAlchemy doesn't use
- return ''
-
- def order_by_clause(self, select, **kw):
- order_by = self.process(select._order_by_clause, **kw)
-
- # MSSQL only allows ORDER BY in subqueries if there is a LIMIT
- if order_by and (not self.is_subquery() or select._limit):
- return " ORDER BY " + order_by
- else:
- return ""
-
-class MSSQLStrictCompiler(MSSQLCompiler):
- """A subclass of MSSQLCompiler which disables the usage of bind
- parameters where not allowed natively by MS-SQL.
-
- A dialect may use this compiler on a platform where native
- binds are used.
-
- """
- ansi_bind_rules = True
-
- def visit_in_op(self, binary, **kw):
- kw['literal_binds'] = True
- return "%s IN %s" % (
- self.process(binary.left, **kw),
- self.process(binary.right, **kw)
- )
-
- def visit_notin_op(self, binary, **kw):
- kw['literal_binds'] = True
- return "%s NOT IN %s" % (
- self.process(binary.left, **kw),
- self.process(binary.right, **kw)
- )
-
- def visit_function(self, func, **kw):
- kw['literal_binds'] = True
- return super(MSSQLStrictCompiler, self).visit_function(func, **kw)
-
- def render_literal_value(self, value, type_):
- """
- For date and datetime values, convert to a string
- format acceptable to MSSQL. That seems to be the
- so-called ODBC canonical date format which looks
- like this:
-
- yyyy-mm-dd hh:mi:ss.mmm(24h)
-
- For other data types, call the base class implementation.
- """
- # datetime and date are both subclasses of datetime.date
- if issubclass(type(value), datetime.date):
- # SQL Server wants single quotes around the date string.
- return "'" + str(value) + "'"
- else:
- return super(MSSQLStrictCompiler, self).\
- render_literal_value(value, type_)
-
-class MSDDLCompiler(compiler.DDLCompiler):
- def get_column_specification(self, column, **kwargs):
- colspec = (self.preparer.format_column(column) + " "
- + self.dialect.type_compiler.process(column.type))
-
- if column.nullable is not None:
- if not column.nullable or column.primary_key:
- colspec += " NOT NULL"
- else:
- colspec += " NULL"
-
- if column.table is None:
- raise exc.InvalidRequestError(
- "mssql requires Table-bound columns "
- "in order to generate DDL")
-
- seq_col = column.table._autoincrement_column
-
- # install a IDENTITY Sequence if we have an implicit IDENTITY column
- if seq_col is column:
- sequence = isinstance(column.default, sa_schema.Sequence) and \
- column.default
- if sequence:
- start, increment = sequence.start or 1, \
- sequence.increment or 1
- else:
- start, increment = 1, 1
- colspec += " IDENTITY(%s,%s)" % (start, increment)
- else:
- default = self.get_column_default_string(column)
- if default is not None:
- colspec += " DEFAULT " + default
-
- return colspec
-
- def visit_drop_index(self, drop):
- return "\nDROP INDEX %s.%s" % (
- self.preparer.quote_identifier(drop.element.table.name),
- self.preparer.quote(
- self._index_identifier(drop.element.name),
- drop.element.quote)
- )
-
-
-class MSIdentifierPreparer(compiler.IdentifierPreparer):
- reserved_words = RESERVED_WORDS
-
- def __init__(self, dialect):
- super(MSIdentifierPreparer, self).__init__(dialect, initial_quote='[',
- final_quote=']')
-
- def _escape_identifier(self, value):
- return value
-
- def quote_schema(self, schema, force=True):
- """Prepare a quoted table and schema name."""
- result = '.'.join([self.quote(x, force) for x in schema.split('.')])
- return result
-
-class MSDialect(default.DefaultDialect):
- name = 'mssql'
- supports_default_values = True
- supports_empty_insert = False
- execution_ctx_cls = MSExecutionContext
- use_scope_identity = True
- max_identifier_length = 128
- schema_name = "dbo"
-
- colspecs = {
- sqltypes.DateTime : _MSDateTime,
- sqltypes.Date : _MSDate,
- sqltypes.Time : TIME,
- }
-
- ischema_names = ischema_names
-
- supports_native_boolean = False
- supports_unicode_binds = True
- postfetch_lastrowid = True
-
- server_version_info = ()
-
- statement_compiler = MSSQLCompiler
- ddl_compiler = MSDDLCompiler
- type_compiler = MSTypeCompiler
- preparer = MSIdentifierPreparer
-
- def __init__(self,
- query_timeout=None,
- use_scope_identity=True,
- max_identifier_length=None,
- schema_name=u"dbo", **opts):
- self.query_timeout = int(query_timeout or 0)
- self.schema_name = schema_name
-
- self.use_scope_identity = use_scope_identity
- self.max_identifier_length = int(max_identifier_length or 0) or \
- self.max_identifier_length
- super(MSDialect, self).__init__(**opts)
-
- def do_savepoint(self, connection, name):
- util.warn("Savepoint support in mssql is experimental and "
- "may lead to data loss.")
- connection.execute("IF @@TRANCOUNT = 0 BEGIN TRANSACTION")
- connection.execute("SAVE TRANSACTION %s" % name)
-
- def do_release_savepoint(self, connection, name):
- pass
-
- def initialize(self, connection):
- super(MSDialect, self).initialize(connection)
- if self.server_version_info[0] not in range(8, 17):
- # FreeTDS with version 4.2 seems to report here
- # a number like "95.10.255". Don't know what
- # that is. So emit warning.
- util.warn(
- "Unrecognized server version info '%s'. Version specific "
- "behaviors may not function properly. If using ODBC "
- "with FreeTDS, ensure server version 7.0 or 8.0, not 4.2, "
- "is configured in the FreeTDS configuration." %
- ".".join(str(x) for x in self.server_version_info) )
- if self.server_version_info >= MS_2005_VERSION and \
- 'implicit_returning' not in self.__dict__:
- self.implicit_returning = True
-
- def _get_default_schema_name(self, connection):
- user_name = connection.scalar("SELECT user_name() as user_name;")
- if user_name is not None:
- # now, get the default schema
- query = sql.text("""
- SELECT default_schema_name FROM
- sys.database_principals
- WHERE name = :name
- AND type = 'S'
- """)
- try:
- default_schema_name = connection.scalar(query, name=user_name)
- if default_schema_name is not None:
- return unicode(default_schema_name)
- except:
- pass
- return self.schema_name
-
-
- def has_table(self, connection, tablename, schema=None):
- current_schema = schema or self.default_schema_name
- columns = ischema.columns
- if current_schema:
- whereclause = sql.and_(columns.c.table_name==tablename,
- columns.c.table_schema==current_schema)
- else:
- whereclause = columns.c.table_name==tablename
- s = sql.select([columns], whereclause)
- c = connection.execute(s)
- return c.first() is not None
-
- @reflection.cache
- def get_schema_names(self, connection, **kw):
- s = sql.select([ischema.schemata.c.schema_name],
- order_by=[ischema.schemata.c.schema_name]
- )
- schema_names = [r[0] for r in connection.execute(s)]
- return schema_names
-
- @reflection.cache
- def get_table_names(self, connection, schema=None, **kw):
- current_schema = schema or self.default_schema_name
- tables = ischema.tables
- s = sql.select([tables.c.table_name],
- sql.and_(
- tables.c.table_schema == current_schema,
- tables.c.table_type == u'BASE TABLE'
- ),
- order_by=[tables.c.table_name]
- )
- table_names = [r[0] for r in connection.execute(s)]
- return table_names
-
- @reflection.cache
- def get_view_names(self, connection, schema=None, **kw):
- current_schema = schema or self.default_schema_name
- tables = ischema.tables
- s = sql.select([tables.c.table_name],
- sql.and_(
- tables.c.table_schema == current_schema,
- tables.c.table_type == u'VIEW'
- ),
- order_by=[tables.c.table_name]
- )
- view_names = [r[0] for r in connection.execute(s)]
- return view_names
-
- @reflection.cache
- def get_indexes(self, connection, tablename, schema=None, **kw):
- # using system catalogs, don't support index reflection
- # below MS 2005
- if self.server_version_info < MS_2005_VERSION:
- return []
-
- current_schema = schema or self.default_schema_name
- full_tname = "%s.%s" % (current_schema, tablename)
-
- rp = connection.execute(
- sql.text("select ind.index_id, ind.is_unique, ind.name "
- "from sys.indexes as ind join sys.tables as tab on "
- "ind.object_id=tab.object_id "
- "join sys.schemas as sch on sch.schema_id=tab.schema_id "
- "where tab.name = :tabname "
- "and sch.name=:schname "
- "and ind.is_primary_key=0",
- bindparams=[
- sql.bindparam('tabname', tablename,
- sqltypes.String(convert_unicode=True)),
- sql.bindparam('schname', current_schema,
- sqltypes.String(convert_unicode=True))
- ]
- )
- )
- indexes = {}
- for row in rp:
- indexes[row['index_id']] = {
- 'name':row['name'],
- 'unique':row['is_unique'] == 1,
- 'column_names':[]
- }
- rp = connection.execute(
- sql.text(
- "select ind_col.index_id, ind_col.object_id, col.name "
- "from sys.columns as col "
- "join sys.tables as tab on tab.object_id=col.object_id "
- "join sys.index_columns as ind_col on "
- "(ind_col.column_id=col.column_id and "
- "ind_col.object_id=tab.object_id) "
- "join sys.schemas as sch on sch.schema_id=tab.schema_id "
- "where tab.name=:tabname "
- "and sch.name=:schname",
- bindparams=[
- sql.bindparam('tabname', tablename,
- sqltypes.String(convert_unicode=True)),
- sql.bindparam('schname', current_schema,
- sqltypes.String(convert_unicode=True))
- ]),
- )
- for row in rp:
- if row['index_id'] in indexes:
- indexes[row['index_id']]['column_names'].append(row['name'])
-
- return indexes.values()
-
- @reflection.cache
- def get_view_definition(self, connection, viewname, schema=None, **kw):
- current_schema = schema or self.default_schema_name
-
- rp = connection.execute(
- sql.text(
- "select definition from sys.sql_modules as mod, "
- "sys.views as views, "
- "sys.schemas as sch"
- " where "
- "mod.object_id=views.object_id and "
- "views.schema_id=sch.schema_id and "
- "views.name=:viewname and sch.name=:schname",
- bindparams=[
- sql.bindparam('viewname', viewname,
- sqltypes.String(convert_unicode=True)),
- sql.bindparam('schname', current_schema,
- sqltypes.String(convert_unicode=True))
- ]
- )
- )
-
- if rp:
- view_def = rp.scalar()
- return view_def
-
- @reflection.cache
- def get_columns(self, connection, tablename, schema=None, **kw):
- # Get base columns
- current_schema = schema or self.default_schema_name
- columns = ischema.columns
- if current_schema:
- whereclause = sql.and_(columns.c.table_name==tablename,
- columns.c.table_schema==current_schema)
- else:
- whereclause = columns.c.table_name==tablename
- s = sql.select([columns], whereclause,
- order_by=[columns.c.ordinal_position])
- c = connection.execute(s)
- cols = []
- while True:
- row = c.fetchone()
- if row is None:
- break
- (name, type, nullable, charlen,
- numericprec, numericscale, default, collation) = (
- row[columns.c.column_name],
- row[columns.c.data_type],
- row[columns.c.is_nullable] == 'YES',
- row[columns.c.character_maximum_length],
- row[columns.c.numeric_precision],
- row[columns.c.numeric_scale],
- row[columns.c.column_default],
- row[columns.c.collation_name]
- )
- coltype = self.ischema_names.get(type, None)
-
- kwargs = {}
- if coltype in (MSString, MSChar, MSNVarchar, MSNChar, MSText,
- MSNText, MSBinary, MSVarBinary,
- sqltypes.LargeBinary):
- kwargs['length'] = charlen
- if collation:
- kwargs['collation'] = collation
- if coltype == MSText or \
- (coltype in (MSString, MSNVarchar) and charlen == -1):
- kwargs.pop('length')
-
- if coltype is None:
- util.warn(
- "Did not recognize type '%s' of column '%s'" %
- (type, name))
- coltype = sqltypes.NULLTYPE
- else:
- if issubclass(coltype, sqltypes.Numeric) and \
- coltype is not MSReal:
- kwargs['scale'] = numericscale
- kwargs['precision'] = numericprec
-
- coltype = coltype(**kwargs)
- cdict = {
- 'name' : name,
- 'type' : coltype,
- 'nullable' : nullable,
- 'default' : default,
- 'autoincrement':False,
- }
- cols.append(cdict)
- # autoincrement and identity
- colmap = {}
- for col in cols:
- colmap[col['name']] = col
- # We also run an sp_columns to check for identity columns:
- cursor = connection.execute("sp_columns @table_name = '%s', "
- "@table_owner = '%s'"
- % (tablename, current_schema))
- ic = None
- while True:
- row = cursor.fetchone()
- if row is None:
- break
- (col_name, type_name) = row[3], row[5]
- if type_name.endswith("identity") and col_name in colmap:
- ic = col_name
- colmap[col_name]['autoincrement'] = True
- colmap[col_name]['sequence'] = dict(
- name='%s_identity' % col_name)
- break
- cursor.close()
-
- if ic is not None and self.server_version_info >= MS_2005_VERSION:
- table_fullname = "%s.%s" % (current_schema, tablename)
- cursor = connection.execute(
- "select ident_seed('%s'), ident_incr('%s')"
- % (table_fullname, table_fullname)
- )
-
- row = cursor.first()
- if row is not None and row[0] is not None:
- colmap[ic]['sequence'].update({
- 'start' : int(row[0]),
- 'increment' : int(row[1])
- })
- return cols
-
- @reflection.cache
- def get_primary_keys(self, connection, tablename, schema=None, **kw):
- current_schema = schema or self.default_schema_name
- pkeys = []
- # information_schema.referential_constraints
- RR = ischema.ref_constraints
- # information_schema.table_constraints
- TC = ischema.constraints
- # information_schema.constraint_column_usage:
- # the constrained column
- C = ischema.key_constraints.alias('C')
- # information_schema.constraint_column_usage:
- # the referenced column
- R = ischema.key_constraints.alias('R')
-
- # Primary key constraints
- s = sql.select([C.c.column_name, TC.c.constraint_type],
- sql.and_(TC.c.constraint_name == C.c.constraint_name,
- C.c.table_name == tablename,
- C.c.table_schema == current_schema)
- )
- c = connection.execute(s)
- for row in c:
- if 'PRIMARY' in row[TC.c.constraint_type.name]:
- pkeys.append(row[0])
- return pkeys
-
- @reflection.cache
- def get_foreign_keys(self, connection, tablename, schema=None, **kw):
- current_schema = schema or self.default_schema_name
- # Add constraints
- #information_schema.referential_constraints
- RR = ischema.ref_constraints
- # information_schema.table_constraints
- TC = ischema.constraints
- # information_schema.constraint_column_usage:
- # the constrained column
- C = ischema.key_constraints.alias('C')
- # information_schema.constraint_column_usage:
- # the referenced column
- R = ischema.key_constraints.alias('R')
-
- # Foreign key constraints
- s = sql.select([C.c.column_name,
- R.c.table_schema, R.c.table_name, R.c.column_name,
- RR.c.constraint_name, RR.c.match_option,
- RR.c.update_rule,
- RR.c.delete_rule],
- sql.and_(C.c.table_name == tablename,
- C.c.table_schema == current_schema,
- C.c.constraint_name == RR.c.constraint_name,
- R.c.constraint_name ==
- RR.c.unique_constraint_name,
- C.c.ordinal_position == R.c.ordinal_position
- ),
- order_by = [
- RR.c.constraint_name,
- R.c.ordinal_position])
-
-
- # group rows by constraint ID, to handle multi-column FKs
- fkeys = []
- fknm, scols, rcols = (None, [], [])
-
- def fkey_rec():
- return {
- 'name' : None,
- 'constrained_columns' : [],
- 'referred_schema' : None,
- 'referred_table' : None,
- 'referred_columns' : []
- }
-
- fkeys = util.defaultdict(fkey_rec)
-
- for r in connection.execute(s).fetchall():
- scol, rschema, rtbl, rcol, rfknm, fkmatch, fkuprule, fkdelrule = r
-
- rec = fkeys[rfknm]
- rec['name'] = rfknm
- if not rec['referred_table']:
- rec['referred_table'] = rtbl
-
- if schema is not None or current_schema != rschema:
- rec['referred_schema'] = rschema
-
- local_cols, remote_cols = \
- rec['constrained_columns'],\
- rec['referred_columns']
-
- local_cols.append(scol)
- remote_cols.append(rcol)
-
- return fkeys.values()
-
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/mssql/information_schema.py b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/mssql/information_schema.py
deleted file mode 100755
index 87dd0a16..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/mssql/information_schema.py
+++ /dev/null
@@ -1,96 +0,0 @@
-# mssql/information_schema.py
-# Copyright (C) 2005-2011 the SQLAlchemy authors and contributors <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-# TODO: should be using the sys. catalog with SQL Server, not information schema
-
-from sqlalchemy import Table, MetaData, Column
-from sqlalchemy.types import String, Unicode, Integer, TypeDecorator
-
-ischema = MetaData()
-
-class CoerceUnicode(TypeDecorator):
- impl = Unicode
-
- def process_bind_param(self, value, dialect):
- if isinstance(value, str):
- value = value.decode(dialect.encoding)
- return value
-
-schemata = Table("SCHEMATA", ischema,
- Column("CATALOG_NAME", CoerceUnicode, key="catalog_name"),
- Column("SCHEMA_NAME", CoerceUnicode, key="schema_name"),
- Column("SCHEMA_OWNER", CoerceUnicode, key="schema_owner"),
- schema="INFORMATION_SCHEMA")
-
-tables = Table("TABLES", ischema,
- Column("TABLE_CATALOG", CoerceUnicode, key="table_catalog"),
- Column("TABLE_SCHEMA", CoerceUnicode, key="table_schema"),
- Column("TABLE_NAME", CoerceUnicode, key="table_name"),
- Column("TABLE_TYPE", String(convert_unicode=True), key="table_type"),
- schema="INFORMATION_SCHEMA")
-
-columns = Table("COLUMNS", ischema,
- Column("TABLE_SCHEMA", CoerceUnicode, key="table_schema"),
- Column("TABLE_NAME", CoerceUnicode, key="table_name"),
- Column("COLUMN_NAME", CoerceUnicode, key="column_name"),
- Column("IS_NULLABLE", Integer, key="is_nullable"),
- Column("DATA_TYPE", String, key="data_type"),
- Column("ORDINAL_POSITION", Integer, key="ordinal_position"),
- Column("CHARACTER_MAXIMUM_LENGTH", Integer, key="character_maximum_length"),
- Column("NUMERIC_PRECISION", Integer, key="numeric_precision"),
- Column("NUMERIC_SCALE", Integer, key="numeric_scale"),
- Column("COLUMN_DEFAULT", Integer, key="column_default"),
- Column("COLLATION_NAME", String, key="collation_name"),
- schema="INFORMATION_SCHEMA")
-
-constraints = Table("TABLE_CONSTRAINTS", ischema,
- Column("TABLE_SCHEMA", CoerceUnicode, key="table_schema"),
- Column("TABLE_NAME", CoerceUnicode, key="table_name"),
- Column("CONSTRAINT_NAME", CoerceUnicode, key="constraint_name"),
- Column("CONSTRAINT_TYPE", String(convert_unicode=True), key="constraint_type"),
- schema="INFORMATION_SCHEMA")
-
-column_constraints = Table("CONSTRAINT_COLUMN_USAGE", ischema,
- Column("TABLE_SCHEMA", CoerceUnicode, key="table_schema"),
- Column("TABLE_NAME", CoerceUnicode, key="table_name"),
- Column("COLUMN_NAME", CoerceUnicode, key="column_name"),
- Column("CONSTRAINT_NAME", CoerceUnicode, key="constraint_name"),
- schema="INFORMATION_SCHEMA")
-
-key_constraints = Table("KEY_COLUMN_USAGE", ischema,
- Column("TABLE_SCHEMA", CoerceUnicode, key="table_schema"),
- Column("TABLE_NAME", CoerceUnicode, key="table_name"),
- Column("COLUMN_NAME", CoerceUnicode, key="column_name"),
- Column("CONSTRAINT_NAME", CoerceUnicode, key="constraint_name"),
- Column("ORDINAL_POSITION", Integer, key="ordinal_position"),
- schema="INFORMATION_SCHEMA")
-
-ref_constraints = Table("REFERENTIAL_CONSTRAINTS", ischema,
- Column("CONSTRAINT_CATALOG", CoerceUnicode, key="constraint_catalog"),
- Column("CONSTRAINT_SCHEMA", CoerceUnicode, key="constraint_schema"),
- Column("CONSTRAINT_NAME", CoerceUnicode, key="constraint_name"),
- # TODO: is CATLOG misspelled ?
- Column("UNIQUE_CONSTRAINT_CATLOG", CoerceUnicode,
- key="unique_constraint_catalog"),
-
- Column("UNIQUE_CONSTRAINT_SCHEMA", CoerceUnicode,
- key="unique_constraint_schema"),
- Column("UNIQUE_CONSTRAINT_NAME", CoerceUnicode,
- key="unique_constraint_name"),
- Column("MATCH_OPTION", String, key="match_option"),
- Column("UPDATE_RULE", String, key="update_rule"),
- Column("DELETE_RULE", String, key="delete_rule"),
- schema="INFORMATION_SCHEMA")
-
-views = Table("VIEWS", ischema,
- Column("TABLE_CATALOG", CoerceUnicode, key="table_catalog"),
- Column("TABLE_SCHEMA", CoerceUnicode, key="table_schema"),
- Column("TABLE_NAME", CoerceUnicode, key="table_name"),
- Column("VIEW_DEFINITION", CoerceUnicode, key="view_definition"),
- Column("CHECK_OPTION", String, key="check_option"),
- Column("IS_UPDATABLE", String, key="is_updatable"),
- schema="INFORMATION_SCHEMA")
-
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/mssql/mxodbc.py b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/mssql/mxodbc.py
deleted file mode 100755
index 6a830509..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/mssql/mxodbc.py
+++ /dev/null
@@ -1,93 +0,0 @@
-# mssql/mxodbc.py
-# Copyright (C) 2005-2011 the SQLAlchemy authors and contributors <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-"""
-Support for MS-SQL via mxODBC.
-
-mxODBC is available at:
-
- http://www.egenix.com/
-
-This was tested with mxODBC 3.1.2 and the SQL Server Native
-Client connected to MSSQL 2005 and 2008 Express Editions.
-
-Connecting
-~~~~~~~~~~
-
-Connection is via DSN::
-
- mssql+mxodbc://<username>:<password>@<dsnname>
-
-Execution Modes
-~~~~~~~~~~~~~~~
-
-mxODBC features two styles of statement execution, using the
-``cursor.execute()`` and ``cursor.executedirect()`` methods (the second being
-an extension to the DBAPI specification). The former makes use of a particular
-API call specific to the SQL Server Native Client ODBC driver known
-SQLDescribeParam, while the latter does not.
-
-mxODBC apparently only makes repeated use of a single prepared statement
-when SQLDescribeParam is used. The advantage to prepared statement reuse is
-one of performance. The disadvantage is that SQLDescribeParam has a limited
-set of scenarios in which bind parameters are understood, including that they
-cannot be placed within the argument lists of function calls, anywhere outside
-the FROM, or even within subqueries within the FROM clause - making the usage
-of bind parameters within SELECT statements impossible for all but the most
-simplistic statements.
-
-For this reason, the mxODBC dialect uses the "native" mode by default only for
-INSERT, UPDATE, and DELETE statements, and uses the escaped string mode for
-all other statements.
-
-This behavior can be controlled via
-:meth:`~sqlalchemy.sql.expression.Executable.execution_options` using the
-``native_odbc_execute`` flag with a value of ``True`` or ``False``, where a
-value of ``True`` will unconditionally use native bind parameters and a value
-of ``False`` will uncondtionally use string-escaped parameters.
-
-"""
-
-
-from sqlalchemy import types as sqltypes
-from sqlalchemy.connectors.mxodbc import MxODBCConnector
-from sqlalchemy.dialects.mssql.pyodbc import MSExecutionContext_pyodbc
-from sqlalchemy.dialects.mssql.base import (MSDialect,
- MSSQLStrictCompiler,
- _MSDateTime, _MSDate, TIME)
-
-
-
-class MSExecutionContext_mxodbc(MSExecutionContext_pyodbc):
- """
- The pyodbc execution context is useful for enabling
- SELECT SCOPE_IDENTITY in cases where OUTPUT clause
- does not work (tables with insert triggers).
- """
- #todo - investigate whether the pyodbc execution context
- # is really only being used in cases where OUTPUT
- # won't work.
-
-class MSDialect_mxodbc(MxODBCConnector, MSDialect):
-
- # TODO: may want to use this only if FreeTDS is not in use,
- # since FreeTDS doesn't seem to use native binds.
- statement_compiler = MSSQLStrictCompiler
- execution_ctx_cls = MSExecutionContext_mxodbc
- colspecs = {
- #sqltypes.Numeric : _MSNumeric,
- sqltypes.DateTime : _MSDateTime,
- sqltypes.Date : _MSDate,
- sqltypes.Time : TIME,
- }
-
-
- def __init__(self, description_encoding='latin-1', **params):
- super(MSDialect_mxodbc, self).__init__(**params)
- self.description_encoding = description_encoding
-
-dialect = MSDialect_mxodbc
-
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/mssql/pymssql.py b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/mssql/pymssql.py
deleted file mode 100755
index 8bc0ad95..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/mssql/pymssql.py
+++ /dev/null
@@ -1,109 +0,0 @@
-# mssql/pymssql.py
-# Copyright (C) 2005-2011 the SQLAlchemy authors and contributors <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-"""
-Support for the pymssql dialect.
-
-This dialect supports pymssql 1.0 and greater.
-
-pymssql is available at:
-
- http://pymssql.sourceforge.net/
-
-Connecting
-^^^^^^^^^^
-
-Sample connect string::
-
- mssql+pymssql://<username>:<password>@<freetds_name>
-
-Adding "?charset=utf8" or similar will cause pymssql to return
-strings as Python unicode objects. This can potentially improve
-performance in some scenarios as decoding of strings is
-handled natively.
-
-Limitations
-^^^^^^^^^^^
-
-pymssql inherits a lot of limitations from FreeTDS, including:
-
-* no support for multibyte schema identifiers
-* poor support for large decimals
-* poor support for binary fields
-* poor support for VARCHAR/CHAR fields over 255 characters
-
-Please consult the pymssql documentation for further information.
-
-"""
-from sqlalchemy.dialects.mssql.base import MSDialect
-from sqlalchemy import types as sqltypes, util, processors
-import re
-
-class _MSNumeric_pymssql(sqltypes.Numeric):
- def result_processor(self, dialect, type_):
- if not self.asdecimal:
- return processors.to_float
- else:
- return sqltypes.Numeric.result_processor(self, dialect, type_)
-
-class MSDialect_pymssql(MSDialect):
- supports_sane_rowcount = False
- max_identifier_length = 30
- driver = 'pymssql'
-
- colspecs = util.update_copy(
- MSDialect.colspecs,
- {
- sqltypes.Numeric:_MSNumeric_pymssql,
- sqltypes.Float:sqltypes.Float,
- }
- )
- @classmethod
- def dbapi(cls):
- module = __import__('pymssql')
- # pymmsql doesn't have a Binary method. we use string
- # TODO: monkeypatching here is less than ideal
- module.Binary = str
-
- client_ver = tuple(int(x) for x in module.__version__.split("."))
- if client_ver < (1, ):
- util.warn("The pymssql dialect expects at least "
- "the 1.0 series of the pymssql DBAPI.")
- return module
-
- def __init__(self, **params):
- super(MSDialect_pymssql, self).__init__(**params)
- self.use_scope_identity = True
-
- def _get_server_version_info(self, connection):
- vers = connection.scalar("select @@version")
- m = re.match(
- r"Microsoft SQL Server.*? - (\d+).(\d+).(\d+).(\d+)", vers)
- if m:
- return tuple(int(x) for x in m.group(1, 2, 3, 4))
- else:
- return None
-
- def create_connect_args(self, url):
- opts = url.translate_connect_args(username='user')
- opts.update(url.query)
- port = opts.pop('port', None)
- if port and 'host' in opts:
- opts['host'] = "%s:%s" % (opts['host'], port)
- return [[], opts]
-
- def is_disconnect(self, e, connection, cursor):
- for msg in (
- "Error 10054",
- "Not connected to any MS SQL server",
- "Connection is closed"
- ):
- if msg in str(e):
- return True
- else:
- return False
-
-dialect = MSDialect_pymssql
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/mssql/pyodbc.py b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/mssql/pyodbc.py
deleted file mode 100755
index 9b88dce2..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/mssql/pyodbc.py
+++ /dev/null
@@ -1,221 +0,0 @@
-# mssql/pyodbc.py
-# Copyright (C) 2005-2011 the SQLAlchemy authors and contributors <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-"""
-Support for MS-SQL via pyodbc.
-
-pyodbc is available at:
-
- http://pypi.python.org/pypi/pyodbc/
-
-Connecting
-^^^^^^^^^^
-
-Examples of pyodbc connection string URLs:
-
-* ``mssql+pyodbc://mydsn`` - connects using the specified DSN named ``mydsn``.
- The connection string that is created will appear like::
-
- dsn=mydsn;Trusted_Connection=Yes
-
-* ``mssql+pyodbc://user:pass@mydsn`` - connects using the DSN named
- ``mydsn`` passing in the ``UID`` and ``PWD`` information. The
- connection string that is created will appear like::
-
- dsn=mydsn;UID=user;PWD=pass
-
-* ``mssql+pyodbc://user:pass@mydsn/?LANGUAGE=us_english`` - connects
- using the DSN named ``mydsn`` passing in the ``UID`` and ``PWD``
- information, plus the additional connection configuration option
- ``LANGUAGE``. The connection string that is created will appear
- like::
-
- dsn=mydsn;UID=user;PWD=pass;LANGUAGE=us_english
-
-* ``mssql+pyodbc://user:pass@host/db`` - connects using a connection string
- dynamically created that would appear like::
-
- DRIVER={SQL Server};Server=host;Database=db;UID=user;PWD=pass
-
-* ``mssql+pyodbc://user:pass@host:123/db`` - connects using a connection
- string that is dynamically created, which also includes the port
- information using the comma syntax. If your connection string
- requires the port information to be passed as a ``port`` keyword
- see the next example. This will create the following connection
- string::
-
- DRIVER={SQL Server};Server=host,123;Database=db;UID=user;PWD=pass
-
-* ``mssql+pyodbc://user:pass@host/db?port=123`` - connects using a connection
- string that is dynamically created that includes the port
- information as a separate ``port`` keyword. This will create the
- following connection string::
-
- DRIVER={SQL Server};Server=host;Database=db;UID=user;PWD=pass;port=123
-
-If you require a connection string that is outside the options
-presented above, use the ``odbc_connect`` keyword to pass in a
-urlencoded connection string. What gets passed in will be urldecoded
-and passed directly.
-
-For example::
-
- mssql+pyodbc:///?odbc_connect=dsn%3Dmydsn%3BDatabase%3Ddb
-
-would create the following connection string::
-
- dsn=mydsn;Database=db
-
-Encoding your connection string can be easily accomplished through
-the python shell. For example::
-
- >>> import urllib
- >>> urllib.quote_plus('dsn=mydsn;Database=db')
- 'dsn%3Dmydsn%3BDatabase%3Ddb'
-
-
-"""
-
-from sqlalchemy.dialects.mssql.base import MSExecutionContext, MSDialect
-from sqlalchemy.connectors.pyodbc import PyODBCConnector
-from sqlalchemy import types as sqltypes, util
-import decimal
-
-class _MSNumeric_pyodbc(sqltypes.Numeric):
- """Turns Decimals with adjusted() < 0 or > 7 into strings.
-
- This is the only method that is proven to work with Pyodbc+MSSQL
- without crashing (floats can be used but seem to cause sporadic
- crashes).
-
- """
-
- def bind_processor(self, dialect):
-
- super_process = super(_MSNumeric_pyodbc, self).\
- bind_processor(dialect)
-
- if not dialect._need_decimal_fix:
- return super_process
-
- def process(value):
- if self.asdecimal and \
- isinstance(value, decimal.Decimal):
-
- adjusted = value.adjusted()
- if adjusted < 0:
- return self._small_dec_to_string(value)
- elif adjusted > 7:
- return self._large_dec_to_string(value)
-
- if super_process:
- return super_process(value)
- else:
- return value
- return process
-
- # these routines needed for older versions of pyodbc.
- # as of 2.1.8 this logic is integrated.
-
- def _small_dec_to_string(self, value):
- return "%s0.%s%s" % (
- (value < 0 and '-' or ''),
- '0' * (abs(value.adjusted()) - 1),
- "".join([str(nint) for nint in value.as_tuple()[1]]))
-
- def _large_dec_to_string(self, value):
- _int = value.as_tuple()[1]
- if 'E' in str(value):
- result = "%s%s%s" % (
- (value < 0 and '-' or ''),
- "".join([str(s) for s in _int]),
- "0" * (value.adjusted() - (len(_int)-1)))
- else:
- if (len(_int) - 1) > value.adjusted():
- result = "%s%s.%s" % (
- (value < 0 and '-' or ''),
- "".join(
- [str(s) for s in _int][0:value.adjusted() + 1]),
- "".join(
- [str(s) for s in _int][value.adjusted() + 1:]))
- else:
- result = "%s%s" % (
- (value < 0 and '-' or ''),
- "".join(
- [str(s) for s in _int][0:value.adjusted() + 1]))
- return result
-
-
-class MSExecutionContext_pyodbc(MSExecutionContext):
- _embedded_scope_identity = False
-
- def pre_exec(self):
- """where appropriate, issue "select scope_identity()" in the same
- statement.
-
- Background on why "scope_identity()" is preferable to "@@identity":
- http://msdn.microsoft.com/en-us/library/ms190315.aspx
-
- Background on why we attempt to embed "scope_identity()" into the same
- statement as the INSERT:
- http://code.google.com/p/pyodbc/wiki/FAQs#How_do_I_retrieve_autogenerated/identity_values?
-
- """
-
- super(MSExecutionContext_pyodbc, self).pre_exec()
-
- # don't embed the scope_identity select into an
- # "INSERT .. DEFAULT VALUES"
- if self._select_lastrowid and \
- self.dialect.use_scope_identity and \
- len(self.parameters[0]):
- self._embedded_scope_identity = True
-
- self.statement += "; select scope_identity()"
-
- def post_exec(self):
- if self._embedded_scope_identity:
- # Fetch the last inserted id from the manipulated statement
- # We may have to skip over a number of result sets with
- # no data (due to triggers, etc.)
- while True:
- try:
- # fetchall() ensures the cursor is consumed
- # without closing it (FreeTDS particularly)
- row = self.cursor.fetchall()[0]
- break
- except self.dialect.dbapi.Error, e:
- # no way around this - nextset() consumes the previous set
- # so we need to just keep flipping
- self.cursor.nextset()
-
- self._lastrowid = int(row[0])
- else:
- super(MSExecutionContext_pyodbc, self).post_exec()
-
-
-class MSDialect_pyodbc(PyODBCConnector, MSDialect):
-
- execution_ctx_cls = MSExecutionContext_pyodbc
-
- pyodbc_driver_name = 'SQL Server'
-
- colspecs = util.update_copy(
- MSDialect.colspecs,
- {
- sqltypes.Numeric:_MSNumeric_pyodbc
- }
- )
-
- def __init__(self, description_encoding='latin-1', **params):
- super(MSDialect_pyodbc, self).__init__(**params)
- self.description_encoding = description_encoding
- self.use_scope_identity = self.dbapi and \
- hasattr(self.dbapi.Cursor, 'nextset')
- self._need_decimal_fix = self.dbapi and \
- tuple(self.dbapi.version.split(".")) < (2, 1, 8)
-
-dialect = MSDialect_pyodbc
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/mssql/zxjdbc.py b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/mssql/zxjdbc.py
deleted file mode 100755
index c293adbe..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/mssql/zxjdbc.py
+++ /dev/null
@@ -1,75 +0,0 @@
-# mssql/zxjdbc.py
-# Copyright (C) 2005-2011 the SQLAlchemy authors and contributors <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-"""Support for the Microsoft SQL Server database via the zxjdbc JDBC
-connector.
-
-JDBC Driver
------------
-
-Requires the jTDS driver, available from: http://jtds.sourceforge.net/
-
-Connecting
-----------
-
-URLs are of the standard form of
-``mssql+zxjdbc://user:pass@host:port/dbname[?key=value&key=value...]``.
-
-Additional arguments which may be specified either as query string
-arguments on the URL, or as keyword arguments to
-:func:`~sqlalchemy.create_engine()` will be passed as Connection
-properties to the underlying JDBC driver.
-
-"""
-from sqlalchemy.connectors.zxJDBC import ZxJDBCConnector
-from sqlalchemy.dialects.mssql.base import MSDialect, MSExecutionContext
-from sqlalchemy.engine import base
-
-class MSExecutionContext_zxjdbc(MSExecutionContext):
-
- _embedded_scope_identity = False
-
- def pre_exec(self):
- super(MSExecutionContext_zxjdbc, self).pre_exec()
- # scope_identity after the fact returns null in jTDS so we must
- # embed it
- if self._select_lastrowid and self.dialect.use_scope_identity:
- self._embedded_scope_identity = True
- self.statement += "; SELECT scope_identity()"
-
- def post_exec(self):
- if self._embedded_scope_identity:
- while True:
- try:
- row = self.cursor.fetchall()[0]
- break
- except self.dialect.dbapi.Error, e:
- self.cursor.nextset()
- self._lastrowid = int(row[0])
-
- if (self.isinsert or self.isupdate or self.isdelete) and \
- self.compiled.returning:
- self._result_proxy = base.FullyBufferedResultProxy(self)
-
- if self._enable_identity_insert:
- table = self.dialect.identifier_preparer.format_table(
- self.compiled.statement.table)
- self.cursor.execute("SET IDENTITY_INSERT %s OFF" % table)
-
-
-class MSDialect_zxjdbc(ZxJDBCConnector, MSDialect):
- jdbc_db_name = 'jtds:sqlserver'
- jdbc_driver_name = 'net.sourceforge.jtds.jdbc.Driver'
-
- execution_ctx_cls = MSExecutionContext_zxjdbc
-
- def _get_server_version_info(self, connection):
- return tuple(
- int(x)
- for x in connection.connection.dbversion.split('.')
- )
-
-dialect = MSDialect_zxjdbc
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/mysql/__init__.py b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/mysql/__init__.py
deleted file mode 100755
index 7cab573e..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/mysql/__init__.py
+++ /dev/null
@@ -1,27 +0,0 @@
-# mysql/__init__.py
-# Copyright (C) 2005-2011 the SQLAlchemy authors and contributors <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-from sqlalchemy.dialects.mysql import base, mysqldb, oursql, \
- pyodbc, zxjdbc, mysqlconnector, pymysql
-
-# default dialect
-base.dialect = mysqldb.dialect
-
-from sqlalchemy.dialects.mysql.base import \
- BIGINT, BINARY, BIT, BLOB, BOOLEAN, CHAR, DATE, DATETIME, \
- DECIMAL, DOUBLE, ENUM, DECIMAL,\
- FLOAT, INTEGER, INTEGER, LONGBLOB, LONGTEXT, MEDIUMBLOB, \
- MEDIUMINT, MEDIUMTEXT, NCHAR, \
- NVARCHAR, NUMERIC, SET, SMALLINT, REAL, TEXT, TIME, TIMESTAMP, \
- TINYBLOB, TINYINT, TINYTEXT,\
- VARBINARY, VARCHAR, YEAR, dialect
-
-__all__ = (
-'BIGINT', 'BINARY', 'BIT', 'BLOB', 'BOOLEAN', 'CHAR', 'DATE', 'DATETIME', 'DECIMAL', 'DOUBLE',
-'ENUM', 'DECIMAL', 'FLOAT', 'INTEGER', 'INTEGER', 'LONGBLOB', 'LONGTEXT', 'MEDIUMBLOB', 'MEDIUMINT',
-'MEDIUMTEXT', 'NCHAR', 'NVARCHAR', 'NUMERIC', 'SET', 'SMALLINT', 'REAL', 'TEXT', 'TIME', 'TIMESTAMP',
-'TINYBLOB', 'TINYINT', 'TINYTEXT', 'VARBINARY', 'VARCHAR', 'YEAR', 'dialect'
-)
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/mysql/base.py b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/mysql/base.py
deleted file mode 100755
index 33dc8a73..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/mysql/base.py
+++ /dev/null
@@ -1,2571 +0,0 @@
-# mysql/base.py
-# Copyright (C) 2005-2011 the SQLAlchemy authors and contributors <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-"""Support for the MySQL database.
-
-Supported Versions and Features
--------------------------------
-
-SQLAlchemy supports 6 major MySQL versions: 3.23, 4.0, 4.1, 5.0, 5.1 and 6.0,
-with capabilities increasing with more modern servers.
-
-Versions 4.1 and higher support the basic SQL functionality that SQLAlchemy
-uses in the ORM and SQL expressions. These versions pass the applicable tests
-in the suite 100%. No heroic measures are taken to work around major missing
-SQL features- if your server version does not support sub-selects, for
-example, they won't work in SQLAlchemy either.
-
-Most available DBAPI drivers are supported; see below.
-
-===================================== ===============
-Feature Minimum Version
-===================================== ===============
-sqlalchemy.orm 4.1.1
-Table Reflection 3.23.x
-DDL Generation 4.1.1
-utf8/Full Unicode Connections 4.1.1
-Transactions 3.23.15
-Two-Phase Transactions 5.0.3
-Nested Transactions 5.0.3
-===================================== ===============
-
-See the official MySQL documentation for detailed information about features
-supported in any given server release.
-
-Connecting
-----------
-
-See the API documentation on individual drivers for details on connecting.
-
-Connection Timeouts
--------------------
-
-MySQL features an automatic connection close behavior, for connections that have
-been idle for eight hours or more. To circumvent having this issue, use the
-``pool_recycle`` option which controls the maximum age of any connection::
-
- engine = create_engine('mysql+mysqldb://...', pool_recycle=3600)
-
-Storage Engines
----------------
-
-Most MySQL server installations have a default table type of ``MyISAM``, a
-non-transactional table type. During a transaction, non-transactional storage
-engines do not participate and continue to store table changes in autocommit
-mode. For fully atomic transactions, all participating tables must use a
-transactional engine such as ``InnoDB``, ``Falcon``, ``SolidDB``, `PBXT`, etc.
-
-Storage engines can be elected when creating tables in SQLAlchemy by supplying
-a ``mysql_engine='whatever'`` to the ``Table`` constructor. Any MySQL table
-creation option can be specified in this syntax::
-
- Table('mytable', metadata,
- Column('data', String(32)),
- mysql_engine='InnoDB',
- mysql_charset='utf8'
- )
-
-Keys
-----
-
-Not all MySQL storage engines support foreign keys. For ``MyISAM`` and
-similar engines, the information loaded by table reflection will not include
-foreign keys. For these tables, you may supply a
-:class:`~sqlalchemy.ForeignKeyConstraint` at reflection time::
-
- Table('mytable', metadata,
- ForeignKeyConstraint(['other_id'], ['othertable.other_id']),
- autoload=True
- )
-
-When creating tables, SQLAlchemy will automatically set ``AUTO_INCREMENT`` on
-an integer primary key column::
-
- >>> t = Table('mytable', metadata,
- ... Column('mytable_id', Integer, primary_key=True)
- ... )
- >>> t.create()
- CREATE TABLE mytable (
- id INTEGER NOT NULL AUTO_INCREMENT,
- PRIMARY KEY (id)
- )
-
-You can disable this behavior by supplying ``autoincrement=False`` to the
-:class:`~sqlalchemy.Column`. This flag can also be used to enable
-auto-increment on a secondary column in a multi-column key for some storage
-engines::
-
- Table('mytable', metadata,
- Column('gid', Integer, primary_key=True, autoincrement=False),
- Column('id', Integer, primary_key=True)
- )
-
-SQL Mode
---------
-
-MySQL SQL modes are supported. Modes that enable ``ANSI_QUOTES`` (such as
-``ANSI``) require an engine option to modify SQLAlchemy's quoting style.
-When using an ANSI-quoting mode, supply ``use_ansiquotes=True`` when
-creating your ``Engine``::
-
- create_engine('mysql://localhost/test', use_ansiquotes=True)
-
-This is an engine-wide option and is not toggleable on a per-connection basis.
-SQLAlchemy does not presume to ``SET sql_mode`` for you with this option. For
-the best performance, set the quoting style server-wide in ``my.cnf`` or by
-supplying ``--sql-mode`` to ``mysqld``. You can also use a
-:class:`sqlalchemy.pool.Pool` listener hook to issue a ``SET SESSION
-sql_mode='...'`` on connect to configure each connection.
-
-If you do not specify ``use_ansiquotes``, the regular MySQL quoting style is
-used by default.
-
-If you do issue a ``SET sql_mode`` through SQLAlchemy, the dialect must be
-updated if the quoting style is changed. Again, this change will affect all
-connections::
-
- connection.execute('SET sql_mode="ansi"')
- connection.dialect.use_ansiquotes = True
-
-MySQL SQL Extensions
---------------------
-
-Many of the MySQL SQL extensions are handled through SQLAlchemy's generic
-function and operator support::
-
- table.select(table.c.password==func.md5('plaintext'))
- table.select(table.c.username.op('regexp')('^[a-d]'))
-
-And of course any valid MySQL statement can be executed as a string as well.
-
-Some limited direct support for MySQL extensions to SQL is currently
-available.
-
-* SELECT pragma::
-
- select(..., prefixes=['HIGH_PRIORITY', 'SQL_SMALL_RESULT'])
-
-* UPDATE with LIMIT::
-
- update(..., mysql_limit=10)
-
-"""
-
-import datetime, inspect, re, sys
-
-from sqlalchemy import schema as sa_schema
-from sqlalchemy import exc, log, sql, util
-from sqlalchemy.sql import operators as sql_operators
-from sqlalchemy.sql import functions as sql_functions
-from sqlalchemy.sql import compiler
-from array import array as _array
-
-from sqlalchemy.engine import reflection
-from sqlalchemy.engine import base as engine_base, default
-from sqlalchemy import types as sqltypes
-
-from sqlalchemy.types import DATE, DATETIME, BOOLEAN, TIME, \
- BLOB, BINARY, VARBINARY
-
-RESERVED_WORDS = set(
- ['accessible', 'add', 'all', 'alter', 'analyze','and', 'as', 'asc',
- 'asensitive', 'before', 'between', 'bigint', 'binary', 'blob', 'both',
- 'by', 'call', 'cascade', 'case', 'change', 'char', 'character', 'check',
- 'collate', 'column', 'condition', 'constraint', 'continue', 'convert',
- 'create', 'cross', 'current_date', 'current_time', 'current_timestamp',
- 'current_user', 'cursor', 'database', 'databases', 'day_hour',
- 'day_microsecond', 'day_minute', 'day_second', 'dec', 'decimal',
- 'declare', 'default', 'delayed', 'delete', 'desc', 'describe',
- 'deterministic', 'distinct', 'distinctrow', 'div', 'double', 'drop',
- 'dual', 'each', 'else', 'elseif', 'enclosed', 'escaped', 'exists',
- 'exit', 'explain', 'false', 'fetch', 'float', 'float4', 'float8',
- 'for', 'force', 'foreign', 'from', 'fulltext', 'grant', 'group', 'having',
- 'high_priority', 'hour_microsecond', 'hour_minute', 'hour_second', 'if',
- 'ignore', 'in', 'index', 'infile', 'inner', 'inout', 'insensitive',
- 'insert', 'int', 'int1', 'int2', 'int3', 'int4', 'int8', 'integer',
- 'interval', 'into', 'is', 'iterate', 'join', 'key', 'keys', 'kill',
- 'leading', 'leave', 'left', 'like', 'limit', 'linear', 'lines', 'load',
- 'localtime', 'localtimestamp', 'lock', 'long', 'longblob', 'longtext',
- 'loop', 'low_priority', 'master_ssl_verify_server_cert', 'match',
- 'mediumblob', 'mediumint', 'mediumtext', 'middleint',
- 'minute_microsecond', 'minute_second', 'mod', 'modifies', 'natural',
- 'not', 'no_write_to_binlog', 'null', 'numeric', 'on', 'optimize',
- 'option', 'optionally', 'or', 'order', 'out', 'outer', 'outfile',
- 'precision', 'primary', 'procedure', 'purge', 'range', 'read', 'reads',
- 'read_only', 'read_write', 'real', 'references', 'regexp', 'release',
- 'rename', 'repeat', 'replace', 'require', 'restrict', 'return',
- 'revoke', 'right', 'rlike', 'schema', 'schemas', 'second_microsecond',
- 'select', 'sensitive', 'separator', 'set', 'show', 'smallint', 'spatial',
- 'specific', 'sql', 'sqlexception', 'sqlstate', 'sqlwarning',
- 'sql_big_result', 'sql_calc_found_rows', 'sql_small_result', 'ssl',
- 'starting', 'straight_join', 'table', 'terminated', 'then', 'tinyblob',
- 'tinyint', 'tinytext', 'to', 'trailing', 'trigger', 'true', 'undo',
- 'union', 'unique', 'unlock', 'unsigned', 'update', 'usage', 'use',
- 'using', 'utc_date', 'utc_time', 'utc_timestamp', 'values', 'varbinary',
- 'varchar', 'varcharacter', 'varying', 'when', 'where', 'while', 'with',
- 'write', 'x509', 'xor', 'year_month', 'zerofill', # 5.0
- 'columns', 'fields', 'privileges', 'soname', 'tables', # 4.1
- 'accessible', 'linear', 'master_ssl_verify_server_cert', 'range',
- 'read_only', 'read_write', # 5.1
- ])
-
-AUTOCOMMIT_RE = re.compile(
- r'\s*(?:UPDATE|INSERT|CREATE|DELETE|DROP|ALTER|LOAD +DATA|REPLACE)',
- re.I | re.UNICODE)
-SET_RE = re.compile(
- r'\s*SET\s+(?:(?:GLOBAL|SESSION)\s+)?\w',
- re.I | re.UNICODE)
-
-
-class _NumericType(object):
- """Base for MySQL numeric types."""
-
- def __init__(self, unsigned=False, zerofill=False, **kw):
- self.unsigned = unsigned
- self.zerofill = zerofill
- super(_NumericType, self).__init__(**kw)
-
-class _FloatType(_NumericType, sqltypes.Float):
- def __init__(self, precision=None, scale=None, asdecimal=True, **kw):
- if isinstance(self, (REAL, DOUBLE)) and \
- (
- (precision is None and scale is not None) or
- (precision is not None and scale is None)
- ):
- raise exc.ArgumentError(
- "You must specify both precision and scale or omit "
- "both altogether.")
-
- super(_FloatType, self).__init__(precision=precision, asdecimal=asdecimal, **kw)
- self.scale = scale
-
-class _IntegerType(_NumericType, sqltypes.Integer):
- def __init__(self, display_width=None, **kw):
- self.display_width = display_width
- super(_IntegerType, self).__init__(**kw)
-
-class _StringType(sqltypes.String):
- """Base for MySQL string types."""
-
- def __init__(self, charset=None, collation=None,
- ascii=False, binary=False,
- national=False, **kw):
- self.charset = charset
- # allow collate= or collation=
- self.collation = kw.pop('collate', collation)
- self.ascii = ascii
- # We have to munge the 'unicode' param strictly as a dict
- # otherwise 2to3 will turn it into str.
- self.__dict__['unicode'] = kw.get('unicode', False)
- # sqltypes.String does not accept the 'unicode' arg at all.
- if 'unicode' in kw:
- del kw['unicode']
- self.binary = binary
- self.national = national
- super(_StringType, self).__init__(**kw)
-
- def __repr__(self):
- attributes = inspect.getargspec(self.__init__)[0][1:]
- attributes.extend(inspect.getargspec(_StringType.__init__)[0][1:])
-
- params = {}
- for attr in attributes:
- val = getattr(self, attr)
- if val is not None and val is not False:
- params[attr] = val
-
- return "%s(%s)" % (self.__class__.__name__,
- ', '.join(['%s=%r' % (k, params[k]) for k in params]))
-
-
-class NUMERIC(_NumericType, sqltypes.NUMERIC):
- """MySQL NUMERIC type."""
-
- __visit_name__ = 'NUMERIC'
-
- def __init__(self, precision=None, scale=None, asdecimal=True, **kw):
- """Construct a NUMERIC.
-
- :param precision: Total digits in this number. If scale and precision
- are both None, values are stored to limits allowed by the server.
-
- :param scale: The number of digits after the decimal point.
-
- :param unsigned: a boolean, optional.
-
- :param zerofill: Optional. If true, values will be stored as strings
- left-padded with zeros. Note that this does not effect the values
- returned by the underlying database API, which continue to be
- numeric.
-
- """
- super(NUMERIC, self).__init__(precision=precision, scale=scale, asdecimal=asdecimal, **kw)
-
-
-class DECIMAL(_NumericType, sqltypes.DECIMAL):
- """MySQL DECIMAL type."""
-
- __visit_name__ = 'DECIMAL'
-
- def __init__(self, precision=None, scale=None, asdecimal=True, **kw):
- """Construct a DECIMAL.
-
- :param precision: Total digits in this number. If scale and precision
- are both None, values are stored to limits allowed by the server.
-
- :param scale: The number of digits after the decimal point.
-
- :param unsigned: a boolean, optional.
-
- :param zerofill: Optional. If true, values will be stored as strings
- left-padded with zeros. Note that this does not effect the values
- returned by the underlying database API, which continue to be
- numeric.
-
- """
- super(DECIMAL, self).__init__(precision=precision, scale=scale,
- asdecimal=asdecimal, **kw)
-
-
-class DOUBLE(_FloatType):
- """MySQL DOUBLE type."""
-
- __visit_name__ = 'DOUBLE'
-
- def __init__(self, precision=None, scale=None, asdecimal=True, **kw):
- """Construct a DOUBLE.
-
- :param precision: Total digits in this number. If scale and precision
- are both None, values are stored to limits allowed by the server.
-
- :param scale: The number of digits after the decimal point.
-
- :param unsigned: a boolean, optional.
-
- :param zerofill: Optional. If true, values will be stored as strings
- left-padded with zeros. Note that this does not effect the values
- returned by the underlying database API, which continue to be
- numeric.
-
- """
- super(DOUBLE, self).__init__(precision=precision, scale=scale,
- asdecimal=asdecimal, **kw)
-
-class REAL(_FloatType, sqltypes.REAL):
- """MySQL REAL type."""
-
- __visit_name__ = 'REAL'
-
- def __init__(self, precision=None, scale=None, asdecimal=True, **kw):
- """Construct a REAL.
-
- :param precision: Total digits in this number. If scale and precision
- are both None, values are stored to limits allowed by the server.
-
- :param scale: The number of digits after the decimal point.
-
- :param unsigned: a boolean, optional.
-
- :param zerofill: Optional. If true, values will be stored as strings
- left-padded with zeros. Note that this does not effect the values
- returned by the underlying database API, which continue to be
- numeric.
-
- """
- super(REAL, self).__init__(precision=precision, scale=scale,
- asdecimal=asdecimal, **kw)
-
-class FLOAT(_FloatType, sqltypes.FLOAT):
- """MySQL FLOAT type."""
-
- __visit_name__ = 'FLOAT'
-
- def __init__(self, precision=None, scale=None, asdecimal=False, **kw):
- """Construct a FLOAT.
-
- :param precision: Total digits in this number. If scale and precision
- are both None, values are stored to limits allowed by the server.
-
- :param scale: The number of digits after the decimal point.
-
- :param unsigned: a boolean, optional.
-
- :param zerofill: Optional. If true, values will be stored as strings
- left-padded with zeros. Note that this does not effect the values
- returned by the underlying database API, which continue to be
- numeric.
-
- """
- super(FLOAT, self).__init__(precision=precision, scale=scale,
- asdecimal=asdecimal, **kw)
-
- def bind_processor(self, dialect):
- return None
-
-class INTEGER(_IntegerType, sqltypes.INTEGER):
- """MySQL INTEGER type."""
-
- __visit_name__ = 'INTEGER'
-
- def __init__(self, display_width=None, **kw):
- """Construct an INTEGER.
-
- :param display_width: Optional, maximum display width for this number.
-
- :param unsigned: a boolean, optional.
-
- :param zerofill: Optional. If true, values will be stored as strings
- left-padded with zeros. Note that this does not effect the values
- returned by the underlying database API, which continue to be
- numeric.
-
- """
- super(INTEGER, self).__init__(display_width=display_width, **kw)
-
-class BIGINT(_IntegerType, sqltypes.BIGINT):
- """MySQL BIGINTEGER type."""
-
- __visit_name__ = 'BIGINT'
-
- def __init__(self, display_width=None, **kw):
- """Construct a BIGINTEGER.
-
- :param display_width: Optional, maximum display width for this number.
-
- :param unsigned: a boolean, optional.
-
- :param zerofill: Optional. If true, values will be stored as strings
- left-padded with zeros. Note that this does not effect the values
- returned by the underlying database API, which continue to be
- numeric.
-
- """
- super(BIGINT, self).__init__(display_width=display_width, **kw)
-
-class MEDIUMINT(_IntegerType):
- """MySQL MEDIUMINTEGER type."""
-
- __visit_name__ = 'MEDIUMINT'
-
- def __init__(self, display_width=None, **kw):
- """Construct a MEDIUMINTEGER
-
- :param display_width: Optional, maximum display width for this number.
-
- :param unsigned: a boolean, optional.
-
- :param zerofill: Optional. If true, values will be stored as strings
- left-padded with zeros. Note that this does not effect the values
- returned by the underlying database API, which continue to be
- numeric.
-
- """
- super(MEDIUMINT, self).__init__(display_width=display_width, **kw)
-
-class TINYINT(_IntegerType):
- """MySQL TINYINT type."""
-
- __visit_name__ = 'TINYINT'
-
- def __init__(self, display_width=None, **kw):
- """Construct a TINYINT.
-
- Note: following the usual MySQL conventions, TINYINT(1) columns
- reflected during Table(..., autoload=True) are treated as
- Boolean columns.
-
- :param display_width: Optional, maximum display width for this number.
-
- :param unsigned: a boolean, optional.
-
- :param zerofill: Optional. If true, values will be stored as strings
- left-padded with zeros. Note that this does not effect the values
- returned by the underlying database API, which continue to be
- numeric.
-
- """
- super(TINYINT, self).__init__(display_width=display_width, **kw)
-
-class SMALLINT(_IntegerType, sqltypes.SMALLINT):
- """MySQL SMALLINTEGER type."""
-
- __visit_name__ = 'SMALLINT'
-
- def __init__(self, display_width=None, **kw):
- """Construct a SMALLINTEGER.
-
- :param display_width: Optional, maximum display width for this number.
-
- :param unsigned: a boolean, optional.
-
- :param zerofill: Optional. If true, values will be stored as strings
- left-padded with zeros. Note that this does not effect the values
- returned by the underlying database API, which continue to be
- numeric.
-
- """
- super(SMALLINT, self).__init__(display_width=display_width, **kw)
-
-class BIT(sqltypes.TypeEngine):
- """MySQL BIT type.
-
- This type is for MySQL 5.0.3 or greater for MyISAM, and 5.0.5 or greater for
- MyISAM, MEMORY, InnoDB and BDB. For older versions, use a MSTinyInteger()
- type.
-
- """
-
- __visit_name__ = 'BIT'
-
- def __init__(self, length=None):
- """Construct a BIT.
-
- :param length: Optional, number of bits.
-
- """
- self.length = length
-
- def result_processor(self, dialect, coltype):
- """Convert a MySQL's 64 bit, variable length binary string to a long.
-
- TODO: this is MySQL-db, pyodbc specific. OurSQL and mysqlconnector
- already do this, so this logic should be moved to those dialects.
-
- """
-
- def process(value):
- if value is not None:
- v = 0L
- for i in map(ord, value):
- v = v << 8 | i
- return v
- return value
- return process
-
-class _MSTime(sqltypes.Time):
- """MySQL TIME type."""
-
- __visit_name__ = 'TIME'
-
- def result_processor(self, dialect, coltype):
- time = datetime.time
- def process(value):
- # convert from a timedelta value
- if value is not None:
- seconds = value.seconds
- minutes = seconds / 60
- return time(minutes / 60, minutes % 60, seconds - minutes * 60)
- else:
- return None
- return process
-
-class TIMESTAMP(sqltypes.TIMESTAMP):
- """MySQL TIMESTAMP type."""
- __visit_name__ = 'TIMESTAMP'
-
-class YEAR(sqltypes.TypeEngine):
- """MySQL YEAR type, for single byte storage of years 1901-2155."""
-
- __visit_name__ = 'YEAR'
-
- def __init__(self, display_width=None):
- self.display_width = display_width
-
-class TEXT(_StringType, sqltypes.TEXT):
- """MySQL TEXT type, for text up to 2^16 characters."""
-
- __visit_name__ = 'TEXT'
-
- def __init__(self, length=None, **kw):
- """Construct a TEXT.
-
- :param length: Optional, if provided the server may optimize storage
- by substituting the smallest TEXT type sufficient to store
- ``length`` characters.
-
- :param charset: Optional, a column-level character set for this string
- value. Takes precedence to 'ascii' or 'unicode' short-hand.
-
- :param collation: Optional, a column-level collation for this string
- value. Takes precedence to 'binary' short-hand.
-
- :param ascii: Defaults to False: short-hand for the ``latin1``
- character set, generates ASCII in schema.
-
- :param unicode: Defaults to False: short-hand for the ``ucs2``
- character set, generates UNICODE in schema.
-
- :param national: Optional. If true, use the server's configured
- national character set.
-
- :param binary: Defaults to False: short-hand, pick the binary
- collation type that matches the column's character set. Generates
- BINARY in schema. This does not affect the type of data stored,
- only the collation of character data.
-
- """
- super(TEXT, self).__init__(length=length, **kw)
-
-class TINYTEXT(_StringType):
- """MySQL TINYTEXT type, for text up to 2^8 characters."""
-
- __visit_name__ = 'TINYTEXT'
-
- def __init__(self, **kwargs):
- """Construct a TINYTEXT.
-
- :param charset: Optional, a column-level character set for this string
- value. Takes precedence to 'ascii' or 'unicode' short-hand.
-
- :param collation: Optional, a column-level collation for this string
- value. Takes precedence to 'binary' short-hand.
-
- :param ascii: Defaults to False: short-hand for the ``latin1``
- character set, generates ASCII in schema.
-
- :param unicode: Defaults to False: short-hand for the ``ucs2``
- character set, generates UNICODE in schema.
-
- :param national: Optional. If true, use the server's configured
- national character set.
-
- :param binary: Defaults to False: short-hand, pick the binary
- collation type that matches the column's character set. Generates
- BINARY in schema. This does not affect the type of data stored,
- only the collation of character data.
-
- """
- super(TINYTEXT, self).__init__(**kwargs)
-
-class MEDIUMTEXT(_StringType):
- """MySQL MEDIUMTEXT type, for text up to 2^24 characters."""
-
- __visit_name__ = 'MEDIUMTEXT'
-
- def __init__(self, **kwargs):
- """Construct a MEDIUMTEXT.
-
- :param charset: Optional, a column-level character set for this string
- value. Takes precedence to 'ascii' or 'unicode' short-hand.
-
- :param collation: Optional, a column-level collation for this string
- value. Takes precedence to 'binary' short-hand.
-
- :param ascii: Defaults to False: short-hand for the ``latin1``
- character set, generates ASCII in schema.
-
- :param unicode: Defaults to False: short-hand for the ``ucs2``
- character set, generates UNICODE in schema.
-
- :param national: Optional. If true, use the server's configured
- national character set.
-
- :param binary: Defaults to False: short-hand, pick the binary
- collation type that matches the column's character set. Generates
- BINARY in schema. This does not affect the type of data stored,
- only the collation of character data.
-
- """
- super(MEDIUMTEXT, self).__init__(**kwargs)
-
-class LONGTEXT(_StringType):
- """MySQL LONGTEXT type, for text up to 2^32 characters."""
-
- __visit_name__ = 'LONGTEXT'
-
- def __init__(self, **kwargs):
- """Construct a LONGTEXT.
-
- :param charset: Optional, a column-level character set for this string
- value. Takes precedence to 'ascii' or 'unicode' short-hand.
-
- :param collation: Optional, a column-level collation for this string
- value. Takes precedence to 'binary' short-hand.
-
- :param ascii: Defaults to False: short-hand for the ``latin1``
- character set, generates ASCII in schema.
-
- :param unicode: Defaults to False: short-hand for the ``ucs2``
- character set, generates UNICODE in schema.
-
- :param national: Optional. If true, use the server's configured
- national character set.
-
- :param binary: Defaults to False: short-hand, pick the binary
- collation type that matches the column's character set. Generates
- BINARY in schema. This does not affect the type of data stored,
- only the collation of character data.
-
- """
- super(LONGTEXT, self).__init__(**kwargs)
-
-
-class VARCHAR(_StringType, sqltypes.VARCHAR):
- """MySQL VARCHAR type, for variable-length character data."""
-
- __visit_name__ = 'VARCHAR'
-
- def __init__(self, length=None, **kwargs):
- """Construct a VARCHAR.
-
- :param charset: Optional, a column-level character set for this string
- value. Takes precedence to 'ascii' or 'unicode' short-hand.
-
- :param collation: Optional, a column-level collation for this string
- value. Takes precedence to 'binary' short-hand.
-
- :param ascii: Defaults to False: short-hand for the ``latin1``
- character set, generates ASCII in schema.
-
- :param unicode: Defaults to False: short-hand for the ``ucs2``
- character set, generates UNICODE in schema.
-
- :param national: Optional. If true, use the server's configured
- national character set.
-
- :param binary: Defaults to False: short-hand, pick the binary
- collation type that matches the column's character set. Generates
- BINARY in schema. This does not affect the type of data stored,
- only the collation of character data.
-
- """
- super(VARCHAR, self).__init__(length=length, **kwargs)
-
-class CHAR(_StringType, sqltypes.CHAR):
- """MySQL CHAR type, for fixed-length character data."""
-
- __visit_name__ = 'CHAR'
-
- def __init__(self, length=None, **kwargs):
- """Construct a CHAR.
-
- :param length: Maximum data length, in characters.
-
- :param binary: Optional, use the default binary collation for the
- national character set. This does not affect the type of data
- stored, use a BINARY type for binary data.
-
- :param collation: Optional, request a particular collation. Must be
- compatible with the national character set.
-
- """
- super(CHAR, self).__init__(length=length, **kwargs)
-
-class NVARCHAR(_StringType, sqltypes.NVARCHAR):
- """MySQL NVARCHAR type.
-
- For variable-length character data in the server's configured national
- character set.
- """
-
- __visit_name__ = 'NVARCHAR'
-
- def __init__(self, length=None, **kwargs):
- """Construct an NVARCHAR.
-
- :param length: Maximum data length, in characters.
-
- :param binary: Optional, use the default binary collation for the
- national character set. This does not affect the type of data
- stored, use a BINARY type for binary data.
-
- :param collation: Optional, request a particular collation. Must be
- compatible with the national character set.
-
- """
- kwargs['national'] = True
- super(NVARCHAR, self).__init__(length=length, **kwargs)
-
-
-class NCHAR(_StringType, sqltypes.NCHAR):
- """MySQL NCHAR type.
-
- For fixed-length character data in the server's configured national
- character set.
- """
-
- __visit_name__ = 'NCHAR'
-
- def __init__(self, length=None, **kwargs):
- """Construct an NCHAR.
-
- :param length: Maximum data length, in characters.
-
- :param binary: Optional, use the default binary collation for the
- national character set. This does not affect the type of data
- stored, use a BINARY type for binary data.
-
- :param collation: Optional, request a particular collation. Must be
- compatible with the national character set.
-
- """
- kwargs['national'] = True
- super(NCHAR, self).__init__(length=length, **kwargs)
-
-
-
-
-class TINYBLOB(sqltypes._Binary):
- """MySQL TINYBLOB type, for binary data up to 2^8 bytes."""
-
- __visit_name__ = 'TINYBLOB'
-
-class MEDIUMBLOB(sqltypes._Binary):
- """MySQL MEDIUMBLOB type, for binary data up to 2^24 bytes."""
-
- __visit_name__ = 'MEDIUMBLOB'
-
-class LONGBLOB(sqltypes._Binary):
- """MySQL LONGBLOB type, for binary data up to 2^32 bytes."""
-
- __visit_name__ = 'LONGBLOB'
-
-class ENUM(sqltypes.Enum, _StringType):
- """MySQL ENUM type."""
-
- __visit_name__ = 'ENUM'
-
- def __init__(self, *enums, **kw):
- """Construct an ENUM.
-
- Example:
-
- Column('myenum', MSEnum("foo", "bar", "baz"))
-
- :param enums: The range of valid values for this ENUM. Values will be
- quoted when generating the schema according to the quoting flag (see
- below).
-
- :param strict: Defaults to False: ensure that a given value is in this
- ENUM's range of permissible values when inserting or updating rows.
- Note that MySQL will not raise a fatal error if you attempt to store
- an out of range value- an alternate value will be stored instead.
- (See MySQL ENUM documentation.)
-
- :param charset: Optional, a column-level character set for this string
- value. Takes precedence to 'ascii' or 'unicode' short-hand.
-
- :param collation: Optional, a column-level collation for this string
- value. Takes precedence to 'binary' short-hand.
-
- :param ascii: Defaults to False: short-hand for the ``latin1``
- character set, generates ASCII in schema.
-
- :param unicode: Defaults to False: short-hand for the ``ucs2``
- character set, generates UNICODE in schema.
-
- :param binary: Defaults to False: short-hand, pick the binary
- collation type that matches the column's character set. Generates
- BINARY in schema. This does not affect the type of data stored,
- only the collation of character data.
-
- :param quoting: Defaults to 'auto': automatically determine enum value
- quoting. If all enum values are surrounded by the same quoting
- character, then use 'quoted' mode. Otherwise, use 'unquoted' mode.
-
- 'quoted': values in enums are already quoted, they will be used
- directly when generating the schema - this usage is deprecated.
-
- 'unquoted': values in enums are not quoted, they will be escaped and
- surrounded by single quotes when generating the schema.
-
- Previous versions of this type always required manually quoted
- values to be supplied; future versions will always quote the string
- literals for you. This is a transitional option.
-
- """
- self.quoting = kw.pop('quoting', 'auto')
-
- if self.quoting == 'auto' and len(enums):
- # What quoting character are we using?
- q = None
- for e in enums:
- if len(e) == 0:
- self.quoting = 'unquoted'
- break
- elif q is None:
- q = e[0]
-
- if e[0] != q or e[-1] != q:
- self.quoting = 'unquoted'
- break
- else:
- self.quoting = 'quoted'
-
- if self.quoting == 'quoted':
- util.warn_deprecated(
- 'Manually quoting ENUM value literals is deprecated. Supply '
- 'unquoted values and use the quoting= option in cases of '
- 'ambiguity.')
- enums = self._strip_enums(enums)
-
- self.strict = kw.pop('strict', False)
- length = max([len(v) for v in enums] + [0])
- kw.pop('metadata', None)
- kw.pop('schema', None)
- kw.pop('name', None)
- kw.pop('quote', None)
- kw.pop('native_enum', None)
- _StringType.__init__(self, length=length, **kw)
- sqltypes.Enum.__init__(self, *enums)
-
- @classmethod
- def _strip_enums(cls, enums):
- strip_enums = []
- for a in enums:
- if a[0:1] == '"' or a[0:1] == "'":
- # strip enclosing quotes and unquote interior
- a = a[1:-1].replace(a[0] * 2, a[0])
- strip_enums.append(a)
- return strip_enums
-
- def bind_processor(self, dialect):
- super_convert = super(ENUM, self).bind_processor(dialect)
- def process(value):
- if self.strict and value is not None and value not in self.enums:
- raise exc.InvalidRequestError('"%s" not a valid value for '
- 'this enum' % value)
- if super_convert:
- return super_convert(value)
- else:
- return value
- return process
-
- def adapt(self, impltype, **kw):
- kw['strict'] = self.strict
- return sqltypes.Enum.adapt(self, impltype, **kw)
-
-class SET(_StringType):
- """MySQL SET type."""
-
- __visit_name__ = 'SET'
-
- def __init__(self, *values, **kw):
- """Construct a SET.
-
- Example::
-
- Column('myset', MSSet("'foo'", "'bar'", "'baz'"))
-
- :param values: The range of valid values for this SET. Values will be
- used exactly as they appear when generating schemas. Strings must
- be quoted, as in the example above. Single-quotes are suggested for
- ANSI compatibility and are required for portability to servers with
- ANSI_QUOTES enabled.
-
- :param charset: Optional, a column-level character set for this string
- value. Takes precedence to 'ascii' or 'unicode' short-hand.
-
- :param collation: Optional, a column-level collation for this string
- value. Takes precedence to 'binary' short-hand.
-
- :param ascii: Defaults to False: short-hand for the ``latin1``
- character set, generates ASCII in schema.
-
- :param unicode: Defaults to False: short-hand for the ``ucs2``
- character set, generates UNICODE in schema.
-
- :param binary: Defaults to False: short-hand, pick the binary
- collation type that matches the column's character set. Generates
- BINARY in schema. This does not affect the type of data stored,
- only the collation of character data.
-
- """
- self._ddl_values = values
-
- strip_values = []
- for a in values:
- if a[0:1] == '"' or a[0:1] == "'":
- # strip enclosing quotes and unquote interior
- a = a[1:-1].replace(a[0] * 2, a[0])
- strip_values.append(a)
-
- self.values = strip_values
- kw.setdefault('length', max([len(v) for v in strip_values] + [0]))
- super(SET, self).__init__(**kw)
-
- def result_processor(self, dialect, coltype):
- def process(value):
- # The good news:
- # No ',' quoting issues- commas aren't allowed in SET values
- # The bad news:
- # Plenty of driver inconsistencies here.
- if isinstance(value, util.set_types):
- # ..some versions convert '' to an empty set
- if not value:
- value.add('')
- # ..some return sets.Set, even for pythons that have __builtin__.set
- if not isinstance(value, set):
- value = set(value)
- return value
- # ...and some versions return strings
- if value is not None:
- return set(value.split(','))
- else:
- return value
- return process
-
- def bind_processor(self, dialect):
- super_convert = super(SET, self).bind_processor(dialect)
- def process(value):
- if value is None or isinstance(value, (int, long, basestring)):
- pass
- else:
- if None in value:
- value = set(value)
- value.remove(None)
- value.add('')
- value = ','.join(value)
- if super_convert:
- return super_convert(value)
- else:
- return value
- return process
-
-# old names
-MSTime = _MSTime
-MSSet = SET
-MSEnum = ENUM
-MSLongBlob = LONGBLOB
-MSMediumBlob = MEDIUMBLOB
-MSTinyBlob = TINYBLOB
-MSBlob = BLOB
-MSBinary = BINARY
-MSVarBinary = VARBINARY
-MSNChar = NCHAR
-MSNVarChar = NVARCHAR
-MSChar = CHAR
-MSString = VARCHAR
-MSLongText = LONGTEXT
-MSMediumText = MEDIUMTEXT
-MSTinyText = TINYTEXT
-MSText = TEXT
-MSYear = YEAR
-MSTimeStamp = TIMESTAMP
-MSBit = BIT
-MSSmallInteger = SMALLINT
-MSTinyInteger = TINYINT
-MSMediumInteger = MEDIUMINT
-MSBigInteger = BIGINT
-MSNumeric = NUMERIC
-MSDecimal = DECIMAL
-MSDouble = DOUBLE
-MSReal = REAL
-MSFloat = FLOAT
-MSInteger = INTEGER
-
-colspecs = {
- sqltypes.Numeric: NUMERIC,
- sqltypes.Float: FLOAT,
- sqltypes.Time: _MSTime,
- sqltypes.Enum: ENUM,
-}
-
-# Everything 3.23 through 5.1 excepting OpenGIS types.
-ischema_names = {
- 'bigint': BIGINT,
- 'binary': BINARY,
- 'bit': BIT,
- 'blob': BLOB,
- 'boolean': BOOLEAN,
- 'char': CHAR,
- 'date': DATE,
- 'datetime': DATETIME,
- 'decimal': DECIMAL,
- 'double': DOUBLE,
- 'enum': ENUM,
- 'fixed': DECIMAL,
- 'float': FLOAT,
- 'int': INTEGER,
- 'integer': INTEGER,
- 'longblob': LONGBLOB,
- 'longtext': LONGTEXT,
- 'mediumblob': MEDIUMBLOB,
- 'mediumint': MEDIUMINT,
- 'mediumtext': MEDIUMTEXT,
- 'nchar': NCHAR,
- 'nvarchar': NVARCHAR,
- 'numeric': NUMERIC,
- 'set': SET,
- 'smallint': SMALLINT,
- 'text': TEXT,
- 'time': TIME,
- 'timestamp': TIMESTAMP,
- 'tinyblob': TINYBLOB,
- 'tinyint': TINYINT,
- 'tinytext': TINYTEXT,
- 'varbinary': VARBINARY,
- 'varchar': VARCHAR,
- 'year': YEAR,
-}
-
-class MySQLExecutionContext(default.DefaultExecutionContext):
-
- def should_autocommit_text(self, statement):
- return AUTOCOMMIT_RE.match(statement)
-
-class MySQLCompiler(compiler.SQLCompiler):
-
- extract_map = compiler.SQLCompiler.extract_map.copy()
- extract_map.update ({
- 'milliseconds': 'millisecond',
- })
-
- def visit_random_func(self, fn, **kw):
- return "rand%s" % self.function_argspec(fn)
-
- def visit_utc_timestamp_func(self, fn, **kw):
- return "UTC_TIMESTAMP"
-
- def visit_sysdate_func(self, fn, **kw):
- return "SYSDATE()"
-
- def visit_concat_op(self, binary, **kw):
- return "concat(%s, %s)" % (self.process(binary.left), self.process(binary.right))
-
- def visit_match_op(self, binary, **kw):
- return "MATCH (%s) AGAINST (%s IN BOOLEAN MODE)" % (self.process(binary.left), self.process(binary.right))
-
- def get_from_hint_text(self, table, text):
- return text
-
- def visit_typeclause(self, typeclause):
- type_ = typeclause.type.dialect_impl(self.dialect)
- if isinstance(type_, sqltypes.Integer):
- if getattr(type_, 'unsigned', False):
- return 'UNSIGNED INTEGER'
- else:
- return 'SIGNED INTEGER'
- elif isinstance(type_, sqltypes.TIMESTAMP):
- return 'DATETIME'
- elif isinstance(type_, (sqltypes.DECIMAL, sqltypes.DateTime, sqltypes.Date, sqltypes.Time)):
- return self.dialect.type_compiler.process(type_)
- elif isinstance(type_, sqltypes.Text):
- return 'CHAR'
- elif (isinstance(type_, sqltypes.String) and not
- isinstance(type_, (ENUM, SET))):
- if getattr(type_, 'length'):
- return 'CHAR(%s)' % type_.length
- else:
- return 'CHAR'
- elif isinstance(type_, sqltypes._Binary):
- return 'BINARY'
- elif isinstance(type_, sqltypes.NUMERIC):
- return self.dialect.type_compiler.process(type_).replace('NUMERIC', 'DECIMAL')
- else:
- return None
-
- def visit_cast(self, cast, **kwargs):
- # No cast until 4, no decimals until 5.
- if not self.dialect._supports_cast:
- return self.process(cast.clause)
-
- type_ = self.process(cast.typeclause)
- if type_ is None:
- return self.process(cast.clause)
-
- return 'CAST(%s AS %s)' % (self.process(cast.clause), type_)
-
- def render_literal_value(self, value, type_):
- value = super(MySQLCompiler, self).render_literal_value(value, type_)
- if self.dialect._backslash_escapes:
- value = value.replace('\\', '\\\\')
- return value
-
- def get_select_precolumns(self, select):
- """Add special MySQL keywords in place of DISTINCT.
-
- .. note:: this usage is deprecated. :meth:`.Select.prefix_with`
- should be used for special keywords at the start
- of a SELECT.
-
- """
- if isinstance(select._distinct, basestring):
- return select._distinct.upper() + " "
- elif select._distinct:
- return "DISTINCT "
- else:
- return ""
-
- def visit_join(self, join, asfrom=False, **kwargs):
- # 'JOIN ... ON ...' for inner joins isn't available until 4.0.
- # Apparently < 3.23.17 requires theta joins for inner joins
- # (but not outer). Not generating these currently, but
- # support can be added, preferably after dialects are
- # refactored to be version-sensitive.
- return ''.join(
- (self.process(join.left, asfrom=True, **kwargs),
- (join.isouter and " LEFT OUTER JOIN " or " INNER JOIN "),
- self.process(join.right, asfrom=True, **kwargs),
- " ON ",
- self.process(join.onclause, **kwargs)))
-
- def for_update_clause(self, select):
- if select.for_update == 'read':
- return ' LOCK IN SHARE MODE'
- else:
- return super(MySQLCompiler, self).for_update_clause(select)
-
- def limit_clause(self, select):
- # MySQL supports:
- # LIMIT <limit>
- # LIMIT <offset>, <limit>
- # and in server versions > 3.3:
- # LIMIT <limit> OFFSET <offset>
- # The latter is more readable for offsets but we're stuck with the
- # former until we can refine dialects by server revision.
-
- limit, offset = select._limit, select._offset
-
- if (limit, offset) == (None, None):
- return ''
- elif offset is not None:
- # As suggested by the MySQL docs, need to apply an
- # artificial limit if one wasn't provided
- # http://dev.mysql.com/doc/refman/5.0/en/select.html
- if limit is None:
- # hardwire the upper limit. Currently
- # needed by OurSQL with Python 3
- # (https://bugs.launchpad.net/oursql/+bug/686232),
- # but also is consistent with the usage of the upper
- # bound as part of MySQL's "syntax" for OFFSET with
- # no LIMIT
- return ' \n LIMIT %s, %s' % (
- self.process(sql.literal(offset)),
- "18446744073709551615")
- else:
- return ' \n LIMIT %s, %s' % (
- self.process(sql.literal(offset)),
- self.process(sql.literal(limit)))
- else:
- # No offset provided, so just use the limit
- return ' \n LIMIT %s' % (self.process(sql.literal(limit)),)
-
- def visit_update(self, update_stmt):
- self.stack.append({'from': set([update_stmt.table])})
-
- self.isupdate = True
- colparams = self._get_colparams(update_stmt)
-
- text = "UPDATE " + self.preparer.format_table(update_stmt.table) + \
- " SET " + ', '.join(["%s=%s" % (self.preparer.format_column(c[0]), c[1]) for c in colparams])
-
- if update_stmt._whereclause is not None:
- text += " WHERE " + self.process(update_stmt._whereclause)
-
- limit = update_stmt.kwargs.get('%s_limit' % self.dialect.name, None)
- if limit:
- text += " LIMIT %s" % limit
-
- self.stack.pop(-1)
-
- return text
-
-# ug. "InnoDB needs indexes on foreign keys and referenced keys [...].
-# Starting with MySQL 4.1.2, these indexes are created automatically.
-# In older versions, the indexes must be created explicitly or the
-# creation of foreign key constraints fails."
-
-class MySQLDDLCompiler(compiler.DDLCompiler):
- def create_table_constraints(self, table):
- """Get table constraints."""
- constraint_string = super(MySQLDDLCompiler, self).create_table_constraints(table)
-
- engine_key = '%s_engine' % self.dialect.name
- is_innodb = table.kwargs.has_key(engine_key) and \
- table.kwargs[engine_key].lower() == 'innodb'
-
- auto_inc_column = table._autoincrement_column
-
- if is_innodb and \
- auto_inc_column is not None and \
- auto_inc_column is not list(table.primary_key)[0]:
- if constraint_string:
- constraint_string += ", \n\t"
- constraint_string += "KEY `idx_autoinc_%s`(`%s`)" % (auto_inc_column.name, \
- self.preparer.format_column(auto_inc_column))
-
- return constraint_string
-
-
- def get_column_specification(self, column, **kw):
- """Builds column DDL."""
-
- colspec = [self.preparer.format_column(column),
- self.dialect.type_compiler.process(column.type)
- ]
-
- default = self.get_column_default_string(column)
- if default is not None:
- colspec.append('DEFAULT ' + default)
-
- is_timestamp = isinstance(column.type, sqltypes.TIMESTAMP)
- if not column.nullable and not is_timestamp:
- colspec.append('NOT NULL')
-
- elif column.nullable and is_timestamp and default is None:
- colspec.append('NULL')
-
- if column is column.table._autoincrement_column and column.server_default is None:
- colspec.append('AUTO_INCREMENT')
-
- return ' '.join(colspec)
-
- def post_create_table(self, table):
- """Build table-level CREATE options like ENGINE and COLLATE."""
-
- table_opts = []
- for k in table.kwargs:
- if k.startswith('%s_' % self.dialect.name):
- opt = k[len(self.dialect.name)+1:].upper()
-
- arg = table.kwargs[k]
- if opt in _options_of_type_string:
- arg = "'%s'" % arg.replace("\\", "\\\\").replace("'", "''")
-
- if opt in ('DATA_DIRECTORY', 'INDEX_DIRECTORY',
- 'DEFAULT_CHARACTER_SET', 'CHARACTER_SET',
- 'DEFAULT_CHARSET',
- 'DEFAULT_COLLATE'):
- opt = opt.replace('_', ' ')
-
- joiner = '='
- if opt in ('TABLESPACE', 'DEFAULT CHARACTER SET',
- 'CHARACTER SET', 'COLLATE'):
- joiner = ' '
-
- table_opts.append(joiner.join((opt, arg)))
- return ' '.join(table_opts)
-
- def visit_drop_index(self, drop):
- index = drop.element
-
- return "\nDROP INDEX %s ON %s" % \
- (self.preparer.quote(self._index_identifier(index.name), index.quote),
- self.preparer.format_table(index.table))
-
- def visit_drop_constraint(self, drop):
- constraint = drop.element
- if isinstance(constraint, sa_schema.ForeignKeyConstraint):
- qual = "FOREIGN KEY "
- const = self.preparer.format_constraint(constraint)
- elif isinstance(constraint, sa_schema.PrimaryKeyConstraint):
- qual = "PRIMARY KEY "
- const = ""
- elif isinstance(constraint, sa_schema.UniqueConstraint):
- qual = "INDEX "
- const = self.preparer.format_constraint(constraint)
- else:
- qual = ""
- const = self.preparer.format_constraint(constraint)
- return "ALTER TABLE %s DROP %s%s" % \
- (self.preparer.format_table(constraint.table),
- qual, const)
-
-class MySQLTypeCompiler(compiler.GenericTypeCompiler):
- def _extend_numeric(self, type_, spec):
- "Extend a numeric-type declaration with MySQL specific extensions."
-
- if not self._mysql_type(type_):
- return spec
-
- if type_.unsigned:
- spec += ' UNSIGNED'
- if type_.zerofill:
- spec += ' ZEROFILL'
- return spec
-
- def _extend_string(self, type_, defaults, spec):
- """Extend a string-type declaration with standard SQL CHARACTER SET /
- COLLATE annotations and MySQL specific extensions.
-
- """
-
- def attr(name):
- return getattr(type_, name, defaults.get(name))
-
- if attr('charset'):
- charset = 'CHARACTER SET %s' % attr('charset')
- elif attr('ascii'):
- charset = 'ASCII'
- elif attr('unicode'):
- charset = 'UNICODE'
- else:
- charset = None
-
- if attr('collation'):
- collation = 'COLLATE %s' % type_.collation
- elif attr('binary'):
- collation = 'BINARY'
- else:
- collation = None
-
- if attr('national'):
- # NATIONAL (aka NCHAR/NVARCHAR) trumps charsets.
- return ' '.join([c for c in ('NATIONAL', spec, collation)
- if c is not None])
- return ' '.join([c for c in (spec, charset, collation)
- if c is not None])
-
- def _mysql_type(self, type_):
- return isinstance(type_, (_StringType, _NumericType))
-
- def visit_NUMERIC(self, type_):
- if type_.precision is None:
- return self._extend_numeric(type_, "NUMERIC")
- elif type_.scale is None:
- return self._extend_numeric(type_,
- "NUMERIC(%(precision)s)" %
- {'precision': type_.precision})
- else:
- return self._extend_numeric(type_,
- "NUMERIC(%(precision)s, %(scale)s)" %
- {'precision': type_.precision, 'scale' : type_.scale})
-
- def visit_DECIMAL(self, type_):
- if type_.precision is None:
- return self._extend_numeric(type_, "DECIMAL")
- elif type_.scale is None:
- return self._extend_numeric(type_,
- "DECIMAL(%(precision)s)" %
- {'precision': type_.precision})
- else:
- return self._extend_numeric(type_,
- "DECIMAL(%(precision)s, %(scale)s)" %
- {'precision': type_.precision, 'scale' : type_.scale})
-
- def visit_DOUBLE(self, type_):
- if type_.precision is not None and type_.scale is not None:
- return self._extend_numeric(type_, "DOUBLE(%(precision)s, %(scale)s)" %
- {'precision': type_.precision,
- 'scale' : type_.scale})
- else:
- return self._extend_numeric(type_, 'DOUBLE')
-
- def visit_REAL(self, type_):
- if type_.precision is not None and type_.scale is not None:
- return self._extend_numeric(type_, "REAL(%(precision)s, %(scale)s)" %
- {'precision': type_.precision,
- 'scale' : type_.scale})
- else:
- return self._extend_numeric(type_, 'REAL')
-
- def visit_FLOAT(self, type_):
- if self._mysql_type(type_) and \
- type_.scale is not None and \
- type_.precision is not None:
- return self._extend_numeric(type_,
- "FLOAT(%s, %s)" % (type_.precision, type_.scale))
- elif type_.precision is not None:
- return self._extend_numeric(type_, "FLOAT(%s)" % (type_.precision,))
- else:
- return self._extend_numeric(type_, "FLOAT")
-
- def visit_INTEGER(self, type_):
- if self._mysql_type(type_) and type_.display_width is not None:
- return self._extend_numeric(type_,
- "INTEGER(%(display_width)s)" %
- {'display_width': type_.display_width})
- else:
- return self._extend_numeric(type_, "INTEGER")
-
- def visit_BIGINT(self, type_):
- if self._mysql_type(type_) and type_.display_width is not None:
- return self._extend_numeric(type_,
- "BIGINT(%(display_width)s)" %
- {'display_width': type_.display_width})
- else:
- return self._extend_numeric(type_, "BIGINT")
-
- def visit_MEDIUMINT(self, type_):
- if self._mysql_type(type_) and type_.display_width is not None:
- return self._extend_numeric(type_,
- "MEDIUMINT(%(display_width)s)" %
- {'display_width': type_.display_width})
- else:
- return self._extend_numeric(type_, "MEDIUMINT")
-
- def visit_TINYINT(self, type_):
- if self._mysql_type(type_) and type_.display_width is not None:
- return self._extend_numeric(type_, "TINYINT(%s)" % type_.display_width)
- else:
- return self._extend_numeric(type_, "TINYINT")
-
- def visit_SMALLINT(self, type_):
- if self._mysql_type(type_) and type_.display_width is not None:
- return self._extend_numeric(type_,
- "SMALLINT(%(display_width)s)" %
- {'display_width': type_.display_width}
- )
- else:
- return self._extend_numeric(type_, "SMALLINT")
-
- def visit_BIT(self, type_):
- if type_.length is not None:
- return "BIT(%s)" % type_.length
- else:
- return "BIT"
-
- def visit_DATETIME(self, type_):
- return "DATETIME"
-
- def visit_DATE(self, type_):
- return "DATE"
-
- def visit_TIME(self, type_):
- return "TIME"
-
- def visit_TIMESTAMP(self, type_):
- return 'TIMESTAMP'
-
- def visit_YEAR(self, type_):
- if type_.display_width is None:
- return "YEAR"
- else:
- return "YEAR(%s)" % type_.display_width
-
- def visit_TEXT(self, type_):
- if type_.length:
- return self._extend_string(type_, {}, "TEXT(%d)" % type_.length)
- else:
- return self._extend_string(type_, {}, "TEXT")
-
- def visit_TINYTEXT(self, type_):
- return self._extend_string(type_, {}, "TINYTEXT")
-
- def visit_MEDIUMTEXT(self, type_):
- return self._extend_string(type_, {}, "MEDIUMTEXT")
-
- def visit_LONGTEXT(self, type_):
- return self._extend_string(type_, {}, "LONGTEXT")
-
- def visit_VARCHAR(self, type_):
- if type_.length:
- return self._extend_string(type_, {}, "VARCHAR(%d)" % type_.length)
- else:
- raise exc.InvalidRequestError(
- "VARCHAR requires a length on dialect %s" %
- self.dialect.name)
-
- def visit_CHAR(self, type_):
- if type_.length:
- return self._extend_string(type_, {}, "CHAR(%(length)s)" % {'length' : type_.length})
- else:
- return self._extend_string(type_, {}, "CHAR")
-
- def visit_NVARCHAR(self, type_):
- # We'll actually generate the equiv. "NATIONAL VARCHAR" instead
- # of "NVARCHAR".
- if type_.length:
- return self._extend_string(type_, {'national':True}, "VARCHAR(%(length)s)" % {'length': type_.length})
- else:
- raise exc.InvalidRequestError(
- "NVARCHAR requires a length on dialect %s" %
- self.dialect.name)
-
- def visit_NCHAR(self, type_):
- # We'll actually generate the equiv. "NATIONAL CHAR" instead of "NCHAR".
- if type_.length:
- return self._extend_string(type_, {'national':True}, "CHAR(%(length)s)" % {'length': type_.length})
- else:
- return self._extend_string(type_, {'national':True}, "CHAR")
-
- def visit_VARBINARY(self, type_):
- return "VARBINARY(%d)" % type_.length
-
- def visit_large_binary(self, type_):
- return self.visit_BLOB(type_)
-
- def visit_enum(self, type_):
- if not type_.native_enum:
- return super(MySQLTypeCompiler, self).visit_enum(type_)
- else:
- return self.visit_ENUM(type_)
-
- def visit_BLOB(self, type_):
- if type_.length:
- return "BLOB(%d)" % type_.length
- else:
- return "BLOB"
-
- def visit_TINYBLOB(self, type_):
- return "TINYBLOB"
-
- def visit_MEDIUMBLOB(self, type_):
- return "MEDIUMBLOB"
-
- def visit_LONGBLOB(self, type_):
- return "LONGBLOB"
-
- def visit_ENUM(self, type_):
- quoted_enums = []
- for e in type_.enums:
- quoted_enums.append("'%s'" % e.replace("'", "''"))
- return self._extend_string(type_, {}, "ENUM(%s)" % ",".join(quoted_enums))
-
- def visit_SET(self, type_):
- return self._extend_string(type_, {}, "SET(%s)" % ",".join(type_._ddl_values))
-
- def visit_BOOLEAN(self, type):
- return "BOOL"
-
-
-class MySQLIdentifierPreparer(compiler.IdentifierPreparer):
-
- reserved_words = RESERVED_WORDS
-
- def __init__(self, dialect, server_ansiquotes=False, **kw):
- if not server_ansiquotes:
- quote = "`"
- else:
- quote = '"'
-
- super(MySQLIdentifierPreparer, self).__init__(
- dialect,
- initial_quote=quote,
- escape_quote=quote)
-
- def _quote_free_identifiers(self, *ids):
- """Unilaterally identifier-quote any number of strings."""
-
- return tuple([self.quote_identifier(i) for i in ids if i is not None])
-
-class MySQLDialect(default.DefaultDialect):
- """Details of the MySQL dialect. Not used directly in application code."""
-
- name = 'mysql'
- supports_alter = True
-
- # identifiers are 64, however aliases can be 255...
- max_identifier_length = 255
- max_index_name_length = 64
-
- supports_native_enum = True
-
- supports_sane_rowcount = True
- supports_sane_multi_rowcount = False
-
- default_paramstyle = 'format'
- colspecs = colspecs
-
- statement_compiler = MySQLCompiler
- ddl_compiler = MySQLDDLCompiler
- type_compiler = MySQLTypeCompiler
- ischema_names = ischema_names
- preparer = MySQLIdentifierPreparer
-
- # default SQL compilation settings -
- # these are modified upon initialize(),
- # i.e. first connect
- _backslash_escapes = True
- _server_ansiquotes = False
-
- def __init__(self, use_ansiquotes=None, **kwargs):
- default.DefaultDialect.__init__(self, **kwargs)
-
- def do_commit(self, connection):
- """Execute a COMMIT."""
-
- # COMMIT/ROLLBACK were introduced in 3.23.15.
- # Yes, we have at least one user who has to talk to these old versions!
- #
- # Ignore commit/rollback if support isn't present, otherwise even basic
- # operations via autocommit fail.
- try:
- connection.commit()
- except:
- if self.server_version_info < (3, 23, 15):
- args = sys.exc_info()[1].args
- if args and args[0] == 1064:
- return
- raise
-
- def do_rollback(self, connection):
- """Execute a ROLLBACK."""
-
- try:
- connection.rollback()
- except:
- if self.server_version_info < (3, 23, 15):
- args = sys.exc_info()[1].args
- if args and args[0] == 1064:
- return
- raise
-
- def do_begin_twophase(self, connection, xid):
- connection.execute(sql.text("XA BEGIN :xid"), xid=xid)
-
- def do_prepare_twophase(self, connection, xid):
- connection.execute(sql.text("XA END :xid"), xid=xid)
- connection.execute(sql.text("XA PREPARE :xid"), xid=xid)
-
- def do_rollback_twophase(self, connection, xid, is_prepared=True,
- recover=False):
- if not is_prepared:
- connection.execute(sql.text("XA END :xid"), xid=xid)
- connection.execute(sql.text("XA ROLLBACK :xid"), xid=xid)
-
- def do_commit_twophase(self, connection, xid, is_prepared=True,
- recover=False):
- if not is_prepared:
- self.do_prepare_twophase(connection, xid)
- connection.execute(sql.text("XA COMMIT :xid"), xid=xid)
-
- def do_recover_twophase(self, connection):
- resultset = connection.execute("XA RECOVER")
- return [row['data'][0:row['gtrid_length']] for row in resultset]
-
- def is_disconnect(self, e, connection, cursor):
- if isinstance(e, self.dbapi.OperationalError):
- return self._extract_error_code(e) in \
- (2006, 2013, 2014, 2045, 2055)
- elif isinstance(e, self.dbapi.InterfaceError):
- # if underlying connection is closed,
- # this is the error you get
- return "(0, '')" in str(e)
- else:
- return False
-
- def _compat_fetchall(self, rp, charset=None):
- """Proxy result rows to smooth over MySQL-Python driver inconsistencies."""
-
- return [_DecodingRowProxy(row, charset) for row in rp.fetchall()]
-
- def _compat_fetchone(self, rp, charset=None):
- """Proxy a result row to smooth over MySQL-Python driver inconsistencies."""
-
- return _DecodingRowProxy(rp.fetchone(), charset)
-
- def _compat_first(self, rp, charset=None):
- """Proxy a result row to smooth over MySQL-Python driver inconsistencies."""
-
- return _DecodingRowProxy(rp.first(), charset)
-
- def _extract_error_code(self, exception):
- raise NotImplementedError()
-
- def _get_default_schema_name(self, connection):
- return connection.execute('SELECT DATABASE()').scalar()
-
-
- def has_table(self, connection, table_name, schema=None):
- # SHOW TABLE STATUS LIKE and SHOW TABLES LIKE do not function properly
- # on macosx (and maybe win?) with multibyte table names.
- #
- # TODO: if this is not a problem on win, make the strategy swappable
- # based on platform. DESCRIBE is slower.
-
- # [ticket:726]
- # full_name = self.identifier_preparer.format_table(table,
- # use_schema=True)
-
-
- full_name = '.'.join(self.identifier_preparer._quote_free_identifiers(
- schema, table_name))
-
- st = "DESCRIBE %s" % full_name
- rs = None
- try:
- try:
- rs = connection.execute(st)
- have = rs.rowcount > 0
- rs.close()
- return have
- except exc.DBAPIError, e:
- if self._extract_error_code(e.orig) == 1146:
- return False
- raise
- finally:
- if rs:
- rs.close()
-
- def initialize(self, connection):
- default.DefaultDialect.initialize(self, connection)
- self._connection_charset = self._detect_charset(connection)
- self._server_casing = self._detect_casing(connection)
- self._server_collations = self._detect_collations(connection)
- self._detect_ansiquotes(connection)
- if self._server_ansiquotes:
- # if ansiquotes == True, build a new IdentifierPreparer
- # with the new setting
- self.identifier_preparer = self.preparer(self,
- server_ansiquotes=self._server_ansiquotes)
-
- @property
- def _supports_cast(self):
- return self.server_version_info is None or \
- self.server_version_info >= (4, 0, 2)
-
- @reflection.cache
- def get_schema_names(self, connection, **kw):
- rp = connection.execute("SHOW schemas")
- return [r[0] for r in rp]
-
- @reflection.cache
- def get_table_names(self, connection, schema=None, **kw):
- """Return a Unicode SHOW TABLES from a given schema."""
- if schema is not None:
- current_schema = schema
- else:
- current_schema = self.default_schema_name
-
- charset = self._connection_charset
- if self.server_version_info < (5, 0, 2):
- rp = connection.execute("SHOW TABLES FROM %s" %
- self.identifier_preparer.quote_identifier(current_schema))
- return [row[0] for row in self._compat_fetchall(rp, charset=charset)]
- else:
- rp = connection.execute("SHOW FULL TABLES FROM %s" %
- self.identifier_preparer.quote_identifier(current_schema))
-
- return [row[0] for row in self._compat_fetchall(rp, charset=charset)\
- if row[1] == 'BASE TABLE']
-
- @reflection.cache
- def get_view_names(self, connection, schema=None, **kw):
- charset = self._connection_charset
- if self.server_version_info < (5, 0, 2):
- raise NotImplementedError
- if schema is None:
- schema = self.default_schema_name
- if self.server_version_info < (5, 0, 2):
- return self.get_table_names(connection, schema)
- charset = self._connection_charset
- rp = connection.execute("SHOW FULL TABLES FROM %s" %
- self.identifier_preparer.quote_identifier(schema))
- return [row[0] for row in self._compat_fetchall(rp, charset=charset)\
- if row[1] == 'VIEW']
-
- @reflection.cache
- def get_table_options(self, connection, table_name, schema=None, **kw):
-
- parsed_state = self._parsed_state_or_create(connection, table_name, schema, **kw)
- return parsed_state.table_options
-
- @reflection.cache
- def get_columns(self, connection, table_name, schema=None, **kw):
- parsed_state = self._parsed_state_or_create(connection, table_name, schema, **kw)
- return parsed_state.columns
-
- @reflection.cache
- def get_primary_keys(self, connection, table_name, schema=None, **kw):
- parsed_state = self._parsed_state_or_create(connection, table_name, schema, **kw)
- for key in parsed_state.keys:
- if key['type'] == 'PRIMARY':
- # There can be only one.
- ##raise Exception, str(key)
- return [s[0] for s in key['columns']]
- return []
-
- @reflection.cache
- def get_foreign_keys(self, connection, table_name, schema=None, **kw):
-
- parsed_state = self._parsed_state_or_create(connection, table_name, schema, **kw)
- default_schema = None
-
- fkeys = []
-
- for spec in parsed_state.constraints:
- # only FOREIGN KEYs
- ref_name = spec['table'][-1]
- ref_schema = len(spec['table']) > 1 and spec['table'][-2] or schema
-
- if not ref_schema:
- if default_schema is None:
- default_schema = \
- connection.dialect.default_schema_name
- if schema == default_schema:
- ref_schema = schema
-
- loc_names = spec['local']
- ref_names = spec['foreign']
-
- con_kw = {}
- for opt in ('name', 'onupdate', 'ondelete'):
- if spec.get(opt, False):
- con_kw[opt] = spec[opt]
-
- fkey_d = {
- 'name' : spec['name'],
- 'constrained_columns' : loc_names,
- 'referred_schema' : ref_schema,
- 'referred_table' : ref_name,
- 'referred_columns' : ref_names,
- 'options' : con_kw
- }
- fkeys.append(fkey_d)
- return fkeys
-
- @reflection.cache
- def get_indexes(self, connection, table_name, schema=None, **kw):
-
- parsed_state = self._parsed_state_or_create(connection, table_name, schema, **kw)
-
- indexes = []
- for spec in parsed_state.keys:
- unique = False
- flavor = spec['type']
- if flavor == 'PRIMARY':
- continue
- if flavor == 'UNIQUE':
- unique = True
- elif flavor in (None, 'FULLTEXT', 'SPATIAL'):
- pass
- else:
- self.logger.info(
- "Converting unknown KEY type %s to a plain KEY" % flavor)
- pass
- index_d = {}
- index_d['name'] = spec['name']
- index_d['column_names'] = [s[0] for s in spec['columns']]
- index_d['unique'] = unique
- index_d['type'] = flavor
- indexes.append(index_d)
- return indexes
-
- @reflection.cache
- def get_view_definition(self, connection, view_name, schema=None, **kw):
-
- charset = self._connection_charset
- full_name = '.'.join(self.identifier_preparer._quote_free_identifiers(
- schema, view_name))
- sql = self._show_create_table(connection, None, charset,
- full_name=full_name)
- return sql
-
- def _parsed_state_or_create(self, connection, table_name, schema=None, **kw):
- return self._setup_parser(
- connection,
- table_name,
- schema,
- info_cache=kw.get('info_cache', None)
- )
-
- @util.memoized_property
- def _tabledef_parser(self):
- """return the MySQLTableDefinitionParser, generate if needed.
-
- The deferred creation ensures that the dialect has
- retrieved server version information first.
-
- """
- if (self.server_version_info < (4, 1) and self._server_ansiquotes):
- # ANSI_QUOTES doesn't affect SHOW CREATE TABLE on < 4.1
- preparer = self.preparer(self, server_ansiquotes=False)
- else:
- preparer = self.identifier_preparer
- return MySQLTableDefinitionParser(self, preparer)
-
- @reflection.cache
- def _setup_parser(self, connection, table_name, schema=None, **kw):
- charset = self._connection_charset
- parser = self._tabledef_parser
- full_name = '.'.join(self.identifier_preparer._quote_free_identifiers(
- schema, table_name))
- sql = self._show_create_table(connection, None, charset,
- full_name=full_name)
- if sql.startswith('CREATE ALGORITHM'):
- # Adapt views to something table-like.
- columns = self._describe_table(connection, None, charset,
- full_name=full_name)
- sql = parser._describe_to_create(table_name, columns)
- return parser.parse(sql, charset)
-
- def _adjust_casing(self, table, charset=None):
- """Adjust Table name to the server case sensitivity, if needed."""
-
- casing = self._server_casing
-
- # For winxx database hosts. TODO: is this really needed?
- if casing == 1 and table.name != table.name.lower():
- table.name = table.name.lower()
- lc_alias = sa_schema._get_table_key(table.name, table.schema)
- table.metadata.tables[lc_alias] = table
-
- def _detect_charset(self, connection):
- raise NotImplementedError()
-
- def _detect_casing(self, connection):
- """Sniff out identifier case sensitivity.
-
- Cached per-connection. This value can not change without a server
- restart.
-
- """
- # http://dev.mysql.com/doc/refman/5.0/en/name-case-sensitivity.html
-
- charset = self._connection_charset
- row = self._compat_first(connection.execute(
- "SHOW VARIABLES LIKE 'lower_case_table_names'"),
- charset=charset)
- if not row:
- cs = 0
- else:
- # 4.0.15 returns OFF or ON according to [ticket:489]
- # 3.23 doesn't, 4.0.27 doesn't..
- if row[1] == 'OFF':
- cs = 0
- elif row[1] == 'ON':
- cs = 1
- else:
- cs = int(row[1])
- return cs
-
- def _detect_collations(self, connection):
- """Pull the active COLLATIONS list from the server.
-
- Cached per-connection.
- """
-
- collations = {}
- if self.server_version_info < (4, 1, 0):
- pass
- else:
- charset = self._connection_charset
- rs = connection.execute('SHOW COLLATION')
- for row in self._compat_fetchall(rs, charset):
- collations[row[0]] = row[1]
- return collations
-
- def _detect_ansiquotes(self, connection):
- """Detect and adjust for the ANSI_QUOTES sql mode."""
-
- row = self._compat_first(
- connection.execute("SHOW VARIABLES LIKE 'sql_mode'"),
- charset=self._connection_charset)
-
- if not row:
- mode = ''
- else:
- mode = row[1] or ''
- # 4.0
- if mode.isdigit():
- mode_no = int(mode)
- mode = (mode_no | 4 == mode_no) and 'ANSI_QUOTES' or ''
-
- self._server_ansiquotes = 'ANSI_QUOTES' in mode
-
- # as of MySQL 5.0.1
- self._backslash_escapes = 'NO_BACKSLASH_ESCAPES' not in mode
-
- def _show_create_table(self, connection, table, charset=None,
- full_name=None):
- """Run SHOW CREATE TABLE for a ``Table``."""
-
- if full_name is None:
- full_name = self.identifier_preparer.format_table(table)
- st = "SHOW CREATE TABLE %s" % full_name
-
- rp = None
- try:
- rp = connection.execute(st)
- except exc.DBAPIError, e:
- if self._extract_error_code(e.orig) == 1146:
- raise exc.NoSuchTableError(full_name)
- else:
- raise
- row = self._compat_first(rp, charset=charset)
- if not row:
- raise exc.NoSuchTableError(full_name)
- return row[1].strip()
-
- return sql
-
- def _describe_table(self, connection, table, charset=None,
- full_name=None):
- """Run DESCRIBE for a ``Table`` and return processed rows."""
-
- if full_name is None:
- full_name = self.identifier_preparer.format_table(table)
- st = "DESCRIBE %s" % full_name
-
- rp, rows = None, None
- try:
- try:
- rp = connection.execute(st)
- except exc.DBAPIError, e:
- if self._extract_error_code(e.orig) == 1146:
- raise exc.NoSuchTableError(full_name)
- else:
- raise
- rows = self._compat_fetchall(rp, charset=charset)
- finally:
- if rp:
- rp.close()
- return rows
-
-class ReflectedState(object):
- """Stores raw information about a SHOW CREATE TABLE statement."""
-
- def __init__(self):
- self.columns = []
- self.table_options = {}
- self.table_name = None
- self.keys = []
- self.constraints = []
-
-class MySQLTableDefinitionParser(object):
- """Parses the results of a SHOW CREATE TABLE statement."""
-
- def __init__(self, dialect, preparer):
- self.dialect = dialect
- self.preparer = preparer
- self._prep_regexes()
-
- def parse(self, show_create, charset):
- state = ReflectedState()
- state.charset = charset
- for line in re.split(r'\r?\n', show_create):
- if line.startswith(' ' + self.preparer.initial_quote):
- self._parse_column(line, state)
- # a regular table options line
- elif line.startswith(') '):
- self._parse_table_options(line, state)
- # an ANSI-mode table options line
- elif line == ')':
- pass
- elif line.startswith('CREATE '):
- self._parse_table_name(line, state)
- # Not present in real reflection, but may be if loading from a file.
- elif not line:
- pass
- else:
- type_, spec = self._parse_constraints(line)
- if type_ is None:
- util.warn("Unknown schema content: %r" % line)
- elif type_ == 'key':
- state.keys.append(spec)
- elif type_ == 'constraint':
- state.constraints.append(spec)
- else:
- pass
-
- return state
-
- def _parse_constraints(self, line):
- """Parse a KEY or CONSTRAINT line.
-
- :param line: A line of SHOW CREATE TABLE output
- """
-
- # KEY
- m = self._re_key.match(line)
- if m:
- spec = m.groupdict()
- # convert columns into name, length pairs
- spec['columns'] = self._parse_keyexprs(spec['columns'])
- return 'key', spec
-
- # CONSTRAINT
- m = self._re_constraint.match(line)
- if m:
- spec = m.groupdict()
- spec['table'] = \
- self.preparer.unformat_identifiers(spec['table'])
- spec['local'] = [c[0]
- for c in self._parse_keyexprs(spec['local'])]
- spec['foreign'] = [c[0]
- for c in self._parse_keyexprs(spec['foreign'])]
- return 'constraint', spec
-
- # PARTITION and SUBPARTITION
- m = self._re_partition.match(line)
- if m:
- # Punt!
- return 'partition', line
-
- # No match.
- return (None, line)
-
- def _parse_table_name(self, line, state):
- """Extract the table name.
-
- :param line: The first line of SHOW CREATE TABLE
- """
-
- regex, cleanup = self._pr_name
- m = regex.match(line)
- if m:
- state.table_name = cleanup(m.group('name'))
-
- def _parse_table_options(self, line, state):
- """Build a dictionary of all reflected table-level options.
-
- :param line: The final line of SHOW CREATE TABLE output.
- """
-
- options = {}
-
- if not line or line == ')':
- pass
-
- else:
- rest_of_line = line[:]
- for regex, cleanup in self._pr_options:
- m = regex.search(rest_of_line)
- if not m:
- continue
- directive, value = m.group('directive'), m.group('val')
- if cleanup:
- value = cleanup(value)
- options[directive.lower()] = value
- rest_of_line = regex.sub('', rest_of_line)
-
- for nope in ('auto_increment', 'data directory', 'index directory'):
- options.pop(nope, None)
-
- for opt, val in options.items():
- state.table_options['%s_%s' % (self.dialect.name, opt)] = val
-
- def _parse_column(self, line, state):
- """Extract column details.
-
- Falls back to a 'minimal support' variant if full parse fails.
-
- :param line: Any column-bearing line from SHOW CREATE TABLE
- """
-
- spec = None
- m = self._re_column.match(line)
- if m:
- spec = m.groupdict()
- spec['full'] = True
- else:
- m = self._re_column_loose.match(line)
- if m:
- spec = m.groupdict()
- spec['full'] = False
- if not spec:
- util.warn("Unknown column definition %r" % line)
- return
- if not spec['full']:
- util.warn("Incomplete reflection of column definition %r" % line)
-
- name, type_, args, notnull = \
- spec['name'], spec['coltype'], spec['arg'], spec['notnull']
-
- try:
- col_type = self.dialect.ischema_names[type_]
- except KeyError:
- util.warn("Did not recognize type '%s' of column '%s'" %
- (type_, name))
- col_type = sqltypes.NullType
-
- # Column type positional arguments eg. varchar(32)
- if args is None or args == '':
- type_args = []
- elif args[0] == "'" and args[-1] == "'":
- type_args = self._re_csv_str.findall(args)
- else:
- type_args = [int(v) for v in self._re_csv_int.findall(args)]
-
- # Column type keyword options
- type_kw = {}
- for kw in ('unsigned', 'zerofill'):
- if spec.get(kw, False):
- type_kw[kw] = True
- for kw in ('charset', 'collate'):
- if spec.get(kw, False):
- type_kw[kw] = spec[kw]
-
- if type_ == 'enum':
- type_args = ENUM._strip_enums(type_args)
-
- type_instance = col_type(*type_args, **type_kw)
-
- col_args, col_kw = [], {}
-
- # NOT NULL
- col_kw['nullable'] = True
- if spec.get('notnull', False):
- col_kw['nullable'] = False
-
- # AUTO_INCREMENT
- if spec.get('autoincr', False):
- col_kw['autoincrement'] = True
- elif issubclass(col_type, sqltypes.Integer):
- col_kw['autoincrement'] = False
-
- # DEFAULT
- default = spec.get('default', None)
-
- if default == 'NULL':
- # eliminates the need to deal with this later.
- default = None
-
- col_d = dict(name=name, type=type_instance, default=default)
- col_d.update(col_kw)
- state.columns.append(col_d)
-
- def _describe_to_create(self, table_name, columns):
- """Re-format DESCRIBE output as a SHOW CREATE TABLE string.
-
- DESCRIBE is a much simpler reflection and is sufficient for
- reflecting views for runtime use. This method formats DDL
- for columns only- keys are omitted.
-
- :param columns: A sequence of DESCRIBE or SHOW COLUMNS 6-tuples.
- SHOW FULL COLUMNS FROM rows must be rearranged for use with
- this function.
- """
-
- buffer = []
- for row in columns:
- (name, col_type, nullable, default, extra) = \
- [row[i] for i in (0, 1, 2, 4, 5)]
-
- line = [' ']
- line.append(self.preparer.quote_identifier(name))
- line.append(col_type)
- if not nullable:
- line.append('NOT NULL')
- if default:
- if 'auto_increment' in default:
- pass
- elif (col_type.startswith('timestamp') and
- default.startswith('C')):
- line.append('DEFAULT')
- line.append(default)
- elif default == 'NULL':
- line.append('DEFAULT')
- line.append(default)
- else:
- line.append('DEFAULT')
- line.append("'%s'" % default.replace("'", "''"))
- if extra:
- line.append(extra)
-
- buffer.append(' '.join(line))
-
- return ''.join([('CREATE TABLE %s (\n' %
- self.preparer.quote_identifier(table_name)),
- ',\n'.join(buffer),
- '\n) '])
-
- def _parse_keyexprs(self, identifiers):
- """Unpack '"col"(2),"col" ASC'-ish strings into components."""
-
- return self._re_keyexprs.findall(identifiers)
-
- def _prep_regexes(self):
- """Pre-compile regular expressions."""
-
- self._re_columns = []
- self._pr_options = []
-
- _final = self.preparer.final_quote
-
- quotes = dict(zip(('iq', 'fq', 'esc_fq'),
- [re.escape(s) for s in
- (self.preparer.initial_quote,
- _final,
- self.preparer._escape_identifier(_final))]))
-
- self._pr_name = _pr_compile(
- r'^CREATE (?:\w+ +)?TABLE +'
- r'%(iq)s(?P<name>(?:%(esc_fq)s|[^%(fq)s])+)%(fq)s +\($' % quotes,
- self.preparer._unescape_identifier)
-
- # `col`,`col2`(32),`col3`(15) DESC
- #
- # Note: ASC and DESC aren't reflected, so we'll punt...
- self._re_keyexprs = _re_compile(
- r'(?:'
- r'(?:%(iq)s((?:%(esc_fq)s|[^%(fq)s])+)%(fq)s)'
- r'(?:\((\d+)\))?(?=\,|$))+' % quotes)
-
- # 'foo' or 'foo','bar' or 'fo,o','ba''a''r'
- self._re_csv_str = _re_compile(r'\x27(?:\x27\x27|[^\x27])*\x27')
-
- # 123 or 123,456
- self._re_csv_int = _re_compile(r'\d+')
-
-
- # `colname` <type> [type opts]
- # (NOT NULL | NULL)
- # DEFAULT ('value' | CURRENT_TIMESTAMP...)
- # COMMENT 'comment'
- # COLUMN_FORMAT (FIXED|DYNAMIC|DEFAULT)
- # STORAGE (DISK|MEMORY)
- self._re_column = _re_compile(
- r' '
- r'%(iq)s(?P<name>(?:%(esc_fq)s|[^%(fq)s])+)%(fq)s +'
- r'(?P<coltype>\w+)'
- r'(?:\((?P<arg>(?:\d+|\d+,\d+|'
- r'(?:\x27(?:\x27\x27|[^\x27])*\x27,?)+))\))?'
- r'(?: +(?P<unsigned>UNSIGNED))?'
- r'(?: +(?P<zerofill>ZEROFILL))?'
- r'(?: +CHARACTER SET +(?P<charset>[\w_]+))?'
- r'(?: +COLLATE +(?P<collate>[\w_]+))?'
- r'(?: +(?P<notnull>NOT NULL))?'
- r'(?: +DEFAULT +(?P<default>'
- r'(?:NULL|\x27(?:\x27\x27|[^\x27])*\x27|\w+'
- r'(?: +ON UPDATE \w+)?)'
- r'))?'
- r'(?: +(?P<autoincr>AUTO_INCREMENT))?'
- r'(?: +COMMENT +(P<comment>(?:\x27\x27|[^\x27])+))?'
- r'(?: +COLUMN_FORMAT +(?P<colfmt>\w+))?'
- r'(?: +STORAGE +(?P<storage>\w+))?'
- r'(?: +(?P<extra>.*))?'
- r',?$'
- % quotes
- )
-
- # Fallback, try to parse as little as possible
- self._re_column_loose = _re_compile(
- r' '
- r'%(iq)s(?P<name>(?:%(esc_fq)s|[^%(fq)s])+)%(fq)s +'
- r'(?P<coltype>\w+)'
- r'(?:\((?P<arg>(?:\d+|\d+,\d+|\x27(?:\x27\x27|[^\x27])+\x27))\))?'
- r'.*?(?P<notnull>NOT NULL)?'
- % quotes
- )
-
- # (PRIMARY|UNIQUE|FULLTEXT|SPATIAL) INDEX `name` (USING (BTREE|HASH))?
- # (`col` (ASC|DESC)?, `col` (ASC|DESC)?)
- # KEY_BLOCK_SIZE size | WITH PARSER name
- self._re_key = _re_compile(
- r' '
- r'(?:(?P<type>\S+) )?KEY'
- r'(?: +%(iq)s(?P<name>(?:%(esc_fq)s|[^%(fq)s])+)%(fq)s)?'
- r'(?: +USING +(?P<using_pre>\S+))?'
- r' +\((?P<columns>.+?)\)'
- r'(?: +USING +(?P<using_post>\S+))?'
- r'(?: +KEY_BLOCK_SIZE +(?P<keyblock>\S+))?'
- r'(?: +WITH PARSER +(?P<parser>\S+))?'
- r',?$'
- % quotes
- )
-
- # CONSTRAINT `name` FOREIGN KEY (`local_col`)
- # REFERENCES `remote` (`remote_col`)
- # MATCH FULL | MATCH PARTIAL | MATCH SIMPLE
- # ON DELETE CASCADE ON UPDATE RESTRICT
- #
- # unique constraints come back as KEYs
- kw = quotes.copy()
- kw['on'] = 'RESTRICT|CASCASDE|SET NULL|NOACTION'
- self._re_constraint = _re_compile(
- r' '
- r'CONSTRAINT +'
- r'%(iq)s(?P<name>(?:%(esc_fq)s|[^%(fq)s])+)%(fq)s +'
- r'FOREIGN KEY +'
- r'\((?P<local>[^\)]+?)\) REFERENCES +'
- r'(?P<table>%(iq)s[^%(fq)s]+%(fq)s(?:\.%(iq)s[^%(fq)s]+%(fq)s)?) +'
- r'\((?P<foreign>[^\)]+?)\)'
- r'(?: +(?P<match>MATCH \w+))?'
- r'(?: +ON DELETE (?P<ondelete>%(on)s))?'
- r'(?: +ON UPDATE (?P<onupdate>%(on)s))?'
- % kw
- )
-
- # PARTITION
- #
- # punt!
- self._re_partition = _re_compile(
- r' '
- r'(?:SUB)?PARTITION')
-
- # Table-level options (COLLATE, ENGINE, etc.)
- # Do the string options first, since they have quoted strings we need to get rid of.
- for option in _options_of_type_string:
- self._add_option_string(option)
-
- for option in ('ENGINE', 'TYPE', 'AUTO_INCREMENT',
- 'AVG_ROW_LENGTH', 'CHARACTER SET',
- 'DEFAULT CHARSET', 'CHECKSUM',
- 'COLLATE', 'DELAY_KEY_WRITE', 'INSERT_METHOD',
- 'MAX_ROWS', 'MIN_ROWS', 'PACK_KEYS', 'ROW_FORMAT',
- 'KEY_BLOCK_SIZE'):
- self._add_option_word(option)
-
- self._add_option_regex('UNION', r'\([^\)]+\)')
- self._add_option_regex('TABLESPACE', r'.*? STORAGE DISK')
- self._add_option_regex('RAID_TYPE',
- r'\w+\s+RAID_CHUNKS\s*\=\s*\w+RAID_CHUNKSIZE\s*=\s*\w+')
-
- _optional_equals = r'(?:\s*(?:=\s*)|\s+)'
-
- def _add_option_string(self, directive):
- regex = (r'(?P<directive>%s)%s'
- r"'(?P<val>(?:[^']|'')*?)'(?!')" %
- (re.escape(directive), self._optional_equals))
- self._pr_options.append(
- _pr_compile(regex, lambda v: v.replace("\\\\","\\").replace("''", "'")))
-
- def _add_option_word(self, directive):
- regex = (r'(?P<directive>%s)%s'
- r'(?P<val>\w+)' %
- (re.escape(directive), self._optional_equals))
- self._pr_options.append(_pr_compile(regex))
-
- def _add_option_regex(self, directive, regex):
- regex = (r'(?P<directive>%s)%s'
- r'(?P<val>%s)' %
- (re.escape(directive), self._optional_equals, regex))
- self._pr_options.append(_pr_compile(regex))
-
-_options_of_type_string = ('COMMENT', 'DATA DIRECTORY', 'INDEX DIRECTORY',
- 'PASSWORD', 'CONNECTION')
-
-log.class_logger(MySQLTableDefinitionParser)
-log.class_logger(MySQLDialect)
-
-
-class _DecodingRowProxy(object):
- """Return unicode-decoded values based on type inspection.
-
- Smooth over data type issues (esp. with alpha driver versions) and
- normalize strings as Unicode regardless of user-configured driver
- encoding settings.
-
- """
-
- # Some MySQL-python versions can return some columns as
- # sets.Set(['value']) (seriously) but thankfully that doesn't
- # seem to come up in DDL queries.
-
- def __init__(self, rowproxy, charset):
- self.rowproxy = rowproxy
- self.charset = charset
-
- def __getitem__(self, index):
- item = self.rowproxy[index]
- if isinstance(item, _array):
- item = item.tostring()
- # Py2K
- if self.charset and isinstance(item, str):
- # end Py2K
- # Py3K
- #if self.charset and isinstance(item, bytes):
- return item.decode(self.charset)
- else:
- return item
-
- def __getattr__(self, attr):
- item = getattr(self.rowproxy, attr)
- if isinstance(item, _array):
- item = item.tostring()
- # Py2K
- if self.charset and isinstance(item, str):
- # end Py2K
- # Py3K
- #if self.charset and isinstance(item, bytes):
- return item.decode(self.charset)
- else:
- return item
-
-
-def _pr_compile(regex, cleanup=None):
- """Prepare a 2-tuple of compiled regex and callable."""
-
- return (_re_compile(regex), cleanup)
-
-def _re_compile(regex):
- """Compile a string to regex, I and UNICODE."""
-
- return re.compile(regex, re.I | re.UNICODE)
-
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/mysql/mysqlconnector.py b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/mysql/mysqlconnector.py
deleted file mode 100755
index 035ebe45..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/mysql/mysqlconnector.py
+++ /dev/null
@@ -1,135 +0,0 @@
-# mysql/mysqlconnector.py
-# Copyright (C) 2005-2011 the SQLAlchemy authors and contributors <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-"""Support for the MySQL database via the MySQL Connector/Python adapter.
-
-MySQL Connector/Python is available at:
-
- https://launchpad.net/myconnpy
-
-Connecting
------------
-
-Connect string format::
-
- mysql+mysqlconnector://<user>:<password>@<host>[:<port>]/<dbname>
-
-"""
-
-import re
-
-from sqlalchemy.dialects.mysql.base import (MySQLDialect,
- MySQLExecutionContext, MySQLCompiler, MySQLIdentifierPreparer,
- BIT)
-
-from sqlalchemy.engine import base as engine_base, default
-from sqlalchemy.sql import operators as sql_operators
-from sqlalchemy import exc, log, schema, sql, types as sqltypes, util
-from sqlalchemy import processors
-
-class MySQLExecutionContext_mysqlconnector(MySQLExecutionContext):
-
- def get_lastrowid(self):
- return self.cursor.lastrowid
-
-
-class MySQLCompiler_mysqlconnector(MySQLCompiler):
- def visit_mod(self, binary, **kw):
- return self.process(binary.left) + " %% " + self.process(binary.right)
-
- def post_process_text(self, text):
- return text.replace('%', '%%')
-
-class MySQLIdentifierPreparer_mysqlconnector(MySQLIdentifierPreparer):
-
- def _escape_identifier(self, value):
- value = value.replace(self.escape_quote, self.escape_to_quote)
- return value.replace("%", "%%")
-
-class _myconnpyBIT(BIT):
- def result_processor(self, dialect, coltype):
- """MySQL-connector already converts mysql bits, so."""
-
- return None
-
-class MySQLDialect_mysqlconnector(MySQLDialect):
- driver = 'mysqlconnector'
- supports_unicode_statements = True
- supports_unicode_binds = True
- supports_sane_rowcount = True
- supports_sane_multi_rowcount = True
-
- supports_native_decimal = True
-
- default_paramstyle = 'format'
- execution_ctx_cls = MySQLExecutionContext_mysqlconnector
- statement_compiler = MySQLCompiler_mysqlconnector
-
- preparer = MySQLIdentifierPreparer_mysqlconnector
-
- colspecs = util.update_copy(
- MySQLDialect.colspecs,
- {
- BIT: _myconnpyBIT,
- }
- )
-
- @classmethod
- def dbapi(cls):
- from mysql import connector
- return connector
-
- def create_connect_args(self, url):
- opts = url.translate_connect_args(username='user')
- opts.update(url.query)
-
- util.coerce_kw_type(opts, 'buffered', bool)
- util.coerce_kw_type(opts, 'raise_on_warnings', bool)
- opts['buffered'] = True
- opts['raise_on_warnings'] = True
-
- # FOUND_ROWS must be set in ClientFlag to enable
- # supports_sane_rowcount.
- if self.dbapi is not None:
- try:
- from mysql.connector.constants import ClientFlag
- client_flags = opts.get('client_flags', ClientFlag.get_default())
- client_flags |= ClientFlag.FOUND_ROWS
- opts['client_flags'] = client_flags
- except:
- pass
- return [[], opts]
-
- def _get_server_version_info(self, connection):
- dbapi_con = connection.connection
-
- from mysql.connector.constants import ClientFlag
- dbapi_con.set_client_flag(ClientFlag.FOUND_ROWS)
-
- version = dbapi_con.get_server_version()
- return tuple(version)
-
- def _detect_charset(self, connection):
- return connection.connection.get_characterset_info()
-
- def _extract_error_code(self, exception):
- return exception.errno
-
- def is_disconnect(self, e, connection, cursor):
- errnos = (2006, 2013, 2014, 2045, 2055, 2048)
- exceptions = (self.dbapi.OperationalError,self.dbapi.InterfaceError)
- if isinstance(e, exceptions):
- return e.errno in errnos
- else:
- return False
-
- def _compat_fetchall(self, rp, charset=None):
- return rp.fetchall()
-
- def _compat_fetchone(self, rp, charset=None):
- return rp.fetchone()
-
-dialect = MySQLDialect_mysqlconnector
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/mysql/mysqldb.py b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/mysql/mysqldb.py
deleted file mode 100755
index 1b0ea85c..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/mysql/mysqldb.py
+++ /dev/null
@@ -1,79 +0,0 @@
-# mysql/mysqldb.py
-# Copyright (C) 2005-2011 the SQLAlchemy authors and contributors <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-"""Support for the MySQL database via the MySQL-python adapter.
-
-MySQL-Python is available at:
-
- http://sourceforge.net/projects/mysql-python
-
-At least version 1.2.1 or 1.2.2 should be used.
-
-Connecting
------------
-
-Connect string format::
-
- mysql+mysqldb://<user>:<password>@<host>[:<port>]/<dbname>
-
-Character Sets
---------------
-
-Many MySQL server installations default to a ``latin1`` encoding for client
-connections. All data sent through the connection will be converted into
-``latin1``, even if you have ``utf8`` or another character set on your tables
-and columns. With versions 4.1 and higher, you can change the connection
-character set either through server configuration or by including the
-``charset`` parameter in the URL used for ``create_engine``. The ``charset``
-option is passed through to MySQL-Python and has the side-effect of also
-enabling ``use_unicode`` in the driver by default. For regular encoded
-strings, also pass ``use_unicode=0`` in the connection arguments::
-
- # set client encoding to utf8; all strings come back as unicode
- create_engine('mysql+mysqldb:///mydb?charset=utf8')
-
- # set client encoding to utf8; all strings come back as utf8 str
- create_engine('mysql+mysqldb:///mydb?charset=utf8&use_unicode=0')
-
-Known Issues
--------------
-
-MySQL-python version 1.2.2 has a serious memory leak related
-to unicode conversion, a feature which is disabled via ``use_unicode=0``.
-Using a more recent version of MySQL-python is recommended. The
-recommended connection form with SQLAlchemy is::
-
- engine = create_engine('mysql://scott:tiger@localhost/test?charset=utf8&use_unicode=0', pool_recycle=3600)
-
-
-"""
-
-from sqlalchemy.dialects.mysql.base import (MySQLDialect, MySQLExecutionContext,
- MySQLCompiler, MySQLIdentifierPreparer)
-from sqlalchemy.connectors.mysqldb import (
- MySQLDBExecutionContext,
- MySQLDBCompiler,
- MySQLDBIdentifierPreparer,
- MySQLDBConnector
- )
-
-class MySQLExecutionContext_mysqldb(MySQLDBExecutionContext, MySQLExecutionContext):
- pass
-
-
-class MySQLCompiler_mysqldb(MySQLDBCompiler, MySQLCompiler):
- pass
-
-
-class MySQLIdentifierPreparer_mysqldb(MySQLDBIdentifierPreparer, MySQLIdentifierPreparer):
- pass
-
-class MySQLDialect_mysqldb(MySQLDBConnector, MySQLDialect):
- execution_ctx_cls = MySQLExecutionContext_mysqldb
- statement_compiler = MySQLCompiler_mysqldb
- preparer = MySQLIdentifierPreparer_mysqldb
-
-dialect = MySQLDialect_mysqldb
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/mysql/oursql.py b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/mysql/oursql.py
deleted file mode 100755
index 4ea4f56b..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/mysql/oursql.py
+++ /dev/null
@@ -1,266 +0,0 @@
-# mysql/oursql.py
-# Copyright (C) 2005-2011 the SQLAlchemy authors and contributors <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-"""Support for the MySQL database via the oursql adapter.
-
-OurSQL is available at:
-
- http://packages.python.org/oursql/
-
-Connecting
------------
-
-Connect string format::
-
- mysql+oursql://<user>:<password>@<host>[:<port>]/<dbname>
-
-Character Sets
---------------
-
-oursql defaults to using ``utf8`` as the connection charset, but other
-encodings may be used instead. Like the MySQL-Python driver, unicode support
-can be completely disabled::
-
- # oursql sets the connection charset to utf8 automatically; all strings come
- # back as utf8 str
- create_engine('mysql+oursql:///mydb?use_unicode=0')
-
-To not automatically use ``utf8`` and instead use whatever the connection
-defaults to, there is a separate parameter::
-
- # use the default connection charset; all strings come back as unicode
- create_engine('mysql+oursql:///mydb?default_charset=1')
-
- # use latin1 as the connection charset; all strings come back as unicode
- create_engine('mysql+oursql:///mydb?charset=latin1')
-"""
-
-import re
-
-from sqlalchemy.dialects.mysql.base import (BIT, MySQLDialect, MySQLExecutionContext,
- MySQLCompiler, MySQLIdentifierPreparer)
-from sqlalchemy.engine import base as engine_base, default
-from sqlalchemy.sql import operators as sql_operators
-from sqlalchemy import exc, log, schema, sql, types as sqltypes, util
-from sqlalchemy import processors
-
-
-
-class _oursqlBIT(BIT):
- def result_processor(self, dialect, coltype):
- """oursql already converts mysql bits, so."""
-
- return None
-
-
-class MySQLExecutionContext_oursql(MySQLExecutionContext):
-
- @property
- def plain_query(self):
- return self.execution_options.get('_oursql_plain_query', False)
-
-class MySQLDialect_oursql(MySQLDialect):
- driver = 'oursql'
-# Py2K
- supports_unicode_binds = True
- supports_unicode_statements = True
-# end Py2K
-
- supports_native_decimal = True
-
- supports_sane_rowcount = True
- supports_sane_multi_rowcount = True
- execution_ctx_cls = MySQLExecutionContext_oursql
-
- colspecs = util.update_copy(
- MySQLDialect.colspecs,
- {
- sqltypes.Time: sqltypes.Time,
- BIT: _oursqlBIT,
- }
- )
-
- @classmethod
- def dbapi(cls):
- return __import__('oursql')
-
- def do_execute(self, cursor, statement, parameters, context=None):
- """Provide an implementation of *cursor.execute(statement, parameters)*."""
-
- if context and context.plain_query:
- cursor.execute(statement, plain_query=True)
- else:
- cursor.execute(statement, parameters)
-
- def do_begin(self, connection):
- connection.cursor().execute('BEGIN', plain_query=True)
-
- def _xa_query(self, connection, query, xid):
-# Py2K
- arg = connection.connection._escape_string(xid)
-# end Py2K
-# Py3K
-# charset = self._connection_charset
-# arg = connection.connection._escape_string(xid.encode(charset)).decode(charset)
- connection.execution_options(_oursql_plain_query=True).execute(query % arg)
-
- # Because mysql is bad, these methods have to be
- # reimplemented to use _PlainQuery. Basically, some queries
- # refuse to return any data if they're run through
- # the parameterized query API, or refuse to be parameterized
- # in the first place.
- def do_begin_twophase(self, connection, xid):
- self._xa_query(connection, 'XA BEGIN "%s"', xid)
-
- def do_prepare_twophase(self, connection, xid):
- self._xa_query(connection, 'XA END "%s"', xid)
- self._xa_query(connection, 'XA PREPARE "%s"', xid)
-
- def do_rollback_twophase(self, connection, xid, is_prepared=True,
- recover=False):
- if not is_prepared:
- self._xa_query(connection, 'XA END "%s"', xid)
- self._xa_query(connection, 'XA ROLLBACK "%s"', xid)
-
- def do_commit_twophase(self, connection, xid, is_prepared=True,
- recover=False):
- if not is_prepared:
- self.do_prepare_twophase(connection, xid)
- self._xa_query(connection, 'XA COMMIT "%s"', xid)
-
- # Q: why didn't we need all these "plain_query" overrides earlier ?
- # am i on a newer/older version of OurSQL ?
- def has_table(self, connection, table_name, schema=None):
- return MySQLDialect.has_table(self,
- connection.connect().\
- execution_options(_oursql_plain_query=True),
- table_name, schema)
-
- def get_table_options(self, connection, table_name, schema=None, **kw):
- return MySQLDialect.get_table_options(self,
- connection.connect().\
- execution_options(_oursql_plain_query=True),
- table_name,
- schema = schema,
- **kw
- )
-
-
- def get_columns(self, connection, table_name, schema=None, **kw):
- return MySQLDialect.get_columns(self,
- connection.connect().\
- execution_options(_oursql_plain_query=True),
- table_name,
- schema=schema,
- **kw
- )
-
- def get_view_names(self, connection, schema=None, **kw):
- return MySQLDialect.get_view_names(self,
- connection.connect().\
- execution_options(_oursql_plain_query=True),
- schema=schema,
- **kw
- )
-
- def get_table_names(self, connection, schema=None, **kw):
- return MySQLDialect.get_table_names(self,
- connection.connect().\
- execution_options(_oursql_plain_query=True),
- schema
- )
-
- def get_schema_names(self, connection, **kw):
- return MySQLDialect.get_schema_names(self,
- connection.connect().\
- execution_options(_oursql_plain_query=True),
- **kw
- )
-
- def initialize(self, connection):
- return MySQLDialect.initialize(
- self,
- connection.execution_options(_oursql_plain_query=True)
- )
-
- def _show_create_table(self, connection, table, charset=None,
- full_name=None):
- return MySQLDialect._show_create_table(self,
- connection.contextual_connect(close_with_result=True).
- execution_options(_oursql_plain_query=True),
- table, charset, full_name)
-
- def is_disconnect(self, e, connection, cursor):
- if isinstance(e, self.dbapi.ProgrammingError):
- return e.errno is None and 'cursor' not in e.args[1] and e.args[1].endswith('closed')
- else:
- return e.errno in (2006, 2013, 2014, 2045, 2055)
-
- def create_connect_args(self, url):
- opts = url.translate_connect_args(database='db', username='user',
- password='passwd')
- opts.update(url.query)
-
- util.coerce_kw_type(opts, 'port', int)
- util.coerce_kw_type(opts, 'compress', bool)
- util.coerce_kw_type(opts, 'autoping', bool)
-
- util.coerce_kw_type(opts, 'default_charset', bool)
- if opts.pop('default_charset', False):
- opts['charset'] = None
- else:
- util.coerce_kw_type(opts, 'charset', str)
- opts['use_unicode'] = opts.get('use_unicode', True)
- util.coerce_kw_type(opts, 'use_unicode', bool)
-
- # FOUND_ROWS must be set in CLIENT_FLAGS to enable
- # supports_sane_rowcount.
- opts.setdefault('found_rows', True)
-
- ssl = {}
- for key in ['ssl_ca', 'ssl_key', 'ssl_cert',
- 'ssl_capath', 'ssl_cipher']:
- if key in opts:
- ssl[key[4:]] = opts[key]
- util.coerce_kw_type(ssl, key[4:], str)
- del opts[key]
- if ssl:
- opts['ssl'] = ssl
-
- return [[], opts]
-
- def _get_server_version_info(self, connection):
- dbapi_con = connection.connection
- version = []
- r = re.compile('[.\-]')
- for n in r.split(dbapi_con.server_info):
- try:
- version.append(int(n))
- except ValueError:
- version.append(n)
- return tuple(version)
-
- def _extract_error_code(self, exception):
- return exception.errno
-
- def _detect_charset(self, connection):
- """Sniff out the character set in use for connection results."""
-
- return connection.connection.charset
-
- def _compat_fetchall(self, rp, charset=None):
- """oursql isn't super-broken like MySQLdb, yaaay."""
- return rp.fetchall()
-
- def _compat_fetchone(self, rp, charset=None):
- """oursql isn't super-broken like MySQLdb, yaaay."""
- return rp.fetchone()
-
- def _compat_first(self, rp, charset=None):
- return rp.first()
-
-
-dialect = MySQLDialect_oursql
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/mysql/pymysql.py b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/mysql/pymysql.py
deleted file mode 100755
index dee3dfea..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/mysql/pymysql.py
+++ /dev/null
@@ -1,38 +0,0 @@
-# mysql/pymysql.py
-# Copyright (C) 2005-2011 the SQLAlchemy authors and contributors <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-"""Support for the MySQL database via the pymysql adapter.
-
-pymysql is available at:
-
- http://code.google.com/p/pymysql/
-
-Connecting
-----------
-
-Connect string::
-
- mysql+pymysql://<username>:<password>@<host>/<dbname>[?<options>]
-
-MySQL-Python Compatibility
---------------------------
-
-The pymysql DBAPI is a pure Python port of the MySQL-python (MySQLdb) driver,
-and targets 100% compatibility. Most behavioral notes for MySQL-python apply to
-the pymysql driver as well.
-
-"""
-
-from sqlalchemy.dialects.mysql.mysqldb import MySQLDialect_mysqldb
-
-class MySQLDialect_pymysql(MySQLDialect_mysqldb):
- driver = 'pymysql'
-
- @classmethod
- def dbapi(cls):
- return __import__('pymysql')
-
-dialect = MySQLDialect_pymysql \ No newline at end of file
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/mysql/pyodbc.py b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/mysql/pyodbc.py
deleted file mode 100755
index 84d43cf2..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/mysql/pyodbc.py
+++ /dev/null
@@ -1,82 +0,0 @@
-# mysql/pyodbc.py
-# Copyright (C) 2005-2011 the SQLAlchemy authors and contributors <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-"""Support for the MySQL database via the pyodbc adapter.
-
-pyodbc is available at:
-
- http://pypi.python.org/pypi/pyodbc/
-
-Connecting
-----------
-
-Connect string::
-
- mysql+pyodbc://<username>:<password>@<dsnname>
-
-Limitations
------------
-
-The mysql-pyodbc dialect is subject to unresolved character encoding issues
-which exist within the current ODBC drivers available.
-(see http://code.google.com/p/pyodbc/issues/detail?id=25). Consider usage
-of OurSQL, MySQLdb, or MySQL-connector/Python.
-
-"""
-
-from sqlalchemy.dialects.mysql.base import MySQLDialect, MySQLExecutionContext
-from sqlalchemy.connectors.pyodbc import PyODBCConnector
-from sqlalchemy.engine import base as engine_base
-from sqlalchemy import util
-import re
-
-class MySQLExecutionContext_pyodbc(MySQLExecutionContext):
-
- def get_lastrowid(self):
- cursor = self.create_cursor()
- cursor.execute("SELECT LAST_INSERT_ID()")
- lastrowid = cursor.fetchone()[0]
- cursor.close()
- return lastrowid
-
-class MySQLDialect_pyodbc(PyODBCConnector, MySQLDialect):
- supports_unicode_statements = False
- execution_ctx_cls = MySQLExecutionContext_pyodbc
-
- pyodbc_driver_name = "MySQL"
-
- def __init__(self, **kw):
- # deal with http://code.google.com/p/pyodbc/issues/detail?id=25
- kw.setdefault('convert_unicode', True)
- super(MySQLDialect_pyodbc, self).__init__(**kw)
-
- def _detect_charset(self, connection):
- """Sniff out the character set in use for connection results."""
-
- # Prefer 'character_set_results' for the current connection over the
- # value in the driver. SET NAMES or individual variable SETs will
- # change the charset without updating the driver's view of the world.
- #
- # If it's decided that issuing that sort of SQL leaves you SOL, then
- # this can prefer the driver value.
- rs = connection.execute("SHOW VARIABLES LIKE 'character_set%%'")
- opts = dict([(row[0], row[1]) for row in self._compat_fetchall(rs)])
- for key in ('character_set_connection', 'character_set'):
- if opts.get(key, None):
- return opts[key]
-
- util.warn("Could not detect the connection character set. Assuming latin1.")
- return 'latin1'
-
- def _extract_error_code(self, exception):
- m = re.compile(r"\((\d+)\)").search(str(exception.args))
- c = m.group(1)
- if c:
- return int(c)
- else:
- return None
-
-dialect = MySQLDialect_pyodbc
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/mysql/zxjdbc.py b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/mysql/zxjdbc.py
deleted file mode 100755
index a56fb6a5..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/mysql/zxjdbc.py
+++ /dev/null
@@ -1,117 +0,0 @@
-# mysql/zxjdbc.py
-# Copyright (C) 2005-2011 the SQLAlchemy authors and contributors <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-"""Support for the MySQL database via Jython's zxjdbc JDBC connector.
-
-JDBC Driver
------------
-
-The official MySQL JDBC driver is at
-http://dev.mysql.com/downloads/connector/j/.
-
-Connecting
-----------
-
-Connect string format:
-
- mysql+zxjdbc://<user>:<password>@<hostname>[:<port>]/<database>
-
-Character Sets
---------------
-
-SQLAlchemy zxjdbc dialects pass unicode straight through to the
-zxjdbc/JDBC layer. To allow multiple character sets to be sent from the
-MySQL Connector/J JDBC driver, by default SQLAlchemy sets its
-``characterEncoding`` connection property to ``UTF-8``. It may be
-overriden via a ``create_engine`` URL parameter.
-
-"""
-import re
-
-from sqlalchemy import types as sqltypes, util
-from sqlalchemy.connectors.zxJDBC import ZxJDBCConnector
-from sqlalchemy.dialects.mysql.base import BIT, MySQLDialect, MySQLExecutionContext
-
-class _ZxJDBCBit(BIT):
- def result_processor(self, dialect, coltype):
- """Converts boolean or byte arrays from MySQL Connector/J to longs."""
- def process(value):
- if value is None:
- return value
- if isinstance(value, bool):
- return int(value)
- v = 0L
- for i in value:
- v = v << 8 | (i & 0xff)
- value = v
- return value
- return process
-
-
-class MySQLExecutionContext_zxjdbc(MySQLExecutionContext):
- def get_lastrowid(self):
- cursor = self.create_cursor()
- cursor.execute("SELECT LAST_INSERT_ID()")
- lastrowid = cursor.fetchone()[0]
- cursor.close()
- return lastrowid
-
-
-class MySQLDialect_zxjdbc(ZxJDBCConnector, MySQLDialect):
- jdbc_db_name = 'mysql'
- jdbc_driver_name = 'com.mysql.jdbc.Driver'
-
- execution_ctx_cls = MySQLExecutionContext_zxjdbc
-
- colspecs = util.update_copy(
- MySQLDialect.colspecs,
- {
- sqltypes.Time: sqltypes.Time,
- BIT: _ZxJDBCBit
- }
- )
-
- def _detect_charset(self, connection):
- """Sniff out the character set in use for connection results."""
- # Prefer 'character_set_results' for the current connection over the
- # value in the driver. SET NAMES or individual variable SETs will
- # change the charset without updating the driver's view of the world.
- #
- # If it's decided that issuing that sort of SQL leaves you SOL, then
- # this can prefer the driver value.
- rs = connection.execute("SHOW VARIABLES LIKE 'character_set%%'")
- opts = dict((row[0], row[1]) for row in self._compat_fetchall(rs))
- for key in ('character_set_connection', 'character_set'):
- if opts.get(key, None):
- return opts[key]
-
- util.warn("Could not detect the connection character set. Assuming latin1.")
- return 'latin1'
-
- def _driver_kwargs(self):
- """return kw arg dict to be sent to connect()."""
- return dict(characterEncoding='UTF-8', yearIsDateType='false')
-
- def _extract_error_code(self, exception):
- # e.g.: DBAPIError: (Error) Table 'test.u2' doesn't exist
- # [SQLCode: 1146], [SQLState: 42S02] 'DESCRIBE `u2`' ()
- m = re.compile(r"\[SQLCode\: (\d+)\]").search(str(exception.args))
- c = m.group(1)
- if c:
- return int(c)
-
- def _get_server_version_info(self,connection):
- dbapi_con = connection.connection
- version = []
- r = re.compile('[.\-]')
- for n in r.split(dbapi_con.dbversion):
- try:
- version.append(int(n))
- except ValueError:
- version.append(n)
- return tuple(version)
-
-dialect = MySQLDialect_zxjdbc
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/oracle/__init__.py b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/oracle/__init__.py
deleted file mode 100755
index 59267c5e..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/oracle/__init__.py
+++ /dev/null
@@ -1,23 +0,0 @@
-# oracle/__init__.py
-# Copyright (C) 2005-2011 the SQLAlchemy authors and contributors <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-from sqlalchemy.dialects.oracle import base, cx_oracle, zxjdbc
-
-base.dialect = cx_oracle.dialect
-
-from sqlalchemy.dialects.oracle.base import \
- VARCHAR, NVARCHAR, CHAR, DATE, DATETIME, NUMBER,\
- BLOB, BFILE, CLOB, NCLOB, TIMESTAMP, RAW,\
- FLOAT, DOUBLE_PRECISION, LONG, dialect, INTERVAL,\
- VARCHAR2, NVARCHAR2
-
-
-__all__ = (
-'VARCHAR', 'NVARCHAR', 'CHAR', 'DATE', 'DATETIME', 'NUMBER',
-'BLOB', 'BFILE', 'CLOB', 'NCLOB', 'TIMESTAMP', 'RAW',
-'FLOAT', 'DOUBLE_PRECISION', 'LONG', 'dialect', 'INTERVAL',
-'VARCHAR2', 'NVARCHAR2', 'ROWID'
-)
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/oracle/base.py b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/oracle/base.py
deleted file mode 100755
index 14e6309c..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/oracle/base.py
+++ /dev/null
@@ -1,1139 +0,0 @@
-# oracle/base.py
-# Copyright (C) 2005-2011 the SQLAlchemy authors and contributors <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-"""Support for the Oracle database.
-
-Oracle version 8 through current (11g at the time of this writing) are supported.
-
-For information on connecting via specific drivers, see the documentation
-for that driver.
-
-Connect Arguments
------------------
-
-The dialect supports several :func:`~sqlalchemy.create_engine()` arguments which
-affect the behavior of the dialect regardless of driver in use.
-
-* *use_ansi* - Use ANSI JOIN constructs (see the section on Oracle 8). Defaults
- to ``True``. If ``False``, Oracle-8 compatible constructs are used for joins.
-
-* *optimize_limits* - defaults to ``False``. see the section on LIMIT/OFFSET.
-
-* *use_binds_for_limits* - defaults to ``True``. see the section on LIMIT/OFFSET.
-
-Auto Increment Behavior
------------------------
-
-SQLAlchemy Table objects which include integer primary keys are usually assumed to have
-"autoincrementing" behavior, meaning they can generate their own primary key values upon
-INSERT. Since Oracle has no "autoincrement" feature, SQLAlchemy relies upon sequences
-to produce these values. With the Oracle dialect, *a sequence must always be explicitly
-specified to enable autoincrement*. This is divergent with the majority of documentation
-examples which assume the usage of an autoincrement-capable database. To specify sequences,
-use the sqlalchemy.schema.Sequence object which is passed to a Column construct::
-
- t = Table('mytable', metadata,
- Column('id', Integer, Sequence('id_seq'), primary_key=True),
- Column(...), ...
- )
-
-This step is also required when using table reflection, i.e. autoload=True::
-
- t = Table('mytable', metadata,
- Column('id', Integer, Sequence('id_seq'), primary_key=True),
- autoload=True
- )
-
-Identifier Casing
------------------
-
-In Oracle, the data dictionary represents all case insensitive identifier names
-using UPPERCASE text. SQLAlchemy on the other hand considers an all-lower case identifier
-name to be case insensitive. The Oracle dialect converts all case insensitive identifiers
-to and from those two formats during schema level communication, such as reflection of
-tables and indexes. Using an UPPERCASE name on the SQLAlchemy side indicates a
-case sensitive identifier, and SQLAlchemy will quote the name - this will cause mismatches
-against data dictionary data received from Oracle, so unless identifier names have been
-truly created as case sensitive (i.e. using quoted names), all lowercase names should be
-used on the SQLAlchemy side.
-
-Unicode
--------
-
-SQLAlchemy 0.6 uses the "native unicode" mode provided as of cx_oracle 5. cx_oracle 5.0.2
-or greater is recommended for support of NCLOB. If not using cx_oracle 5, the NLS_LANG
-environment variable needs to be set in order for the oracle client library to use
-proper encoding, such as "AMERICAN_AMERICA.UTF8".
-
-Also note that Oracle supports unicode data through the NVARCHAR and NCLOB data types.
-When using the SQLAlchemy Unicode and UnicodeText types, these DDL types will be used
-within CREATE TABLE statements. Usage of VARCHAR2 and CLOB with unicode text still
-requires NLS_LANG to be set.
-
-LIMIT/OFFSET Support
---------------------
-
-Oracle has no support for the LIMIT or OFFSET keywords. SQLAlchemy uses
-a wrapped subquery approach in conjunction with ROWNUM. The exact methodology
-is taken from
-http://www.oracle.com/technology/oramag/oracle/06-sep/o56asktom.html .
-
-There are two options which affect its behavior:
-
-* the "FIRST ROWS()" optimization keyword is not used by default. To enable the usage of this
- optimization directive, specify ``optimize_limits=True`` to :func:`.create_engine`.
-* the values passed for the limit/offset are sent as bound parameters. Some users have observed
- that Oracle produces a poor query plan when the values are sent as binds and not
- rendered literally. To render the limit/offset values literally within the SQL
- statement, specify ``use_binds_for_limits=False`` to :func:`.create_engine`.
-
-Some users have reported better performance when the entirely different approach of a
-window query is used, i.e. ROW_NUMBER() OVER (ORDER BY), to provide LIMIT/OFFSET (note
-that the majority of users don't observe this). To suit this case the
-method used for LIMIT/OFFSET can be replaced entirely. See the recipe at
-http://www.sqlalchemy.org/trac/wiki/UsageRecipes/WindowFunctionsByDefault
-which installs a select compiler that overrides the generation of limit/offset with
-a window function.
-
-ON UPDATE CASCADE
------------------
-
-Oracle doesn't have native ON UPDATE CASCADE functionality. A trigger based solution
-is available at http://asktom.oracle.com/tkyte/update_cascade/index.html .
-
-When using the SQLAlchemy ORM, the ORM has limited ability to manually issue
-cascading updates - specify ForeignKey objects using the
-"deferrable=True, initially='deferred'" keyword arguments,
-and specify "passive_updates=False" on each relationship().
-
-Oracle 8 Compatibility
-----------------------
-
-When Oracle 8 is detected, the dialect internally configures itself to the following
-behaviors:
-
-* the use_ansi flag is set to False. This has the effect of converting all
- JOIN phrases into the WHERE clause, and in the case of LEFT OUTER JOIN
- makes use of Oracle's (+) operator.
-
-* the NVARCHAR2 and NCLOB datatypes are no longer generated as DDL when
- the :class:`~sqlalchemy.types.Unicode` is used - VARCHAR2 and CLOB are issued
- instead. This because these types don't seem to work correctly on Oracle 8
- even though they are available. The :class:`~sqlalchemy.types.NVARCHAR`
- and :class:`~sqlalchemy.dialects.oracle.NCLOB` types will always generate NVARCHAR2 and NCLOB.
-
-* the "native unicode" mode is disabled when using cx_oracle, i.e. SQLAlchemy
- encodes all Python unicode objects to "string" before passing in as bind parameters.
-
-Synonym/DBLINK Reflection
--------------------------
-
-When using reflection with Table objects, the dialect can optionally search for tables
-indicated by synonyms that reference DBLINK-ed tables by passing the flag
-oracle_resolve_synonyms=True as a keyword argument to the Table construct. If DBLINK
-is not in use this flag should be left off.
-
-"""
-
-import random, re
-
-from sqlalchemy import schema as sa_schema
-from sqlalchemy import util, sql, log
-from sqlalchemy.engine import default, base, reflection
-from sqlalchemy.sql import compiler, visitors, expression
-from sqlalchemy.sql import operators as sql_operators, functions as sql_functions
-from sqlalchemy import types as sqltypes
-from sqlalchemy.types import VARCHAR, NVARCHAR, CHAR, DATE, DATETIME, \
- BLOB, CLOB, TIMESTAMP, FLOAT
-
-RESERVED_WORDS = \
- set('SHARE RAW DROP BETWEEN FROM DESC OPTION PRIOR LONG THEN '\
- 'DEFAULT ALTER IS INTO MINUS INTEGER NUMBER GRANT IDENTIFIED '\
- 'ALL TO ORDER ON FLOAT DATE HAVING CLUSTER NOWAIT RESOURCE '\
- 'ANY TABLE INDEX FOR UPDATE WHERE CHECK SMALLINT WITH DELETE '\
- 'BY ASC REVOKE LIKE SIZE RENAME NOCOMPRESS NULL GROUP VALUES '\
- 'AS IN VIEW EXCLUSIVE COMPRESS SYNONYM SELECT INSERT EXISTS '\
- 'NOT TRIGGER ELSE CREATE INTERSECT PCTFREE DISTINCT USER '\
- 'CONNECT SET MODE OF UNIQUE VARCHAR2 VARCHAR LOCK OR CHAR '\
- 'DECIMAL UNION PUBLIC AND START UID COMMENT'.split())
-
-NO_ARG_FNS = set('UID CURRENT_DATE SYSDATE USER '
- 'CURRENT_TIME CURRENT_TIMESTAMP'.split())
-
-class RAW(sqltypes.LargeBinary):
- pass
-OracleRaw = RAW
-
-class NCLOB(sqltypes.Text):
- __visit_name__ = 'NCLOB'
-
-VARCHAR2 = VARCHAR
-NVARCHAR2 = NVARCHAR
-
-class NUMBER(sqltypes.Numeric, sqltypes.Integer):
- __visit_name__ = 'NUMBER'
-
- def __init__(self, precision=None, scale=None, asdecimal=None):
- if asdecimal is None:
- asdecimal = bool(scale and scale > 0)
-
- super(NUMBER, self).__init__(precision=precision, scale=scale, asdecimal=asdecimal)
-
- def adapt(self, impltype):
- ret = super(NUMBER, self).adapt(impltype)
- # leave a hint for the DBAPI handler
- ret._is_oracle_number = True
- return ret
-
- @property
- def _type_affinity(self):
- if bool(self.scale and self.scale > 0):
- return sqltypes.Numeric
- else:
- return sqltypes.Integer
-
-
-class DOUBLE_PRECISION(sqltypes.Numeric):
- __visit_name__ = 'DOUBLE_PRECISION'
- def __init__(self, precision=None, scale=None, asdecimal=None):
- if asdecimal is None:
- asdecimal = False
-
- super(DOUBLE_PRECISION, self).__init__(precision=precision, scale=scale, asdecimal=asdecimal)
-
-class BFILE(sqltypes.LargeBinary):
- __visit_name__ = 'BFILE'
-
-class LONG(sqltypes.Text):
- __visit_name__ = 'LONG'
-
-class INTERVAL(sqltypes.TypeEngine):
- __visit_name__ = 'INTERVAL'
-
- def __init__(self,
- day_precision=None,
- second_precision=None):
- """Construct an INTERVAL.
-
- Note that only DAY TO SECOND intervals are currently supported.
- This is due to a lack of support for YEAR TO MONTH intervals
- within available DBAPIs (cx_oracle and zxjdbc).
-
- :param day_precision: the day precision value. this is the number of digits
- to store for the day field. Defaults to "2"
- :param second_precision: the second precision value. this is the number of digits
- to store for the fractional seconds field. Defaults to "6".
-
- """
- self.day_precision = day_precision
- self.second_precision = second_precision
-
- @classmethod
- def _adapt_from_generic_interval(cls, interval):
- return INTERVAL(day_precision=interval.day_precision,
- second_precision=interval.second_precision)
-
- @property
- def _type_affinity(self):
- return sqltypes.Interval
-
-class ROWID(sqltypes.TypeEngine):
- """Oracle ROWID type.
-
- When used in a cast() or similar, generates ROWID.
-
- """
- __visit_name__ = 'ROWID'
-
-
-
-class _OracleBoolean(sqltypes.Boolean):
- def get_dbapi_type(self, dbapi):
- return dbapi.NUMBER
-
-colspecs = {
- sqltypes.Boolean : _OracleBoolean,
- sqltypes.Interval : INTERVAL,
-}
-
-ischema_names = {
- 'VARCHAR2' : VARCHAR,
- 'NVARCHAR2' : NVARCHAR,
- 'CHAR' : CHAR,
- 'DATE' : DATE,
- 'NUMBER' : NUMBER,
- 'BLOB' : BLOB,
- 'BFILE' : BFILE,
- 'CLOB' : CLOB,
- 'NCLOB' : NCLOB,
- 'TIMESTAMP' : TIMESTAMP,
- 'TIMESTAMP WITH TIME ZONE' : TIMESTAMP,
- 'INTERVAL DAY TO SECOND' : INTERVAL,
- 'RAW' : RAW,
- 'FLOAT' : FLOAT,
- 'DOUBLE PRECISION' : DOUBLE_PRECISION,
- 'LONG' : LONG,
-}
-
-
-class OracleTypeCompiler(compiler.GenericTypeCompiler):
- # Note:
- # Oracle DATE == DATETIME
- # Oracle does not allow milliseconds in DATE
- # Oracle does not support TIME columns
-
- def visit_datetime(self, type_):
- return self.visit_DATE(type_)
-
- def visit_float(self, type_):
- return self.visit_FLOAT(type_)
-
- def visit_unicode(self, type_):
- if self.dialect._supports_nchar:
- return self.visit_NVARCHAR(type_)
- else:
- return self.visit_VARCHAR(type_)
-
- def visit_INTERVAL(self, type_):
- return "INTERVAL DAY%s TO SECOND%s" % (
- type_.day_precision is not None and
- "(%d)" % type_.day_precision or
- "",
- type_.second_precision is not None and
- "(%d)" % type_.second_precision or
- "",
- )
-
- def visit_TIMESTAMP(self, type_):
- if type_.timezone:
- return "TIMESTAMP WITH TIME ZONE"
- else:
- return "TIMESTAMP"
-
- def visit_DOUBLE_PRECISION(self, type_):
- return self._generate_numeric(type_, "DOUBLE PRECISION")
-
- def visit_NUMBER(self, type_, **kw):
- return self._generate_numeric(type_, "NUMBER", **kw)
-
- def _generate_numeric(self, type_, name, precision=None, scale=None):
- if precision is None:
- precision = type_.precision
-
- if scale is None:
- scale = getattr(type_, 'scale', None)
-
- if precision is None:
- return name
- elif scale is None:
- return "%(name)s(%(precision)s)" % {'name':name,'precision': precision}
- else:
- return "%(name)s(%(precision)s, %(scale)s)" % {'name':name,'precision': precision, 'scale' : scale}
-
- def visit_VARCHAR(self, type_):
- if self.dialect._supports_char_length:
- return "VARCHAR(%(length)s CHAR)" % {'length' : type_.length}
- else:
- return "VARCHAR(%(length)s)" % {'length' : type_.length}
-
- def visit_NVARCHAR(self, type_):
- return "NVARCHAR2(%(length)s)" % {'length' : type_.length}
-
- def visit_text(self, type_):
- return self.visit_CLOB(type_)
-
- def visit_unicode_text(self, type_):
- if self.dialect._supports_nchar:
- return self.visit_NCLOB(type_)
- else:
- return self.visit_CLOB(type_)
-
- def visit_large_binary(self, type_):
- return self.visit_BLOB(type_)
-
- def visit_big_integer(self, type_):
- return self.visit_NUMBER(type_, precision=19)
-
- def visit_boolean(self, type_):
- return self.visit_SMALLINT(type_)
-
- def visit_RAW(self, type_):
- return "RAW(%(length)s)" % {'length' : type_.length}
-
- def visit_ROWID(self, type_):
- return "ROWID"
-
-class OracleCompiler(compiler.SQLCompiler):
- """Oracle compiler modifies the lexical structure of Select
- statements to work under non-ANSI configured Oracle databases, if
- the use_ansi flag is False.
- """
-
- compound_keywords = util.update_copy(
- compiler.SQLCompiler.compound_keywords,
- {
- expression.CompoundSelect.EXCEPT : 'MINUS'
- }
- )
-
- def __init__(self, *args, **kwargs):
- self.__wheres = {}
- self._quoted_bind_names = {}
- super(OracleCompiler, self).__init__(*args, **kwargs)
-
- def visit_mod(self, binary, **kw):
- return "mod(%s, %s)" % (self.process(binary.left), self.process(binary.right))
-
- def visit_now_func(self, fn, **kw):
- return "CURRENT_TIMESTAMP"
-
- def visit_char_length_func(self, fn, **kw):
- return "LENGTH" + self.function_argspec(fn, **kw)
-
- def visit_match_op(self, binary, **kw):
- return "CONTAINS (%s, %s)" % (self.process(binary.left), self.process(binary.right))
-
- def get_select_hint_text(self, byfroms):
- return " ".join(
- "/*+ %s */" % text for table, text in byfroms.items()
- )
-
- def function_argspec(self, fn, **kw):
- if len(fn.clauses) > 0 or fn.name.upper() not in NO_ARG_FNS:
- return compiler.SQLCompiler.function_argspec(self, fn, **kw)
- else:
- return ""
-
- def default_from(self):
- """Called when a ``SELECT`` statement has no froms,
- and no ``FROM`` clause is to be appended.
-
- The Oracle compiler tacks a "FROM DUAL" to the statement.
- """
-
- return " FROM DUAL"
-
- def visit_join(self, join, **kwargs):
- if self.dialect.use_ansi:
- return compiler.SQLCompiler.visit_join(self, join, **kwargs)
- else:
- kwargs['asfrom'] = True
- return self.process(join.left, **kwargs) + \
- ", " + self.process(join.right, **kwargs)
-
- def _get_nonansi_join_whereclause(self, froms):
- clauses = []
-
- def visit_join(join):
- if join.isouter:
- def visit_binary(binary):
- if binary.operator == sql_operators.eq:
- if binary.left.table is join.right:
- binary.left = _OuterJoinColumn(binary.left)
- elif binary.right.table is join.right:
- binary.right = _OuterJoinColumn(binary.right)
- clauses.append(visitors.cloned_traverse(join.onclause, {},
- {'binary':visit_binary}))
- else:
- clauses.append(join.onclause)
-
- for j in join.left, join.right:
- if isinstance(j, expression.Join):
- visit_join(j)
-
- for f in froms:
- if isinstance(f, expression.Join):
- visit_join(f)
-
- if not clauses:
- return None
- else:
- return sql.and_(*clauses)
-
- def visit_outer_join_column(self, vc):
- return self.process(vc.column) + "(+)"
-
- def visit_sequence(self, seq):
- return self.dialect.identifier_preparer.format_sequence(seq) + ".nextval"
-
- def visit_alias(self, alias, asfrom=False, ashint=False, **kwargs):
- """Oracle doesn't like ``FROM table AS alias``. Is the AS standard SQL??"""
-
- if asfrom or ashint:
- alias_name = isinstance(alias.name, expression._generated_label) and \
- self._truncated_identifier("alias", alias.name) or alias.name
-
- if ashint:
- return alias_name
- elif asfrom:
- return self.process(alias.original, asfrom=asfrom, **kwargs) + \
- " " + self.preparer.format_alias(alias, alias_name)
- else:
- return self.process(alias.original, **kwargs)
-
- def returning_clause(self, stmt, returning_cols):
-
- def create_out_param(col, i):
- bindparam = sql.outparam("ret_%d" % i, type_=col.type)
- self.binds[bindparam.key] = bindparam
- return self.bindparam_string(self._truncate_bindparam(bindparam))
-
- columnlist = list(expression._select_iterables(returning_cols))
-
- # within_columns_clause =False so that labels (foo AS bar) don't render
- columns = [self.process(c, within_columns_clause=False, result_map=self.result_map) for c in columnlist]
-
- binds = [create_out_param(c, i) for i, c in enumerate(columnlist)]
-
- return 'RETURNING ' + ', '.join(columns) + " INTO " + ", ".join(binds)
-
- def _TODO_visit_compound_select(self, select):
- """Need to determine how to get ``LIMIT``/``OFFSET`` into a ``UNION`` for Oracle."""
- pass
-
- def visit_select(self, select, **kwargs):
- """Look for ``LIMIT`` and OFFSET in a select statement, and if
- so tries to wrap it in a subquery with ``rownum`` criterion.
- """
-
- if not getattr(select, '_oracle_visit', None):
- if not self.dialect.use_ansi:
- if self.stack and 'from' in self.stack[-1]:
- existingfroms = self.stack[-1]['from']
- else:
- existingfroms = None
-
- froms = select._get_display_froms(existingfroms)
- whereclause = self._get_nonansi_join_whereclause(froms)
- if whereclause is not None:
- select = select.where(whereclause)
- select._oracle_visit = True
-
- if select._limit is not None or select._offset is not None:
- # See http://www.oracle.com/technology/oramag/oracle/06-sep/o56asktom.html
- #
- # Generalized form of an Oracle pagination query:
- # select ... from (
- # select /*+ FIRST_ROWS(N) */ ...., rownum as ora_rn from (
- # select distinct ... where ... order by ...
- # ) where ROWNUM <= :limit+:offset
- # ) where ora_rn > :offset
- # Outer select and "ROWNUM as ora_rn" can be dropped if limit=0
-
- # TODO: use annotations instead of clone + attr set ?
- select = select._generate()
- select._oracle_visit = True
-
- # Wrap the middle select and add the hint
- limitselect = sql.select([c for c in select.c])
- if select._limit and self.dialect.optimize_limits:
- limitselect = limitselect.prefix_with("/*+ FIRST_ROWS(%d) */" % select._limit)
-
- limitselect._oracle_visit = True
- limitselect._is_wrapper = True
-
- # If needed, add the limiting clause
- if select._limit is not None:
- max_row = select._limit
- if select._offset is not None:
- max_row += select._offset
- if not self.dialect.use_binds_for_limits:
- max_row = sql.literal_column("%d" % max_row)
- limitselect.append_whereclause(
- sql.literal_column("ROWNUM")<=max_row)
-
- # If needed, add the ora_rn, and wrap again with offset.
- if select._offset is None:
- limitselect.for_update = select.for_update
- select = limitselect
- else:
- limitselect = limitselect.column(
- sql.literal_column("ROWNUM").label("ora_rn"))
- limitselect._oracle_visit = True
- limitselect._is_wrapper = True
-
- offsetselect = sql.select(
- [c for c in limitselect.c if c.key!='ora_rn'])
- offsetselect._oracle_visit = True
- offsetselect._is_wrapper = True
-
- offset_value = select._offset
- if not self.dialect.use_binds_for_limits:
- offset_value = sql.literal_column("%d" % offset_value)
- offsetselect.append_whereclause(
- sql.literal_column("ora_rn")>offset_value)
-
- offsetselect.for_update = select.for_update
- select = offsetselect
-
- kwargs['iswrapper'] = getattr(select, '_is_wrapper', False)
- return compiler.SQLCompiler.visit_select(self, select, **kwargs)
-
- def limit_clause(self, select):
- return ""
-
- def for_update_clause(self, select):
- if self.is_subquery():
- return ""
- elif select.for_update == "nowait":
- return " FOR UPDATE NOWAIT"
- else:
- return super(OracleCompiler, self).for_update_clause(select)
-
-class OracleDDLCompiler(compiler.DDLCompiler):
-
- def define_constraint_cascades(self, constraint):
- text = ""
- if constraint.ondelete is not None:
- text += " ON DELETE %s" % constraint.ondelete
-
- # oracle has no ON UPDATE CASCADE -
- # its only available via triggers http://asktom.oracle.com/tkyte/update_cascade/index.html
- if constraint.onupdate is not None:
- util.warn(
- "Oracle does not contain native UPDATE CASCADE "
- "functionality - onupdates will not be rendered for foreign keys. "
- "Consider using deferrable=True, initially='deferred' or triggers.")
-
- return text
-
-class OracleIdentifierPreparer(compiler.IdentifierPreparer):
-
- reserved_words = set([x.lower() for x in RESERVED_WORDS])
- illegal_initial_characters = set(xrange(0, 10)).union(["_", "$"])
-
- def _bindparam_requires_quotes(self, value):
- """Return True if the given identifier requires quoting."""
- lc_value = value.lower()
- return (lc_value in self.reserved_words
- or value[0] in self.illegal_initial_characters
- or not self.legal_characters.match(unicode(value))
- )
-
- def format_savepoint(self, savepoint):
- name = re.sub(r'^_+', '', savepoint.ident)
- return super(OracleIdentifierPreparer, self).format_savepoint(savepoint, name)
-
-
-class OracleExecutionContext(default.DefaultExecutionContext):
- def fire_sequence(self, seq, type_):
- return self._execute_scalar("SELECT " +
- self.dialect.identifier_preparer.format_sequence(seq) +
- ".nextval FROM DUAL", type_)
-
-class OracleDialect(default.DefaultDialect):
- name = 'oracle'
- supports_alter = True
- supports_unicode_statements = False
- supports_unicode_binds = False
- max_identifier_length = 30
- supports_sane_rowcount = True
- supports_sane_multi_rowcount = False
-
- supports_sequences = True
- sequences_optional = False
- postfetch_lastrowid = False
-
- default_paramstyle = 'named'
- colspecs = colspecs
- ischema_names = ischema_names
- requires_name_normalize = True
-
- supports_default_values = False
- supports_empty_insert = False
-
- statement_compiler = OracleCompiler
- ddl_compiler = OracleDDLCompiler
- type_compiler = OracleTypeCompiler
- preparer = OracleIdentifierPreparer
- execution_ctx_cls = OracleExecutionContext
-
- reflection_options = ('oracle_resolve_synonyms', )
-
- def __init__(self,
- use_ansi=True,
- optimize_limits=False,
- use_binds_for_limits=True,
- **kwargs):
- default.DefaultDialect.__init__(self, **kwargs)
- self.use_ansi = use_ansi
- self.optimize_limits = optimize_limits
- self.use_binds_for_limits = use_binds_for_limits
-
- def initialize(self, connection):
- super(OracleDialect, self).initialize(connection)
- self.implicit_returning = self.__dict__.get(
- 'implicit_returning',
- self.server_version_info > (10, )
- )
-
- if self._is_oracle_8:
- self.colspecs = self.colspecs.copy()
- self.colspecs.pop(sqltypes.Interval)
- self.use_ansi = False
-
- @property
- def _is_oracle_8(self):
- return self.server_version_info and \
- self.server_version_info < (9, )
-
- @property
- def _supports_char_length(self):
- return not self._is_oracle_8
-
- @property
- def _supports_nchar(self):
- return not self._is_oracle_8
-
- def do_release_savepoint(self, connection, name):
- # Oracle does not support RELEASE SAVEPOINT
- pass
-
- def has_table(self, connection, table_name, schema=None):
- if not schema:
- schema = self.default_schema_name
- cursor = connection.execute(
- sql.text("SELECT table_name FROM all_tables "
- "WHERE table_name = :name AND owner = :schema_name"),
- name=self.denormalize_name(table_name), schema_name=self.denormalize_name(schema))
- return cursor.first() is not None
-
- def has_sequence(self, connection, sequence_name, schema=None):
- if not schema:
- schema = self.default_schema_name
- cursor = connection.execute(
- sql.text("SELECT sequence_name FROM all_sequences "
- "WHERE sequence_name = :name AND sequence_owner = :schema_name"),
- name=self.denormalize_name(sequence_name), schema_name=self.denormalize_name(schema))
- return cursor.first() is not None
-
- def normalize_name(self, name):
- if name is None:
- return None
- # Py2K
- if isinstance(name, str):
- name = name.decode(self.encoding)
- # end Py2K
- if name.upper() == name and \
- not self.identifier_preparer._requires_quotes(name.lower()):
- return name.lower()
- else:
- return name
-
- def denormalize_name(self, name):
- if name is None:
- return None
- elif name.lower() == name and not self.identifier_preparer._requires_quotes(name.lower()):
- name = name.upper()
- # Py2K
- if not self.supports_unicode_binds:
- name = name.encode(self.encoding)
- else:
- name = unicode(name)
- # end Py2K
- return name
-
- def _get_default_schema_name(self, connection):
- return self.normalize_name(connection.execute(u'SELECT USER FROM DUAL').scalar())
-
- def _resolve_synonym(self, connection, desired_owner=None, desired_synonym=None, desired_table=None):
- """search for a local synonym matching the given desired owner/name.
-
- if desired_owner is None, attempts to locate a distinct owner.
-
- returns the actual name, owner, dblink name, and synonym name if found.
- """
-
- q = "SELECT owner, table_owner, table_name, db_link, synonym_name FROM all_synonyms WHERE "
- clauses = []
- params = {}
- if desired_synonym:
- clauses.append("synonym_name = :synonym_name")
- params['synonym_name'] = desired_synonym
- if desired_owner:
- clauses.append("table_owner = :desired_owner")
- params['desired_owner'] = desired_owner
- if desired_table:
- clauses.append("table_name = :tname")
- params['tname'] = desired_table
-
- q += " AND ".join(clauses)
-
- result = connection.execute(sql.text(q), **params)
- if desired_owner:
- row = result.first()
- if row:
- return row['table_name'], row['table_owner'], row['db_link'], row['synonym_name']
- else:
- return None, None, None, None
- else:
- rows = result.fetchall()
- if len(rows) > 1:
- raise AssertionError("There are multiple tables visible to the schema, you must specify owner")
- elif len(rows) == 1:
- row = rows[0]
- return row['table_name'], row['table_owner'], row['db_link'], row['synonym_name']
- else:
- return None, None, None, None
-
- @reflection.cache
- def _prepare_reflection_args(self, connection, table_name, schema=None,
- resolve_synonyms=False, dblink='', **kw):
-
- if resolve_synonyms:
- actual_name, owner, dblink, synonym = self._resolve_synonym(
- connection,
- desired_owner=self.denormalize_name(schema),
- desired_synonym=self.denormalize_name(table_name)
- )
- else:
- actual_name, owner, dblink, synonym = None, None, None, None
- if not actual_name:
- actual_name = self.denormalize_name(table_name)
- if not dblink:
- dblink = ''
- if not owner:
- owner = self.denormalize_name(schema or self.default_schema_name)
- return (actual_name, owner, dblink, synonym)
-
- @reflection.cache
- def get_schema_names(self, connection, **kw):
- s = "SELECT username FROM all_users ORDER BY username"
- cursor = connection.execute(s,)
- return [self.normalize_name(row[0]) for row in cursor]
-
- @reflection.cache
- def get_table_names(self, connection, schema=None, **kw):
- schema = self.denormalize_name(schema or self.default_schema_name)
-
- # note that table_names() isnt loading DBLINKed or synonym'ed tables
- if schema is None:
- schema = self.default_schema_name
- s = sql.text(
- "SELECT table_name FROM all_tables "
- "WHERE nvl(tablespace_name, 'no tablespace') NOT IN ('SYSTEM', 'SYSAUX') "
- "AND OWNER = :owner "
- "AND IOT_NAME IS NULL")
- cursor = connection.execute(s, owner=schema)
- return [self.normalize_name(row[0]) for row in cursor]
-
-
- @reflection.cache
- def get_view_names(self, connection, schema=None, **kw):
- schema = self.denormalize_name(schema or self.default_schema_name)
- s = sql.text("SELECT view_name FROM all_views WHERE owner = :owner")
- cursor = connection.execute(s, owner=self.denormalize_name(schema))
- return [self.normalize_name(row[0]) for row in cursor]
-
- @reflection.cache
- def get_columns(self, connection, table_name, schema=None, **kw):
- """
-
- kw arguments can be:
-
- oracle_resolve_synonyms
-
- dblink
-
- """
-
- resolve_synonyms = kw.get('oracle_resolve_synonyms', False)
- dblink = kw.get('dblink', '')
- info_cache = kw.get('info_cache')
-
- (table_name, schema, dblink, synonym) = \
- self._prepare_reflection_args(connection, table_name, schema,
- resolve_synonyms, dblink,
- info_cache=info_cache)
- columns = []
- if self._supports_char_length:
- char_length_col = 'char_length'
- else:
- char_length_col = 'data_length'
-
- c = connection.execute(sql.text(
- "SELECT column_name, data_type, %(char_length_col)s, data_precision, data_scale, "
- "nullable, data_default FROM ALL_TAB_COLUMNS%(dblink)s "
- "WHERE table_name = :table_name AND owner = :owner "
- "ORDER BY column_id" % {'dblink': dblink, 'char_length_col':char_length_col}),
- table_name=table_name, owner=schema)
-
- for row in c:
- (colname, orig_colname, coltype, length, precision, scale, nullable, default) = \
- (self.normalize_name(row[0]), row[0], row[1], row[2], row[3], row[4], row[5]=='Y', row[6])
-
- if coltype == 'NUMBER' :
- coltype = NUMBER(precision, scale)
- elif coltype in ('VARCHAR2', 'NVARCHAR2', 'CHAR'):
- coltype = self.ischema_names.get(coltype)(length)
- elif 'WITH TIME ZONE' in coltype:
- coltype = TIMESTAMP(timezone=True)
- else:
- coltype = re.sub(r'\(\d+\)', '', coltype)
- try:
- coltype = self.ischema_names[coltype]
- except KeyError:
- util.warn("Did not recognize type '%s' of column '%s'" %
- (coltype, colname))
- coltype = sqltypes.NULLTYPE
-
- cdict = {
- 'name': colname,
- 'type': coltype,
- 'nullable': nullable,
- 'default': default,
- 'autoincrement':default is None
- }
- if orig_colname.lower() == orig_colname:
- cdict['quote'] = True
-
- columns.append(cdict)
- return columns
-
- @reflection.cache
- def get_indexes(self, connection, table_name, schema=None,
- resolve_synonyms=False, dblink='', **kw):
-
-
- info_cache = kw.get('info_cache')
- (table_name, schema, dblink, synonym) = \
- self._prepare_reflection_args(connection, table_name, schema,
- resolve_synonyms, dblink,
- info_cache=info_cache)
- indexes = []
- q = sql.text("""
- SELECT a.index_name, a.column_name, b.uniqueness
- FROM ALL_IND_COLUMNS%(dblink)s a,
- ALL_INDEXES%(dblink)s b
- WHERE
- a.index_name = b.index_name
- AND a.table_owner = b.table_owner
- AND a.table_name = b.table_name
-
- AND a.table_name = :table_name
- AND a.table_owner = :schema
- ORDER BY a.index_name, a.column_position""" % {'dblink': dblink})
- rp = connection.execute(q, table_name=self.denormalize_name(table_name),
- schema=self.denormalize_name(schema))
- indexes = []
- last_index_name = None
- pkeys = self.get_primary_keys(connection, table_name, schema,
- resolve_synonyms=resolve_synonyms,
- dblink=dblink,
- info_cache=kw.get('info_cache'))
- uniqueness = dict(NONUNIQUE=False, UNIQUE=True)
-
- oracle_sys_col = re.compile(r'SYS_NC\d+\$', re.IGNORECASE)
-
- def upper_name_set(names):
- return set([i.upper() for i in names])
-
- pk_names = upper_name_set(pkeys)
-
- def remove_if_primary_key(index):
- # don't include the primary key index
- if index is not None and \
- upper_name_set(index['column_names']) == pk_names:
- indexes.pop()
-
- index = None
- for rset in rp:
- if rset.index_name != last_index_name:
- remove_if_primary_key(index)
- index = dict(name=self.normalize_name(rset.index_name), column_names=[])
- indexes.append(index)
- index['unique'] = uniqueness.get(rset.uniqueness, False)
-
- # filter out Oracle SYS_NC names. could also do an outer join
- # to the all_tab_columns table and check for real col names there.
- if not oracle_sys_col.match(rset.column_name):
- index['column_names'].append(self.normalize_name(rset.column_name))
- last_index_name = rset.index_name
- remove_if_primary_key(index)
- return indexes
-
- @reflection.cache
- def _get_constraint_data(self, connection, table_name, schema=None,
- dblink='', **kw):
-
- rp = connection.execute(
- sql.text("""SELECT
- ac.constraint_name,
- ac.constraint_type,
- loc.column_name AS local_column,
- rem.table_name AS remote_table,
- rem.column_name AS remote_column,
- rem.owner AS remote_owner,
- loc.position as loc_pos,
- rem.position as rem_pos
- FROM all_constraints%(dblink)s ac,
- all_cons_columns%(dblink)s loc,
- all_cons_columns%(dblink)s rem
- WHERE ac.table_name = :table_name
- AND ac.constraint_type IN ('R','P')
- AND ac.owner = :owner
- AND ac.owner = loc.owner
- AND ac.constraint_name = loc.constraint_name
- AND ac.r_owner = rem.owner(+)
- AND ac.r_constraint_name = rem.constraint_name(+)
- AND (rem.position IS NULL or loc.position=rem.position)
- ORDER BY ac.constraint_name, loc.position""" % {'dblink': dblink}),
- table_name=table_name, owner=schema)
- constraint_data = rp.fetchall()
- return constraint_data
-
- def get_primary_keys(self, connection, table_name, schema=None, **kw):
- """
-
- kw arguments can be:
-
- oracle_resolve_synonyms
-
- dblink
-
- """
- return self._get_primary_keys(connection, table_name, schema, **kw)[0]
-
- @reflection.cache
- def _get_primary_keys(self, connection, table_name, schema=None, **kw):
- resolve_synonyms = kw.get('oracle_resolve_synonyms', False)
- dblink = kw.get('dblink', '')
- info_cache = kw.get('info_cache')
-
- (table_name, schema, dblink, synonym) = \
- self._prepare_reflection_args(connection, table_name, schema,
- resolve_synonyms, dblink,
- info_cache=info_cache)
- pkeys = []
- constraint_name = None
- constraint_data = self._get_constraint_data(connection, table_name,
- schema, dblink,
- info_cache=kw.get('info_cache'))
-
- for row in constraint_data:
- #print "ROW:" , row
- (cons_name, cons_type, local_column, remote_table, remote_column, remote_owner) = \
- row[0:2] + tuple([self.normalize_name(x) for x in row[2:6]])
- if cons_type == 'P':
- if constraint_name is None:
- constraint_name = self.normalize_name(cons_name)
- pkeys.append(local_column)
- return pkeys, constraint_name
-
- def get_pk_constraint(self, connection, table_name, schema=None, **kw):
- cols, name = self._get_primary_keys(connection, table_name, schema=schema, **kw)
-
- return {
- 'constrained_columns':cols,
- 'name':name
- }
-
- @reflection.cache
- def get_foreign_keys(self, connection, table_name, schema=None, **kw):
- """
-
- kw arguments can be:
-
- oracle_resolve_synonyms
-
- dblink
-
- """
-
- requested_schema = schema # to check later on
- resolve_synonyms = kw.get('oracle_resolve_synonyms', False)
- dblink = kw.get('dblink', '')
- info_cache = kw.get('info_cache')
-
- (table_name, schema, dblink, synonym) = \
- self._prepare_reflection_args(connection, table_name, schema,
- resolve_synonyms, dblink,
- info_cache=info_cache)
-
- constraint_data = self._get_constraint_data(connection, table_name,
- schema, dblink,
- info_cache=kw.get('info_cache'))
-
- def fkey_rec():
- return {
- 'name' : None,
- 'constrained_columns' : [],
- 'referred_schema' : None,
- 'referred_table' : None,
- 'referred_columns' : []
- }
-
- fkeys = util.defaultdict(fkey_rec)
-
- for row in constraint_data:
- (cons_name, cons_type, local_column, remote_table, remote_column, remote_owner) = \
- row[0:2] + tuple([self.normalize_name(x) for x in row[2:6]])
-
- if cons_type == 'R':
- if remote_table is None:
- # ticket 363
- util.warn(
- ("Got 'None' querying 'table_name' from "
- "all_cons_columns%(dblink)s - does the user have "
- "proper rights to the table?") % {'dblink':dblink})
- continue
-
- rec = fkeys[cons_name]
- rec['name'] = cons_name
- local_cols, remote_cols = rec['constrained_columns'], rec['referred_columns']
-
- if not rec['referred_table']:
- if resolve_synonyms:
- ref_remote_name, ref_remote_owner, ref_dblink, ref_synonym = \
- self._resolve_synonym(
- connection,
- desired_owner=self.denormalize_name(remote_owner),
- desired_table=self.denormalize_name(remote_table)
- )
- if ref_synonym:
- remote_table = self.normalize_name(ref_synonym)
- remote_owner = self.normalize_name(ref_remote_owner)
-
- rec['referred_table'] = remote_table
-
- if requested_schema is not None or self.denormalize_name(remote_owner) != schema:
- rec['referred_schema'] = remote_owner
-
- local_cols.append(local_column)
- remote_cols.append(remote_column)
-
- return fkeys.values()
-
- @reflection.cache
- def get_view_definition(self, connection, view_name, schema=None,
- resolve_synonyms=False, dblink='', **kw):
- info_cache = kw.get('info_cache')
- (view_name, schema, dblink, synonym) = \
- self._prepare_reflection_args(connection, view_name, schema,
- resolve_synonyms, dblink,
- info_cache=info_cache)
- s = sql.text("""
- SELECT text FROM all_views
- WHERE owner = :schema
- AND view_name = :view_name
- """)
- rp = connection.execute(s,
- view_name=view_name, schema=schema).scalar()
- if rp:
- return rp.decode(self.encoding)
- else:
- return None
-
-
-
-class _OuterJoinColumn(sql.ClauseElement):
- __visit_name__ = 'outer_join_column'
-
- def __init__(self, column):
- self.column = column
-
-
-
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/oracle/cx_oracle.py b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/oracle/cx_oracle.py
deleted file mode 100755
index a917aac0..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/oracle/cx_oracle.py
+++ /dev/null
@@ -1,718 +0,0 @@
-# oracle/cx_oracle.py
-# Copyright (C) 2005-2011 the SQLAlchemy authors and contributors <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-"""Support for the Oracle database via the cx_oracle driver.
-
-Driver
-------
-
-The Oracle dialect uses the cx_oracle driver, available at
-http://cx-oracle.sourceforge.net/ . The dialect has several behaviors
-which are specifically tailored towards compatibility with this module.
-Version 5.0 or greater is **strongly** recommended, as SQLAlchemy makes
-extensive use of the cx_oracle output converters for numeric and
-string conversions.
-
-Connecting
-----------
-
-Connecting with create_engine() uses the standard URL approach of
-``oracle://user:pass@host:port/dbname[?key=value&key=value...]``. If dbname is present, the
-host, port, and dbname tokens are converted to a TNS name using the cx_oracle
-:func:`makedsn()` function. Otherwise, the host token is taken directly as a TNS name.
-
-Additional arguments which may be specified either as query string arguments on the
-URL, or as keyword arguments to :func:`~sqlalchemy.create_engine()` are:
-
-* *allow_twophase* - enable two-phase transactions. Defaults to ``True``.
-
-* *arraysize* - set the cx_oracle.arraysize value on cursors, in SQLAlchemy
- it defaults to 50. See the section on "LOB Objects" below.
-
-* *auto_convert_lobs* - defaults to True, see the section on LOB objects.
-
-* *auto_setinputsizes* - the cx_oracle.setinputsizes() call is issued for all bind parameters.
- This is required for LOB datatypes but can be disabled to reduce overhead. Defaults
- to ``True``.
-
-* *mode* - This is given the string value of SYSDBA or SYSOPER, or alternatively an
- integer value. This value is only available as a URL query string argument.
-
-* *threaded* - enable multithreaded access to cx_oracle connections. Defaults
- to ``True``. Note that this is the opposite default of cx_oracle itself.
-
-Unicode
--------
-
-cx_oracle 5 fully supports Python unicode objects. SQLAlchemy will pass
-all unicode strings directly to cx_oracle, and additionally uses an output
-handler so that all string based result values are returned as unicode as well.
-
-Note that this behavior is disabled when Oracle 8 is detected, as it has been
-observed that issues remain when passing Python unicodes to cx_oracle with Oracle 8.
-
-LOB Objects
------------
-
-cx_oracle returns oracle LOBs using the cx_oracle.LOB object. SQLAlchemy converts
-these to strings so that the interface of the Binary type is consistent with that of
-other backends, and so that the linkage to a live cursor is not needed in scenarios
-like result.fetchmany() and result.fetchall(). This means that by default, LOB
-objects are fully fetched unconditionally by SQLAlchemy, and the linkage to a live
-cursor is broken.
-
-To disable this processing, pass ``auto_convert_lobs=False`` to :func:`create_engine()`.
-
-Two Phase Transaction Support
------------------------------
-
-Two Phase transactions are implemented using XA transactions. Success has been reported
-with this feature but it should be regarded as experimental.
-
-Precision Numerics
-------------------
-
-The SQLAlchemy dialect goes thorugh a lot of steps to ensure
-that decimal numbers are sent and received with full accuracy.
-An "outputtypehandler" callable is associated with each
-cx_oracle connection object which detects numeric types and
-receives them as string values, instead of receiving a Python
-``float`` directly, which is then passed to the Python
-``Decimal`` constructor. The :class:`.Numeric` and
-:class:`.Float` types under the cx_oracle dialect are aware of
-this behavior, and will coerce the ``Decimal`` to ``float`` if
-the ``asdecimal`` flag is ``False`` (default on :class:`.Float`,
-optional on :class:`.Numeric`).
-
-The handler attempts to use the "precision" and "scale"
-attributes of the result set column to best determine if
-subsequent incoming values should be received as ``Decimal`` as
-opposed to int (in which case no processing is added). There are
-several scenarios where OCI_ does not provide unambiguous data
-as to the numeric type, including some situations where
-individual rows may return a combination of floating point and
-integer values. Certain values for "precision" and "scale" have
-been observed to determine this scenario. When it occurs, the
-outputtypehandler receives as string and then passes off to a
-processing function which detects, for each returned value, if a
-decimal point is present, and if so converts to ``Decimal``,
-otherwise to int. The intention is that simple int-based
-statements like "SELECT my_seq.nextval() FROM DUAL" continue to
-return ints and not ``Decimal`` objects, and that any kind of
-floating point value is received as a string so that there is no
-floating point loss of precision.
-
-The "decimal point is present" logic itself is also sensitive to
-locale. Under OCI_, this is controlled by the NLS_LANG
-environment variable. Upon first connection, the dialect runs a
-test to determine the current "decimal" character, which can be
-a comma "," for european locales. From that point forward the
-outputtypehandler uses that character to represent a decimal
-point (this behavior is new in version 0.6.6). Note that
-cx_oracle 5.0.3 or greater is required when dealing with
-numerics with locale settings that don't use a period "." as the
-decimal character.
-
-.. _OCI: http://www.oracle.com/technetwork/database/features/oci/index.html
-
-"""
-
-from sqlalchemy.dialects.oracle.base import OracleCompiler, OracleDialect, \
- RESERVED_WORDS, OracleExecutionContext
-from sqlalchemy.dialects.oracle import base as oracle
-from sqlalchemy.engine import base
-from sqlalchemy import types as sqltypes, util, exc, processors
-from datetime import datetime
-import random
-import collections
-from sqlalchemy.util.compat import decimal
-import re
-
-class _OracleNumeric(sqltypes.Numeric):
- def bind_processor(self, dialect):
- # cx_oracle accepts Decimal objects and floats
- return None
-
- def result_processor(self, dialect, coltype):
- # we apply a cx_oracle type handler to all connections
- # that converts floating point strings to Decimal().
- # However, in some subquery situations, Oracle doesn't
- # give us enough information to determine int or Decimal.
- # It could even be int/Decimal differently on each row,
- # regardless of the scale given for the originating type.
- # So we still need an old school isinstance() handler
- # here for decimals.
-
- if dialect.supports_native_decimal:
- if self.asdecimal:
- if self.scale is None:
- fstring = "%.10f"
- else:
- fstring = "%%.%df" % self.scale
- def to_decimal(value):
- if value is None:
- return None
- elif isinstance(value, decimal.Decimal):
- return value
- else:
- return decimal.Decimal(fstring % value)
- return to_decimal
- else:
- if self.precision is None and self.scale is None:
- return processors.to_float
- elif not getattr(self, '_is_oracle_number', False) \
- and self.scale is not None:
- return processors.to_float
- else:
- return None
- else:
- # cx_oracle 4 behavior, will assume
- # floats
- return super(_OracleNumeric, self).\
- result_processor(dialect, coltype)
-
-class _OracleDate(sqltypes.Date):
- def bind_processor(self, dialect):
- return None
-
- def result_processor(self, dialect, coltype):
- def process(value):
- if value is not None:
- return value.date()
- else:
- return value
- return process
-
-class _LOBMixin(object):
- def result_processor(self, dialect, coltype):
- if not dialect.auto_convert_lobs:
- # return the cx_oracle.LOB directly.
- return None
-
- def process(value):
- if value is not None:
- return value.read()
- else:
- return value
- return process
-
-class _NativeUnicodeMixin(object):
- # Py3K
- #pass
- # Py2K
- def bind_processor(self, dialect):
- if dialect._cx_oracle_with_unicode:
- def process(value):
- if value is None:
- return value
- else:
- return unicode(value)
- return process
- else:
- return super(_NativeUnicodeMixin, self).bind_processor(dialect)
- # end Py2K
-
- # we apply a connection output handler that returns
- # unicode in all cases, so the "native_unicode" flag
- # will be set for the default String.result_processor.
-
-class _OracleChar(_NativeUnicodeMixin, sqltypes.CHAR):
- def get_dbapi_type(self, dbapi):
- return dbapi.FIXED_CHAR
-
-class _OracleNVarChar(_NativeUnicodeMixin, sqltypes.NVARCHAR):
- def get_dbapi_type(self, dbapi):
- return getattr(dbapi, 'UNICODE', dbapi.STRING)
-
-class _OracleText(_LOBMixin, sqltypes.Text):
- def get_dbapi_type(self, dbapi):
- return dbapi.CLOB
-
-class _OracleString(_NativeUnicodeMixin, sqltypes.String):
- pass
-
-class _OracleUnicodeText(_LOBMixin, _NativeUnicodeMixin, sqltypes.UnicodeText):
- def get_dbapi_type(self, dbapi):
- return dbapi.NCLOB
-
- def result_processor(self, dialect, coltype):
- lob_processor = _LOBMixin.result_processor(self, dialect, coltype)
- if lob_processor is None:
- return None
-
- string_processor = sqltypes.UnicodeText.result_processor(self, dialect, coltype)
-
- if string_processor is None:
- return lob_processor
- else:
- def process(value):
- return string_processor(lob_processor(value))
- return process
-
-class _OracleInteger(sqltypes.Integer):
- def result_processor(self, dialect, coltype):
- def to_int(val):
- if val is not None:
- val = int(val)
- return val
- return to_int
-
-class _OracleBinary(_LOBMixin, sqltypes.LargeBinary):
- def get_dbapi_type(self, dbapi):
- return dbapi.BLOB
-
- def bind_processor(self, dialect):
- return None
-
-class _OracleInterval(oracle.INTERVAL):
- def get_dbapi_type(self, dbapi):
- return dbapi.INTERVAL
-
-class _OracleRaw(oracle.RAW):
- pass
-
-class _OracleRowid(oracle.ROWID):
- def get_dbapi_type(self, dbapi):
- return dbapi.ROWID
-
-class OracleCompiler_cx_oracle(OracleCompiler):
- def bindparam_string(self, name):
- if self.preparer._bindparam_requires_quotes(name):
- quoted_name = '"%s"' % name
- self._quoted_bind_names[name] = quoted_name
- return OracleCompiler.bindparam_string(self, quoted_name)
- else:
- return OracleCompiler.bindparam_string(self, name)
-
-
-class OracleExecutionContext_cx_oracle(OracleExecutionContext):
-
- def pre_exec(self):
- quoted_bind_names = \
- getattr(self.compiled, '_quoted_bind_names', None)
- if quoted_bind_names:
- if not self.dialect.supports_unicode_statements:
- # if DBAPI doesn't accept unicode statements,
- # keys in self.parameters would have been encoded
- # here. so convert names in quoted_bind_names
- # to encoded as well.
- quoted_bind_names = \
- dict(
- (fromname.encode(self.dialect.encoding),
- toname.encode(self.dialect.encoding))
- for fromname, toname in
- quoted_bind_names.items()
- )
- for param in self.parameters:
- for fromname, toname in quoted_bind_names.items():
- param[toname] = param[fromname]
- del param[fromname]
-
- if self.dialect.auto_setinputsizes:
- # cx_oracle really has issues when you setinputsizes
- # on String, including that outparams/RETURNING
- # breaks for varchars
- self.set_input_sizes(quoted_bind_names,
- exclude_types=self.dialect._cx_oracle_string_types
- )
-
- # if a single execute, check for outparams
- if len(self.compiled_parameters) == 1:
- for bindparam in self.compiled.binds.values():
- if bindparam.isoutparam:
- dbtype = bindparam.type.dialect_impl(self.dialect).\
- get_dbapi_type(self.dialect.dbapi)
- if not hasattr(self, 'out_parameters'):
- self.out_parameters = {}
- if dbtype is None:
- raise exc.InvalidRequestError("Cannot create out parameter for parameter "
- "%r - it's type %r is not supported by"
- " cx_oracle" %
- (name, bindparam.type)
- )
- name = self.compiled.bind_names[bindparam]
- self.out_parameters[name] = self.cursor.var(dbtype)
- self.parameters[0][quoted_bind_names.get(name, name)] = \
- self.out_parameters[name]
-
- def create_cursor(self):
- c = self._dbapi_connection.cursor()
- if self.dialect.arraysize:
- c.arraysize = self.dialect.arraysize
-
- return c
-
- def get_result_proxy(self):
- if hasattr(self, 'out_parameters') and self.compiled.returning:
- returning_params = dict(
- (k, v.getvalue())
- for k, v in self.out_parameters.items()
- )
- return ReturningResultProxy(self, returning_params)
-
- result = None
- if self.cursor.description is not None:
- for column in self.cursor.description:
- type_code = column[1]
- if type_code in self.dialect._cx_oracle_binary_types:
- result = base.BufferedColumnResultProxy(self)
-
- if result is None:
- result = base.ResultProxy(self)
-
- if hasattr(self, 'out_parameters'):
- if self.compiled_parameters is not None and \
- len(self.compiled_parameters) == 1:
- result.out_parameters = out_parameters = {}
-
- for bind, name in self.compiled.bind_names.items():
- if name in self.out_parameters:
- type = bind.type
- impl_type = type.dialect_impl(self.dialect)
- dbapi_type = impl_type.get_dbapi_type(self.dialect.dbapi)
- result_processor = impl_type.\
- result_processor(self.dialect,
- dbapi_type)
- if result_processor is not None:
- out_parameters[name] = \
- result_processor(self.out_parameters[name].getvalue())
- else:
- out_parameters[name] = self.out_parameters[name].getvalue()
- else:
- result.out_parameters = dict(
- (k, v.getvalue())
- for k, v in self.out_parameters.items()
- )
-
- return result
-
-class OracleExecutionContext_cx_oracle_with_unicode(OracleExecutionContext_cx_oracle):
- """Support WITH_UNICODE in Python 2.xx.
-
- WITH_UNICODE allows cx_Oracle's Python 3 unicode handling
- behavior under Python 2.x. This mode in some cases disallows
- and in other cases silently passes corrupted data when
- non-Python-unicode strings (a.k.a. plain old Python strings)
- are passed as arguments to connect(), the statement sent to execute(),
- or any of the bind parameter keys or values sent to execute().
- This optional context therefore ensures that all statements are
- passed as Python unicode objects.
-
- """
- def __init__(self, *arg, **kw):
- OracleExecutionContext_cx_oracle.__init__(self, *arg, **kw)
- self.statement = unicode(self.statement)
-
- def _execute_scalar(self, stmt):
- return super(OracleExecutionContext_cx_oracle_with_unicode, self).\
- _execute_scalar(unicode(stmt))
-
-class ReturningResultProxy(base.FullyBufferedResultProxy):
- """Result proxy which stuffs the _returning clause + outparams into the fetch."""
-
- def __init__(self, context, returning_params):
- self._returning_params = returning_params
- super(ReturningResultProxy, self).__init__(context)
-
- def _cursor_description(self):
- returning = self.context.compiled.returning
-
- ret = []
- for c in returning:
- if hasattr(c, 'name'):
- ret.append((c.name, c.type))
- else:
- ret.append((c.anon_label, c.type))
- return ret
-
- def _buffer_rows(self):
- return collections.deque([tuple(self._returning_params["ret_%d" % i]
- for i, c in enumerate(self._returning_params))])
-
-class OracleDialect_cx_oracle(OracleDialect):
- execution_ctx_cls = OracleExecutionContext_cx_oracle
- statement_compiler = OracleCompiler_cx_oracle
-
- driver = "cx_oracle"
-
- colspecs = colspecs = {
- sqltypes.Numeric: _OracleNumeric,
- sqltypes.Date : _OracleDate, # generic type, assume datetime.date is desired
- oracle.DATE: oracle.DATE, # non generic type - passthru
- sqltypes.LargeBinary : _OracleBinary,
- sqltypes.Boolean : oracle._OracleBoolean,
- sqltypes.Interval : _OracleInterval,
- oracle.INTERVAL : _OracleInterval,
- sqltypes.Text : _OracleText,
- sqltypes.String : _OracleString,
- sqltypes.UnicodeText : _OracleUnicodeText,
- sqltypes.CHAR : _OracleChar,
- sqltypes.Integer : _OracleInteger, # this is only needed for OUT parameters.
- # it would be nice if we could not use it otherwise.
- oracle.RAW: _OracleRaw,
- sqltypes.Unicode: _OracleNVarChar,
- sqltypes.NVARCHAR : _OracleNVarChar,
- oracle.ROWID: _OracleRowid,
- }
-
-
- execute_sequence_format = list
-
- def __init__(self,
- auto_setinputsizes=True,
- auto_convert_lobs=True,
- threaded=True,
- allow_twophase=True,
- arraysize=50, **kwargs):
- OracleDialect.__init__(self, **kwargs)
- self.threaded = threaded
- self.arraysize = arraysize
- self.allow_twophase = allow_twophase
- self.supports_timestamp = self.dbapi is None or hasattr(self.dbapi, 'TIMESTAMP' )
- self.auto_setinputsizes = auto_setinputsizes
- self.auto_convert_lobs = auto_convert_lobs
-
- if hasattr(self.dbapi, 'version'):
- self.cx_oracle_ver = tuple([int(x) for x in self.dbapi.version.split('.')])
- else:
- self.cx_oracle_ver = (0, 0, 0)
-
- def types(*names):
- return set([
- getattr(self.dbapi, name, None) for name in names
- ]).difference([None])
-
- self._cx_oracle_string_types = types("STRING", "UNICODE", "NCLOB", "CLOB")
- self._cx_oracle_unicode_types = types("UNICODE", "NCLOB")
- self._cx_oracle_binary_types = types("BFILE", "CLOB", "NCLOB", "BLOB")
- self.supports_unicode_binds = self.cx_oracle_ver >= (5, 0)
- self.supports_native_decimal = self.cx_oracle_ver >= (5, 0)
- self._cx_oracle_native_nvarchar = self.cx_oracle_ver >= (5, 0)
-
- if self.cx_oracle_ver is None:
- # this occurs in tests with mock DBAPIs
- self._cx_oracle_string_types = set()
- self._cx_oracle_with_unicode = False
- elif self.cx_oracle_ver >= (5,) and not hasattr(self.dbapi, 'UNICODE'):
- # cx_Oracle WITH_UNICODE mode. *only* python
- # unicode objects accepted for anything
- self.supports_unicode_statements = True
- self.supports_unicode_binds = True
- self._cx_oracle_with_unicode = True
- # Py2K
- # There's really no reason to run with WITH_UNICODE under Python 2.x.
- # Give the user a hint.
- util.warn("cx_Oracle is compiled under Python 2.xx using the "
- "WITH_UNICODE flag. Consider recompiling cx_Oracle without "
- "this flag, which is in no way necessary for full support of Unicode. "
- "Otherwise, all string-holding bind parameters must "
- "be explicitly typed using SQLAlchemy's String type or one of its subtypes,"
- "or otherwise be passed as Python unicode. Plain Python strings "
- "passed as bind parameters will be silently corrupted by cx_Oracle."
- )
- self.execution_ctx_cls = OracleExecutionContext_cx_oracle_with_unicode
- # end Py2K
- else:
- self._cx_oracle_with_unicode = False
-
- if self.cx_oracle_ver is None or \
- not self.auto_convert_lobs or \
- not hasattr(self.dbapi, 'CLOB'):
- self.dbapi_type_map = {}
- else:
- # only use this for LOB objects. using it for strings, dates
- # etc. leads to a little too much magic, reflection doesn't know if it should
- # expect encoded strings or unicodes, etc.
- self.dbapi_type_map = {
- self.dbapi.CLOB: oracle.CLOB(),
- self.dbapi.NCLOB:oracle.NCLOB(),
- self.dbapi.BLOB: oracle.BLOB(),
- self.dbapi.BINARY: oracle.RAW(),
- }
- @classmethod
- def dbapi(cls):
- import cx_Oracle
- return cx_Oracle
-
- def initialize(self, connection):
- super(OracleDialect_cx_oracle, self).initialize(connection)
- if self._is_oracle_8:
- self.supports_unicode_binds = False
- self._detect_decimal_char(connection)
-
- def _detect_decimal_char(self, connection):
- """detect if the decimal separator character is not '.', as
- is the case with european locale settings for NLS_LANG.
-
- cx_oracle itself uses similar logic when it formats Python
- Decimal objects to strings on the bind side (as of 5.0.3),
- as Oracle sends/receives string numerics only in the
- current locale.
-
- """
- if self.cx_oracle_ver < (5,):
- # no output type handlers before version 5
- return
-
- cx_Oracle = self.dbapi
- conn = connection.connection
-
- # override the output_type_handler that's
- # on the cx_oracle connection with a plain
- # one on the cursor
-
- def output_type_handler(cursor, name, defaultType,
- size, precision, scale):
- return cursor.var(
- cx_Oracle.STRING,
- 255, arraysize=cursor.arraysize)
-
- cursor = conn.cursor()
- cursor.outputtypehandler = output_type_handler
- cursor.execute("SELECT 0.1 FROM DUAL")
- val = cursor.fetchone()[0]
- cursor.close()
- char = re.match(r"([\.,])", val).group(1)
- if char != '.':
- _detect_decimal = self._detect_decimal
- self._detect_decimal = \
- lambda value: _detect_decimal(value.replace(char, '.'))
- self._to_decimal = \
- lambda value: decimal.Decimal(value.replace(char, '.'))
-
- def _detect_decimal(self, value):
- if "." in value:
- return decimal.Decimal(value)
- else:
- return int(value)
-
- _to_decimal = decimal.Decimal
-
- def on_connect(self):
- if self.cx_oracle_ver < (5,):
- # no output type handlers before version 5
- return
-
- cx_Oracle = self.dbapi
- def output_type_handler(cursor, name, defaultType,
- size, precision, scale):
- # convert all NUMBER with precision + positive scale to Decimal
- # this almost allows "native decimal" mode.
- if defaultType == cx_Oracle.NUMBER and precision and scale > 0:
- return cursor.var(
- cx_Oracle.STRING,
- 255,
- outconverter=self._to_decimal,
- arraysize=cursor.arraysize)
- # if NUMBER with zero precision and 0 or neg scale, this appears
- # to indicate "ambiguous". Use a slower converter that will
- # make a decision based on each value received - the type
- # may change from row to row (!). This kills
- # off "native decimal" mode, handlers still needed.
- elif defaultType == cx_Oracle.NUMBER \
- and not precision and scale <= 0:
- return cursor.var(
- cx_Oracle.STRING,
- 255,
- outconverter=self._detect_decimal,
- arraysize=cursor.arraysize)
- # allow all strings to come back natively as Unicode
- elif defaultType in (cx_Oracle.STRING, cx_Oracle.FIXED_CHAR):
- return cursor.var(unicode, size, cursor.arraysize)
-
- def on_connect(conn):
- conn.outputtypehandler = output_type_handler
-
- return on_connect
-
- def create_connect_args(self, url):
- dialect_opts = dict(url.query)
- for opt in ('use_ansi', 'auto_setinputsizes', 'auto_convert_lobs',
- 'threaded', 'allow_twophase'):
- if opt in dialect_opts:
- util.coerce_kw_type(dialect_opts, opt, bool)
- setattr(self, opt, dialect_opts[opt])
-
- if url.database:
- # if we have a database, then we have a remote host
- port = url.port
- if port:
- port = int(port)
- else:
- port = 1521
- dsn = self.dbapi.makedsn(url.host, port, url.database)
- else:
- # we have a local tnsname
- dsn = url.host
-
- opts = dict(
- user=url.username,
- password=url.password,
- dsn=dsn,
- threaded=self.threaded,
- twophase=self.allow_twophase,
- )
-
- # Py2K
- if self._cx_oracle_with_unicode:
- for k, v in opts.items():
- if isinstance(v, str):
- opts[k] = unicode(v)
- else:
- for k, v in opts.items():
- if isinstance(v, unicode):
- opts[k] = str(v)
- # end Py2K
-
- if 'mode' in url.query:
- opts['mode'] = url.query['mode']
- if isinstance(opts['mode'], basestring):
- mode = opts['mode'].upper()
- if mode == 'SYSDBA':
- opts['mode'] = self.dbapi.SYSDBA
- elif mode == 'SYSOPER':
- opts['mode'] = self.dbapi.SYSOPER
- else:
- util.coerce_kw_type(opts, 'mode', int)
- return ([], opts)
-
- def _get_server_version_info(self, connection):
- return tuple(
- int(x)
- for x in connection.connection.version.split('.')
- )
-
- def is_disconnect(self, e, connection, cursor):
- if isinstance(e, self.dbapi.InterfaceError):
- return "not connected" in str(e)
- else:
- return "ORA-03114" in str(e) or "ORA-03113" in str(e)
-
- def create_xid(self):
- """create a two-phase transaction ID.
-
- this id will be passed to do_begin_twophase(), do_rollback_twophase(),
- do_commit_twophase(). its format is unspecified."""
-
- id = random.randint(0, 2 ** 128)
- return (0x1234, "%032x" % id, "%032x" % 9)
-
- def do_begin_twophase(self, connection, xid):
- connection.connection.begin(*xid)
-
- def do_prepare_twophase(self, connection, xid):
- connection.connection.prepare()
-
- def do_rollback_twophase(self, connection, xid, is_prepared=True, recover=False):
- self.do_rollback(connection.connection)
-
- def do_commit_twophase(self, connection, xid, is_prepared=True, recover=False):
- self.do_commit(connection.connection)
-
- def do_recover_twophase(self, connection):
- pass
-
-dialect = OracleDialect_cx_oracle
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/oracle/zxjdbc.py b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/oracle/zxjdbc.py
deleted file mode 100755
index 6ec33972..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/oracle/zxjdbc.py
+++ /dev/null
@@ -1,215 +0,0 @@
-# oracle/zxjdbc.py
-# Copyright (C) 2005-2011 the SQLAlchemy authors and contributors <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-"""Support for the Oracle database via the zxjdbc JDBC connector.
-
-JDBC Driver
------------
-
-The official Oracle JDBC driver is at
-http://www.oracle.com/technology/software/tech/java/sqlj_jdbc/index.html.
-
-"""
-import decimal
-import re
-
-from sqlalchemy import sql, types as sqltypes, util
-from sqlalchemy.connectors.zxJDBC import ZxJDBCConnector
-from sqlalchemy.dialects.oracle.base import OracleCompiler, OracleDialect, OracleExecutionContext
-from sqlalchemy.engine import base, default
-from sqlalchemy.sql import expression
-
-SQLException = zxJDBC = None
-
-class _ZxJDBCDate(sqltypes.Date):
-
- def result_processor(self, dialect, coltype):
- def process(value):
- if value is None:
- return None
- else:
- return value.date()
- return process
-
-
-class _ZxJDBCNumeric(sqltypes.Numeric):
-
- def result_processor(self, dialect, coltype):
- #XXX: does the dialect return Decimal or not???
- # if it does (in all cases), we could use a None processor as well as
- # the to_float generic processor
- if self.asdecimal:
- def process(value):
- if isinstance(value, decimal.Decimal):
- return value
- else:
- return decimal.Decimal(str(value))
- else:
- def process(value):
- if isinstance(value, decimal.Decimal):
- return float(value)
- else:
- return value
- return process
-
-
-class OracleCompiler_zxjdbc(OracleCompiler):
-
- def returning_clause(self, stmt, returning_cols):
- self.returning_cols = list(expression._select_iterables(returning_cols))
-
- # within_columns_clause=False so that labels (foo AS bar) don't render
- columns = [self.process(c, within_columns_clause=False, result_map=self.result_map)
- for c in self.returning_cols]
-
- if not hasattr(self, 'returning_parameters'):
- self.returning_parameters = []
-
- binds = []
- for i, col in enumerate(self.returning_cols):
- dbtype = col.type.dialect_impl(self.dialect).get_dbapi_type(self.dialect.dbapi)
- self.returning_parameters.append((i + 1, dbtype))
-
- bindparam = sql.bindparam("ret_%d" % i, value=ReturningParam(dbtype))
- self.binds[bindparam.key] = bindparam
- binds.append(self.bindparam_string(self._truncate_bindparam(bindparam)))
-
- return 'RETURNING ' + ', '.join(columns) + " INTO " + ", ".join(binds)
-
-
-class OracleExecutionContext_zxjdbc(OracleExecutionContext):
-
- def pre_exec(self):
- if hasattr(self.compiled, 'returning_parameters'):
- # prepare a zxJDBC statement so we can grab its underlying
- # OraclePreparedStatement's getReturnResultSet later
- self.statement = self.cursor.prepare(self.statement)
-
- def get_result_proxy(self):
- if hasattr(self.compiled, 'returning_parameters'):
- rrs = None
- try:
- try:
- rrs = self.statement.__statement__.getReturnResultSet()
- rrs.next()
- except SQLException, sqle:
- msg = '%s [SQLCode: %d]' % (sqle.getMessage(), sqle.getErrorCode())
- if sqle.getSQLState() is not None:
- msg += ' [SQLState: %s]' % sqle.getSQLState()
- raise zxJDBC.Error(msg)
- else:
- row = tuple(self.cursor.datahandler.getPyObject(rrs, index, dbtype)
- for index, dbtype in self.compiled.returning_parameters)
- return ReturningResultProxy(self, row)
- finally:
- if rrs is not None:
- try:
- rrs.close()
- except SQLException:
- pass
- self.statement.close()
-
- return base.ResultProxy(self)
-
- def create_cursor(self):
- cursor = self._dbapi_connection.cursor()
- cursor.datahandler = self.dialect.DataHandler(cursor.datahandler)
- return cursor
-
-
-class ReturningResultProxy(base.FullyBufferedResultProxy):
-
- """ResultProxy backed by the RETURNING ResultSet results."""
-
- def __init__(self, context, returning_row):
- self._returning_row = returning_row
- super(ReturningResultProxy, self).__init__(context)
-
- def _cursor_description(self):
- ret = []
- for c in self.context.compiled.returning_cols:
- if hasattr(c, 'name'):
- ret.append((c.name, c.type))
- else:
- ret.append((c.anon_label, c.type))
- return ret
-
- def _buffer_rows(self):
- return [self._returning_row]
-
-
-class ReturningParam(object):
-
- """A bindparam value representing a RETURNING parameter.
-
- Specially handled by OracleReturningDataHandler.
- """
-
- def __init__(self, type):
- self.type = type
-
- def __eq__(self, other):
- if isinstance(other, ReturningParam):
- return self.type == other.type
- return NotImplemented
-
- def __ne__(self, other):
- if isinstance(other, ReturningParam):
- return self.type != other.type
- return NotImplemented
-
- def __repr__(self):
- kls = self.__class__
- return '<%s.%s object at 0x%x type=%s>' % (kls.__module__, kls.__name__, id(self),
- self.type)
-
-
-class OracleDialect_zxjdbc(ZxJDBCConnector, OracleDialect):
- jdbc_db_name = 'oracle'
- jdbc_driver_name = 'oracle.jdbc.OracleDriver'
-
- statement_compiler = OracleCompiler_zxjdbc
- execution_ctx_cls = OracleExecutionContext_zxjdbc
-
- colspecs = util.update_copy(
- OracleDialect.colspecs,
- {
- sqltypes.Date : _ZxJDBCDate,
- sqltypes.Numeric: _ZxJDBCNumeric
- }
- )
-
- def __init__(self, *args, **kwargs):
- super(OracleDialect_zxjdbc, self).__init__(*args, **kwargs)
- global SQLException, zxJDBC
- from java.sql import SQLException
- from com.ziclix.python.sql import zxJDBC
- from com.ziclix.python.sql.handler import OracleDataHandler
- class OracleReturningDataHandler(OracleDataHandler):
-
- """zxJDBC DataHandler that specially handles ReturningParam."""
-
- def setJDBCObject(self, statement, index, object, dbtype=None):
- if type(object) is ReturningParam:
- statement.registerReturnParameter(index, object.type)
- elif dbtype is None:
- OracleDataHandler.setJDBCObject(self, statement, index, object)
- else:
- OracleDataHandler.setJDBCObject(self, statement, index, object, dbtype)
- self.DataHandler = OracleReturningDataHandler
-
- def initialize(self, connection):
- super(OracleDialect_zxjdbc, self).initialize(connection)
- self.implicit_returning = connection.connection.driverversion >= '10.2'
-
- def _create_jdbc_url(self, url):
- return 'jdbc:oracle:thin:@%s:%s:%s' % (url.host, url.port or 1521, url.database)
-
- def _get_server_version_info(self, connection):
- version = re.search(r'Release ([\d\.]+)', connection.connection.dbversion).group(1)
- return tuple(int(x) for x in version.split('.'))
-
-dialect = OracleDialect_zxjdbc
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/postgres.py b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/postgres.py
deleted file mode 100755
index 48d1a8c3..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/postgres.py
+++ /dev/null
@@ -1,16 +0,0 @@
-# dialects/postgres.py
-# Copyright (C) 2005-2011 the SQLAlchemy authors and contributors <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-# backwards compat with the old name
-from sqlalchemy.util import warn_deprecated
-
-warn_deprecated(
- "The SQLAlchemy PostgreSQL dialect has been renamed from 'postgres' to 'postgresql'. "
- "The new URL format is postgresql[+driver]://<user>:<pass>@<host>/<dbname>"
- )
-
-from sqlalchemy.dialects.postgresql import *
-from sqlalchemy.dialects.postgresql import base
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/postgresql/__init__.py b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/postgresql/__init__.py
deleted file mode 100755
index 481a7fb7..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/postgresql/__init__.py
+++ /dev/null
@@ -1,20 +0,0 @@
-# postgresql/__init__.py
-# Copyright (C) 2005-2011 the SQLAlchemy authors and contributors <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-from sqlalchemy.dialects.postgresql import base, psycopg2, pg8000, pypostgresql, zxjdbc
-
-base.dialect = psycopg2.dialect
-
-from sqlalchemy.dialects.postgresql.base import \
- INTEGER, BIGINT, SMALLINT, VARCHAR, CHAR, TEXT, NUMERIC, FLOAT, REAL, INET, \
- CIDR, UUID, BIT, MACADDR, DOUBLE_PRECISION, TIMESTAMP, TIME,\
- DATE, BYTEA, BOOLEAN, INTERVAL, ARRAY, ENUM, dialect
-
-__all__ = (
-'INTEGER', 'BIGINT', 'SMALLINT', 'VARCHAR', 'CHAR', 'TEXT', 'NUMERIC', 'FLOAT', 'REAL', 'INET',
-'CIDR', 'UUID', 'BIT', 'MACADDR', 'DOUBLE_PRECISION', 'TIMESTAMP', 'TIME',
-'DATE', 'BYTEA', 'BOOLEAN', 'INTERVAL', 'ARRAY', 'ENUM', 'dialect'
-)
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/postgresql/base.py b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/postgresql/base.py
deleted file mode 100755
index 3193cde9..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/postgresql/base.py
+++ /dev/null
@@ -1,1449 +0,0 @@
-# postgresql/base.py
-# Copyright (C) 2005-2011 the SQLAlchemy authors and contributors <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-"""Support for the PostgreSQL database.
-
-For information on connecting using specific drivers, see the documentation
-section regarding that driver.
-
-Sequences/SERIAL
-----------------
-
-PostgreSQL supports sequences, and SQLAlchemy uses these as the default means
-of creating new primary key values for integer-based primary key columns. When
-creating tables, SQLAlchemy will issue the ``SERIAL`` datatype for
-integer-based primary key columns, which generates a sequence and server side
-default corresponding to the column.
-
-To specify a specific named sequence to be used for primary key generation,
-use the :func:`~sqlalchemy.schema.Sequence` construct::
-
- Table('sometable', metadata,
- Column('id', Integer, Sequence('some_id_seq'), primary_key=True)
- )
-
-When SQLAlchemy issues a single INSERT statement, to fulfill the contract of
-having the "last insert identifier" available, a RETURNING clause is added to
-the INSERT statement which specifies the primary key columns should be
-returned after the statement completes. The RETURNING functionality only takes
-place if Postgresql 8.2 or later is in use. As a fallback approach, the
-sequence, whether specified explicitly or implicitly via ``SERIAL``, is
-executed independently beforehand, the returned value to be used in the
-subsequent insert. Note that when an
-:func:`~sqlalchemy.sql.expression.insert()` construct is executed using
-"executemany" semantics, the "last inserted identifier" functionality does not
-apply; no RETURNING clause is emitted nor is the sequence pre-executed in this
-case.
-
-To force the usage of RETURNING by default off, specify the flag
-``implicit_returning=False`` to :func:`.create_engine`.
-
-Transaction Isolation Level
----------------------------
-
-:func:`.create_engine` accepts an ``isolation_level`` parameter which results
-in the command ``SET SESSION CHARACTERISTICS AS TRANSACTION ISOLATION LEVEL
-<level>`` being invoked for every new connection. Valid values for this
-parameter are ``READ_COMMITTED``, ``READ_UNCOMMITTED``, ``REPEATABLE_READ``,
-and ``SERIALIZABLE``. Note that the psycopg2 dialect does *not* use this
-technique and uses psycopg2-specific APIs (see that dialect for details).
-
-INSERT/UPDATE...RETURNING
--------------------------
-
-The dialect supports PG 8.2's ``INSERT..RETURNING``, ``UPDATE..RETURNING`` and
-``DELETE..RETURNING`` syntaxes. ``INSERT..RETURNING`` is used by default
-for single-row INSERT statements in order to fetch newly generated
-primary key identifiers. To specify an explicit ``RETURNING`` clause,
-use the :meth:`._UpdateBase.returning` method on a per-statement basis::
-
- # INSERT..RETURNING
- result = table.insert().returning(table.c.col1, table.c.col2).\\
- values(name='foo')
- print result.fetchall()
-
- # UPDATE..RETURNING
- result = table.update().returning(table.c.col1, table.c.col2).\\
- where(table.c.name=='foo').values(name='bar')
- print result.fetchall()
-
- # DELETE..RETURNING
- result = table.delete().returning(table.c.col1, table.c.col2).\\
- where(table.c.name=='foo')
- print result.fetchall()
-
-Indexes
--------
-
-PostgreSQL supports partial indexes. To create them pass a postgresql_where
-option to the Index constructor::
-
- Index('my_index', my_table.c.id, postgresql_where=tbl.c.value > 10)
-
-"""
-
-import re
-
-from sqlalchemy import sql, schema, exc, util
-from sqlalchemy.engine import default, reflection
-from sqlalchemy.sql import compiler, expression, util as sql_util
-from sqlalchemy import types as sqltypes
-
-try:
- from uuid import UUID as _python_UUID
-except ImportError:
- _python_UUID = None
-
-from sqlalchemy.types import INTEGER, BIGINT, SMALLINT, VARCHAR, \
- CHAR, TEXT, FLOAT, NUMERIC, \
- DATE, BOOLEAN, REAL
-
-RESERVED_WORDS = set(
- ["all", "analyse", "analyze", "and", "any", "array", "as", "asc",
- "asymmetric", "both", "case", "cast", "check", "collate", "column",
- "constraint", "create", "current_catalog", "current_date",
- "current_role", "current_time", "current_timestamp", "current_user",
- "default", "deferrable", "desc", "distinct", "do", "else", "end",
- "except", "false", "fetch", "for", "foreign", "from", "grant", "group",
- "having", "in", "initially", "intersect", "into", "leading", "limit",
- "localtime", "localtimestamp", "new", "not", "null", "off", "offset",
- "old", "on", "only", "or", "order", "placing", "primary", "references",
- "returning", "select", "session_user", "some", "symmetric", "table",
- "then", "to", "trailing", "true", "union", "unique", "user", "using",
- "variadic", "when", "where", "window", "with", "authorization",
- "between", "binary", "cross", "current_schema", "freeze", "full",
- "ilike", "inner", "is", "isnull", "join", "left", "like", "natural",
- "notnull", "outer", "over", "overlaps", "right", "similar", "verbose"
- ])
-
-_DECIMAL_TYPES = (1231, 1700)
-_FLOAT_TYPES = (700, 701, 1021, 1022)
-_INT_TYPES = (20, 21, 23, 26, 1005, 1007, 1016)
-
-class BYTEA(sqltypes.LargeBinary):
- __visit_name__ = 'BYTEA'
-
-class DOUBLE_PRECISION(sqltypes.Float):
- __visit_name__ = 'DOUBLE_PRECISION'
-
-class INET(sqltypes.TypeEngine):
- __visit_name__ = "INET"
-PGInet = INET
-
-class CIDR(sqltypes.TypeEngine):
- __visit_name__ = "CIDR"
-PGCidr = CIDR
-
-class MACADDR(sqltypes.TypeEngine):
- __visit_name__ = "MACADDR"
-PGMacAddr = MACADDR
-
-class TIMESTAMP(sqltypes.TIMESTAMP):
- def __init__(self, timezone=False, precision=None):
- super(TIMESTAMP, self).__init__(timezone=timezone)
- self.precision = precision
-
-
-class TIME(sqltypes.TIME):
- def __init__(self, timezone=False, precision=None):
- super(TIME, self).__init__(timezone=timezone)
- self.precision = precision
-
-class INTERVAL(sqltypes.TypeEngine):
- """Postgresql INTERVAL type.
-
- The INTERVAL type may not be supported on all DBAPIs.
- It is known to work on psycopg2 and not pg8000 or zxjdbc.
-
- """
- __visit_name__ = 'INTERVAL'
- def __init__(self, precision=None):
- self.precision = precision
-
- @classmethod
- def _adapt_from_generic_interval(cls, interval):
- return INTERVAL(precision=interval.second_precision)
-
- @property
- def _type_affinity(self):
- return sqltypes.Interval
-
-PGInterval = INTERVAL
-
-class BIT(sqltypes.TypeEngine):
- __visit_name__ = 'BIT'
- def __init__(self, length=None, varying=False):
- if not varying:
- # BIT without VARYING defaults to length 1
- self.length = length or 1
- else:
- # but BIT VARYING can be unlimited-length, so no default
- self.length = length
- self.varying = varying
-
-PGBit = BIT
-
-class UUID(sqltypes.TypeEngine):
- """Postgresql UUID type.
-
- Represents the UUID column type, interpreting
- data either as natively returned by the DBAPI
- or as Python uuid objects.
-
- The UUID type may not be supported on all DBAPIs.
- It is known to work on psycopg2 and not pg8000.
-
- """
- __visit_name__ = 'UUID'
-
- def __init__(self, as_uuid=False):
- """Construct a UUID type.
-
-
- :param as_uuid=False: if True, values will be interpreted
- as Python uuid objects, converting to/from string via the
- DBAPI.
-
- """
- if as_uuid and _python_UUID is None:
- raise NotImplementedError(
- "This version of Python does not support the native UUID type."
- )
- self.as_uuid = as_uuid
-
- def bind_processor(self, dialect):
- if self.as_uuid:
- def process(value):
- if value is not None:
- value = str(value)
- return value
- return process
- else:
- return None
-
- def result_processor(self, dialect, coltype):
- if self.as_uuid:
- def process(value):
- if value is not None:
- value = _python_UUID(value)
- return value
- return process
- else:
- return None
-
-PGUuid = UUID
-
-class ARRAY(sqltypes.MutableType, sqltypes.Concatenable, sqltypes.TypeEngine):
- """Postgresql ARRAY type.
-
- Represents values as Python lists.
-
- The ARRAY type may not be supported on all DBAPIs.
- It is known to work on psycopg2 and not pg8000.
-
-
- """
- __visit_name__ = 'ARRAY'
-
- def __init__(self, item_type, mutable=False, as_tuple=False):
- """Construct an ARRAY.
-
- E.g.::
-
- Column('myarray', ARRAY(Integer))
-
- Arguments are:
-
- :param item_type: The data type of items of this array. Note that
- dimensionality is irrelevant here, so multi-dimensional arrays like
- ``INTEGER[][]``, are constructed as ``ARRAY(Integer)``, not as
- ``ARRAY(ARRAY(Integer))`` or such. The type mapping figures out on
- the fly
-
- :param mutable=False: Specify whether lists passed to this
- class should be considered mutable - this enables
- "mutable types" mode in the ORM. Be sure to read the
- notes for :class:`.MutableType` regarding ORM
- performance implications (default changed from ``True`` in
- 0.7.0).
-
- .. note:: This functionality is now superseded by the
- ``sqlalchemy.ext.mutable`` extension described in
- :ref:`mutable_toplevel`.
-
- :param as_tuple=False: Specify whether return results
- should be converted to tuples from lists. DBAPIs such
- as psycopg2 return lists by default. When tuples are
- returned, the results are hashable. This flag can only
- be set to ``True`` when ``mutable`` is set to
- ``False``. (new in 0.6.5)
-
- """
- if isinstance(item_type, ARRAY):
- raise ValueError("Do not nest ARRAY types; ARRAY(basetype) "
- "handles multi-dimensional arrays of basetype")
- if isinstance(item_type, type):
- item_type = item_type()
- self.item_type = item_type
- self.mutable = mutable
- if mutable and as_tuple:
- raise exc.ArgumentError(
- "mutable must be set to False if as_tuple is True."
- )
- self.as_tuple = as_tuple
-
- def copy_value(self, value):
- if value is None:
- return None
- elif self.mutable:
- return list(value)
- else:
- return value
-
- def compare_values(self, x, y):
- return x == y
-
- def is_mutable(self):
- return self.mutable
-
- def bind_processor(self, dialect):
- item_proc = self.item_type.dialect_impl(dialect).bind_processor(dialect)
- if item_proc:
- def convert_item(item):
- if isinstance(item, (list, tuple)):
- return [convert_item(child) for child in item]
- else:
- return item_proc(item)
- else:
- def convert_item(item):
- if isinstance(item, (list, tuple)):
- return [convert_item(child) for child in item]
- else:
- return item
- def process(value):
- if value is None:
- return value
- return [convert_item(item) for item in value]
- return process
-
- def result_processor(self, dialect, coltype):
- item_proc = self.item_type.dialect_impl(dialect).result_processor(dialect, coltype)
- if item_proc:
- def convert_item(item):
- if isinstance(item, list):
- r = [convert_item(child) for child in item]
- if self.as_tuple:
- r = tuple(r)
- return r
- else:
- return item_proc(item)
- else:
- def convert_item(item):
- if isinstance(item, list):
- r = [convert_item(child) for child in item]
- if self.as_tuple:
- r = tuple(r)
- return r
- else:
- return item
- def process(value):
- if value is None:
- return value
- r = [convert_item(item) for item in value]
- if self.as_tuple:
- r = tuple(r)
- return r
- return process
-PGArray = ARRAY
-
-class ENUM(sqltypes.Enum):
-
- def create(self, bind=None, checkfirst=True):
- if not bind.dialect.supports_native_enum:
- return
-
- if not checkfirst or \
- not bind.dialect.has_type(bind, self.name, schema=self.schema):
- bind.execute(CreateEnumType(self))
-
- def drop(self, bind=None, checkfirst=True):
- if not bind.dialect.supports_native_enum:
- return
-
- if not checkfirst or \
- bind.dialect.has_type(bind, self.name, schema=self.schema):
- bind.execute(DropEnumType(self))
-
- def _on_table_create(self, event, target, bind, **kw):
- self.create(bind=bind, checkfirst=True)
-
- def _on_metadata_create(self, event, target, bind, **kw):
- if self.metadata is not None:
- self.create(bind=bind, checkfirst=True)
-
- def _on_metadata_drop(self, event, target, bind, **kw):
- self.drop(bind=bind, checkfirst=True)
-
-colspecs = {
- sqltypes.Interval:INTERVAL,
- sqltypes.Enum:ENUM,
-}
-
-ischema_names = {
- 'integer' : INTEGER,
- 'bigint' : BIGINT,
- 'smallint' : SMALLINT,
- 'character varying' : VARCHAR,
- 'character' : CHAR,
- '"char"' : sqltypes.String,
- 'name' : sqltypes.String,
- 'text' : TEXT,
- 'numeric' : NUMERIC,
- 'float' : FLOAT,
- 'real' : REAL,
- 'inet': INET,
- 'cidr': CIDR,
- 'uuid': UUID,
- 'bit': BIT,
- 'bit varying': BIT,
- 'macaddr': MACADDR,
- 'double precision' : DOUBLE_PRECISION,
- 'timestamp' : TIMESTAMP,
- 'timestamp with time zone' : TIMESTAMP,
- 'timestamp without time zone' : TIMESTAMP,
- 'time with time zone' : TIME,
- 'time without time zone' : TIME,
- 'date' : DATE,
- 'time': TIME,
- 'bytea' : BYTEA,
- 'boolean' : BOOLEAN,
- 'interval':INTERVAL,
- 'interval year to month':INTERVAL,
- 'interval day to second':INTERVAL,
-}
-
-
-
-class PGCompiler(compiler.SQLCompiler):
-
- def visit_match_op(self, binary, **kw):
- return "%s @@ to_tsquery(%s)" % (
- self.process(binary.left),
- self.process(binary.right))
-
- def visit_ilike_op(self, binary, **kw):
- escape = binary.modifiers.get("escape", None)
- return '%s ILIKE %s' % \
- (self.process(binary.left), self.process(binary.right)) \
- + (escape and
- (' ESCAPE ' + self.render_literal_value(escape, None))
- or '')
-
- def visit_notilike_op(self, binary, **kw):
- escape = binary.modifiers.get("escape", None)
- return '%s NOT ILIKE %s' % \
- (self.process(binary.left), self.process(binary.right)) \
- + (escape and
- (' ESCAPE ' + self.render_literal_value(escape, None))
- or '')
-
- def render_literal_value(self, value, type_):
- value = super(PGCompiler, self).render_literal_value(value, type_)
- # TODO: need to inspect "standard_conforming_strings"
- if self.dialect._backslash_escapes:
- value = value.replace('\\', '\\\\')
- return value
-
- def visit_sequence(self, seq):
- return "nextval('%s')" % self.preparer.format_sequence(seq)
-
- def limit_clause(self, select):
- text = ""
- if select._limit is not None:
- text += " \n LIMIT " + self.process(sql.literal(select._limit))
- if select._offset is not None:
- if select._limit is None:
- text += " \n LIMIT ALL"
- text += " OFFSET " + self.process(sql.literal(select._offset))
- return text
-
- def get_select_precolumns(self, select):
- if select._distinct is not False:
- if select._distinct is True:
- return "DISTINCT "
- elif isinstance(select._distinct, (list, tuple)):
- return "DISTINCT ON (" + ', '.join(
- [self.process(col) for col in select._distinct]
- )+ ") "
- else:
- return "DISTINCT ON (" + self.process(select._distinct) + ") "
- else:
- return ""
-
- def for_update_clause(self, select):
- if select.for_update == 'nowait':
- return " FOR UPDATE NOWAIT"
- else:
- return super(PGCompiler, self).for_update_clause(select)
-
- def returning_clause(self, stmt, returning_cols):
-
- columns = [
- self.process(
- self.label_select_column(None, c, asfrom=False),
- within_columns_clause=True,
- result_map=self.result_map)
- for c in expression._select_iterables(returning_cols)
- ]
-
- return 'RETURNING ' + ', '.join(columns)
-
- def visit_extract(self, extract, **kwargs):
- field = self.extract_map.get(extract.field, extract.field)
- if extract.expr.type:
- affinity = extract.expr.type._type_affinity
- else:
- affinity = None
-
- casts = {
- sqltypes.Date:'date',
- sqltypes.DateTime:'timestamp',
- sqltypes.Interval:'interval', sqltypes.Time:'time'
- }
- cast = casts.get(affinity, None)
- if isinstance(extract.expr, sql.ColumnElement) and cast is not None:
- expr = extract.expr.op('::')(sql.literal_column(cast))
- else:
- expr = extract.expr
- return "EXTRACT(%s FROM %s)" % (
- field, self.process(expr))
-
-class PGDDLCompiler(compiler.DDLCompiler):
- def get_column_specification(self, column, **kwargs):
- colspec = self.preparer.format_column(column)
- impl_type = column.type.dialect_impl(self.dialect)
- if column.primary_key and \
- column is column.table._autoincrement_column and \
- not isinstance(impl_type, sqltypes.SmallInteger) and \
- (
- column.default is None or
- (
- isinstance(column.default, schema.Sequence) and
- column.default.optional
- )
- ):
- if isinstance(impl_type, sqltypes.BigInteger):
- colspec += " BIGSERIAL"
- else:
- colspec += " SERIAL"
- else:
- colspec += " " + self.dialect.type_compiler.process(column.type)
- default = self.get_column_default_string(column)
- if default is not None:
- colspec += " DEFAULT " + default
-
- if not column.nullable:
- colspec += " NOT NULL"
- return colspec
-
- def visit_create_enum_type(self, create):
- type_ = create.element
-
- return "CREATE TYPE %s AS ENUM (%s)" % (
- self.preparer.format_type(type_),
- ",".join("'%s'" % e for e in type_.enums)
- )
-
- def visit_drop_enum_type(self, drop):
- type_ = drop.element
-
- return "DROP TYPE %s" % (
- self.preparer.format_type(type_)
- )
-
- def visit_create_index(self, create):
- preparer = self.preparer
- index = create.element
- text = "CREATE "
- if index.unique:
- text += "UNIQUE "
- text += "INDEX %s ON %s (%s)" \
- % (preparer.quote(
- self._index_identifier(index.name), index.quote),
- preparer.format_table(index.table),
- ', '.join([preparer.format_column(c)
- for c in index.columns]))
-
- if "postgres_where" in index.kwargs:
- whereclause = index.kwargs['postgres_where']
- util.warn_deprecated(
- "The 'postgres_where' argument has been renamed "
- "to 'postgresql_where'.")
- elif 'postgresql_where' in index.kwargs:
- whereclause = index.kwargs['postgresql_where']
- else:
- whereclause = None
-
- if whereclause is not None:
- whereclause = sql_util.expression_as_ddl(whereclause)
- where_compiled = self.sql_compiler.process(whereclause)
- text += " WHERE " + where_compiled
- return text
-
-
-class PGTypeCompiler(compiler.GenericTypeCompiler):
- def visit_INET(self, type_):
- return "INET"
-
- def visit_CIDR(self, type_):
- return "CIDR"
-
- def visit_MACADDR(self, type_):
- return "MACADDR"
-
- def visit_FLOAT(self, type_):
- if not type_.precision:
- return "FLOAT"
- else:
- return "FLOAT(%(precision)s)" % {'precision': type_.precision}
-
- def visit_DOUBLE_PRECISION(self, type_):
- return "DOUBLE PRECISION"
-
- def visit_BIGINT(self, type_):
- return "BIGINT"
-
- def visit_datetime(self, type_):
- return self.visit_TIMESTAMP(type_)
-
- def visit_enum(self, type_):
- if not type_.native_enum or not self.dialect.supports_native_enum:
- return super(PGTypeCompiler, self).visit_enum(type_)
- else:
- return self.visit_ENUM(type_)
-
- def visit_ENUM(self, type_):
- return self.dialect.identifier_preparer.format_type(type_)
-
- def visit_TIMESTAMP(self, type_):
- return "TIMESTAMP%s %s" % (
- getattr(type_, 'precision', None) and "(%d)" %
- type_.precision or "",
- (type_.timezone and "WITH" or "WITHOUT") + " TIME ZONE"
- )
-
- def visit_TIME(self, type_):
- return "TIME%s %s" % (
- getattr(type_, 'precision', None) and "(%d)" %
- type_.precision or "",
- (type_.timezone and "WITH" or "WITHOUT") + " TIME ZONE"
- )
-
- def visit_INTERVAL(self, type_):
- if type_.precision is not None:
- return "INTERVAL(%d)" % type_.precision
- else:
- return "INTERVAL"
-
- def visit_BIT(self, type_):
- if type_.varying:
- compiled = "BIT VARYING"
- if type_.length is not None:
- compiled += "(%d)" % type_.length
- else:
- compiled = "BIT(%d)" % type_.length
- return compiled
-
- def visit_UUID(self, type_):
- return "UUID"
-
- def visit_large_binary(self, type_):
- return self.visit_BYTEA(type_)
-
- def visit_BYTEA(self, type_):
- return "BYTEA"
-
- def visit_ARRAY(self, type_):
- return self.process(type_.item_type) + '[]'
-
-
-class PGIdentifierPreparer(compiler.IdentifierPreparer):
-
- reserved_words = RESERVED_WORDS
-
- def _unquote_identifier(self, value):
- if value[0] == self.initial_quote:
- value = value[1:-1].\
- replace(self.escape_to_quote, self.escape_quote)
- return value
-
- def format_type(self, type_, use_schema=True):
- if not type_.name:
- raise exc.ArgumentError("Postgresql ENUM type requires a name.")
-
- name = self.quote(type_.name, type_.quote)
- if not self.omit_schema and use_schema and type_.schema is not None:
- name = self.quote_schema(type_.schema, type_.quote) + "." + name
- return name
-
-class PGInspector(reflection.Inspector):
-
- def __init__(self, conn):
- reflection.Inspector.__init__(self, conn)
-
- def get_table_oid(self, table_name, schema=None):
- """Return the oid from `table_name` and `schema`."""
-
- return self.dialect.get_table_oid(self.bind, table_name, schema,
- info_cache=self.info_cache)
-
-class CreateEnumType(schema._CreateDropBase):
- __visit_name__ = "create_enum_type"
-
-class DropEnumType(schema._CreateDropBase):
- __visit_name__ = "drop_enum_type"
-
-class PGExecutionContext(default.DefaultExecutionContext):
- def fire_sequence(self, seq, type_):
- return self._execute_scalar(("select nextval('%s')" % \
- self.dialect.identifier_preparer.format_sequence(seq)), type_)
-
- def get_insert_default(self, column):
- if column.primary_key and column is column.table._autoincrement_column:
- if column.server_default and column.server_default.has_argument:
-
- # pre-execute passive defaults on primary key columns
- return self._execute_scalar("select %s" %
- column.server_default.arg, column.type)
-
- elif (column.default is None or
- (column.default.is_sequence and
- column.default.optional)):
-
- # execute the sequence associated with a SERIAL primary
- # key column. for non-primary-key SERIAL, the ID just
- # generates server side.
-
- try:
- seq_name = column._postgresql_seq_name
- except AttributeError:
- tab = column.table.name
- col = column.name
- tab = tab[0:29 + max(0, (29 - len(col)))]
- col = col[0:29 + max(0, (29 - len(tab)))]
- column._postgresql_seq_name = seq_name = "%s_%s_seq" % (tab, col)
-
- sch = column.table.schema
- if sch is not None:
- exc = "select nextval('\"%s\".\"%s\"')" % \
- (sch, seq_name)
- else:
- exc = "select nextval('\"%s\"')" % \
- (seq_name, )
-
- return self._execute_scalar(exc, column.type)
-
- return super(PGExecutionContext, self).get_insert_default(column)
-
-class PGDialect(default.DefaultDialect):
- name = 'postgresql'
- supports_alter = True
- max_identifier_length = 63
- supports_sane_rowcount = True
-
- supports_native_enum = True
- supports_native_boolean = True
-
- supports_sequences = True
- sequences_optional = True
- preexecute_autoincrement_sequences = True
- postfetch_lastrowid = False
-
- supports_default_values = True
- supports_empty_insert = False
- default_paramstyle = 'pyformat'
- ischema_names = ischema_names
- colspecs = colspecs
-
- statement_compiler = PGCompiler
- ddl_compiler = PGDDLCompiler
- type_compiler = PGTypeCompiler
- preparer = PGIdentifierPreparer
- execution_ctx_cls = PGExecutionContext
- inspector = PGInspector
- isolation_level = None
-
- # TODO: need to inspect "standard_conforming_strings"
- _backslash_escapes = True
-
- def __init__(self, isolation_level=None, **kwargs):
- default.DefaultDialect.__init__(self, **kwargs)
- self.isolation_level = isolation_level
-
- def initialize(self, connection):
- super(PGDialect, self).initialize(connection)
- self.implicit_returning = self.server_version_info > (8, 2) and \
- self.__dict__.get('implicit_returning', True)
- self.supports_native_enum = self.server_version_info >= (8, 3)
- if not self.supports_native_enum:
- self.colspecs = self.colspecs.copy()
- # pop base Enum type
- self.colspecs.pop(sqltypes.Enum, None)
- # psycopg2, others may have placed ENUM here as well
- self.colspecs.pop(ENUM, None)
-
- def on_connect(self):
- if self.isolation_level is not None:
- def connect(conn):
- self.set_isolation_level(conn, self.isolation_level)
- return connect
- else:
- return None
-
- _isolation_lookup = set(['SERIALIZABLE',
- 'READ UNCOMMITTED', 'READ COMMITTED', 'REPEATABLE READ'])
-
- def set_isolation_level(self, connection, level):
- level = level.replace('_', ' ')
- if level not in self._isolation_lookup:
- raise exc.ArgumentError(
- "Invalid value '%s' for isolation_level. "
- "Valid isolation levels for %s are %s" %
- (level, self.name, ", ".join(self._isolation_lookup))
- )
- cursor = connection.cursor()
- cursor.execute(
- "SET SESSION CHARACTERISTICS AS TRANSACTION "
- "ISOLATION LEVEL %s" % level)
- cursor.execute("COMMIT")
- cursor.close()
-
- def get_isolation_level(self, connection):
- cursor = connection.cursor()
- cursor.execute('show transaction isolation level')
- val = cursor.fetchone()[0]
- cursor.close()
- return val.upper()
-
- def do_begin_twophase(self, connection, xid):
- self.do_begin(connection.connection)
-
- def do_prepare_twophase(self, connection, xid):
- connection.execute("PREPARE TRANSACTION '%s'" % xid)
-
- def do_rollback_twophase(self, connection, xid,
- is_prepared=True, recover=False):
- if is_prepared:
- if recover:
- #FIXME: ugly hack to get out of transaction
- # context when commiting recoverable transactions
- # Must find out a way how to make the dbapi not
- # open a transaction.
- connection.execute("ROLLBACK")
- connection.execute("ROLLBACK PREPARED '%s'" % xid)
- connection.execute("BEGIN")
- self.do_rollback(connection.connection)
- else:
- self.do_rollback(connection.connection)
-
- def do_commit_twophase(self, connection, xid,
- is_prepared=True, recover=False):
- if is_prepared:
- if recover:
- connection.execute("ROLLBACK")
- connection.execute("COMMIT PREPARED '%s'" % xid)
- connection.execute("BEGIN")
- self.do_rollback(connection.connection)
- else:
- self.do_commit(connection.connection)
-
- def do_recover_twophase(self, connection):
- resultset = connection.execute(
- sql.text("SELECT gid FROM pg_prepared_xacts"))
- return [row[0] for row in resultset]
-
- def _get_default_schema_name(self, connection):
- return connection.scalar("select current_schema()")
-
- def has_table(self, connection, table_name, schema=None):
- # seems like case gets folded in pg_class...
- if schema is None:
- cursor = connection.execute(
- sql.text(
- "select relname from pg_class c join pg_namespace n on "
- "n.oid=c.relnamespace where n.nspname=current_schema() and "
- "lower(relname)=:name",
- bindparams=[
- sql.bindparam('name', unicode(table_name.lower()),
- type_=sqltypes.Unicode)]
- )
- )
- else:
- cursor = connection.execute(
- sql.text(
- "select relname from pg_class c join pg_namespace n on "
- "n.oid=c.relnamespace where n.nspname=:schema and "
- "lower(relname)=:name",
- bindparams=[
- sql.bindparam('name',
- unicode(table_name.lower()), type_=sqltypes.Unicode),
- sql.bindparam('schema',
- unicode(schema), type_=sqltypes.Unicode)]
- )
- )
- return bool(cursor.first())
-
- def has_sequence(self, connection, sequence_name, schema=None):
- if schema is None:
- cursor = connection.execute(
- sql.text(
- "SELECT relname FROM pg_class c join pg_namespace n on "
- "n.oid=c.relnamespace where relkind='S' and "
- "n.nspname=current_schema() "
- "and lower(relname)=:name",
- bindparams=[
- sql.bindparam('name', unicode(sequence_name.lower()),
- type_=sqltypes.Unicode)
- ]
- )
- )
- else:
- cursor = connection.execute(
- sql.text(
- "SELECT relname FROM pg_class c join pg_namespace n on "
- "n.oid=c.relnamespace where relkind='S' and "
- "n.nspname=:schema and lower(relname)=:name",
- bindparams=[
- sql.bindparam('name', unicode(sequence_name.lower()),
- type_=sqltypes.Unicode),
- sql.bindparam('schema',
- unicode(schema), type_=sqltypes.Unicode)
- ]
- )
- )
-
- return bool(cursor.first())
-
- def has_type(self, connection, type_name, schema=None):
- bindparams = [
- sql.bindparam('typname',
- unicode(type_name), type_=sqltypes.Unicode),
- sql.bindparam('nspname',
- unicode(schema), type_=sqltypes.Unicode),
- ]
- if schema is not None:
- query = """
- SELECT EXISTS (
- SELECT * FROM pg_catalog.pg_type t, pg_catalog.pg_namespace n
- WHERE t.typnamespace = n.oid
- AND t.typname = :typname
- AND n.nspname = :nspname
- )
- """
- else:
- query = """
- SELECT EXISTS (
- SELECT * FROM pg_catalog.pg_type t
- WHERE t.typname = :typname
- AND pg_type_is_visible(t.oid)
- )
- """
- cursor = connection.execute(sql.text(query, bindparams=bindparams))
- return bool(cursor.scalar())
-
- def _get_server_version_info(self, connection):
- v = connection.execute("select version()").scalar()
- m = re.match('PostgreSQL (\d+)\.(\d+)(?:\.(\d+))?(?:devel)?', v)
- if not m:
- raise AssertionError(
- "Could not determine version from string '%s'" % v)
- return tuple([int(x) for x in m.group(1, 2, 3) if x is not None])
-
- @reflection.cache
- def get_table_oid(self, connection, table_name, schema=None, **kw):
- """Fetch the oid for schema.table_name.
-
- Several reflection methods require the table oid. The idea for using
- this method is that it can be fetched one time and cached for
- subsequent calls.
-
- """
- table_oid = None
- if schema is not None:
- schema_where_clause = "n.nspname = :schema"
- else:
- schema_where_clause = "pg_catalog.pg_table_is_visible(c.oid)"
- query = """
- SELECT c.oid
- FROM pg_catalog.pg_class c
- LEFT JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace
- WHERE (%s)
- AND c.relname = :table_name AND c.relkind in ('r','v')
- """ % schema_where_clause
- # Since we're binding to unicode, table_name and schema_name must be
- # unicode.
- table_name = unicode(table_name)
- if schema is not None:
- schema = unicode(schema)
- s = sql.text(query, bindparams=[
- sql.bindparam('table_name', type_=sqltypes.Unicode),
- sql.bindparam('schema', type_=sqltypes.Unicode)
- ],
- typemap={'oid':sqltypes.Integer}
- )
- c = connection.execute(s, table_name=table_name, schema=schema)
- table_oid = c.scalar()
- if table_oid is None:
- raise exc.NoSuchTableError(table_name)
- return table_oid
-
- @reflection.cache
- def get_schema_names(self, connection, **kw):
- s = """
- SELECT nspname
- FROM pg_namespace
- ORDER BY nspname
- """
- rp = connection.execute(s)
- # what about system tables?
- # Py3K
- #schema_names = [row[0] for row in rp \
- # if not row[0].startswith('pg_')]
- # Py2K
- schema_names = [row[0].decode(self.encoding) for row in rp \
- if not row[0].startswith('pg_')]
- # end Py2K
- return schema_names
-
- @reflection.cache
- def get_table_names(self, connection, schema=None, **kw):
- if schema is not None:
- current_schema = schema
- else:
- current_schema = self.default_schema_name
-
- result = connection.execute(
- sql.text(u"SELECT relname FROM pg_class c "
- "WHERE relkind = 'r' "
- "AND '%s' = (select nspname from pg_namespace n "
- "where n.oid = c.relnamespace) " %
- current_schema,
- typemap = {'relname':sqltypes.Unicode}
- )
- )
- return [row[0] for row in result]
-
-
- @reflection.cache
- def get_view_names(self, connection, schema=None, **kw):
- if schema is not None:
- current_schema = schema
- else:
- current_schema = self.default_schema_name
- s = """
- SELECT relname
- FROM pg_class c
- WHERE relkind = 'v'
- AND '%(schema)s' = (select nspname from pg_namespace n
- where n.oid = c.relnamespace)
- """ % dict(schema=current_schema)
- # Py3K
- #view_names = [row[0] for row in connection.execute(s)]
- # Py2K
- view_names = [row[0].decode(self.encoding)
- for row in connection.execute(s)]
- # end Py2K
- return view_names
-
- @reflection.cache
- def get_view_definition(self, connection, view_name, schema=None, **kw):
- if schema is not None:
- current_schema = schema
- else:
- current_schema = self.default_schema_name
- s = """
- SELECT definition FROM pg_views
- WHERE schemaname = :schema
- AND viewname = :view_name
- """
- rp = connection.execute(sql.text(s),
- view_name=view_name, schema=current_schema)
- if rp:
- # Py3K
- #view_def = rp.scalar()
- # Py2K
- view_def = rp.scalar().decode(self.encoding)
- # end Py2K
- return view_def
-
- @reflection.cache
- def get_columns(self, connection, table_name, schema=None, **kw):
-
- table_oid = self.get_table_oid(connection, table_name, schema,
- info_cache=kw.get('info_cache'))
- SQL_COLS = """
- SELECT a.attname,
- pg_catalog.format_type(a.atttypid, a.atttypmod),
- (SELECT substring(pg_catalog.pg_get_expr(d.adbin, d.adrelid)
- for 128)
- FROM pg_catalog.pg_attrdef d
- WHERE d.adrelid = a.attrelid AND d.adnum = a.attnum
- AND a.atthasdef)
- AS DEFAULT,
- a.attnotnull, a.attnum, a.attrelid as table_oid
- FROM pg_catalog.pg_attribute a
- WHERE a.attrelid = :table_oid
- AND a.attnum > 0 AND NOT a.attisdropped
- ORDER BY a.attnum
- """
- s = sql.text(SQL_COLS,
- bindparams=[sql.bindparam('table_oid', type_=sqltypes.Integer)],
- typemap={'attname':sqltypes.Unicode, 'default':sqltypes.Unicode}
- )
- c = connection.execute(s, table_oid=table_oid)
- rows = c.fetchall()
- domains = self._load_domains(connection)
- enums = self._load_enums(connection)
-
- # format columns
- columns = []
- for name, format_type, default, notnull, attnum, table_oid in rows:
- ## strip (5) from character varying(5), timestamp(5)
- # with time zone, etc
- attype = re.sub(r'\([\d,]+\)', '', format_type)
-
- # strip '[]' from integer[], etc.
- attype = re.sub(r'\[\]', '', attype)
-
- nullable = not notnull
- is_array = format_type.endswith('[]')
- charlen = re.search('\(([\d,]+)\)', format_type)
- if charlen:
- charlen = charlen.group(1)
- kwargs = {}
- args = None
-
- if attype == 'numeric':
- if charlen:
- prec, scale = charlen.split(',')
- args = (int(prec), int(scale))
- else:
- args = ()
- elif attype == 'double precision':
- args = (53, )
- elif attype == 'integer':
- args = ()
- elif attype in ('timestamp with time zone',
- 'time with time zone'):
- kwargs['timezone'] = True
- if charlen:
- kwargs['precision'] = int(charlen)
- args = ()
- elif attype in ('timestamp without time zone',
- 'time without time zone', 'time'):
- kwargs['timezone'] = False
- if charlen:
- kwargs['precision'] = int(charlen)
- args = ()
- elif attype == 'bit varying':
- kwargs['varying'] = True
- if charlen:
- args = (int(charlen),)
- else:
- args = ()
- elif attype in ('interval','interval year to month',
- 'interval day to second'):
- if charlen:
- kwargs['precision'] = int(charlen)
- args = ()
- elif charlen:
- args = (int(charlen),)
- else:
- args = ()
-
- while True:
- if attype in self.ischema_names:
- coltype = self.ischema_names[attype]
- break
- elif attype in enums:
- enum = enums[attype]
- coltype = ENUM
- if "." in attype:
- kwargs['schema'], kwargs['name'] = attype.split('.')
- else:
- kwargs['name'] = attype
- args = tuple(enum['labels'])
- break
- elif attype in domains:
- domain = domains[attype]
- attype = domain['attype']
- # A table can't override whether the domain is nullable.
- nullable = domain['nullable']
- if domain['default'] and not default:
- # It can, however, override the default
- # value, but can't set it to null.
- default = domain['default']
- continue
- else:
- coltype = None
- break
-
- if coltype:
- coltype = coltype(*args, **kwargs)
- if is_array:
- coltype = ARRAY(coltype)
- else:
- util.warn("Did not recognize type '%s' of column '%s'" %
- (attype, name))
- coltype = sqltypes.NULLTYPE
- # adjust the default value
- autoincrement = False
- if default is not None:
- match = re.search(r"""(nextval\(')([^']+)('.*$)""", default)
- if match is not None:
- autoincrement = True
- # the default is related to a Sequence
- sch = schema
- if '.' not in match.group(2) and sch is not None:
- # unconditionally quote the schema name. this could
- # later be enhanced to obey quoting rules /
- # "quote schema"
- default = match.group(1) + \
- ('"%s"' % sch) + '.' + \
- match.group(2) + match.group(3)
-
- column_info = dict(name=name, type=coltype, nullable=nullable,
- default=default, autoincrement=autoincrement)
- columns.append(column_info)
- return columns
-
- @reflection.cache
- def get_primary_keys(self, connection, table_name, schema=None, **kw):
- table_oid = self.get_table_oid(connection, table_name, schema,
- info_cache=kw.get('info_cache'))
- PK_SQL = """
- SELECT attname FROM pg_attribute
- WHERE attrelid = (
- SELECT indexrelid FROM pg_index i
- WHERE i.indrelid = :table_oid
- AND i.indisprimary = 't')
- ORDER BY attnum
- """
- t = sql.text(PK_SQL, typemap={'attname':sqltypes.Unicode})
- c = connection.execute(t, table_oid=table_oid)
- primary_keys = [r[0] for r in c.fetchall()]
- return primary_keys
-
- @reflection.cache
- def get_pk_constraint(self, connection, table_name, schema=None, **kw):
- cols = self.get_primary_keys(connection, table_name,
- schema=schema, **kw)
-
- table_oid = self.get_table_oid(connection, table_name, schema,
- info_cache=kw.get('info_cache'))
-
- PK_CONS_SQL = """
- SELECT conname
- FROM pg_catalog.pg_constraint r
- WHERE r.conrelid = :table_oid AND r.contype = 'p'
- ORDER BY 1
- """
- t = sql.text(PK_CONS_SQL, typemap={'conname':sqltypes.Unicode})
- c = connection.execute(t, table_oid=table_oid)
- name = c.scalar()
- return {
- 'constrained_columns':cols,
- 'name':name
- }
-
- @reflection.cache
- def get_foreign_keys(self, connection, table_name, schema=None, **kw):
- preparer = self.identifier_preparer
- table_oid = self.get_table_oid(connection, table_name, schema,
- info_cache=kw.get('info_cache'))
- FK_SQL = """
- SELECT conname, pg_catalog.pg_get_constraintdef(oid, true) as condef
- FROM pg_catalog.pg_constraint r
- WHERE r.conrelid = :table AND r.contype = 'f'
- ORDER BY 1
- """
-
- t = sql.text(FK_SQL, typemap={
- 'conname':sqltypes.Unicode,
- 'condef':sqltypes.Unicode})
- c = connection.execute(t, table=table_oid)
- fkeys = []
- for conname, condef in c.fetchall():
- m = re.search('FOREIGN KEY \((.*?)\) REFERENCES '
- '(?:(.*?)\.)?(.*?)\((.*?)\)', condef).groups()
- constrained_columns, referred_schema, \
- referred_table, referred_columns = m
- constrained_columns = [preparer._unquote_identifier(x)
- for x in re.split(r'\s*,\s*', constrained_columns)]
- if referred_schema:
- referred_schema =\
- preparer._unquote_identifier(referred_schema)
- elif schema is not None and schema == self.default_schema_name:
- # no schema (i.e. its the default schema), and the table we're
- # reflecting has the default schema explicit, then use that.
- # i.e. try to use the user's conventions
- referred_schema = schema
- referred_table = preparer._unquote_identifier(referred_table)
- referred_columns = [preparer._unquote_identifier(x)
- for x in re.split(r'\s*,\s', referred_columns)]
- fkey_d = {
- 'name' : conname,
- 'constrained_columns' : constrained_columns,
- 'referred_schema' : referred_schema,
- 'referred_table' : referred_table,
- 'referred_columns' : referred_columns
- }
- fkeys.append(fkey_d)
- return fkeys
-
- @reflection.cache
- def get_indexes(self, connection, table_name, schema, **kw):
- table_oid = self.get_table_oid(connection, table_name, schema,
- info_cache=kw.get('info_cache'))
-
- IDX_SQL = """
- SELECT
- i.relname as relname,
- ix.indisunique, ix.indexprs, ix.indpred,
- a.attname
- FROM
- pg_class t
- join pg_index ix on t.oid = ix.indrelid
- join pg_class i on i.oid=ix.indexrelid
- left outer join
- pg_attribute a
- on t.oid=a.attrelid and a.attnum=ANY(ix.indkey)
- WHERE
- t.relkind = 'r'
- and t.oid = :table_oid
- and ix.indisprimary = 'f'
- ORDER BY
- t.relname,
- i.relname
- """
-
- t = sql.text(IDX_SQL, typemap={'attname':sqltypes.Unicode})
- c = connection.execute(t, table_oid=table_oid)
-
- index_names = {}
- indexes = []
- sv_idx_name = None
- for row in c.fetchall():
- idx_name, unique, expr, prd, col = row
- if expr:
- if idx_name != sv_idx_name:
- util.warn(
- "Skipped unsupported reflection of "
- "expression-based index %s"
- % idx_name)
- sv_idx_name = idx_name
- continue
- if prd and not idx_name == sv_idx_name:
- util.warn(
- "Predicate of partial index %s ignored during reflection"
- % idx_name)
- sv_idx_name = idx_name
- if idx_name in index_names:
- index_d = index_names[idx_name]
- else:
- index_d = {'column_names':[]}
- indexes.append(index_d)
- index_names[idx_name] = index_d
- index_d['name'] = idx_name
- if col is not None:
- index_d['column_names'].append(col)
- index_d['unique'] = unique
- return indexes
-
- def _load_enums(self, connection):
- if not self.supports_native_enum:
- return {}
-
- ## Load data types for enums:
- SQL_ENUMS = """
- SELECT t.typname as "name",
- -- no enum defaults in 8.4 at least
- -- t.typdefault as "default",
- pg_catalog.pg_type_is_visible(t.oid) as "visible",
- n.nspname as "schema",
- e.enumlabel as "label"
- FROM pg_catalog.pg_type t
- LEFT JOIN pg_catalog.pg_namespace n ON n.oid = t.typnamespace
- LEFT JOIN pg_catalog.pg_constraint r ON t.oid = r.contypid
- LEFT JOIN pg_catalog.pg_enum e ON t.oid = e.enumtypid
- WHERE t.typtype = 'e'
- ORDER BY "name", e.oid -- e.oid gives us label order
- """
-
- s = sql.text(SQL_ENUMS, typemap={
- 'attname':sqltypes.Unicode,
- 'label':sqltypes.Unicode})
- c = connection.execute(s)
-
- enums = {}
- for enum in c.fetchall():
- if enum['visible']:
- # 'visible' just means whether or not the enum is in a
- # schema that's on the search path -- or not overriden by
- # a schema with higher presedence. If it's not visible,
- # it will be prefixed with the schema-name when it's used.
- name = enum['name']
- else:
- name = "%s.%s" % (enum['schema'], enum['name'])
-
- if name in enums:
- enums[name]['labels'].append(enum['label'])
- else:
- enums[name] = {
- 'labels': [enum['label']],
- }
-
- return enums
-
- def _load_domains(self, connection):
- ## Load data types for domains:
- SQL_DOMAINS = """
- SELECT t.typname as "name",
- pg_catalog.format_type(t.typbasetype, t.typtypmod) as "attype",
- not t.typnotnull as "nullable",
- t.typdefault as "default",
- pg_catalog.pg_type_is_visible(t.oid) as "visible",
- n.nspname as "schema"
- FROM pg_catalog.pg_type t
- LEFT JOIN pg_catalog.pg_namespace n ON n.oid = t.typnamespace
- LEFT JOIN pg_catalog.pg_constraint r ON t.oid = r.contypid
- WHERE t.typtype = 'd'
- """
-
- s = sql.text(SQL_DOMAINS, typemap={'attname':sqltypes.Unicode})
- c = connection.execute(s)
-
- domains = {}
- for domain in c.fetchall():
- ## strip (30) from character varying(30)
- attype = re.search('([^\(]+)', domain['attype']).group(1)
- if domain['visible']:
- # 'visible' just means whether or not the domain is in a
- # schema that's on the search path -- or not overriden by
- # a schema with higher presedence. If it's not visible,
- # it will be prefixed with the schema-name when it's used.
- name = domain['name']
- else:
- name = "%s.%s" % (domain['schema'], domain['name'])
-
- domains[name] = {
- 'attype':attype,
- 'nullable': domain['nullable'],
- 'default': domain['default']
- }
-
- return domains
-
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/postgresql/pg8000.py b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/postgresql/pg8000.py
deleted file mode 100755
index ac927edb..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/postgresql/pg8000.py
+++ /dev/null
@@ -1,121 +0,0 @@
-# postgresql/pg8000.py
-# Copyright (C) 2005-2011 the SQLAlchemy authors and contributors <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-"""Support for the PostgreSQL database via the pg8000 driver.
-
-Connecting
-----------
-
-URLs are of the form
-``postgresql+pg8000://user:password@host:port/dbname[?key=value&key=value...]``.
-
-Unicode
--------
-
-pg8000 requires that the postgresql client encoding be
-configured in the postgresql.conf file in order to use encodings
-other than ascii. Set this value to the same value as the
-"encoding" parameter on create_engine(), usually "utf-8".
-
-Interval
---------
-
-Passing data from/to the Interval type is not supported as of
-yet.
-
-"""
-from sqlalchemy import util, exc
-from sqlalchemy.util.compat import decimal
-from sqlalchemy import processors
-from sqlalchemy import types as sqltypes
-from sqlalchemy.dialects.postgresql.base import PGDialect, \
- PGCompiler, PGIdentifierPreparer, PGExecutionContext,\
- _DECIMAL_TYPES, _FLOAT_TYPES, _INT_TYPES
-
-class _PGNumeric(sqltypes.Numeric):
- def result_processor(self, dialect, coltype):
- if self.asdecimal:
- if coltype in _FLOAT_TYPES:
- return processors.to_decimal_processor_factory(decimal.Decimal)
- elif coltype in _DECIMAL_TYPES or coltype in _INT_TYPES:
- # pg8000 returns Decimal natively for 1700
- return None
- else:
- raise exc.InvalidRequestError(
- "Unknown PG numeric type: %d" % coltype)
- else:
- if coltype in _FLOAT_TYPES:
- # pg8000 returns float natively for 701
- return None
- elif coltype in _DECIMAL_TYPES or coltype in _INT_TYPES:
- return processors.to_float
- else:
- raise exc.InvalidRequestError(
- "Unknown PG numeric type: %d" % coltype)
-
-
-class _PGNumericNoBind(_PGNumeric):
- def bind_processor(self, dialect):
- return None
-
-class PGExecutionContext_pg8000(PGExecutionContext):
- pass
-
-
-class PGCompiler_pg8000(PGCompiler):
- def visit_mod(self, binary, **kw):
- return self.process(binary.left) + " %% " + self.process(binary.right)
-
- def post_process_text(self, text):
- if '%%' in text:
- util.warn("The SQLAlchemy postgresql dialect now automatically escapes '%' in text() "
- "expressions to '%%'.")
- return text.replace('%', '%%')
-
-
-class PGIdentifierPreparer_pg8000(PGIdentifierPreparer):
- def _escape_identifier(self, value):
- value = value.replace(self.escape_quote, self.escape_to_quote)
- return value.replace('%', '%%')
-
-
-class PGDialect_pg8000(PGDialect):
- driver = 'pg8000'
-
- supports_unicode_statements = True
-
- supports_unicode_binds = True
-
- default_paramstyle = 'format'
- supports_sane_multi_rowcount = False
- execution_ctx_cls = PGExecutionContext_pg8000
- statement_compiler = PGCompiler_pg8000
- preparer = PGIdentifierPreparer_pg8000
- description_encoding = 'use_encoding'
-
- colspecs = util.update_copy(
- PGDialect.colspecs,
- {
- sqltypes.Numeric : _PGNumericNoBind,
- sqltypes.Float : _PGNumeric
- }
- )
-
- @classmethod
- def dbapi(cls):
- return __import__('pg8000').dbapi
-
- def create_connect_args(self, url):
- opts = url.translate_connect_args(username='user')
- if 'port' in opts:
- opts['port'] = int(opts['port'])
- opts.update(url.query)
- return ([], opts)
-
- def is_disconnect(self, e, connection, cursor):
- return "connection is closed" in str(e)
-
-dialect = PGDialect_pg8000
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/postgresql/psycopg2.py b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/postgresql/psycopg2.py
deleted file mode 100755
index 2a3b4297..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/postgresql/psycopg2.py
+++ /dev/null
@@ -1,334 +0,0 @@
-# postgresql/psycopg2.py
-# Copyright (C) 2005-2011 the SQLAlchemy authors and contributors <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-"""Support for the PostgreSQL database via the psycopg2 driver.
-
-Driver
-------
-
-The psycopg2 driver is available at http://pypi.python.org/pypi/psycopg2/ .
-The dialect has several behaviors which are specifically tailored towards compatibility
-with this module.
-
-Note that psycopg1 is **not** supported.
-
-Connecting
-----------
-
-URLs are of the form
-``postgresql+psycopg2://user:password@host:port/dbname[?key=value&key=value...]``.
-
-psycopg2-specific keyword arguments which are accepted by
-:func:`.create_engine()` are:
-
-* *server_side_cursors* - Enable the usage of "server side cursors" for SQL
- statements which support this feature. What this essentially means from a
- psycopg2 point of view is that the cursor is created using a name, e.g.
- ``connection.cursor('some name')``, which has the effect that result rows are
- not immediately pre-fetched and buffered after statement execution, but are
- instead left on the server and only retrieved as needed. SQLAlchemy's
- :class:`~sqlalchemy.engine.base.ResultProxy` uses special row-buffering
- behavior when this feature is enabled, such that groups of 100 rows at a
- time are fetched over the wire to reduce conversational overhead.
- Note that the ``stream_results=True`` execution option is a more targeted
- way of enabling this mode on a per-execution basis.
-* *use_native_unicode* - Enable the usage of Psycopg2 "native unicode" mode
- per connection. True by default.
-
-Per-Statement/Connection Execution Options
--------------------------------------------
-
-The following DBAPI-specific options are respected when used with
-:meth:`.Connection.execution_options`, :meth:`.Executable.execution_options`,
-:meth:`.Query.execution_options`, in addition to those not specific to DBAPIs:
-
-* isolation_level - Set the transaction isolation level for the lifespan of a
- :class:`.Connection` (can only be set on a connection, not a statement or query).
- This includes the options ``SERIALIZABLE``, ``READ COMMITTED``,
- ``READ UNCOMMITTED`` and ``REPEATABLE READ``.
-* stream_results - Enable or disable usage of server side cursors.
- If ``None`` or not set, the ``server_side_cursors`` option of the :class:`.Engine` is used.
-
-Unicode
--------
-
-By default, the psycopg2 driver uses the ``psycopg2.extensions.UNICODE``
-extension, such that the DBAPI receives and returns all strings as Python
-Unicode objects directly - SQLAlchemy passes these values through without
-change. Note that this setting requires that the PG client encoding be set to
-one which can accomodate the kind of character data being passed - typically
-``utf-8``. If the Postgresql database is configured for ``SQL_ASCII``
-encoding, which is often the default for PG installations, it may be necessary
-for non-ascii strings to be encoded into a specific encoding before being
-passed to the DBAPI. If changing the database's client encoding setting is not
-an option, specify ``use_native_unicode=False`` as a keyword argument to
-``create_engine()``, and take note of the ``encoding`` setting as well, which
-also defaults to ``utf-8``. Note that disabling "native unicode" mode has a
-slight performance penalty, as SQLAlchemy now must translate unicode strings
-to/from an encoding such as utf-8, a task that is handled more efficiently
-within the Psycopg2 driver natively.
-
-Transactions
-------------
-
-The psycopg2 dialect fully supports SAVEPOINT and two-phase commit operations.
-
-Transaction Isolation Level
----------------------------
-
-The ``isolation_level`` parameter of :func:`.create_engine` here makes use
-psycopg2's ``set_isolation_level()`` connection method, rather than
-issuing a ``SET SESSION CHARACTERISTICS`` command. This because psycopg2
-resets the isolation level on each new transaction, and needs to know
-at the API level what level should be used.
-
-NOTICE logging
----------------
-
-The psycopg2 dialect will log Postgresql NOTICE messages via the
-``sqlalchemy.dialects.postgresql`` logger::
-
- import logging
- logging.getLogger('sqlalchemy.dialects.postgresql').setLevel(logging.INFO)
-
-
-"""
-
-import random
-import re
-import logging
-
-from sqlalchemy import util, exc
-from sqlalchemy.util.compat import decimal
-from sqlalchemy import processors
-from sqlalchemy.engine import base
-from sqlalchemy.sql import expression
-from sqlalchemy import types as sqltypes
-from sqlalchemy.dialects.postgresql.base import PGDialect, PGCompiler, \
- PGIdentifierPreparer, PGExecutionContext, \
- ENUM, ARRAY, _DECIMAL_TYPES, _FLOAT_TYPES,\
- _INT_TYPES
-
-
-logger = logging.getLogger('sqlalchemy.dialects.postgresql')
-
-
-class _PGNumeric(sqltypes.Numeric):
- def bind_processor(self, dialect):
- return None
-
- def result_processor(self, dialect, coltype):
- if self.asdecimal:
- if coltype in _FLOAT_TYPES:
- return processors.to_decimal_processor_factory(decimal.Decimal)
- elif coltype in _DECIMAL_TYPES or coltype in _INT_TYPES:
- # pg8000 returns Decimal natively for 1700
- return None
- else:
- raise exc.InvalidRequestError(
- "Unknown PG numeric type: %d" % coltype)
- else:
- if coltype in _FLOAT_TYPES:
- # pg8000 returns float natively for 701
- return None
- elif coltype in _DECIMAL_TYPES or coltype in _INT_TYPES:
- return processors.to_float
- else:
- raise exc.InvalidRequestError(
- "Unknown PG numeric type: %d" % coltype)
-
-class _PGEnum(ENUM):
- def __init__(self, *arg, **kw):
- super(_PGEnum, self).__init__(*arg, **kw)
- # Py2K
- if self.convert_unicode:
- self.convert_unicode = "force"
- # end Py2K
-
-class _PGArray(ARRAY):
- def __init__(self, *arg, **kw):
- super(_PGArray, self).__init__(*arg, **kw)
- # Py2K
- # FIXME: this check won't work for setups that
- # have convert_unicode only on their create_engine().
- if isinstance(self.item_type, sqltypes.String) and \
- self.item_type.convert_unicode:
- self.item_type.convert_unicode = "force"
- # end Py2K
-
-# When we're handed literal SQL, ensure it's a SELECT-query. Since
-# 8.3, combining cursors and "FOR UPDATE" has been fine.
-SERVER_SIDE_CURSOR_RE = re.compile(
- r'\s*SELECT',
- re.I | re.UNICODE)
-
-class PGExecutionContext_psycopg2(PGExecutionContext):
- def create_cursor(self):
- # TODO: coverage for server side cursors + select.for_update()
-
- if self.dialect.server_side_cursors:
- is_server_side = \
- self.execution_options.get('stream_results', True) and (
- (self.compiled and isinstance(self.compiled.statement, expression.Selectable) \
- or \
- (
- (not self.compiled or
- isinstance(self.compiled.statement, expression._TextClause))
- and self.statement and SERVER_SIDE_CURSOR_RE.match(self.statement))
- )
- )
- else:
- is_server_side = self.execution_options.get('stream_results', False)
-
- self.__is_server_side = is_server_side
- if is_server_side:
- # use server-side cursors:
- # http://lists.initd.org/pipermail/psycopg/2007-January/005251.html
- ident = "c_%s_%s" % (hex(id(self))[2:], hex(random.randint(0, 65535))[2:])
- return self._dbapi_connection.cursor(ident)
- else:
- return self._dbapi_connection.cursor()
-
- def get_result_proxy(self):
- # TODO: ouch
- if logger.isEnabledFor(logging.INFO):
- self._log_notices(self.cursor)
-
- if self.__is_server_side:
- return base.BufferedRowResultProxy(self)
- else:
- return base.ResultProxy(self)
-
- def _log_notices(self, cursor):
- for notice in cursor.connection.notices:
- # NOTICE messages have a
- # newline character at the end
- logger.info(notice.rstrip())
-
- cursor.connection.notices[:] = []
-
-
-class PGCompiler_psycopg2(PGCompiler):
- def visit_mod(self, binary, **kw):
- return self.process(binary.left) + " %% " + self.process(binary.right)
-
- def post_process_text(self, text):
- return text.replace('%', '%%')
-
-
-class PGIdentifierPreparer_psycopg2(PGIdentifierPreparer):
- def _escape_identifier(self, value):
- value = value.replace(self.escape_quote, self.escape_to_quote)
- return value.replace('%', '%%')
-
-class PGDialect_psycopg2(PGDialect):
- driver = 'psycopg2'
- # Py2K
- supports_unicode_statements = False
- # end Py2K
- default_paramstyle = 'pyformat'
- supports_sane_multi_rowcount = False
- execution_ctx_cls = PGExecutionContext_psycopg2
- statement_compiler = PGCompiler_psycopg2
- preparer = PGIdentifierPreparer_psycopg2
- psycopg2_version = (0, 0)
-
- colspecs = util.update_copy(
- PGDialect.colspecs,
- {
- sqltypes.Numeric : _PGNumeric,
- ENUM : _PGEnum, # needs force_unicode
- sqltypes.Enum : _PGEnum, # needs force_unicode
- ARRAY : _PGArray, # needs force_unicode
- }
- )
-
- def __init__(self, server_side_cursors=False, use_native_unicode=True, **kwargs):
- PGDialect.__init__(self, **kwargs)
- self.server_side_cursors = server_side_cursors
- self.use_native_unicode = use_native_unicode
- self.supports_unicode_binds = use_native_unicode
- if self.dbapi and hasattr(self.dbapi, '__version__'):
- m = re.match(r'(\d+)\.(\d+)(?:\.(\d+))?',
- self.dbapi.__version__)
- if m:
- self.psycopg2_version = tuple(
- int(x)
- for x in m.group(1, 2, 3)
- if x is not None)
-
- @classmethod
- def dbapi(cls):
- psycopg = __import__('psycopg2')
- return psycopg
-
- @util.memoized_property
- def _isolation_lookup(self):
- extensions = __import__('psycopg2.extensions').extensions
- return {
- 'READ COMMITTED':extensions.ISOLATION_LEVEL_READ_COMMITTED,
- 'READ UNCOMMITTED':extensions.ISOLATION_LEVEL_READ_UNCOMMITTED,
- 'REPEATABLE READ':extensions.ISOLATION_LEVEL_REPEATABLE_READ,
- 'SERIALIZABLE':extensions.ISOLATION_LEVEL_SERIALIZABLE
- }
-
- def set_isolation_level(self, connection, level):
- try:
- level = self._isolation_lookup[level.replace('_', ' ')]
- except KeyError:
- raise exc.ArgumentError(
- "Invalid value '%s' for isolation_level. "
- "Valid isolation levels for %s are %s" %
- (level, self.name, ", ".join(self._isolation_lookup))
- )
-
- connection.set_isolation_level(level)
-
- def on_connect(self):
- if self.isolation_level is not None:
- def base_on_connect(conn):
- self.set_isolation_level(conn, self.isolation_level)
- else:
- base_on_connect = None
-
- if self.dbapi and self.use_native_unicode:
- extensions = __import__('psycopg2.extensions').extensions
- def connect(conn):
- extensions.register_type(extensions.UNICODE, conn)
- if base_on_connect:
- base_on_connect(conn)
- return connect
- else:
- return base_on_connect
-
- def create_connect_args(self, url):
- opts = url.translate_connect_args(username='user')
- if 'port' in opts:
- opts['port'] = int(opts['port'])
- opts.update(url.query)
- return ([], opts)
-
- def is_disconnect(self, e, connection, cursor):
- if isinstance(e, self.dbapi.OperationalError):
- # these error messages from libpq: interfaces/libpq/fe-misc.c.
- # TODO: these are sent through gettext in libpq and we can't
- # check within other locales - consider using connection.closed
- return 'closed the connection' in str(e) or \
- 'connection not open' in str(e) or \
- 'could not receive data from server' in str(e)
- elif isinstance(e, self.dbapi.InterfaceError):
- # psycopg2 client errors, psycopg2/conenction.h, psycopg2/cursor.h
- return 'connection already closed' in str(e) or \
- 'cursor already closed' in str(e)
- elif isinstance(e, self.dbapi.ProgrammingError):
- # not sure where this path is originally from, it may
- # be obsolete. It really says "losed", not "closed".
- return "losed the connection unexpectedly" in str(e)
- else:
- return False
-
-dialect = PGDialect_psycopg2
-
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/postgresql/pypostgresql.py b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/postgresql/pypostgresql.py
deleted file mode 100755
index a137a624..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/postgresql/pypostgresql.py
+++ /dev/null
@@ -1,73 +0,0 @@
-# postgresql/pypostgresql.py
-# Copyright (C) 2005-2011 the SQLAlchemy authors and contributors <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-"""Support for the PostgreSQL database via py-postgresql.
-
-Connecting
-----------
-
-URLs are of the form ``postgresql+pypostgresql://user@password@host:port/dbname[?key=value&key=value...]``.
-
-
-"""
-from sqlalchemy import util
-from sqlalchemy import types as sqltypes
-from sqlalchemy.dialects.postgresql.base import PGDialect, PGExecutionContext
-from sqlalchemy import processors
-
-class PGNumeric(sqltypes.Numeric):
- def bind_processor(self, dialect):
- return processors.to_str
-
- def result_processor(self, dialect, coltype):
- if self.asdecimal:
- return None
- else:
- return processors.to_float
-
-class PGExecutionContext_pypostgresql(PGExecutionContext):
- pass
-
-class PGDialect_pypostgresql(PGDialect):
- driver = 'pypostgresql'
-
- supports_unicode_statements = True
- supports_unicode_binds = True
- description_encoding = None
- default_paramstyle = 'pyformat'
-
- # requires trunk version to support sane rowcounts
- # TODO: use dbapi version information to set this flag appropariately
- supports_sane_rowcount = True
- supports_sane_multi_rowcount = False
-
- execution_ctx_cls = PGExecutionContext_pypostgresql
- colspecs = util.update_copy(
- PGDialect.colspecs,
- {
- sqltypes.Numeric : PGNumeric,
- sqltypes.Float: sqltypes.Float, # prevents PGNumeric from being used
- }
- )
-
- @classmethod
- def dbapi(cls):
- from postgresql.driver import dbapi20
- return dbapi20
-
- def create_connect_args(self, url):
- opts = url.translate_connect_args(username='user')
- if 'port' in opts:
- opts['port'] = int(opts['port'])
- else:
- opts['port'] = 5432
- opts.update(url.query)
- return ([], opts)
-
- def is_disconnect(self, e, connection, cursor):
- return "connection is closed" in str(e)
-
-dialect = PGDialect_pypostgresql
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/postgresql/zxjdbc.py b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/postgresql/zxjdbc.py
deleted file mode 100755
index f64b42ac..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/postgresql/zxjdbc.py
+++ /dev/null
@@ -1,42 +0,0 @@
-# postgresql/zxjdbc.py
-# Copyright (C) 2005-2011 the SQLAlchemy authors and contributors <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-"""Support for the PostgreSQL database via the zxjdbc JDBC connector.
-
-JDBC Driver
------------
-
-The official Postgresql JDBC driver is at http://jdbc.postgresql.org/.
-
-"""
-from sqlalchemy.connectors.zxJDBC import ZxJDBCConnector
-from sqlalchemy.dialects.postgresql.base import PGDialect, PGExecutionContext
-
-class PGExecutionContext_zxjdbc(PGExecutionContext):
-
- def create_cursor(self):
- cursor = self._dbapi_connection.cursor()
- cursor.datahandler = self.dialect.DataHandler(cursor.datahandler)
- return cursor
-
-
-class PGDialect_zxjdbc(ZxJDBCConnector, PGDialect):
- jdbc_db_name = 'postgresql'
- jdbc_driver_name = 'org.postgresql.Driver'
-
- execution_ctx_cls = PGExecutionContext_zxjdbc
-
- supports_native_decimal = True
-
- def __init__(self, *args, **kwargs):
- super(PGDialect_zxjdbc, self).__init__(*args, **kwargs)
- from com.ziclix.python.sql.handler import PostgresqlDataHandler
- self.DataHandler = PostgresqlDataHandler
-
- def _get_server_version_info(self, connection):
- return tuple(int(x) for x in connection.connection.dbversion.split('.'))
-
-dialect = PGDialect_zxjdbc
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/sqlite/__init__.py b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/sqlite/__init__.py
deleted file mode 100755
index d939d51c..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/sqlite/__init__.py
+++ /dev/null
@@ -1,20 +0,0 @@
-# sqlite/__init__.py
-# Copyright (C) 2005-2011 the SQLAlchemy authors and contributors <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-from sqlalchemy.dialects.sqlite import base, pysqlite
-
-# default dialect
-base.dialect = pysqlite.dialect
-
-
-from sqlalchemy.dialects.sqlite.base import \
- BLOB, BOOLEAN, CHAR, DATE, DATETIME, DECIMAL, FLOAT, INTEGER, REAL,\
- NUMERIC, SMALLINT, TEXT, TIME, TIMESTAMP, VARCHAR, dialect
-
-__all__ = (
- 'BLOB', 'BOOLEAN', 'CHAR', 'DATE', 'DATETIME', 'DECIMAL', 'FLOAT', 'INTEGER',
- 'NUMERIC', 'SMALLINT', 'TEXT', 'TIME', 'TIMESTAMP', 'VARCHAR', 'dialect', 'REAL'
-) \ No newline at end of file
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/sqlite/base.py b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/sqlite/base.py
deleted file mode 100755
index 4ab50931..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/sqlite/base.py
+++ /dev/null
@@ -1,753 +0,0 @@
-# sqlite/base.py
-# Copyright (C) 2005-2011 the SQLAlchemy authors and contributors <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-"""Support for the SQLite database.
-
-For information on connecting using a specific driver, see the documentation
-section regarding that driver.
-
-Date and Time Types
--------------------
-
-SQLite does not have built-in DATE, TIME, or DATETIME types, and pysqlite does not provide
-out of the box functionality for translating values between Python `datetime` objects
-and a SQLite-supported format. SQLAlchemy's own :class:`~sqlalchemy.types.DateTime`
-and related types provide date formatting and parsing functionality when SQlite is used.
-The implementation classes are :class:`.DATETIME`, :class:`.DATE` and :class:`.TIME`.
-These types represent dates and times as ISO formatted strings, which also nicely
-support ordering. There's no reliance on typical "libc" internals for these functions
-so historical dates are fully supported.
-
-Auto Incrementing Behavior
---------------------------
-
-Background on SQLite's autoincrement is at: http://sqlite.org/autoinc.html
-
-Two things to note:
-
-* The AUTOINCREMENT keyword is **not** required for SQLite tables to
- generate primary key values automatically. AUTOINCREMENT only means that
- the algorithm used to generate ROWID values should be slightly different.
-* SQLite does **not** generate primary key (i.e. ROWID) values, even for
- one column, if the table has a composite (i.e. multi-column) primary key.
- This is regardless of the AUTOINCREMENT keyword being present or not.
-
-To specifically render the AUTOINCREMENT keyword on the primary key
-column when rendering DDL, add the flag ``sqlite_autoincrement=True``
-to the Table construct::
-
- Table('sometable', metadata,
- Column('id', Integer, primary_key=True),
- sqlite_autoincrement=True)
-
-Transaction Isolation Level
----------------------------
-
-:func:`create_engine` accepts an ``isolation_level`` parameter which results in
-the command ``PRAGMA read_uncommitted <level>`` being invoked for every new
-connection. Valid values for this parameter are ``SERIALIZABLE`` and
-``READ UNCOMMITTED`` corresponding to a value of 0 and 1, respectively.
-
-"""
-
-import datetime, re
-
-from sqlalchemy import sql, exc
-from sqlalchemy.engine import default, base, reflection
-from sqlalchemy import types as sqltypes
-from sqlalchemy import util
-from sqlalchemy.sql import compiler
-from sqlalchemy import processors
-
-from sqlalchemy.types import BLOB, BOOLEAN, CHAR, DATE, DATETIME, DECIMAL,\
- FLOAT, REAL, INTEGER, NUMERIC, SMALLINT, TEXT, TIME, TIMESTAMP, VARCHAR
-
-class _DateTimeMixin(object):
- _reg = None
- _storage_format = None
-
- def __init__(self, storage_format=None, regexp=None, **kw):
- super(_DateTimeMixin, self).__init__(**kw)
- if regexp is not None:
- self._reg = re.compile(regexp)
- if storage_format is not None:
- self._storage_format = storage_format
-
-class DATETIME(_DateTimeMixin, sqltypes.DateTime):
- """Represent a Python datetime object in SQLite using a string.
-
- The default string storage format is::
-
- "%04d-%02d-%02d %02d:%02d:%02d.%06d" % (value.year,
- value.month, value.day,
- value.hour, value.minute,
- value.second, value.microsecond)
-
- e.g.::
-
- 2011-03-15 12:05:57.10558
-
- The storage format can be customized to some degree using the
- ``storage_format`` and ``regexp`` parameters, such as::
-
- import re
- from sqlalchemy.dialects.sqlite import DATETIME
-
- dt = DATETIME(
- storage_format="%04d/%02d/%02d %02d-%02d-%02d-%06d",
- regexp=re.compile("(\d+)/(\d+)/(\d+) (\d+)-(\d+)-(\d+)(?:-(\d+))?")
- )
-
- :param storage_format: format string which will be appled to the
- tuple ``(value.year, value.month, value.day, value.hour,
- value.minute, value.second, value.microsecond)``, given a
- Python datetime.datetime() object.
-
- :param regexp: regular expression which will be applied to
- incoming result rows. The resulting match object is appled to
- the Python datetime() constructor via ``*map(int,
- match_obj.groups(0))``.
- """
-
- _storage_format = "%04d-%02d-%02d %02d:%02d:%02d.%06d"
-
- def bind_processor(self, dialect):
- datetime_datetime = datetime.datetime
- datetime_date = datetime.date
- format = self._storage_format
- def process(value):
- if value is None:
- return None
- elif isinstance(value, datetime_datetime):
- return format % (value.year, value.month, value.day,
- value.hour, value.minute, value.second,
- value.microsecond)
- elif isinstance(value, datetime_date):
- return format % (value.year, value.month, value.day,
- 0, 0, 0, 0)
- else:
- raise TypeError("SQLite DateTime type only accepts Python "
- "datetime and date objects as input.")
- return process
-
- def result_processor(self, dialect, coltype):
- if self._reg:
- return processors.str_to_datetime_processor_factory(
- self._reg, datetime.datetime)
- else:
- return processors.str_to_datetime
-
-class DATE(_DateTimeMixin, sqltypes.Date):
- """Represent a Python date object in SQLite using a string.
-
- The default string storage format is::
-
- "%04d-%02d-%02d" % (value.year, value.month, value.day)
-
- e.g.::
-
- 2011-03-15
-
- The storage format can be customized to some degree using the
- ``storage_format`` and ``regexp`` parameters, such as::
-
- import re
- from sqlalchemy.dialects.sqlite import DATE
-
- d = DATE(
- storage_format="%02d/%02d/%02d",
- regexp=re.compile("(\d+)/(\d+)/(\d+)")
- )
-
- :param storage_format: format string which will be appled to the
- tuple ``(value.year, value.month, value.day)``,
- given a Python datetime.date() object.
-
- :param regexp: regular expression which will be applied to
- incoming result rows. The resulting match object is appled to
- the Python date() constructor via ``*map(int,
- match_obj.groups(0))``.
-
- """
-
- _storage_format = "%04d-%02d-%02d"
-
- def bind_processor(self, dialect):
- datetime_date = datetime.date
- format = self._storage_format
- def process(value):
- if value is None:
- return None
- elif isinstance(value, datetime_date):
- return format % (value.year, value.month, value.day)
- else:
- raise TypeError("SQLite Date type only accepts Python "
- "date objects as input.")
- return process
-
- def result_processor(self, dialect, coltype):
- if self._reg:
- return processors.str_to_datetime_processor_factory(
- self._reg, datetime.date)
- else:
- return processors.str_to_date
-
-class TIME(_DateTimeMixin, sqltypes.Time):
- """Represent a Python time object in SQLite using a string.
-
- The default string storage format is::
-
- "%02d:%02d:%02d.%06d" % (value.hour, value.minute,
- value.second,
- value.microsecond)
-
- e.g.::
-
- 12:05:57.10558
-
- The storage format can be customized to some degree using the
- ``storage_format`` and ``regexp`` parameters, such as::
-
- import re
- from sqlalchemy.dialects.sqlite import TIME
-
- t = TIME(
- storage_format="%02d-%02d-%02d-%06d",
- regexp=re.compile("(\d+)-(\d+)-(\d+)-(?:-(\d+))?")
- )
-
- :param storage_format: format string which will be appled
- to the tuple ``(value.hour, value.minute, value.second,
- value.microsecond)``, given a Python datetime.time() object.
-
- :param regexp: regular expression which will be applied to
- incoming result rows. The resulting match object is appled to
- the Python time() constructor via ``*map(int,
- match_obj.groups(0))``.
-
- """
-
- _storage_format = "%02d:%02d:%02d.%06d"
-
- def bind_processor(self, dialect):
- datetime_time = datetime.time
- format = self._storage_format
- def process(value):
- if value is None:
- return None
- elif isinstance(value, datetime_time):
- return format % (value.hour, value.minute, value.second,
- value.microsecond)
- else:
- raise TypeError("SQLite Time type only accepts Python "
- "time objects as input.")
- return process
-
- def result_processor(self, dialect, coltype):
- if self._reg:
- return processors.str_to_datetime_processor_factory(
- self._reg, datetime.time)
- else:
- return processors.str_to_time
-
-colspecs = {
- sqltypes.Date: DATE,
- sqltypes.DateTime: DATETIME,
- sqltypes.Time: TIME,
-}
-
-ischema_names = {
- 'BLOB': sqltypes.BLOB,
- 'BOOL': sqltypes.BOOLEAN,
- 'BOOLEAN': sqltypes.BOOLEAN,
- 'CHAR': sqltypes.CHAR,
- 'DATE': sqltypes.DATE,
- 'DATETIME': sqltypes.DATETIME,
- 'DECIMAL': sqltypes.DECIMAL,
- 'FLOAT': sqltypes.FLOAT,
- 'INT': sqltypes.INTEGER,
- 'INTEGER': sqltypes.INTEGER,
- 'NUMERIC': sqltypes.NUMERIC,
- 'REAL': sqltypes.REAL,
- 'SMALLINT': sqltypes.SMALLINT,
- 'TEXT': sqltypes.TEXT,
- 'TIME': sqltypes.TIME,
- 'TIMESTAMP': sqltypes.TIMESTAMP,
- 'VARCHAR': sqltypes.VARCHAR,
-}
-
-
-
-class SQLiteCompiler(compiler.SQLCompiler):
- extract_map = util.update_copy(
- compiler.SQLCompiler.extract_map,
- {
- 'month': '%m',
- 'day': '%d',
- 'year': '%Y',
- 'second': '%S',
- 'hour': '%H',
- 'doy': '%j',
- 'minute': '%M',
- 'epoch': '%s',
- 'dow': '%w',
- 'week': '%W'
- })
-
- def visit_now_func(self, fn, **kw):
- return "CURRENT_TIMESTAMP"
-
- def visit_char_length_func(self, fn, **kw):
- return "length%s" % self.function_argspec(fn)
-
- def visit_cast(self, cast, **kwargs):
- if self.dialect.supports_cast:
- return super(SQLiteCompiler, self).visit_cast(cast)
- else:
- return self.process(cast.clause)
-
- def visit_extract(self, extract, **kw):
- try:
- return "CAST(STRFTIME('%s', %s) AS INTEGER)" % (
- self.extract_map[extract.field], self.process(extract.expr, **kw))
- except KeyError:
- raise exc.ArgumentError(
- "%s is not a valid extract argument." % extract.field)
-
- def limit_clause(self, select):
- text = ""
- if select._limit is not None:
- text += "\n LIMIT " + self.process(sql.literal(select._limit))
- if select._offset is not None:
- if select._limit is None:
- text += "\n LIMIT " + self.process(sql.literal(-1))
- text += " OFFSET " + self.process(sql.literal(select._offset))
- else:
- text += " OFFSET " + self.process(sql.literal(0))
- return text
-
- def for_update_clause(self, select):
- # sqlite has no "FOR UPDATE" AFAICT
- return ''
-
-
-class SQLiteDDLCompiler(compiler.DDLCompiler):
-
- def get_column_specification(self, column, **kwargs):
- colspec = self.preparer.format_column(column) + " " + self.dialect.type_compiler.process(column.type)
- default = self.get_column_default_string(column)
- if default is not None:
- colspec += " DEFAULT " + default
-
- if not column.nullable:
- colspec += " NOT NULL"
-
- if column.primary_key and \
- column.table.kwargs.get('sqlite_autoincrement', False) and \
- len(column.table.primary_key.columns) == 1 and \
- issubclass(column.type._type_affinity, sqltypes.Integer) and \
- not column.foreign_keys:
- colspec += " PRIMARY KEY AUTOINCREMENT"
-
- return colspec
-
- def visit_primary_key_constraint(self, constraint):
- # for columns with sqlite_autoincrement=True,
- # the PRIMARY KEY constraint can only be inline
- # with the column itself.
- if len(constraint.columns) == 1:
- c = list(constraint)[0]
- if c.primary_key and \
- c.table.kwargs.get('sqlite_autoincrement', False) and \
- issubclass(c.type._type_affinity, sqltypes.Integer) and \
- not c.foreign_keys:
- return None
-
- return super(SQLiteDDLCompiler, self).\
- visit_primary_key_constraint(constraint)
-
- def visit_foreign_key_constraint(self, constraint):
-
- local_table = constraint._elements.values()[0].parent.table
- remote_table = list(constraint._elements.values())[0].column.table
-
- if local_table.schema != remote_table.schema:
- return None
- else:
- return super(SQLiteDDLCompiler, self).visit_foreign_key_constraint(constraint)
-
- def define_constraint_remote_table(self, constraint, table, preparer):
- """Format the remote table clause of a CREATE CONSTRAINT clause."""
-
- return preparer.format_table(table, use_schema=False)
-
- def visit_create_index(self, create):
- index = create.element
- preparer = self.preparer
- text = "CREATE "
- if index.unique:
- text += "UNIQUE "
- text += "INDEX %s ON %s (%s)" \
- % (preparer.format_index(index,
- name=self._index_identifier(index.name)),
- preparer.format_table(index.table, use_schema=False),
- ', '.join(preparer.quote(c.name, c.quote)
- for c in index.columns))
- return text
-
-class SQLiteTypeCompiler(compiler.GenericTypeCompiler):
- def visit_large_binary(self, type_):
- return self.visit_BLOB(type_)
-
-class SQLiteIdentifierPreparer(compiler.IdentifierPreparer):
- reserved_words = set([
- 'add', 'after', 'all', 'alter', 'analyze', 'and', 'as', 'asc',
- 'attach', 'autoincrement', 'before', 'begin', 'between', 'by',
- 'cascade', 'case', 'cast', 'check', 'collate', 'column', 'commit',
- 'conflict', 'constraint', 'create', 'cross', 'current_date',
- 'current_time', 'current_timestamp', 'database', 'default',
- 'deferrable', 'deferred', 'delete', 'desc', 'detach', 'distinct',
- 'drop', 'each', 'else', 'end', 'escape', 'except', 'exclusive',
- 'explain', 'false', 'fail', 'for', 'foreign', 'from', 'full', 'glob',
- 'group', 'having', 'if', 'ignore', 'immediate', 'in', 'index',
- 'indexed', 'initially', 'inner', 'insert', 'instead', 'intersect', 'into', 'is',
- 'isnull', 'join', 'key', 'left', 'like', 'limit', 'match', 'natural',
- 'not', 'notnull', 'null', 'of', 'offset', 'on', 'or', 'order', 'outer',
- 'plan', 'pragma', 'primary', 'query', 'raise', 'references',
- 'reindex', 'rename', 'replace', 'restrict', 'right', 'rollback',
- 'row', 'select', 'set', 'table', 'temp', 'temporary', 'then', 'to',
- 'transaction', 'trigger', 'true', 'union', 'unique', 'update', 'using',
- 'vacuum', 'values', 'view', 'virtual', 'when', 'where',
- ])
-
- def format_index(self, index, use_schema=True, name=None):
- """Prepare a quoted index and schema name."""
-
- if name is None:
- name = index.name
- result = self.quote(name, index.quote)
- if not self.omit_schema and use_schema and getattr(index.table, "schema", None):
- result = self.quote_schema(index.table.schema, index.table.quote_schema) + "." + result
- return result
-
-class SQLiteExecutionContext(default.DefaultExecutionContext):
- def get_result_proxy(self):
- rp = base.ResultProxy(self)
- if rp._metadata:
- # adjust for dotted column names. SQLite
- # in the case of UNION may store col names as
- # "tablename.colname"
- # in cursor.description
- for colname in rp._metadata.keys:
- if "." in colname:
- trunc_col = colname.split(".")[1]
- rp._metadata._set_keymap_synonym(trunc_col, colname)
- return rp
-
-class SQLiteDialect(default.DefaultDialect):
- name = 'sqlite'
- supports_alter = False
- supports_unicode_statements = True
- supports_unicode_binds = True
- supports_default_values = True
- supports_empty_insert = False
- supports_cast = True
-
- default_paramstyle = 'qmark'
- statement_compiler = SQLiteCompiler
- ddl_compiler = SQLiteDDLCompiler
- type_compiler = SQLiteTypeCompiler
- preparer = SQLiteIdentifierPreparer
- ischema_names = ischema_names
- colspecs = colspecs
- isolation_level = None
- execution_ctx_cls = SQLiteExecutionContext
-
- supports_cast = True
- supports_default_values = True
-
- def __init__(self, isolation_level=None, native_datetime=False, **kwargs):
- default.DefaultDialect.__init__(self, **kwargs)
- self.isolation_level = isolation_level
-
- # this flag used by pysqlite dialect, and perhaps others in the
- # future, to indicate the driver is handling date/timestamp
- # conversions (and perhaps datetime/time as well on some
- # hypothetical driver ?)
- self.native_datetime = native_datetime
-
- if self.dbapi is not None:
- self.supports_default_values = \
- self.dbapi.sqlite_version_info >= (3, 3, 8)
- self.supports_cast = \
- self.dbapi.sqlite_version_info >= (3, 2, 3)
-
- _isolation_lookup = {
- 'READ UNCOMMITTED':1,
- 'SERIALIZABLE':0
- }
- def set_isolation_level(self, connection, level):
- try:
- isolation_level = self._isolation_lookup[level.replace('_', ' ')]
- except KeyError:
- raise exc.ArgumentError(
- "Invalid value '%s' for isolation_level. "
- "Valid isolation levels for %s are %s" %
- (level, self.name, ", ".join(self._isolation_lookup))
- )
- cursor = connection.cursor()
- cursor.execute("PRAGMA read_uncommitted = %d" % isolation_level)
- cursor.close()
-
- def get_isolation_level(self, connection):
- cursor = connection.cursor()
- cursor.execute('PRAGMA read_uncommitted')
- value = cursor.fetchone()[0]
- cursor.close()
- if value == 0:
- return "SERIALIZABLE"
- elif value == 1:
- return "READ UNCOMMITTED"
- else:
- assert False, "Unknown isolation level %s" % value
-
- def on_connect(self):
- if self.isolation_level is not None:
- def connect(conn):
- self.set_isolation_level(conn, self.isolation_level)
- return connect
- else:
- return None
-
- @reflection.cache
- def get_table_names(self, connection, schema=None, **kw):
- if schema is not None:
- qschema = self.identifier_preparer.quote_identifier(schema)
- master = '%s.sqlite_master' % qschema
- s = ("SELECT name FROM %s "
- "WHERE type='table' ORDER BY name") % (master,)
- rs = connection.execute(s)
- else:
- try:
- s = ("SELECT name FROM "
- " (SELECT * FROM sqlite_master UNION ALL "
- " SELECT * FROM sqlite_temp_master) "
- "WHERE type='table' ORDER BY name")
- rs = connection.execute(s)
- except exc.DBAPIError:
- raise
- s = ("SELECT name FROM sqlite_master "
- "WHERE type='table' ORDER BY name")
- rs = connection.execute(s)
-
- return [row[0] for row in rs]
-
- def has_table(self, connection, table_name, schema=None):
- quote = self.identifier_preparer.quote_identifier
- if schema is not None:
- pragma = "PRAGMA %s." % quote(schema)
- else:
- pragma = "PRAGMA "
- qtable = quote(table_name)
- cursor = _pragma_cursor(connection.execute("%stable_info(%s)" % (pragma, qtable)))
- row = cursor.fetchone()
-
- # consume remaining rows, to work around
- # http://www.sqlite.org/cvstrac/tktview?tn=1884
- while not cursor.closed and cursor.fetchone() is not None:
- pass
-
- return (row is not None)
-
- @reflection.cache
- def get_view_names(self, connection, schema=None, **kw):
- if schema is not None:
- qschema = self.identifier_preparer.quote_identifier(schema)
- master = '%s.sqlite_master' % qschema
- s = ("SELECT name FROM %s "
- "WHERE type='view' ORDER BY name") % (master,)
- rs = connection.execute(s)
- else:
- try:
- s = ("SELECT name FROM "
- " (SELECT * FROM sqlite_master UNION ALL "
- " SELECT * FROM sqlite_temp_master) "
- "WHERE type='view' ORDER BY name")
- rs = connection.execute(s)
- except exc.DBAPIError:
- raise
- s = ("SELECT name FROM sqlite_master "
- "WHERE type='view' ORDER BY name")
- rs = connection.execute(s)
-
- return [row[0] for row in rs]
-
- @reflection.cache
- def get_view_definition(self, connection, view_name, schema=None, **kw):
- quote = self.identifier_preparer.quote_identifier
- if schema is not None:
- qschema = self.identifier_preparer.quote_identifier(schema)
- master = '%s.sqlite_master' % qschema
- s = ("SELECT sql FROM %s WHERE name = '%s'"
- "AND type='view'") % (master, view_name)
- rs = connection.execute(s)
- else:
- try:
- s = ("SELECT sql FROM "
- " (SELECT * FROM sqlite_master UNION ALL "
- " SELECT * FROM sqlite_temp_master) "
- "WHERE name = '%s' "
- "AND type='view'") % view_name
- rs = connection.execute(s)
- except exc.DBAPIError:
- raise
- s = ("SELECT sql FROM sqlite_master WHERE name = '%s' "
- "AND type='view'") % view_name
- rs = connection.execute(s)
-
- result = rs.fetchall()
- if result:
- return result[0].sql
-
- @reflection.cache
- def get_columns(self, connection, table_name, schema=None, **kw):
- quote = self.identifier_preparer.quote_identifier
- if schema is not None:
- pragma = "PRAGMA %s." % quote(schema)
- else:
- pragma = "PRAGMA "
- qtable = quote(table_name)
- c = _pragma_cursor(connection.execute("%stable_info(%s)" % (pragma, qtable)))
- found_table = False
- columns = []
- while True:
- row = c.fetchone()
- if row is None:
- break
- (name, type_, nullable, default, has_default, primary_key) = (row[1], row[2].upper(), not row[3], row[4], row[4] is not None, row[5])
- name = re.sub(r'^\"|\"$', '', name)
- if default:
- default = re.sub(r"^\'|\'$", '', default)
- match = re.match(r'(\w+)(\(.*?\))?', type_)
- if match:
- coltype = match.group(1)
- args = match.group(2)
- else:
- coltype = "VARCHAR"
- args = ''
- try:
- coltype = self.ischema_names[coltype]
- if args is not None:
- args = re.findall(r'(\d+)', args)
- coltype = coltype(*[int(a) for a in args])
- except KeyError:
- util.warn("Did not recognize type '%s' of column '%s'" %
- (coltype, name))
- coltype = sqltypes.NullType()
-
- columns.append({
- 'name' : name,
- 'type' : coltype,
- 'nullable' : nullable,
- 'default' : default,
- 'autoincrement':default is None,
- 'primary_key': primary_key
- })
- return columns
-
- @reflection.cache
- def get_primary_keys(self, connection, table_name, schema=None, **kw):
- cols = self.get_columns(connection, table_name, schema, **kw)
- pkeys = []
- for col in cols:
- if col['primary_key']:
- pkeys.append(col['name'])
- return pkeys
-
- @reflection.cache
- def get_foreign_keys(self, connection, table_name, schema=None, **kw):
- quote = self.identifier_preparer.quote_identifier
- if schema is not None:
- pragma = "PRAGMA %s." % quote(schema)
- else:
- pragma = "PRAGMA "
- qtable = quote(table_name)
- c = _pragma_cursor(connection.execute("%sforeign_key_list(%s)" % (pragma, qtable)))
- fkeys = []
- fks = {}
- while True:
- row = c.fetchone()
- if row is None:
- break
- (constraint_name, rtbl, lcol, rcol) = (row[0], row[2], row[3], row[4])
- # sqlite won't return rcol if the table
- # was created with REFERENCES <tablename>, no col
- if rcol is None:
- rcol = lcol
- rtbl = re.sub(r'^\"|\"$', '', rtbl)
- lcol = re.sub(r'^\"|\"$', '', lcol)
- rcol = re.sub(r'^\"|\"$', '', rcol)
- try:
- fk = fks[constraint_name]
- except KeyError:
- fk = {
- 'name' : constraint_name,
- 'constrained_columns' : [],
- 'referred_schema' : None,
- 'referred_table' : rtbl,
- 'referred_columns' : []
- }
- fkeys.append(fk)
- fks[constraint_name] = fk
-
- # look up the table based on the given table's engine, not 'self',
- # since it could be a ProxyEngine
- if lcol not in fk['constrained_columns']:
- fk['constrained_columns'].append(lcol)
- if rcol not in fk['referred_columns']:
- fk['referred_columns'].append(rcol)
- return fkeys
-
- @reflection.cache
- def get_indexes(self, connection, table_name, schema=None, **kw):
- quote = self.identifier_preparer.quote_identifier
- if schema is not None:
- pragma = "PRAGMA %s." % quote(schema)
- else:
- pragma = "PRAGMA "
- include_auto_indexes = kw.pop('include_auto_indexes', False)
- qtable = quote(table_name)
- c = _pragma_cursor(connection.execute("%sindex_list(%s)" % (pragma, qtable)))
- indexes = []
- while True:
- row = c.fetchone()
- if row is None:
- break
- # ignore implicit primary key index.
- # http://www.mail-archive.com/sqlite-users@sqlite.org/msg30517.html
- elif not include_auto_indexes and row[1].startswith('sqlite_autoindex'):
- continue
-
- indexes.append(dict(name=row[1], column_names=[], unique=row[2]))
- # loop thru unique indexes to get the column names.
- for idx in indexes:
- c = connection.execute("%sindex_info(%s)" % (pragma, quote(idx['name'])))
- cols = idx['column_names']
- while True:
- row = c.fetchone()
- if row is None:
- break
- cols.append(row[2])
- return indexes
-
-
-def _pragma_cursor(cursor):
- """work around SQLite issue whereby cursor.description
- is blank when PRAGMA returns no rows."""
-
- if cursor.closed:
- cursor.fetchone = lambda: None
- return cursor
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/sqlite/pysqlite.py b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/sqlite/pysqlite.py
deleted file mode 100755
index d426b1bb..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/sqlite/pysqlite.py
+++ /dev/null
@@ -1,247 +0,0 @@
-# sqlite/pysqlite.py
-# Copyright (C) 2005-2011 the SQLAlchemy authors and contributors <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-"""Support for the SQLite database via pysqlite.
-
-Note that pysqlite is the same driver as the ``sqlite3``
-module included with the Python distribution.
-
-Driver
-------
-
-When using Python 2.5 and above, the built in ``sqlite3`` driver is
-already installed and no additional installation is needed. Otherwise,
-the ``pysqlite2`` driver needs to be present. This is the same driver as
-``sqlite3``, just with a different name.
-
-The ``pysqlite2`` driver will be loaded first, and if not found, ``sqlite3``
-is loaded. This allows an explicitly installed pysqlite driver to take
-precedence over the built in one. As with all dialects, a specific
-DBAPI module may be provided to :func:`~sqlalchemy.create_engine()` to control
-this explicitly::
-
- from sqlite3 import dbapi2 as sqlite
- e = create_engine('sqlite+pysqlite:///file.db', module=sqlite)
-
-Full documentation on pysqlite is available at:
-`<http://www.initd.org/pub/software/pysqlite/doc/usage-guide.html>`_
-
-Connect Strings
----------------
-
-The file specification for the SQLite database is taken as the "database" portion of
-the URL. Note that the format of a url is::
-
- driver://user:pass@host/database
-
-This means that the actual filename to be used starts with the characters to the
-**right** of the third slash. So connecting to a relative filepath looks like::
-
- # relative path
- e = create_engine('sqlite:///path/to/database.db')
-
-An absolute path, which is denoted by starting with a slash, means you need **four**
-slashes::
-
- # absolute path
- e = create_engine('sqlite:////path/to/database.db')
-
-To use a Windows path, regular drive specifications and backslashes can be used.
-Double backslashes are probably needed::
-
- # absolute path on Windows
- e = create_engine('sqlite:///C:\\\\path\\\\to\\\\database.db')
-
-The sqlite ``:memory:`` identifier is the default if no filepath is present. Specify
-``sqlite://`` and nothing else::
-
- # in-memory database
- e = create_engine('sqlite://')
-
-Compatibility with sqlite3 "native" date and datetime types
------------------------------------------------------------
-
-The pysqlite driver includes the sqlite3.PARSE_DECLTYPES and
-sqlite3.PARSE_COLNAMES options, which have the effect of any column
-or expression explicitly cast as "date" or "timestamp" will be converted
-to a Python date or datetime object. The date and datetime types provided
-with the pysqlite dialect are not currently compatible with these options,
-since they render the ISO date/datetime including microseconds, which
-pysqlite's driver does not. Additionally, SQLAlchemy does not at
-this time automatically render the "cast" syntax required for the
-freestanding functions "current_timestamp" and "current_date" to return
-datetime/date types natively. Unfortunately, pysqlite
-does not provide the standard DBAPI types in ``cursor.description``,
-leaving SQLAlchemy with no way to detect these types on the fly
-without expensive per-row type checks.
-
-Keeping in mind that pysqlite's parsing option is not recommended,
-nor should be necessary, for use with SQLAlchemy, usage of PARSE_DECLTYPES
-can be forced if one configures "native_datetime=True" on create_engine()::
-
- engine = create_engine('sqlite://',
- connect_args={'detect_types': sqlite3.PARSE_DECLTYPES|sqlite3.PARSE_COLNAMES},
- native_datetime=True
- )
-
-With this flag enabled, the DATE and TIMESTAMP types (but note - not the DATETIME
-or TIME types...confused yet ?) will not perform any bind parameter or result
-processing. Execution of "func.current_date()" will return a string.
-"func.current_timestamp()" is registered as returning a DATETIME type in
-SQLAlchemy, so this function still receives SQLAlchemy-level result processing.
-
-Pooling Behavior
-------------------
-
-Pysqlite connections do not support being moved between threads, unless
-the ``check_same_thread`` Pysqlite flag is set to ``False``. In addition,
-when using an in-memory SQLite database, the full database exists only within
-the scope of a single connection. It is reported that an in-memory
-database does not support being shared between threads regardless of the
-``check_same_thread`` flag - which means that a multithreaded
-application **cannot** share data from a ``:memory:`` database across threads
-unless access to the connection is limited to a single worker thread which communicates
-through a queueing mechanism to concurrent threads.
-
-To provide for these two behaviors, the pysqlite dialect will select a :class:`.Pool`
-implementation suitable:
-
-* When a ``:memory:`` SQLite database is specified, the dialect will use :class:`.SingletonThreadPool`.
- This pool maintains a single connection per thread, so that all access to the engine within
- the current thread use the same ``:memory:`` database.
-* When a file-based database is specified, the dialect will use :class:`.NullPool` as the source
- of connections. This pool closes and discards connections which are returned to the pool immediately.
- SQLite file-based connections have extermely low overhead, so pooling is not necessary.
- The scheme also prevents a connection from being used again in a different thread
- and works best with SQLite's coarse-grained file locking.
-
- .. note:: The default selection of :class:`.NullPool` for SQLite file-based databases
- is new in SQLAlchemy 0.7. Previous versions
- select :class:`.SingletonThreadPool` by
- default for all SQLite databases.
-
-Unicode
--------
-
-In contrast to SQLAlchemy's active handling of date and time types for pysqlite, pysqlite's
-default behavior regarding Unicode is that all strings are returned as Python unicode objects
-in all cases. So even if the :class:`~sqlalchemy.types.Unicode` type is
-*not* used, you will still always receive unicode data back from a result set. It is
-**strongly** recommended that you do use the :class:`~sqlalchemy.types.Unicode` type
-to represent strings, since it will raise a warning if a non-unicode Python string is
-passed from the user application. Mixing the usage of non-unicode objects with returned unicode objects can
-quickly create confusion, particularly when using the ORM as internal data is not
-always represented by an actual database result string.
-
-"""
-
-from sqlalchemy.dialects.sqlite.base import SQLiteDialect, DATETIME, DATE
-from sqlalchemy import exc, pool
-from sqlalchemy import types as sqltypes
-from sqlalchemy import util
-
-import os
-
-class _SQLite_pysqliteTimeStamp(DATETIME):
- def bind_processor(self, dialect):
- if dialect.native_datetime:
- return None
- else:
- return DATETIME.bind_processor(self, dialect)
-
- def result_processor(self, dialect, coltype):
- if dialect.native_datetime:
- return None
- else:
- return DATETIME.result_processor(self, dialect, coltype)
-
-class _SQLite_pysqliteDate(DATE):
- def bind_processor(self, dialect):
- if dialect.native_datetime:
- return None
- else:
- return DATE.bind_processor(self, dialect)
-
- def result_processor(self, dialect, coltype):
- if dialect.native_datetime:
- return None
- else:
- return DATE.result_processor(self, dialect, coltype)
-
-class SQLiteDialect_pysqlite(SQLiteDialect):
- default_paramstyle = 'qmark'
-
- colspecs = util.update_copy(
- SQLiteDialect.colspecs,
- {
- sqltypes.Date:_SQLite_pysqliteDate,
- sqltypes.TIMESTAMP:_SQLite_pysqliteTimeStamp,
- }
- )
-
- # Py3K
- #description_encoding = None
-
- driver = 'pysqlite'
-
- def __init__(self, **kwargs):
- SQLiteDialect.__init__(self, **kwargs)
-
- if self.dbapi is not None:
- sqlite_ver = self.dbapi.version_info
- if sqlite_ver < (2, 1, 3):
- util.warn(
- ("The installed version of pysqlite2 (%s) is out-dated "
- "and will cause errors in some cases. Version 2.1.3 "
- "or greater is recommended.") %
- '.'.join([str(subver) for subver in sqlite_ver]))
-
- @classmethod
- def dbapi(cls):
- try:
- from pysqlite2 import dbapi2 as sqlite
- except ImportError, e:
- try:
- from sqlite3 import dbapi2 as sqlite #try the 2.5+ stdlib name.
- except ImportError:
- raise e
- return sqlite
-
- @classmethod
- def get_pool_class(cls, url):
- if url.database and url.database != ':memory:':
- return pool.NullPool
- else:
- return pool.SingletonThreadPool
-
- def _get_server_version_info(self, connection):
- return self.dbapi.sqlite_version_info
-
- def create_connect_args(self, url):
- if url.username or url.password or url.host or url.port:
- raise exc.ArgumentError(
- "Invalid SQLite URL: %s\n"
- "Valid SQLite URL forms are:\n"
- " sqlite:///:memory: (or, sqlite://)\n"
- " sqlite:///relative/path/to/file.db\n"
- " sqlite:////absolute/path/to/file.db" % (url,))
- filename = url.database or ':memory:'
- if filename != ':memory:':
- filename = os.path.abspath(filename)
-
- opts = url.query.copy()
- util.coerce_kw_type(opts, 'timeout', float)
- util.coerce_kw_type(opts, 'isolation_level', str)
- util.coerce_kw_type(opts, 'detect_types', int)
- util.coerce_kw_type(opts, 'check_same_thread', bool)
- util.coerce_kw_type(opts, 'cached_statements', int)
-
- return ([filename], opts)
-
- def is_disconnect(self, e, connection, cursor):
- return isinstance(e, self.dbapi.ProgrammingError) and "Cannot operate on a closed database." in str(e)
-
-dialect = SQLiteDialect_pysqlite
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/sybase/__init__.py b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/sybase/__init__.py
deleted file mode 100755
index 1481c6d9..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/sybase/__init__.py
+++ /dev/null
@@ -1,26 +0,0 @@
-# sybase/__init__.py
-# Copyright (C) 2005-2011 the SQLAlchemy authors and contributors <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-from sqlalchemy.dialects.sybase import base, pysybase, pyodbc
-
-
-from base import CHAR, VARCHAR, TIME, NCHAR, NVARCHAR,\
- TEXT,DATE,DATETIME, FLOAT, NUMERIC,\
- BIGINT,INT, INTEGER, SMALLINT, BINARY,\
- VARBINARY,UNITEXT,UNICHAR,UNIVARCHAR,\
- IMAGE,BIT,MONEY,SMALLMONEY,TINYINT
-
-# default dialect
-base.dialect = pyodbc.dialect
-
-__all__ = (
- 'CHAR', 'VARCHAR', 'TIME', 'NCHAR', 'NVARCHAR',
- 'TEXT','DATE','DATETIME', 'FLOAT', 'NUMERIC',
- 'BIGINT','INT', 'INTEGER', 'SMALLINT', 'BINARY',
- 'VARBINARY','UNITEXT','UNICHAR','UNIVARCHAR',
- 'IMAGE','BIT','MONEY','SMALLMONEY','TINYINT',
- 'dialect'
-)
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/sybase/base.py b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/sybase/base.py
deleted file mode 100755
index 3c470604..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/sybase/base.py
+++ /dev/null
@@ -1,434 +0,0 @@
-# sybase/base.py
-# Copyright (C) 2010-2011 the SQLAlchemy authors and contributors <see AUTHORS file>
-# get_select_precolumns(), limit_clause() implementation
-# copyright (C) 2007 Fisch Asset Management
-# AG http://www.fam.ch, with coding by Alexander Houben
-# alexander.houben@thor-solutions.ch
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-"""Support for Sybase Adaptive Server Enterprise (ASE).
-
-Note that this dialect is no longer specific to Sybase iAnywhere.
-ASE is the primary support platform.
-
-"""
-
-import operator
-from sqlalchemy.sql import compiler, expression, text, bindparam
-from sqlalchemy.engine import default, base, reflection
-from sqlalchemy import types as sqltypes
-from sqlalchemy.sql import operators as sql_operators
-from sqlalchemy import schema as sa_schema
-from sqlalchemy import util, sql, exc
-
-from sqlalchemy.types import CHAR, VARCHAR, TIME, NCHAR, NVARCHAR,\
- TEXT,DATE,DATETIME, FLOAT, NUMERIC,\
- BIGINT,INT, INTEGER, SMALLINT, BINARY,\
- VARBINARY, DECIMAL, TIMESTAMP, Unicode,\
- UnicodeText
-
-RESERVED_WORDS = set([
- "add", "all", "alter", "and",
- "any", "as", "asc", "backup",
- "begin", "between", "bigint", "binary",
- "bit", "bottom", "break", "by",
- "call", "capability", "cascade", "case",
- "cast", "char", "char_convert", "character",
- "check", "checkpoint", "close", "comment",
- "commit", "connect", "constraint", "contains",
- "continue", "convert", "create", "cross",
- "cube", "current", "current_timestamp", "current_user",
- "cursor", "date", "dbspace", "deallocate",
- "dec", "decimal", "declare", "default",
- "delete", "deleting", "desc", "distinct",
- "do", "double", "drop", "dynamic",
- "else", "elseif", "encrypted", "end",
- "endif", "escape", "except", "exception",
- "exec", "execute", "existing", "exists",
- "externlogin", "fetch", "first", "float",
- "for", "force", "foreign", "forward",
- "from", "full", "goto", "grant",
- "group", "having", "holdlock", "identified",
- "if", "in", "index", "index_lparen",
- "inner", "inout", "insensitive", "insert",
- "inserting", "install", "instead", "int",
- "integer", "integrated", "intersect", "into",
- "iq", "is", "isolation", "join",
- "key", "lateral", "left", "like",
- "lock", "login", "long", "match",
- "membership", "message", "mode", "modify",
- "natural", "new", "no", "noholdlock",
- "not", "notify", "null", "numeric",
- "of", "off", "on", "open",
- "option", "options", "or", "order",
- "others", "out", "outer", "over",
- "passthrough", "precision", "prepare", "primary",
- "print", "privileges", "proc", "procedure",
- "publication", "raiserror", "readtext", "real",
- "reference", "references", "release", "remote",
- "remove", "rename", "reorganize", "resource",
- "restore", "restrict", "return", "revoke",
- "right", "rollback", "rollup", "save",
- "savepoint", "scroll", "select", "sensitive",
- "session", "set", "setuser", "share",
- "smallint", "some", "sqlcode", "sqlstate",
- "start", "stop", "subtrans", "subtransaction",
- "synchronize", "syntax_error", "table", "temporary",
- "then", "time", "timestamp", "tinyint",
- "to", "top", "tran", "trigger",
- "truncate", "tsequal", "unbounded", "union",
- "unique", "unknown", "unsigned", "update",
- "updating", "user", "using", "validate",
- "values", "varbinary", "varchar", "variable",
- "varying", "view", "wait", "waitfor",
- "when", "where", "while", "window",
- "with", "with_cube", "with_lparen", "with_rollup",
- "within", "work", "writetext",
- ])
-
-
-class _SybaseUnitypeMixin(object):
- """these types appear to return a buffer object."""
-
- def result_processor(self, dialect, coltype):
- def process(value):
- if value is not None:
- return str(value) #.decode("ucs-2")
- else:
- return None
- return process
-
-class UNICHAR(_SybaseUnitypeMixin, sqltypes.Unicode):
- __visit_name__ = 'UNICHAR'
-
-class UNIVARCHAR(_SybaseUnitypeMixin, sqltypes.Unicode):
- __visit_name__ = 'UNIVARCHAR'
-
-class UNITEXT(_SybaseUnitypeMixin, sqltypes.UnicodeText):
- __visit_name__ = 'UNITEXT'
-
-class TINYINT(sqltypes.Integer):
- __visit_name__ = 'TINYINT'
-
-class BIT(sqltypes.TypeEngine):
- __visit_name__ = 'BIT'
-
-class MONEY(sqltypes.TypeEngine):
- __visit_name__ = "MONEY"
-
-class SMALLMONEY(sqltypes.TypeEngine):
- __visit_name__ = "SMALLMONEY"
-
-class UNIQUEIDENTIFIER(sqltypes.TypeEngine):
- __visit_name__ = "UNIQUEIDENTIFIER"
-
-class IMAGE(sqltypes.LargeBinary):
- __visit_name__ = 'IMAGE'
-
-
-class SybaseTypeCompiler(compiler.GenericTypeCompiler):
- def visit_large_binary(self, type_):
- return self.visit_IMAGE(type_)
-
- def visit_boolean(self, type_):
- return self.visit_BIT(type_)
-
- def visit_unicode(self, type_):
- return self.visit_NVARCHAR(type_)
-
- def visit_UNICHAR(self, type_):
- return "UNICHAR(%d)" % type_.length
-
- def visit_UNIVARCHAR(self, type_):
- return "UNIVARCHAR(%d)" % type_.length
-
- def visit_UNITEXT(self, type_):
- return "UNITEXT"
-
- def visit_TINYINT(self, type_):
- return "TINYINT"
-
- def visit_IMAGE(self, type_):
- return "IMAGE"
-
- def visit_BIT(self, type_):
- return "BIT"
-
- def visit_MONEY(self, type_):
- return "MONEY"
-
- def visit_SMALLMONEY(self, type_):
- return "SMALLMONEY"
-
- def visit_UNIQUEIDENTIFIER(self, type_):
- return "UNIQUEIDENTIFIER"
-
-ischema_names = {
- 'integer' : INTEGER,
- 'unsigned int' : INTEGER, # TODO: unsigned flags
- 'unsigned smallint' : SMALLINT, # TODO: unsigned flags
- 'unsigned bigint' : BIGINT, # TODO: unsigned flags
- 'bigint': BIGINT,
- 'smallint' : SMALLINT,
- 'tinyint' : TINYINT,
- 'varchar' : VARCHAR,
- 'long varchar' : TEXT, # TODO
- 'char' : CHAR,
- 'decimal' : DECIMAL,
- 'numeric' : NUMERIC,
- 'float' : FLOAT,
- 'double' : NUMERIC, # TODO
- 'binary' : BINARY,
- 'varbinary' : VARBINARY,
- 'bit': BIT,
- 'image' : IMAGE,
- 'timestamp': TIMESTAMP,
- 'money': MONEY,
- 'smallmoney': MONEY,
- 'uniqueidentifier': UNIQUEIDENTIFIER,
-
-}
-
-
-class SybaseExecutionContext(default.DefaultExecutionContext):
- _enable_identity_insert = False
-
- def set_ddl_autocommit(self, connection, value):
- """Must be implemented by subclasses to accommodate DDL executions.
-
- "connection" is the raw unwrapped DBAPI connection. "value"
- is True or False. when True, the connection should be configured
- such that a DDL can take place subsequently. when False,
- a DDL has taken place and the connection should be resumed
- into non-autocommit mode.
-
- """
- raise NotImplementedError()
-
- def pre_exec(self):
- if self.isinsert:
- tbl = self.compiled.statement.table
- seq_column = tbl._autoincrement_column
- insert_has_sequence = seq_column is not None
-
- if insert_has_sequence:
- self._enable_identity_insert = \
- seq_column.key in self.compiled_parameters[0]
- else:
- self._enable_identity_insert = False
-
- if self._enable_identity_insert:
- self.cursor.execute("SET IDENTITY_INSERT %s ON" %
- self.dialect.identifier_preparer.format_table(tbl))
-
- if self.isddl:
- # TODO: to enhance this, we can detect "ddl in tran" on the
- # database settings. this error message should be improved to
- # include a note about that.
- if not self.should_autocommit:
- raise exc.InvalidRequestError(
- "The Sybase dialect only supports "
- "DDL in 'autocommit' mode at this time.")
-
- self.root_connection.engine.logger.info(
- "AUTOCOMMIT (Assuming no Sybase 'ddl in tran')")
-
- self.set_ddl_autocommit(
- self.root_connection.connection.connection,
- True)
-
-
- def post_exec(self):
- if self.isddl:
- self.set_ddl_autocommit(self.root_connection, False)
-
- if self._enable_identity_insert:
- self.cursor.execute(
- "SET IDENTITY_INSERT %s OFF" %
- self.dialect.identifier_preparer.
- format_table(self.compiled.statement.table)
- )
-
- def get_lastrowid(self):
- cursor = self.create_cursor()
- cursor.execute("SELECT @@identity AS lastrowid")
- lastrowid = cursor.fetchone()[0]
- cursor.close()
- return lastrowid
-
-class SybaseSQLCompiler(compiler.SQLCompiler):
- ansi_bind_rules = True
-
- extract_map = util.update_copy(
- compiler.SQLCompiler.extract_map,
- {
- 'doy': 'dayofyear',
- 'dow': 'weekday',
- 'milliseconds': 'millisecond'
- })
-
- def get_select_precolumns(self, select):
- s = select._distinct and "DISTINCT " or ""
- # TODO: don't think Sybase supports
- # bind params for FIRST / TOP
- if select._limit:
- #if select._limit == 1:
- #s += "FIRST "
- #else:
- #s += "TOP %s " % (select._limit,)
- s += "TOP %s " % (select._limit,)
- if select._offset:
- if not select._limit:
- # FIXME: sybase doesn't allow an offset without a limit
- # so use a huge value for TOP here
- s += "TOP 1000000 "
- s += "START AT %s " % (select._offset+1,)
- return s
-
- def get_from_hint_text(self, table, text):
- return text
-
- def limit_clause(self, select):
- # Limit in sybase is after the select keyword
- return ""
-
- def visit_extract(self, extract, **kw):
- field = self.extract_map.get(extract.field, extract.field)
- return 'DATEPART("%s", %s)' % (
- field, self.process(extract.expr, **kw))
-
- def for_update_clause(self, select):
- # "FOR UPDATE" is only allowed on "DECLARE CURSOR"
- # which SQLAlchemy doesn't use
- return ''
-
- def order_by_clause(self, select, **kw):
- kw['literal_binds'] = True
- order_by = self.process(select._order_by_clause, **kw)
-
- # SybaseSQL only allows ORDER BY in subqueries if there is a LIMIT
- if order_by and (not self.is_subquery() or select._limit):
- return " ORDER BY " + order_by
- else:
- return ""
-
-
-class SybaseDDLCompiler(compiler.DDLCompiler):
- def get_column_specification(self, column, **kwargs):
- colspec = self.preparer.format_column(column) + " " + \
- self.dialect.type_compiler.process(column.type)
-
- if column.table is None:
- raise exc.InvalidRequestError(
- "The Sybase dialect requires Table-bound "
- "columns in order to generate DDL")
- seq_col = column.table._autoincrement_column
-
- # install a IDENTITY Sequence if we have an implicit IDENTITY column
- if seq_col is column:
- sequence = isinstance(column.default, sa_schema.Sequence) \
- and column.default
- if sequence:
- start, increment = sequence.start or 1, \
- sequence.increment or 1
- else:
- start, increment = 1, 1
- if (start, increment) == (1, 1):
- colspec += " IDENTITY"
- else:
- # TODO: need correct syntax for this
- colspec += " IDENTITY(%s,%s)" % (start, increment)
- else:
- if column.nullable is not None:
- if not column.nullable or column.primary_key:
- colspec += " NOT NULL"
- else:
- colspec += " NULL"
-
- default = self.get_column_default_string(column)
- if default is not None:
- colspec += " DEFAULT " + default
-
- return colspec
-
- def visit_drop_index(self, drop):
- index = drop.element
- return "\nDROP INDEX %s.%s" % (
- self.preparer.quote_identifier(index.table.name),
- self.preparer.quote(
- self._index_identifier(index.name), index.quote)
- )
-
-class SybaseIdentifierPreparer(compiler.IdentifierPreparer):
- reserved_words = RESERVED_WORDS
-
-class SybaseDialect(default.DefaultDialect):
- name = 'sybase'
- supports_unicode_statements = False
- supports_sane_rowcount = False
- supports_sane_multi_rowcount = False
-
- supports_native_boolean = False
- supports_unicode_binds = False
- postfetch_lastrowid = True
-
- colspecs = {}
- ischema_names = ischema_names
-
- type_compiler = SybaseTypeCompiler
- statement_compiler = SybaseSQLCompiler
- ddl_compiler = SybaseDDLCompiler
- preparer = SybaseIdentifierPreparer
-
- def _get_default_schema_name(self, connection):
- return connection.scalar(
- text("SELECT user_name() as user_name",
- typemap={'user_name':Unicode})
- )
-
- def initialize(self, connection):
- super(SybaseDialect, self).initialize(connection)
- if self.server_version_info is not None and\
- self.server_version_info < (15, ):
- self.max_identifier_length = 30
- else:
- self.max_identifier_length = 255
-
- @reflection.cache
- def get_table_names(self, connection, schema=None, **kw):
- if schema is None:
- schema = self.default_schema_name
-
- result = connection.execute(
- text("select sysobjects.name from sysobjects, sysusers "
- "where sysobjects.uid=sysusers.uid and "
- "sysusers.name=:schemaname and "
- "sysobjects.type='U'",
- bindparams=[
- bindparam('schemaname', schema)
- ])
- )
- return [r[0] for r in result]
-
- def has_table(self, connection, tablename, schema=None):
- if schema is None:
- schema = self.default_schema_name
-
- result = connection.execute(
- text("select sysobjects.name from sysobjects, sysusers "
- "where sysobjects.uid=sysusers.uid and "
- "sysobjects.name=:tablename and "
- "sysusers.name=:schemaname and "
- "sysobjects.type='U'",
- bindparams=[
- bindparam('tablename', tablename),
- bindparam('schemaname', schema)
- ])
- )
- return result.scalar() is not None
-
- def reflecttable(self, connection, table, include_columns):
- raise NotImplementedError()
-
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/sybase/mxodbc.py b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/sybase/mxodbc.py
deleted file mode 100755
index 756c0b28..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/sybase/mxodbc.py
+++ /dev/null
@@ -1,23 +0,0 @@
-# sybase/mxodbc.py
-# Copyright (C) 2005-2011 the SQLAlchemy authors and contributors <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-"""
-Support for Sybase via mxodbc.
-
-This dialect is a stub only and is likely non functional at this time.
-
-
-"""
-from sqlalchemy.dialects.sybase.base import SybaseDialect, SybaseExecutionContext
-from sqlalchemy.connectors.mxodbc import MxODBCConnector
-
-class SybaseExecutionContext_mxodbc(SybaseExecutionContext):
- pass
-
-class SybaseDialect_mxodbc(MxODBCConnector, SybaseDialect):
- execution_ctx_cls = SybaseExecutionContext_mxodbc
-
-dialect = SybaseDialect_mxodbc
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/sybase/pyodbc.py b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/sybase/pyodbc.py
deleted file mode 100755
index c8480cb4..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/sybase/pyodbc.py
+++ /dev/null
@@ -1,83 +0,0 @@
-# sybase/pyodbc.py
-# Copyright (C) 2005-2011 the SQLAlchemy authors and contributors <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-"""
-Support for Sybase via pyodbc.
-
-http://pypi.python.org/pypi/pyodbc/
-
-Connect strings are of the form::
-
- sybase+pyodbc://<username>:<password>@<dsn>/
- sybase+pyodbc://<username>:<password>@<host>/<database>
-
-Unicode Support
----------------
-
-The pyodbc driver currently supports usage of these Sybase types with
-Unicode or multibyte strings::
-
- CHAR
- NCHAR
- NVARCHAR
- TEXT
- VARCHAR
-
-Currently *not* supported are::
-
- UNICHAR
- UNITEXT
- UNIVARCHAR
-
-"""
-
-from sqlalchemy.dialects.sybase.base import SybaseDialect,\
- SybaseExecutionContext
-from sqlalchemy.connectors.pyodbc import PyODBCConnector
-from sqlalchemy import types as sqltypes, util, processors
-from sqlalchemy.util.compat import decimal
-
-class _SybNumeric_pyodbc(sqltypes.Numeric):
- """Turns Decimals with adjusted() < -6 into floats.
-
- It's not yet known how to get decimals with many
- significant digits or very large adjusted() into Sybase
- via pyodbc.
-
- """
-
- def bind_processor(self, dialect):
- super_process = super(_SybNumeric_pyodbc,self).\
- bind_processor(dialect)
-
- def process(value):
- if self.asdecimal and \
- isinstance(value, decimal.Decimal):
-
- if value.adjusted() < -6:
- return processors.to_float(value)
-
- if super_process:
- return super_process(value)
- else:
- return value
- return process
-
-class SybaseExecutionContext_pyodbc(SybaseExecutionContext):
- def set_ddl_autocommit(self, connection, value):
- if value:
- connection.autocommit = True
- else:
- connection.autocommit = False
-
-class SybaseDialect_pyodbc(PyODBCConnector, SybaseDialect):
- execution_ctx_cls = SybaseExecutionContext_pyodbc
-
- colspecs = {
- sqltypes.Numeric:_SybNumeric_pyodbc,
- }
-
-dialect = SybaseDialect_pyodbc
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/sybase/pysybase.py b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/sybase/pysybase.py
deleted file mode 100755
index e12cf07d..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/dialects/sybase/pysybase.py
+++ /dev/null
@@ -1,100 +0,0 @@
-# sybase/pysybase.py
-# Copyright (C) 2010-2011 the SQLAlchemy authors and contributors <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-"""
-Support for Sybase via the python-sybase driver.
-
-http://python-sybase.sourceforge.net/
-
-Connect strings are of the form::
-
- sybase+pysybase://<username>:<password>@<dsn>/[database name]
-
-Unicode Support
----------------
-
-The python-sybase driver does not appear to support non-ASCII strings of any
-kind at this time.
-
-"""
-
-from sqlalchemy import types as sqltypes, processors
-from sqlalchemy.dialects.sybase.base import SybaseDialect, \
- SybaseExecutionContext, SybaseSQLCompiler
-
-
-class _SybNumeric(sqltypes.Numeric):
- def result_processor(self, dialect, type_):
- if not self.asdecimal:
- return processors.to_float
- else:
- return sqltypes.Numeric.result_processor(self, dialect, type_)
-
-class SybaseExecutionContext_pysybase(SybaseExecutionContext):
-
- def set_ddl_autocommit(self, dbapi_connection, value):
- if value:
- # call commit() on the Sybase connection directly,
- # to avoid any side effects of calling a Connection
- # transactional method inside of pre_exec()
- dbapi_connection.commit()
-
- def pre_exec(self):
- SybaseExecutionContext.pre_exec(self)
-
- for param in self.parameters:
- for key in list(param):
- param["@" + key] = param[key]
- del param[key]
-
-
-class SybaseSQLCompiler_pysybase(SybaseSQLCompiler):
- def bindparam_string(self, name):
- return "@" + name
-
-class SybaseDialect_pysybase(SybaseDialect):
- driver = 'pysybase'
- execution_ctx_cls = SybaseExecutionContext_pysybase
- statement_compiler = SybaseSQLCompiler_pysybase
-
- colspecs={
- sqltypes.Numeric:_SybNumeric,
- sqltypes.Float:sqltypes.Float
- }
-
- @classmethod
- def dbapi(cls):
- import Sybase
- return Sybase
-
- def create_connect_args(self, url):
- opts = url.translate_connect_args(username='user', password='passwd')
-
- return ([opts.pop('host')], opts)
-
- def do_executemany(self, cursor, statement, parameters, context=None):
- # calling python-sybase executemany yields:
- # TypeError: string too long for buffer
- for param in parameters:
- cursor.execute(statement, param)
-
- def _get_server_version_info(self, connection):
- vers = connection.scalar("select @@version_number")
- # i.e. 15500, 15000, 12500 == (15, 5, 0, 0), (15, 0, 0, 0),
- # (12, 5, 0, 0)
- return (vers / 1000, vers % 1000 / 100, vers % 100 / 10, vers % 10)
-
- def is_disconnect(self, e, connection, cursor):
- if isinstance(e, (self.dbapi.OperationalError,
- self.dbapi.ProgrammingError)):
- msg = str(e)
- return ('Unable to complete network request to host' in msg or
- 'Invalid connection state' in msg or
- 'Invalid cursor state' in msg)
- else:
- return False
-
-dialect = SybaseDialect_pysybase
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/engine/__init__.py b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/engine/__init__.py
deleted file mode 100755
index 010cc22d..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/engine/__init__.py
+++ /dev/null
@@ -1,301 +0,0 @@
-# engine/__init__.py
-# Copyright (C) 2005-2011 the SQLAlchemy authors and contributors <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-"""SQL connections, SQL execution and high-level DB-API interface.
-
-The engine package defines the basic components used to interface
-DB-API modules with higher-level statement construction,
-connection-management, execution and result contexts. The primary
-"entry point" class into this package is the Engine and it's public
-constructor ``create_engine()``.
-
-This package includes:
-
-base.py
- Defines interface classes and some implementation classes which
- comprise the basic components used to interface between a DB-API,
- constructed and plain-text statements, connections, transactions,
- and results.
-
-default.py
- Contains default implementations of some of the components defined
- in base.py. All current database dialects use the classes in
- default.py as base classes for their own database-specific
- implementations.
-
-strategies.py
- The mechanics of constructing ``Engine`` objects are represented
- here. Defines the ``EngineStrategy`` class which represents how
- to go from arguments specified to the ``create_engine()``
- function, to a fully constructed ``Engine``, including
- initialization of connection pooling, dialects, and specific
- subclasses of ``Engine``.
-
-threadlocal.py
- The ``TLEngine`` class is defined here, which is a subclass of
- the generic ``Engine`` and tracks ``Connection`` and
- ``Transaction`` objects against the identity of the current
- thread. This allows certain programming patterns based around
- the concept of a "thread-local connection" to be possible.
- The ``TLEngine`` is created by using the "threadlocal" engine
- strategy in conjunction with the ``create_engine()`` function.
-
-url.py
- Defines the ``URL`` class which represents the individual
- components of a string URL passed to ``create_engine()``. Also
- defines a basic module-loading strategy for the dialect specifier
- within a URL.
-"""
-
-# not sure what this was used for
-#import sqlalchemy.databases
-
-from sqlalchemy.engine.base import (
- BufferedColumnResultProxy,
- BufferedColumnRow,
- BufferedRowResultProxy,
- Compiled,
- Connectable,
- Connection,
- Dialect,
- Engine,
- ExecutionContext,
- NestedTransaction,
- ResultProxy,
- RootTransaction,
- RowProxy,
- Transaction,
- TwoPhaseTransaction,
- TypeCompiler
- )
-from sqlalchemy.engine import strategies
-from sqlalchemy import util
-
-
-__all__ = (
- 'BufferedColumnResultProxy',
- 'BufferedColumnRow',
- 'BufferedRowResultProxy',
- 'Compiled',
- 'Connectable',
- 'Connection',
- 'Dialect',
- 'Engine',
- 'ExecutionContext',
- 'NestedTransaction',
- 'ResultProxy',
- 'RootTransaction',
- 'RowProxy',
- 'Transaction',
- 'TwoPhaseTransaction',
- 'TypeCompiler',
- 'create_engine',
- 'engine_from_config',
- )
-
-
-default_strategy = 'plain'
-def create_engine(*args, **kwargs):
- """Create a new Engine instance.
-
- The standard method of specifying the engine is via URL as the
- first positional argument, to indicate the appropriate database
- dialect and connection arguments, with additional keyword
- arguments sent as options to the dialect and resulting Engine.
-
- The URL is a string in the form
- ``dialect+driver://user:password@host/dbname[?key=value..]``, where
- ``dialect`` is a database name such as ``mysql``, ``oracle``,
- ``postgresql``, etc., and ``driver`` the name of a DBAPI, such as
- ``psycopg2``, ``pyodbc``, ``cx_oracle``, etc. Alternatively,
- the URL can be an instance of :class:`~sqlalchemy.engine.url.URL`.
-
- `**kwargs` takes a wide variety of options which are routed
- towards their appropriate components. Arguments may be
- specific to the Engine, the underlying Dialect, as well as the
- Pool. Specific dialects also accept keyword arguments that
- are unique to that dialect. Here, we describe the parameters
- that are common to most ``create_engine()`` usage.
-
- :param assert_unicode: Deprecated. A warning is raised in all cases when a non-Unicode
- object is passed when SQLAlchemy would coerce into an encoding
- (note: but **not** when the DBAPI handles unicode objects natively).
- To suppress or raise this warning to an
- error, use the Python warnings filter documented at:
- http://docs.python.org/library/warnings.html
-
- :param connect_args: a dictionary of options which will be
- passed directly to the DBAPI's ``connect()`` method as
- additional keyword arguments.
-
- :param convert_unicode=False: if set to True, all
- String/character based types will convert Python Unicode values to raw
- byte values sent to the DBAPI as bind parameters, and all raw byte values to
- Python Unicode coming out in result sets. This is an
- engine-wide method to provide Unicode conversion across the
- board for those DBAPIs that do not accept Python Unicode objects
- as input. For Unicode conversion on a column-by-column level, use
- the ``Unicode`` column type instead, described in :ref:`types_toplevel`. Note that
- many DBAPIs have the ability to return Python Unicode objects in
- result sets directly - SQLAlchemy will use these modes of operation
- if possible and will also attempt to detect "Unicode returns"
- behavior by the DBAPI upon first connect by the
- :class:`.Engine`. When this is detected, string values in
- result sets are passed through without further processing.
-
- :param creator: a callable which returns a DBAPI connection.
- This creation function will be passed to the underlying
- connection pool and will be used to create all new database
- connections. Usage of this function causes connection
- parameters specified in the URL argument to be bypassed.
-
- :param echo=False: if True, the Engine will log all statements
- as well as a repr() of their parameter lists to the engines
- logger, which defaults to sys.stdout. The ``echo`` attribute of
- ``Engine`` can be modified at any time to turn logging on and
- off. If set to the string ``"debug"``, result rows will be
- printed to the standard output as well. This flag ultimately
- controls a Python logger; see :ref:`dbengine_logging` for
- information on how to configure logging directly.
-
- :param echo_pool=False: if True, the connection pool will log
- all checkouts/checkins to the logging stream, which defaults to
- sys.stdout. This flag ultimately controls a Python logger; see
- :ref:`dbengine_logging` for information on how to configure logging
- directly.
-
- :param encoding='utf-8': the encoding to use for all Unicode
- translations, both by engine-wide unicode conversion as well as
- the ``Unicode`` type object.
-
- :param execution_options: Dictionary execution options which will
- be applied to all connections. See
- :meth:`~sqlalchemy.engine.base.Connection.execution_options`
-
- :param implicit_returning=True: When ``True``, a RETURNING-
- compatible construct, if available, will be used to
- fetch newly generated primary key values when a single row
- INSERT statement is emitted with no existing returning()
- clause. This applies to those backends which support RETURNING
- or a compatible construct, including Postgresql, Firebird, Oracle,
- Microsoft SQL Server. Set this to ``False`` to disable
- the automatic usage of RETURNING.
-
- :param label_length=None: optional integer value which limits
- the size of dynamically generated column labels to that many
- characters. If less than 6, labels are generated as
- "_(counter)". If ``None``, the value of
- ``dialect.max_identifier_length`` is used instead.
-
- :param listeners: A list of one or more
- :class:`~sqlalchemy.interfaces.PoolListener` objects which will
- receive connection pool events.
-
- :param logging_name: String identifier which will be used within
- the "name" field of logging records generated within the
- "sqlalchemy.engine" logger. Defaults to a hexstring of the
- object's id.
-
- :param max_overflow=10: the number of connections to allow in
- connection pool "overflow", that is connections that can be
- opened above and beyond the pool_size setting, which defaults
- to five. this is only used with :class:`~sqlalchemy.pool.QueuePool`.
-
- :param module=None: reference to a Python module object (the module itself, not
- its string name). Specifies an alternate DBAPI module to be used
- by the engine's dialect. Each sub-dialect references a specific DBAPI which
- will be imported before first connect. This parameter causes the
- import to be bypassed, and the given module to be used instead.
- Can be used for testing of DBAPIs as well as to inject "mock"
- DBAPI implementations into the :class:`.Engine`.
-
- :param pool=None: an already-constructed instance of
- :class:`~sqlalchemy.pool.Pool`, such as a
- :class:`~sqlalchemy.pool.QueuePool` instance. If non-None, this
- pool will be used directly as the underlying connection pool
- for the engine, bypassing whatever connection parameters are
- present in the URL argument. For information on constructing
- connection pools manually, see :ref:`pooling_toplevel`.
-
- :param poolclass=None: a :class:`~sqlalchemy.pool.Pool`
- subclass, which will be used to create a connection pool
- instance using the connection parameters given in the URL. Note
- this differs from ``pool`` in that you don't actually
- instantiate the pool in this case, you just indicate what type
- of pool to be used.
-
- :param pool_logging_name: String identifier which will be used within
- the "name" field of logging records generated within the
- "sqlalchemy.pool" logger. Defaults to a hexstring of the object's
- id.
-
- :param pool_size=5: the number of connections to keep open
- inside the connection pool. This used with :class:`~sqlalchemy.pool.QueuePool` as
- well as :class:`~sqlalchemy.pool.SingletonThreadPool`. With
- :class:`~sqlalchemy.pool.QueuePool`, a ``pool_size`` setting
- of 0 indicates no limit; to disable pooling, set ``poolclass`` to
- :class:`~sqlalchemy.pool.NullPool` instead.
-
- :param pool_recycle=-1: this setting causes the pool to recycle
- connections after the given number of seconds has passed. It
- defaults to -1, or no timeout. For example, setting to 3600
- means connections will be recycled after one hour. Note that
- MySQL in particular will disconnect automatically if no
- activity is detected on a connection for eight hours (although
- this is configurable with the MySQLDB connection itself and the
- server configuration as well).
-
- :param pool_timeout=30: number of seconds to wait before giving
- up on getting a connection from the pool. This is only used
- with :class:`~sqlalchemy.pool.QueuePool`.
-
- :param strategy='plain': selects alternate engine implementations.
- Currently available is the ``threadlocal``
- strategy, which is described in :ref:`threadlocal_strategy`.
-
- """
-
- strategy = kwargs.pop('strategy', default_strategy)
- strategy = strategies.strategies[strategy]
- return strategy.create(*args, **kwargs)
-
-def engine_from_config(configuration, prefix='sqlalchemy.', **kwargs):
- """Create a new Engine instance using a configuration dictionary.
-
- The dictionary is typically produced from a config file where keys
- are prefixed, such as sqlalchemy.url, sqlalchemy.echo, etc. The
- 'prefix' argument indicates the prefix to be searched for.
-
- A select set of keyword arguments will be "coerced" to their
- expected type based on string values. In a future release, this
- functionality will be expanded and include dialect-specific
- arguments.
- """
-
- opts = _coerce_config(configuration, prefix)
- opts.update(kwargs)
- url = opts.pop('url')
- return create_engine(url, **opts)
-
-def _coerce_config(configuration, prefix):
- """Convert configuration values to expected types."""
-
- options = dict((key[len(prefix):], configuration[key])
- for key in configuration
- if key.startswith(prefix))
- for option, type_ in (
- ('convert_unicode', util.bool_or_str('force')),
- ('pool_timeout', int),
- ('echo', util.bool_or_str('debug')),
- ('echo_pool', util.bool_or_str('debug')),
- ('pool_recycle', int),
- ('pool_size', int),
- ('max_overflow', int),
- ('pool_threadlocal', bool),
- ('use_native_unicode', bool),
- ):
- util.coerce_kw_type(options, option, type_)
- return options
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/engine/base.py b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/engine/base.py
deleted file mode 100755
index 31fdd7fb..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/engine/base.py
+++ /dev/null
@@ -1,2995 +0,0 @@
-# engine/base.py
-# Copyright (C) 2005-2011 the SQLAlchemy authors and contributors <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-
-"""Basic components for SQL execution and interfacing with DB-API.
-
-Defines the basic components used to interface DB-API modules with
-higher-level statement-construction, connection-management, execution
-and result contexts.
-"""
-
-__all__ = [
- 'BufferedColumnResultProxy', 'BufferedColumnRow',
- 'BufferedRowResultProxy','Compiled', 'Connectable', 'Connection',
- 'Dialect', 'Engine','ExecutionContext', 'NestedTransaction',
- 'ResultProxy', 'RootTransaction','RowProxy', 'SchemaIterator',
- 'StringIO', 'Transaction', 'TwoPhaseTransaction',
- 'connection_memoize']
-
-import inspect, StringIO, sys, operator
-from itertools import izip
-from sqlalchemy import exc, schema, util, types, log, interfaces, \
- event, events
-from sqlalchemy.sql import expression
-from sqlalchemy import processors
-import collections
-
-class Dialect(object):
- """Define the behavior of a specific database and DB-API combination.
-
- Any aspect of metadata definition, SQL query generation,
- execution, result-set handling, or anything else which varies
- between databases is defined under the general category of the
- Dialect. The Dialect acts as a factory for other
- database-specific object implementations including
- ExecutionContext, Compiled, DefaultGenerator, and TypeEngine.
-
- All Dialects implement the following attributes:
-
- name
- identifying name for the dialect from a DBAPI-neutral point of view
- (i.e. 'sqlite')
-
- driver
- identifying name for the dialect's DBAPI
-
- positional
- True if the paramstyle for this Dialect is positional.
-
- paramstyle
- the paramstyle to be used (some DB-APIs support multiple
- paramstyles).
-
- convert_unicode
- True if Unicode conversion should be applied to all ``str``
- types.
-
- encoding
- type of encoding to use for unicode, usually defaults to
- 'utf-8'.
-
- statement_compiler
- a :class:`~Compiled` class used to compile SQL statements
-
- ddl_compiler
- a :class:`~Compiled` class used to compile DDL statements
-
- server_version_info
- a tuple containing a version number for the DB backend in use.
- This value is only available for supporting dialects, and is
- typically populated during the initial connection to the database.
-
- default_schema_name
- the name of the default schema. This value is only available for
- supporting dialects, and is typically populated during the
- initial connection to the database.
-
- execution_ctx_cls
- a :class:`.ExecutionContext` class used to handle statement execution
-
- execute_sequence_format
- either the 'tuple' or 'list' type, depending on what cursor.execute()
- accepts for the second argument (they vary).
-
- preparer
- a :class:`~sqlalchemy.sql.compiler.IdentifierPreparer` class used to
- quote identifiers.
-
- supports_alter
- ``True`` if the database supports ``ALTER TABLE``.
-
- max_identifier_length
- The maximum length of identifier names.
-
- supports_unicode_statements
- Indicate whether the DB-API can receive SQL statements as Python
- unicode strings
-
- supports_unicode_binds
- Indicate whether the DB-API can receive string bind parameters
- as Python unicode strings
-
- supports_sane_rowcount
- Indicate whether the dialect properly implements rowcount for
- ``UPDATE`` and ``DELETE`` statements.
-
- supports_sane_multi_rowcount
- Indicate whether the dialect properly implements rowcount for
- ``UPDATE`` and ``DELETE`` statements when executed via
- executemany.
-
- preexecute_autoincrement_sequences
- True if 'implicit' primary key functions must be executed separately
- in order to get their value. This is currently oriented towards
- Postgresql.
-
- implicit_returning
- use RETURNING or equivalent during INSERT execution in order to load
- newly generated primary keys and other column defaults in one execution,
- which are then available via inserted_primary_key.
- If an insert statement has returning() specified explicitly,
- the "implicit" functionality is not used and inserted_primary_key
- will not be available.
-
- dbapi_type_map
- A mapping of DB-API type objects present in this Dialect's
- DB-API implementation mapped to TypeEngine implementations used
- by the dialect.
-
- This is used to apply types to result sets based on the DB-API
- types present in cursor.description; it only takes effect for
- result sets against textual statements where no explicit
- typemap was present.
-
- colspecs
- A dictionary of TypeEngine classes from sqlalchemy.types mapped
- to subclasses that are specific to the dialect class. This
- dictionary is class-level only and is not accessed from the
- dialect instance itself.
-
- supports_default_values
- Indicates if the construct ``INSERT INTO tablename DEFAULT
- VALUES`` is supported
-
- supports_sequences
- Indicates if the dialect supports CREATE SEQUENCE or similar.
-
- sequences_optional
- If True, indicates if the "optional" flag on the Sequence() construct
- should signal to not generate a CREATE SEQUENCE. Applies only to
- dialects that support sequences. Currently used only to allow Postgresql
- SERIAL to be used on a column that specifies Sequence() for usage on
- other backends.
-
- supports_native_enum
- Indicates if the dialect supports a native ENUM construct.
- This will prevent types.Enum from generating a CHECK
- constraint when that type is used.
-
- supports_native_boolean
- Indicates if the dialect supports a native boolean construct.
- This will prevent types.Boolean from generating a CHECK
- constraint when that type is used.
-
- """
-
- def create_connect_args(self, url):
- """Build DB-API compatible connection arguments.
-
- Given a :class:`~sqlalchemy.engine.url.URL` object, returns a tuple
- consisting of a `*args`/`**kwargs` suitable to send directly
- to the dbapi's connect function.
-
- """
-
- raise NotImplementedError()
-
- @classmethod
- def type_descriptor(cls, typeobj):
- """Transform a generic type to a dialect-specific type.
-
- Dialect classes will usually use the
- :func:`~sqlalchemy.types.adapt_type` function in the types module to
- make this job easy.
-
- The returned result is cached *per dialect class* so can
- contain no dialect-instance state.
-
- """
-
- raise NotImplementedError()
-
- def initialize(self, connection):
- """Called during strategized creation of the dialect with a
- connection.
-
- Allows dialects to configure options based on server version info or
- other properties.
-
- The connection passed here is a SQLAlchemy Connection object,
- with full capabilities.
-
- The initalize() method of the base dialect should be called via
- super().
-
- """
-
- pass
-
- def reflecttable(self, connection, table, include_columns=None):
- """Load table description from the database.
-
- Given a :class:`.Connection` and a
- :class:`~sqlalchemy.schema.Table` object, reflect its columns and
- properties from the database. If include_columns (a list or
- set) is specified, limit the autoload to the given column
- names.
-
- The default implementation uses the
- :class:`~sqlalchemy.engine.reflection.Inspector` interface to
- provide the output, building upon the granular table/column/
- constraint etc. methods of :class:`.Dialect`.
-
- """
-
- raise NotImplementedError()
-
- def get_columns(self, connection, table_name, schema=None, **kw):
- """Return information about columns in `table_name`.
-
- Given a :class:`.Connection`, a string
- `table_name`, and an optional string `schema`, return column
- information as a list of dictionaries with these keys:
-
- name
- the column's name
-
- type
- [sqlalchemy.types#TypeEngine]
-
- nullable
- boolean
-
- default
- the column's default value
-
- autoincrement
- boolean
-
- sequence
- a dictionary of the form
- {'name' : str, 'start' :int, 'increment': int}
-
- Additional column attributes may be present.
- """
-
- raise NotImplementedError()
-
- def get_primary_keys(self, connection, table_name, schema=None, **kw):
- """Return information about primary keys in `table_name`.
-
- Given a :class:`.Connection`, a string
- `table_name`, and an optional string `schema`, return primary
- key information as a list of column names.
-
- """
- raise NotImplementedError()
-
- def get_pk_constraint(self, table_name, schema=None, **kw):
- """Return information about the primary key constraint on
- table_name`.
-
- Given a string `table_name`, and an optional string `schema`, return
- primary key information as a dictionary with these keys:
-
- constrained_columns
- a list of column names that make up the primary key
-
- name
- optional name of the primary key constraint.
-
- """
- raise NotImplementedError()
-
- def get_foreign_keys(self, connection, table_name, schema=None, **kw):
- """Return information about foreign_keys in `table_name`.
-
- Given a :class:`.Connection`, a string
- `table_name`, and an optional string `schema`, return foreign
- key information as a list of dicts with these keys:
-
- name
- the constraint's name
-
- constrained_columns
- a list of column names that make up the foreign key
-
- referred_schema
- the name of the referred schema
-
- referred_table
- the name of the referred table
-
- referred_columns
- a list of column names in the referred table that correspond to
- constrained_columns
- """
-
- raise NotImplementedError()
-
- def get_table_names(self, connection, schema=None, **kw):
- """Return a list of table names for `schema`."""
-
- raise NotImplementedError
-
- def get_view_names(self, connection, schema=None, **kw):
- """Return a list of all view names available in the database.
-
- schema:
- Optional, retrieve names from a non-default schema.
- """
-
- raise NotImplementedError()
-
- def get_view_definition(self, connection, view_name, schema=None, **kw):
- """Return view definition.
-
- Given a :class:`.Connection`, a string
- `view_name`, and an optional string `schema`, return the view
- definition.
- """
-
- raise NotImplementedError()
-
- def get_indexes(self, connection, table_name, schema=None, **kw):
- """Return information about indexes in `table_name`.
-
- Given a :class:`.Connection`, a string
- `table_name` and an optional string `schema`, return index
- information as a list of dictionaries with these keys:
-
- name
- the index's name
-
- column_names
- list of column names in order
-
- unique
- boolean
- """
-
- raise NotImplementedError()
-
- def normalize_name(self, name):
- """convert the given name to lowercase if it is detected as
- case insensitive.
-
- this method is only used if the dialect defines
- requires_name_normalize=True.
-
- """
- raise NotImplementedError()
-
- def denormalize_name(self, name):
- """convert the given name to a case insensitive identifier
- for the backend if it is an all-lowercase name.
-
- this method is only used if the dialect defines
- requires_name_normalize=True.
-
- """
- raise NotImplementedError()
-
- def has_table(self, connection, table_name, schema=None):
- """Check the existence of a particular table in the database.
-
- Given a :class:`.Connection` object and a string
- `table_name`, return True if the given table (possibly within
- the specified `schema`) exists in the database, False
- otherwise.
- """
-
- raise NotImplementedError()
-
- def has_sequence(self, connection, sequence_name, schema=None):
- """Check the existence of a particular sequence in the database.
-
- Given a :class:`.Connection` object and a string
- `sequence_name`, return True if the given sequence exists in
- the database, False otherwise.
- """
-
- raise NotImplementedError()
-
- def _get_server_version_info(self, connection):
- """Retrieve the server version info from the given connection.
-
- This is used by the default implementation to populate the
- "server_version_info" attribute and is called exactly
- once upon first connect.
-
- """
-
- raise NotImplementedError()
-
- def _get_default_schema_name(self, connection):
- """Return the string name of the currently selected schema from
- the given connection.
-
- This is used by the default implementation to populate the
- "default_schema_name" attribute and is called exactly
- once upon first connect.
-
- """
-
- raise NotImplementedError()
-
- def do_begin(self, connection):
- """Provide an implementation of *connection.begin()*, given a
- DB-API connection."""
-
- raise NotImplementedError()
-
- def do_rollback(self, connection):
- """Provide an implementation of *connection.rollback()*, given
- a DB-API connection."""
-
- raise NotImplementedError()
-
- def create_xid(self):
- """Create a two-phase transaction ID.
-
- This id will be passed to do_begin_twophase(),
- do_rollback_twophase(), do_commit_twophase(). Its format is
- unspecified.
- """
-
- raise NotImplementedError()
-
- def do_commit(self, connection):
- """Provide an implementation of *connection.commit()*, given a
- DB-API connection."""
-
- raise NotImplementedError()
-
- def do_savepoint(self, connection, name):
- """Create a savepoint with the given name on a SQLAlchemy
- connection."""
-
- raise NotImplementedError()
-
- def do_rollback_to_savepoint(self, connection, name):
- """Rollback a SQL Alchemy connection to the named savepoint."""
-
- raise NotImplementedError()
-
- def do_release_savepoint(self, connection, name):
- """Release the named savepoint on a SQL Alchemy connection."""
-
- raise NotImplementedError()
-
- def do_begin_twophase(self, connection, xid):
- """Begin a two phase transaction on the given connection."""
-
- raise NotImplementedError()
-
- def do_prepare_twophase(self, connection, xid):
- """Prepare a two phase transaction on the given connection."""
-
- raise NotImplementedError()
-
- def do_rollback_twophase(self, connection, xid, is_prepared=True,
- recover=False):
- """Rollback a two phase transaction on the given connection."""
-
- raise NotImplementedError()
-
- def do_commit_twophase(self, connection, xid, is_prepared=True,
- recover=False):
- """Commit a two phase transaction on the given connection."""
-
- raise NotImplementedError()
-
- def do_recover_twophase(self, connection):
- """Recover list of uncommited prepared two phase transaction
- identifiers on the given connection."""
-
- raise NotImplementedError()
-
- def do_executemany(self, cursor, statement, parameters, context=None):
- """Provide an implementation of *cursor.executemany(statement,
- parameters)*."""
-
- raise NotImplementedError()
-
- def do_execute(self, cursor, statement, parameters, context=None):
- """Provide an implementation of *cursor.execute(statement,
- parameters)*."""
-
- raise NotImplementedError()
-
- def is_disconnect(self, e, connection, cursor):
- """Return True if the given DB-API error indicates an invalid
- connection"""
-
- raise NotImplementedError()
-
- def connect(self):
- """return a callable which sets up a newly created DBAPI connection.
-
- The callable accepts a single argument "conn" which is the
- DBAPI connection itself. It has no return value.
-
- This is used to set dialect-wide per-connection options such as
- isolation modes, unicode modes, etc.
-
- If a callable is returned, it will be assembled into a pool listener
- that receives the direct DBAPI connection, with all wrappers removed.
-
- If None is returned, no listener will be generated.
-
- """
- return None
-
- def reset_isolation_level(self, dbapi_conn):
- """Given a DBAPI connection, revert its isolation to the default."""
-
- raise NotImplementedError()
-
- def set_isolation_level(self, dbapi_conn, level):
- """Given a DBAPI connection, set its isolation level."""
-
- raise NotImplementedError()
-
- def get_isolation_level(self, dbapi_conn):
- """Given a DBAPI connection, return its isolation level."""
-
- raise NotImplementedError()
-
-
-class ExecutionContext(object):
- """A messenger object for a Dialect that corresponds to a single
- execution.
-
- ExecutionContext should have these data members:
-
- connection
- Connection object which can be freely used by default value
- generators to execute SQL. This Connection should reference the
- same underlying connection/transactional resources of
- root_connection.
-
- root_connection
- Connection object which is the source of this ExecutionContext. This
- Connection may have close_with_result=True set, in which case it can
- only be used once.
-
- dialect
- dialect which created this ExecutionContext.
-
- cursor
- DB-API cursor procured from the connection,
-
- compiled
- if passed to constructor, sqlalchemy.engine.base.Compiled object
- being executed,
-
- statement
- string version of the statement to be executed. Is either
- passed to the constructor, or must be created from the
- sql.Compiled object by the time pre_exec() has completed.
-
- parameters
- bind parameters passed to the execute() method. For compiled
- statements, this is a dictionary or list of dictionaries. For
- textual statements, it should be in a format suitable for the
- dialect's paramstyle (i.e. dict or list of dicts for non
- positional, list or list of lists/tuples for positional).
-
- isinsert
- True if the statement is an INSERT.
-
- isupdate
- True if the statement is an UPDATE.
-
- should_autocommit
- True if the statement is a "committable" statement.
-
- postfetch_cols
- a list of Column objects for which a server-side default or
- inline SQL expression value was fired off. Applies to inserts
- and updates.
- """
-
- def create_cursor(self):
- """Return a new cursor generated from this ExecutionContext's
- connection.
-
- Some dialects may wish to change the behavior of
- connection.cursor(), such as postgresql which may return a PG
- "server side" cursor.
- """
-
- raise NotImplementedError()
-
- def pre_exec(self):
- """Called before an execution of a compiled statement.
-
- If a compiled statement was passed to this ExecutionContext,
- the `statement` and `parameters` datamembers must be
- initialized after this statement is complete.
- """
-
- raise NotImplementedError()
-
- def post_exec(self):
- """Called after the execution of a compiled statement.
-
- If a compiled statement was passed to this ExecutionContext,
- the `last_insert_ids`, `last_inserted_params`, etc.
- datamembers should be available after this method completes.
- """
-
- raise NotImplementedError()
-
- def result(self):
- """Return a result object corresponding to this ExecutionContext.
-
- Returns a ResultProxy.
- """
-
- raise NotImplementedError()
-
- def handle_dbapi_exception(self, e):
- """Receive a DBAPI exception which occurred upon execute, result
- fetch, etc."""
-
- raise NotImplementedError()
-
- def should_autocommit_text(self, statement):
- """Parse the given textual statement and return True if it refers to
- a "committable" statement"""
-
- raise NotImplementedError()
-
- def lastrow_has_defaults(self):
- """Return True if the last INSERT or UPDATE row contained
- inlined or database-side defaults.
- """
-
- raise NotImplementedError()
-
- def get_rowcount(self):
- """Return the number of rows produced (by a SELECT query)
- or affected (by an INSERT/UPDATE/DELETE statement).
-
- Note that this row count may not be properly implemented
- in some dialects; this is indicated by the
- ``supports_sane_rowcount`` and ``supports_sane_multi_rowcount``
- dialect attributes.
-
- """
-
- raise NotImplementedError()
-
-
-class Compiled(object):
- """Represent a compiled SQL or DDL expression.
-
- The ``__str__`` method of the ``Compiled`` object should produce
- the actual text of the statement. ``Compiled`` objects are
- specific to their underlying database dialect, and also may
- or may not be specific to the columns referenced within a
- particular set of bind parameters. In no case should the
- ``Compiled`` object be dependent on the actual values of those
- bind parameters, even though it may reference those values as
- defaults.
- """
-
- def __init__(self, dialect, statement, bind=None):
- """Construct a new ``Compiled`` object.
-
- :param dialect: ``Dialect`` to compile against.
-
- :param statement: ``ClauseElement`` to be compiled.
-
- :param bind: Optional Engine or Connection to compile this
- statement against.
- """
-
- self.dialect = dialect
- self.bind = bind
- if statement is not None:
- self.statement = statement
- self.can_execute = statement.supports_execution
- self.string = self.process(self.statement)
-
- @util.deprecated("0.7", ":class:`.Compiled` objects now compile "
- "within the constructor.")
- def compile(self):
- """Produce the internal string representation of this element."""
- pass
-
- @property
- def sql_compiler(self):
- """Return a Compiled that is capable of processing SQL expressions.
-
- If this compiler is one, it would likely just return 'self'.
-
- """
-
- raise NotImplementedError()
-
- def process(self, obj, **kwargs):
- return obj._compiler_dispatch(self, **kwargs)
-
- def __str__(self):
- """Return the string text of the generated SQL or DDL."""
-
- return self.string or ''
-
- def construct_params(self, params=None):
- """Return the bind params for this compiled object.
-
- :param params: a dict of string/object pairs whos values will
- override bind values compiled in to the
- statement.
- """
-
- raise NotImplementedError()
-
- @property
- def params(self):
- """Return the bind params for this compiled object."""
- return self.construct_params()
-
- def execute(self, *multiparams, **params):
- """Execute this compiled object."""
-
- e = self.bind
- if e is None:
- raise exc.UnboundExecutionError(
- "This Compiled object is not bound to any Engine "
- "or Connection.")
- return e._execute_compiled(self, multiparams, params)
-
- def scalar(self, *multiparams, **params):
- """Execute this compiled object and return the result's
- scalar value."""
-
- return self.execute(*multiparams, **params).scalar()
-
-
-class TypeCompiler(object):
- """Produces DDL specification for TypeEngine objects."""
-
- def __init__(self, dialect):
- self.dialect = dialect
-
- def process(self, type_):
- return type_._compiler_dispatch(self)
-
-
-class Connectable(object):
- """Interface for an object which supports execution of SQL constructs.
-
- The two implementations of ``Connectable`` are :class:`.Connection` and
- :class:`.Engine`.
-
- Connectable must also implement the 'dialect' member which references a
- :class:`.Dialect` instance.
- """
-
- def contextual_connect(self):
- """Return a Connection object which may be part of an ongoing
- context."""
-
- raise NotImplementedError()
-
- def create(self, entity, **kwargs):
- """Create a table or index given an appropriate schema object."""
-
- raise NotImplementedError()
-
- def drop(self, entity, **kwargs):
- """Drop a table or index given an appropriate schema object."""
-
- raise NotImplementedError()
-
- def execute(self, object, *multiparams, **params):
- """Executes the given construct and returns a :class:`.ResultProxy`."""
- raise NotImplementedError()
-
- def scalar(self, object, *multiparams, **params):
- """Executes and returns the first column of the first row.
-
- The underlying cursor is closed after execution.
- """
- raise NotImplementedError()
-
- def _execute_clauseelement(self, elem, multiparams=None, params=None):
- raise NotImplementedError()
-
-
-class Connection(Connectable):
- """Provides high-level functionality for a wrapped DB-API connection.
-
- Provides execution support for string-based SQL statements as well as
- :class:`.ClauseElement`, :class:`.Compiled` and :class:`.DefaultGenerator`
- objects. Provides a :meth:`begin` method to return :class:`.Transaction`
- objects.
-
- The Connection object is **not** thread-safe. While a Connection can be
- shared among threads using properly synchronized access, it is still
- possible that the underlying DBAPI connection may not support shared
- access between threads. Check the DBAPI documentation for details.
-
- The Connection object represents a single dbapi connection checked out
- from the connection pool. In this state, the connection pool has no affect
- upon the connection, including its expiration or timeout state. For the
- connection pool to properly manage connections, connections should be
- returned to the connection pool (i.e. ``connection.close()``) whenever the
- connection is not in use.
-
- .. index::
- single: thread safety; Connection
-
- """
-
- def __init__(self, engine, connection=None, close_with_result=False,
- _branch=False, _execution_options=None):
- """Construct a new Connection.
-
- The constructor here is not public and is only called only by an
- :class:`.Engine`. See :meth:`.Engine.connect` and
- :meth:`.Engine.contextual_connect` methods.
-
- """
- self.engine = engine
- self.dialect = engine.dialect
- self.__connection = connection or engine.raw_connection()
- self.__transaction = None
- self.should_close_with_result = close_with_result
- self.__savepoint_seq = 0
- self.__branch = _branch
- self.__invalid = False
- self._has_events = engine._has_events
- self._echo = self.engine._should_log_info()
- if _execution_options:
- self._execution_options =\
- engine._execution_options.union(_execution_options)
- else:
- self._execution_options = engine._execution_options
-
- def _branch(self):
- """Return a new Connection which references this Connection's
- engine and connection; but does not have close_with_result enabled,
- and also whose close() method does nothing.
-
- This is used to execute "sub" statements within a single execution,
- usually an INSERT statement.
- """
-
- return self.engine._connection_cls(
- self.engine,
- self.__connection, _branch=True)
-
- def _clone(self):
- """Create a shallow copy of this Connection.
-
- """
- c = self.__class__.__new__(self.__class__)
- c.__dict__ = self.__dict__.copy()
- return c
-
- def execution_options(self, **opt):
- """ Set non-SQL options for the connection which take effect
- during execution.
-
- The method returns a copy of this :class:`.Connection` which references
- the same underlying DBAPI connection, but also defines the given
- execution options which will take effect for a call to
- :meth:`execute`. As the new :class:`.Connection` references the same
- underlying resource, it is probably best to ensure that the copies
- would be discarded immediately, which is implicit if used as in::
-
- result = connection.execution_options(stream_results=True).\\
- execute(stmt)
-
- :meth:`.Connection.execution_options` accepts all options as those
- accepted by :meth:`.Executable.execution_options`. Additionally,
- it includes options that are applicable only to
- :class:`.Connection`.
-
- :param autocommit: Available on: Connection, statement.
- When True, a COMMIT will be invoked after execution
- when executed in 'autocommit' mode, i.e. when an explicit
- transaction is not begun on the connection. Note that DBAPI
- connections by default are always in a transaction - SQLAlchemy uses
- rules applied to different kinds of statements to determine if
- COMMIT will be invoked in order to provide its "autocommit" feature.
- Typically, all INSERT/UPDATE/DELETE statements as well as
- CREATE/DROP statements have autocommit behavior enabled; SELECT
- constructs do not. Use this option when invoking a SELECT or other
- specific SQL construct where COMMIT is desired (typically when
- calling stored procedures and such), and an explicit
- transaction is not in progress.
-
- :param compiled_cache: Available on: Connection.
- A dictionary where :class:`.Compiled` objects
- will be cached when the :class:`.Connection` compiles a clause
- expression into a :class:`.Compiled` object.
- It is the user's responsibility to
- manage the size of this dictionary, which will have keys
- corresponding to the dialect, clause element, the column
- names within the VALUES or SET clause of an INSERT or UPDATE,
- as well as the "batch" mode for an INSERT or UPDATE statement.
- The format of this dictionary is not guaranteed to stay the
- same in future releases.
-
- Note that the ORM makes use of its own "compiled" caches for
- some operations, including flush operations. The caching
- used by the ORM internally supersedes a cache dictionary
- specified here.
-
- :param isolation_level: Available on: Connection.
- Set the transaction isolation level for
- the lifespan of this connection. Valid values include
- those string values accepted by the ``isolation_level``
- parameter passed to :func:`.create_engine`, and are
- database specific, including those for :ref:`sqlite_toplevel`,
- :ref:`postgresql_toplevel` - see those dialect's documentation
- for further info.
-
- Note that this option necessarily affects the underying
- DBAPI connection for the lifespan of the originating
- :class:`.Connection`, and is not per-execution. This
- setting is not removed until the underying DBAPI connection
- is returned to the connection pool, i.e.
- the :meth:`.Connection.close` method is called.
-
- :param stream_results: Available on: Connection, statement.
- Indicate to the dialect that results should be
- "streamed" and not pre-buffered, if possible. This is a limitation
- of many DBAPIs. The flag is currently understood only by the
- psycopg2 dialect.
-
- """
- c = self._clone()
- c._execution_options = c._execution_options.union(opt)
- if 'isolation_level' in opt:
- c._set_isolation_level()
- return c
-
- def _set_isolation_level(self):
- self.dialect.set_isolation_level(self.connection,
- self._execution_options['isolation_level'])
- self.connection._connection_record.finalize_callback = \
- self.dialect.reset_isolation_level
-
- @property
- def closed(self):
- """Return True if this connection is closed."""
-
- return not self.__invalid and '_Connection__connection' \
- not in self.__dict__
-
- @property
- def invalidated(self):
- """Return True if this connection was invalidated."""
-
- return self.__invalid
-
- @property
- def connection(self):
- "The underlying DB-API connection managed by this Connection."
-
- try:
- return self.__connection
- except AttributeError:
- return self._revalidate_connection()
-
- def _revalidate_connection(self):
- if self.__invalid:
- if self.__transaction is not None:
- raise exc.InvalidRequestError(
- "Can't reconnect until invalid "
- "transaction is rolled back")
- self.__connection = self.engine.raw_connection()
- self.__invalid = False
- return self.__connection
- raise exc.ResourceClosedError("This Connection is closed")
-
- @property
- def _connection_is_valid(self):
- # use getattr() for is_valid to support exceptions raised in
- # dialect initializer, where the connection is not wrapped in
- # _ConnectionFairy
-
- return getattr(self.__connection, 'is_valid', False)
-
- @property
- def info(self):
- """A collection of per-DB-API connection instance properties."""
-
- return self.connection.info
-
- def connect(self):
- """Returns self.
-
- This ``Connectable`` interface method returns self, allowing
- Connections to be used interchangably with Engines in most
- situations that require a bind.
- """
-
- return self
-
- def contextual_connect(self, **kwargs):
- """Returns self.
-
- This ``Connectable`` interface method returns self, allowing
- Connections to be used interchangably with Engines in most
- situations that require a bind.
- """
-
- return self
-
- def invalidate(self, exception=None):
- """Invalidate the underlying DBAPI connection associated with
- this Connection.
-
- The underlying DB-API connection is literally closed (if
- possible), and is discarded. Its source connection pool will
- typically lazily create a new connection to replace it.
-
- Upon the next usage, this Connection will attempt to reconnect
- to the pool with a new connection.
-
- Transactions in progress remain in an "opened" state (even though the
- actual transaction is gone); these must be explicitly rolled back
- before a reconnect on this Connection can proceed. This is to prevent
- applications from accidentally continuing their transactional
- operations in a non-transactional state.
-
- """
- if self.invalidated:
- return
-
- if self.closed:
- raise exc.ResourceClosedError("This Connection is closed")
-
- if self._connection_is_valid:
- self.__connection.invalidate(exception)
- del self.__connection
- self.__invalid = True
-
-
- def detach(self):
- """Detach the underlying DB-API connection from its connection pool.
-
- This Connection instance will remain useable. When closed,
- the DB-API connection will be literally closed and not
- returned to its pool. The pool will typically lazily create a
- new connection to replace the detached connection.
-
- This method can be used to insulate the rest of an application
- from a modified state on a connection (such as a transaction
- isolation level or similar). Also see
- :class:`~sqlalchemy.interfaces.PoolListener` for a mechanism to modify
- connection state when connections leave and return to their
- connection pool.
- """
-
- self.__connection.detach()
-
- def begin(self):
- """Begin a transaction and return a Transaction handle.
-
- Repeated calls to ``begin`` on the same Connection will create
- a lightweight, emulated nested transaction. Only the
- outermost transaction may ``commit``. Calls to ``commit`` on
- inner transactions are ignored. Any transaction in the
- hierarchy may ``rollback``, however.
- """
-
- if self.__transaction is None:
- self.__transaction = RootTransaction(self)
- return self.__transaction
- else:
- return Transaction(self, self.__transaction)
-
- def begin_nested(self):
- """Begin a nested transaction and return a Transaction handle.
-
- Nested transactions require SAVEPOINT support in the
- underlying database. Any transaction in the hierarchy may
- ``commit`` and ``rollback``, however the outermost transaction
- still controls the overall ``commit`` or ``rollback`` of the
- transaction of a whole.
- """
-
- if self.__transaction is None:
- self.__transaction = RootTransaction(self)
- else:
- self.__transaction = NestedTransaction(self, self.__transaction)
- return self.__transaction
-
- def begin_twophase(self, xid=None):
- """Begin a two-phase or XA transaction and return a Transaction
- handle.
-
- :param xid: the two phase transaction id. If not supplied, a
- random id will be generated.
-
- """
-
- if self.__transaction is not None:
- raise exc.InvalidRequestError(
- "Cannot start a two phase transaction when a transaction "
- "is already in progress.")
- if xid is None:
- xid = self.engine.dialect.create_xid();
- self.__transaction = TwoPhaseTransaction(self, xid)
- return self.__transaction
-
- def recover_twophase(self):
- return self.engine.dialect.do_recover_twophase(self)
-
- def rollback_prepared(self, xid, recover=False):
- self.engine.dialect.do_rollback_twophase(self, xid, recover=recover)
-
- def commit_prepared(self, xid, recover=False):
- self.engine.dialect.do_commit_twophase(self, xid, recover=recover)
-
- def in_transaction(self):
- """Return True if a transaction is in progress."""
-
- return self.__transaction is not None
-
- def _begin_impl(self):
- if self._echo:
- self.engine.logger.info("BEGIN (implicit)")
-
- if self._has_events:
- self.engine.dispatch.begin(self)
-
- try:
- self.engine.dialect.do_begin(self.connection)
- except Exception, e:
- self._handle_dbapi_exception(e, None, None, None, None)
- raise
-
- def _rollback_impl(self):
- if self._has_events:
- self.engine.dispatch.rollback(self)
-
- if not self.closed and not self.invalidated and \
- self._connection_is_valid:
- if self._echo:
- self.engine.logger.info("ROLLBACK")
- try:
- self.engine.dialect.do_rollback(self.connection)
- self.__transaction = None
- except Exception, e:
- self._handle_dbapi_exception(e, None, None, None, None)
- raise
- else:
- self.__transaction = None
-
- def _commit_impl(self):
- if self._has_events:
- self.engine.dispatch.commit(self)
-
- if self._echo:
- self.engine.logger.info("COMMIT")
- try:
- self.engine.dialect.do_commit(self.connection)
- self.__transaction = None
- except Exception, e:
- self._handle_dbapi_exception(e, None, None, None, None)
- raise
-
- def _savepoint_impl(self, name=None):
- if self._has_events:
- self.engine.dispatch.savepoint(self, name)
-
- if name is None:
- self.__savepoint_seq += 1
- name = 'sa_savepoint_%s' % self.__savepoint_seq
- if self._connection_is_valid:
- self.engine.dialect.do_savepoint(self, name)
- return name
-
- def _rollback_to_savepoint_impl(self, name, context):
- if self._has_events:
- self.engine.dispatch.rollback_savepoint(self, name, context)
-
- if self._connection_is_valid:
- self.engine.dialect.do_rollback_to_savepoint(self, name)
- self.__transaction = context
-
- def _release_savepoint_impl(self, name, context):
- if self._has_events:
- self.engine.dispatch.release_savepoint(self, name, context)
-
- if self._connection_is_valid:
- self.engine.dialect.do_release_savepoint(self, name)
- self.__transaction = context
-
- def _begin_twophase_impl(self, xid):
- if self._has_events:
- self.engine.dispatch.begin_twophase(self, xid)
-
- if self._connection_is_valid:
- self.engine.dialect.do_begin_twophase(self, xid)
-
- def _prepare_twophase_impl(self, xid):
- if self._has_events:
- self.engine.dispatch.prepare_twophase(self, xid)
-
- if self._connection_is_valid:
- assert isinstance(self.__transaction, TwoPhaseTransaction)
- self.engine.dialect.do_prepare_twophase(self, xid)
-
- def _rollback_twophase_impl(self, xid, is_prepared):
- if self._has_events:
- self.engine.dispatch.rollback_twophase(self, xid, is_prepared)
-
- if self._connection_is_valid:
- assert isinstance(self.__transaction, TwoPhaseTransaction)
- self.engine.dialect.do_rollback_twophase(self, xid, is_prepared)
- self.__transaction = None
-
- def _commit_twophase_impl(self, xid, is_prepared):
- if self._has_events:
- self.engine.dispatch.commit_twophase(self, xid, is_prepared)
-
- if self._connection_is_valid:
- assert isinstance(self.__transaction, TwoPhaseTransaction)
- self.engine.dialect.do_commit_twophase(self, xid, is_prepared)
- self.__transaction = None
-
- def _autorollback(self):
- if not self.in_transaction():
- self._rollback_impl()
-
- def close(self):
- """Close this Connection."""
-
- try:
- conn = self.__connection
- except AttributeError:
- return
- if not self.__branch:
- conn.close()
- self.__invalid = False
- del self.__connection
- self.__transaction = None
-
- def scalar(self, object, *multiparams, **params):
- """Executes and returns the first column of the first row.
-
- The underlying result/cursor is closed after execution.
- """
-
- return self.execute(object, *multiparams, **params).scalar()
-
- def execute(self, object, *multiparams, **params):
- """Executes the given construct and returns a :class:`.ResultProxy`.
-
- The construct can be one of:
-
- * a textual SQL string
- * any :class:`.ClauseElement` construct that is also
- a subclass of :class:`.Executable`, such as a
- :func:`expression.select` construct
- * a :class:`.FunctionElement`, such as that generated
- by :attr:`.func`, will be automatically wrapped in
- a SELECT statement, which is then executed.
- * a :class:`.DDLElement` object
- * a :class:`.DefaultGenerator` object
- * a :class:`.Compiled` object
-
- """
- for c in type(object).__mro__:
- if c in Connection.executors:
- return Connection.executors[c](
- self,
- object,
- multiparams,
- params)
- else:
- raise exc.InvalidRequestError(
- "Unexecutable object type: %s" %
- type(object))
-
- def __distill_params(self, multiparams, params):
- """Given arguments from the calling form *multiparams, **params,
- return a list of bind parameter structures, usually a list of
- dictionaries.
-
- In the case of 'raw' execution which accepts positional parameters,
- it may be a list of tuples or lists.
-
- """
-
- if not multiparams:
- if params:
- return [params]
- else:
- return []
- elif len(multiparams) == 1:
- zero = multiparams[0]
- if isinstance(zero, (list, tuple)):
- if not zero or hasattr(zero[0], '__iter__'):
- return zero
- else:
- return [zero]
- elif hasattr(zero, 'keys'):
- return [zero]
- else:
- return [[zero]]
- else:
- if hasattr(multiparams[0], '__iter__'):
- return multiparams
- else:
- return [multiparams]
-
- def _execute_function(self, func, multiparams, params):
- """Execute a sql.FunctionElement object."""
-
- return self._execute_clauseelement(func.select(),
- multiparams, params)
-
- def _execute_default(self, default, multiparams, params):
- """Execute a schema.ColumnDefault object."""
-
- if self._has_events:
- for fn in self.engine.dispatch.before_execute:
- default, multiparams, params = \
- fn(self, default, multiparams, params)
-
- try:
- try:
- conn = self.__connection
- except AttributeError:
- conn = self._revalidate_connection()
-
- dialect = self.dialect
- ctx = dialect.execution_ctx_cls._init_default(
- dialect, self, conn)
- except Exception, e:
- self._handle_dbapi_exception(e, None, None, None, None)
- raise
-
- ret = ctx._exec_default(default, None)
- if self.should_close_with_result:
- self.close()
-
- if self._has_events:
- self.engine.dispatch.after_execute(self,
- default, multiparams, params, ret)
-
- return ret
-
- def _execute_ddl(self, ddl, multiparams, params):
- """Execute a schema.DDL object."""
-
- if self._has_events:
- for fn in self.engine.dispatch.before_execute:
- ddl, multiparams, params = \
- fn(self, ddl, multiparams, params)
-
- dialect = self.dialect
-
- compiled = ddl.compile(dialect=dialect)
- ret = self._execute_context(
- dialect,
- dialect.execution_ctx_cls._init_ddl,
- compiled,
- None,
- compiled
- )
- if self._has_events:
- self.engine.dispatch.after_execute(self,
- ddl, multiparams, params, ret)
- return ret
-
- def _execute_clauseelement(self, elem, multiparams, params):
- """Execute a sql.ClauseElement object."""
-
- if self._has_events:
- for fn in self.engine.dispatch.before_execute:
- elem, multiparams, params = \
- fn(self, elem, multiparams, params)
-
- distilled_params = self.__distill_params(multiparams, params)
- if distilled_params:
- keys = distilled_params[0].keys()
- else:
- keys = []
-
- dialect = self.dialect
- if 'compiled_cache' in self._execution_options:
- key = dialect, elem, tuple(keys), len(distilled_params) > 1
- if key in self._execution_options['compiled_cache']:
- compiled_sql = self._execution_options['compiled_cache'][key]
- else:
- compiled_sql = elem.compile(
- dialect=dialect, column_keys=keys,
- inline=len(distilled_params) > 1)
- self._execution_options['compiled_cache'][key] = compiled_sql
- else:
- compiled_sql = elem.compile(
- dialect=dialect, column_keys=keys,
- inline=len(distilled_params) > 1)
-
-
- ret = self._execute_context(
- dialect,
- dialect.execution_ctx_cls._init_compiled,
- compiled_sql,
- distilled_params,
- compiled_sql, distilled_params
- )
- if self._has_events:
- self.engine.dispatch.after_execute(self,
- elem, multiparams, params, ret)
- return ret
-
- def _execute_compiled(self, compiled, multiparams, params):
- """Execute a sql.Compiled object."""
-
- if self._has_events:
- for fn in self.engine.dispatch.before_execute:
- compiled, multiparams, params = \
- fn(self, compiled, multiparams, params)
-
- dialect = self.dialect
- parameters=self.__distill_params(multiparams, params)
- ret = self._execute_context(
- dialect,
- dialect.execution_ctx_cls._init_compiled,
- compiled,
- parameters,
- compiled, parameters
- )
- if self._has_events:
- self.engine.dispatch.after_execute(self,
- compiled, multiparams, params, ret)
- return ret
-
- def _execute_text(self, statement, multiparams, params):
- """Execute a string SQL statement."""
-
- if self._has_events:
- for fn in self.engine.dispatch.before_execute:
- statement, multiparams, params = \
- fn(self, statement, multiparams, params)
-
- dialect = self.dialect
- parameters = self.__distill_params(multiparams, params)
- ret = self._execute_context(
- dialect,
- dialect.execution_ctx_cls._init_statement,
- statement,
- parameters,
- statement, parameters
- )
- if self._has_events:
- self.engine.dispatch.after_execute(self,
- statement, multiparams, params, ret)
- return ret
-
- def _execute_context(self, dialect, constructor,
- statement, parameters,
- *args):
- """Create an :class:`.ExecutionContext` and execute, returning
- a :class:`.ResultProxy`."""
-
- try:
- try:
- conn = self.__connection
- except AttributeError:
- conn = self._revalidate_connection()
-
- context = constructor(dialect, self, conn, *args)
- except Exception, e:
- self._handle_dbapi_exception(e,
- str(statement), parameters,
- None, None)
- raise
-
- if context.compiled:
- context.pre_exec()
-
- cursor, statement, parameters = context.cursor, \
- context.statement, \
- context.parameters
-
- if not context.executemany:
- parameters = parameters[0]
-
- if self._has_events:
- for fn in self.engine.dispatch.before_cursor_execute:
- statement, parameters = \
- fn(self, cursor, statement, parameters,
- context, context.executemany)
-
- if self._echo:
- self.engine.logger.info(statement)
- self.engine.logger.info("%r", parameters)
- try:
- if context.executemany:
- self.dialect.do_executemany(
- cursor,
- statement,
- parameters,
- context)
- else:
- self.dialect.do_execute(
- cursor,
- statement,
- parameters,
- context)
- except Exception, e:
- self._handle_dbapi_exception(
- e,
- statement,
- parameters,
- cursor,
- context)
- raise
-
-
- if self._has_events:
- self.engine.dispatch.after_cursor_execute(self, cursor,
- statement,
- parameters,
- context,
- context.executemany)
-
- if context.compiled:
- context.post_exec()
-
- if context.isinsert and not context.executemany:
- context.post_insert()
-
- # create a resultproxy, get rowcount/implicit RETURNING
- # rows, close cursor if no further results pending
- result = context.get_result_proxy()
-
- if context.isinsert:
- if context._is_implicit_returning:
- context._fetch_implicit_returning(result)
- result.close(_autoclose_connection=False)
- elif not context._is_explicit_returning:
- result.close(_autoclose_connection=False)
- elif result._metadata is None:
- # no results, get rowcount
- # (which requires open cursor on some drivers
- # such as kintersbasdb, mxodbc),
- result.rowcount
- result.close(_autoclose_connection=False)
-
- if self.__transaction is None and context.should_autocommit:
- self._commit_impl()
-
- if result.closed and self.should_close_with_result:
- self.close()
-
- return result
-
- def _cursor_execute(self, cursor, statement, parameters):
- """Execute a statement + params on the given cursor.
-
- Adds appropriate logging and exception handling.
-
- This method is used by DefaultDialect for special-case
- executions, such as for sequences and column defaults.
- The path of statement execution in the majority of cases
- terminates at _execute_context().
-
- """
- if self._echo:
- self.engine.logger.info(statement)
- self.engine.logger.info("%r", parameters)
- try:
- self.dialect.do_execute(
- cursor,
- statement,
- parameters)
- except Exception, e:
- self._handle_dbapi_exception(
- e,
- statement,
- parameters,
- cursor,
- None)
- raise
-
- def _safe_close_cursor(self, cursor):
- """Close the given cursor, catching exceptions
- and turning into log warnings.
-
- """
- try:
- cursor.close()
- except Exception, e:
- try:
- ex_text = str(e)
- except TypeError:
- ex_text = repr(e)
- self.connection._logger.warn("Error closing cursor: %s", ex_text)
-
- if isinstance(e, (SystemExit, KeyboardInterrupt)):
- raise
-
- def _handle_dbapi_exception(self,
- e,
- statement,
- parameters,
- cursor,
- context):
- if getattr(self, '_reentrant_error', False):
- # Py3K
- #raise exc.DBAPIError.instance(statement, parameters, e,
- # self.dialect.dbapi.Error) from e
- # Py2K
- raise exc.DBAPIError.instance(statement,
- parameters,
- e,
- self.dialect.dbapi.Error), \
- None, sys.exc_info()[2]
- # end Py2K
- self._reentrant_error = True
- try:
- # non-DBAPI error - if we already got a context,
- # or theres no string statement, don't wrap it
- should_wrap = isinstance(e, self.dialect.dbapi.Error) or \
- (statement is not None and context is None)
-
- if should_wrap and context:
- context.handle_dbapi_exception(e)
-
- is_disconnect = isinstance(e, self.dialect.dbapi.Error) and \
- self.dialect.is_disconnect(e, self.__connection, cursor)
- if is_disconnect:
- self.invalidate(e)
- self.engine.dispose()
- else:
- if cursor:
- self._safe_close_cursor(cursor)
- self._autorollback()
- if self.should_close_with_result:
- self.close()
-
- if not should_wrap:
- return
-
- # Py3K
- #raise exc.DBAPIError.instance(
- # statement,
- # parameters,
- # e,
- # self.dialect.dbapi.Error,
- # connection_invalidated=is_disconnect) \
- # from e
- # Py2K
- raise exc.DBAPIError.instance(
- statement,
- parameters,
- e,
- self.dialect.dbapi.Error,
- connection_invalidated=is_disconnect), \
- None, sys.exc_info()[2]
- # end Py2K
-
- finally:
- del self._reentrant_error
-
- # poor man's multimethod/generic function thingy
- executors = {
- expression.FunctionElement: _execute_function,
- expression.ClauseElement: _execute_clauseelement,
- Compiled: _execute_compiled,
- schema.SchemaItem: _execute_default,
- schema.DDLElement: _execute_ddl,
- basestring: _execute_text
- }
-
- def create(self, entity, **kwargs):
- """Create a Table or Index given an appropriate Schema object."""
-
- return self.engine.create(entity, connection=self, **kwargs)
-
- def drop(self, entity, **kwargs):
- """Drop a Table or Index given an appropriate Schema object."""
-
- return self.engine.drop(entity, connection=self, **kwargs)
-
- def reflecttable(self, table, include_columns=None):
- """Reflect the columns in the given string table name from the
- database."""
-
- return self.engine.reflecttable(table, self, include_columns)
-
- def default_schema_name(self):
- return self.engine.dialect.get_default_schema_name(self)
-
- def transaction(self, callable_, *args, **kwargs):
- """Execute the given function within a transaction boundary.
-
- This is a shortcut for explicitly calling `begin()` and `commit()`
- and optionally `rollback()` when exceptions are raised. The
- given `*args` and `**kwargs` will be passed to the function.
-
- See also transaction() on engine.
-
- """
-
- trans = self.begin()
- try:
- ret = self.run_callable(callable_, *args, **kwargs)
- trans.commit()
- return ret
- except:
- trans.rollback()
- raise
-
- def run_callable(self, callable_, *args, **kwargs):
- return callable_(self, *args, **kwargs)
-
-
-class Transaction(object):
- """Represent a Transaction in progress.
-
- The object provides :meth:`.rollback` and :meth:`.commit`
- methods in order to control transaction boundaries. It
- also implements a context manager interface so that
- the Python ``with`` statement can be used with the
- :meth:`.Connection.begin` method.
-
- The Transaction object is **not** threadsafe.
-
- .. index::
- single: thread safety; Transaction
- """
-
- def __init__(self, connection, parent):
- """The constructor for :class:`.Transaction` is private
- and is called from within the :class:`.Connection.begin`
- implementation.
-
- """
- self.connection = connection
- self._parent = parent or self
- self.is_active = True
-
- def close(self):
- """Close this :class:`.Transaction`.
-
- If this transaction is the base transaction in a begin/commit
- nesting, the transaction will rollback(). Otherwise, the
- method returns.
-
- This is used to cancel a Transaction without affecting the scope of
- an enclosing transaction.
-
- """
- if not self._parent.is_active:
- return
- if self._parent is self:
- self.rollback()
-
- def rollback(self):
- """Roll back this :class:`.Transaction`.
-
- """
- if not self._parent.is_active:
- return
- self._do_rollback()
- self.is_active = False
-
- def _do_rollback(self):
- self._parent.rollback()
-
- def commit(self):
- """Commit this :class:`.Transaction`."""
-
- if not self._parent.is_active:
- raise exc.InvalidRequestError("This transaction is inactive")
- self._do_commit()
- self.is_active = False
-
- def _do_commit(self):
- pass
-
- def __enter__(self):
- return self
-
- def __exit__(self, type, value, traceback):
- if type is None and self.is_active:
- self.commit()
- else:
- self.rollback()
-
-
-class RootTransaction(Transaction):
- def __init__(self, connection):
- super(RootTransaction, self).__init__(connection, None)
- self.connection._begin_impl()
-
- def _do_rollback(self):
- if self.is_active:
- self.connection._rollback_impl()
-
- def _do_commit(self):
- if self.is_active:
- self.connection._commit_impl()
-
-
-class NestedTransaction(Transaction):
- def __init__(self, connection, parent):
- super(NestedTransaction, self).__init__(connection, parent)
- self._savepoint = self.connection._savepoint_impl()
-
- def _do_rollback(self):
- if self.is_active:
- self.connection._rollback_to_savepoint_impl(
- self._savepoint, self._parent)
-
- def _do_commit(self):
- if self.is_active:
- self.connection._release_savepoint_impl(
- self._savepoint, self._parent)
-
-
-class TwoPhaseTransaction(Transaction):
- def __init__(self, connection, xid):
- super(TwoPhaseTransaction, self).__init__(connection, None)
- self._is_prepared = False
- self.xid = xid
- self.connection._begin_twophase_impl(self.xid)
-
- def prepare(self):
- if not self._parent.is_active:
- raise exc.InvalidRequestError("This transaction is inactive")
- self.connection._prepare_twophase_impl(self.xid)
- self._is_prepared = True
-
- def _do_rollback(self):
- self.connection._rollback_twophase_impl(self.xid, self._is_prepared)
-
- def _do_commit(self):
- self.connection._commit_twophase_impl(self.xid, self._is_prepared)
-
-
-class Engine(Connectable, log.Identified):
- """
- Connects a :class:`~sqlalchemy.pool.Pool` and
- :class:`~sqlalchemy.engine.base.Dialect` together to provide a source
- of database connectivity and behavior.
-
- An :class:`.Engine` object is instantiated publically using the
- :func:`~sqlalchemy.create_engine` function.
-
- """
-
- _execution_options = util.immutabledict()
- _has_events = False
- _connection_cls = Connection
-
- def __init__(self, pool, dialect, url,
- logging_name=None, echo=None, proxy=None,
- execution_options=None
- ):
- self.pool = pool
- self.url = url
- self.dialect = dialect
- if logging_name:
- self.logging_name = logging_name
- self.echo = echo
- self.engine = self
- log.instance_logger(self, echoflag=echo)
- if proxy:
- interfaces.ConnectionProxy._adapt_listener(self, proxy)
- if execution_options:
- if 'isolation_level' in execution_options:
- raise exc.ArgumentError(
- "'isolation_level' execution option may "
- "only be specified on Connection.execution_options(). "
- "To set engine-wide isolation level, "
- "use the isolation_level argument to create_engine()."
- )
- self.update_execution_options(**execution_options)
-
- dispatch = event.dispatcher(events.ConnectionEvents)
-
- def update_execution_options(self, **opt):
- """update the execution_options dictionary of this :class:`.Engine`.
-
- For details on execution_options, see
- :meth:`Connection.execution_options` as well as
- :meth:`sqlalchemy.sql.expression.Executable.execution_options`.
-
- """
- self._execution_options = \
- self._execution_options.union(opt)
-
- @property
- def name(self):
- """String name of the :class:`~sqlalchemy.engine.Dialect` in use by
- this ``Engine``."""
-
- return self.dialect.name
-
- @property
- def driver(self):
- """Driver name of the :class:`~sqlalchemy.engine.Dialect` in use by
- this ``Engine``."""
-
- return self.dialect.driver
-
- echo = log.echo_property()
-
- def __repr__(self):
- return 'Engine(%s)' % str(self.url)
-
- def dispose(self):
- """Dispose of the connection pool used by this :class:`.Engine`.
-
- A new connection pool is created immediately after the old one has
- been disposed. This new pool, like all SQLAlchemy connection pools,
- does not make any actual connections to the database until one is
- first requested.
-
- This method has two general use cases:
-
- * When a dropped connection is detected, it is assumed that all
- connections held by the pool are potentially dropped, and
- the entire pool is replaced.
-
- * An application may want to use :meth:`dispose` within a test
- suite that is creating multiple engines.
-
- It is critical to note that :meth:`dispose` does **not** guarantee
- that the application will release all open database connections - only
- those connections that are checked into the pool are closed.
- Connections which remain checked out or have been detached from
- the engine are not affected.
-
- """
- self.pool.dispose()
- self.pool = self.pool.recreate()
-
- def create(self, entity, connection=None, **kwargs):
- """Create a table or index within this engine's database connection
- given a schema object."""
-
- from sqlalchemy.engine import ddl
-
- self._run_visitor(ddl.SchemaGenerator, entity,
- connection=connection, **kwargs)
-
- def drop(self, entity, connection=None, **kwargs):
- """Drop a table or index within this engine's database connection
- given a schema object."""
-
- from sqlalchemy.engine import ddl
-
- self._run_visitor(ddl.SchemaDropper, entity,
- connection=connection, **kwargs)
-
- def _execute_default(self, default):
- connection = self.contextual_connect()
- try:
- return connection._execute_default(default, (), {})
- finally:
- connection.close()
-
- @property
- def func(self):
- return expression._FunctionGenerator(bind=self)
-
- def text(self, text, *args, **kwargs):
- """Return a :func:`~sqlalchemy.sql.expression.text` construct,
- bound to this engine.
-
- This is equivalent to::
-
- text("SELECT * FROM table", bind=engine)
-
- """
-
- return expression.text(text, bind=self, *args, **kwargs)
-
- def _run_visitor(self, visitorcallable, element,
- connection=None, **kwargs):
- if connection is None:
- conn = self.contextual_connect(close_with_result=False)
- else:
- conn = connection
- try:
- visitorcallable(self.dialect, conn,
- **kwargs).traverse_single(element)
- finally:
- if connection is None:
- conn.close()
-
- def transaction(self, callable_, *args, **kwargs):
- """Execute the given function within a transaction boundary.
-
- This is a shortcut for explicitly calling `begin()` and `commit()`
- and optionally `rollback()` when exceptions are raised. The
- given `*args` and `**kwargs` will be passed to the function.
-
- The connection used is that of contextual_connect().
-
- See also the similar method on Connection itself.
-
- """
-
- conn = self.contextual_connect()
- try:
- return conn.transaction(callable_, *args, **kwargs)
- finally:
- conn.close()
-
- def run_callable(self, callable_, *args, **kwargs):
- conn = self.contextual_connect()
- try:
- return conn.run_callable(callable_, *args, **kwargs)
- finally:
- conn.close()
-
- def execute(self, statement, *multiparams, **params):
- """Executes the given construct and returns a :class:`.ResultProxy`.
-
- The arguments are the same as those used by
- :meth:`.Connection.execute`.
-
- Here, a :class:`.Connection` is acquired using the
- :meth:`~.Engine.contextual_connect` method, and the statement executed
- with that connection. The returned :class:`.ResultProxy` is flagged
- such that when the :class:`.ResultProxy` is exhausted and its
- underlying cursor is closed, the :class:`.Connection` created here
- will also be closed, which allows its associated DBAPI connection
- resource to be returned to the connection pool.
-
- """
-
- connection = self.contextual_connect(close_with_result=True)
- return connection.execute(statement, *multiparams, **params)
-
- def scalar(self, statement, *multiparams, **params):
- return self.execute(statement, *multiparams, **params).scalar()
-
- def _execute_clauseelement(self, elem, multiparams=None, params=None):
- connection = self.contextual_connect(close_with_result=True)
- return connection._execute_clauseelement(elem, multiparams, params)
-
- def _execute_compiled(self, compiled, multiparams, params):
- connection = self.contextual_connect(close_with_result=True)
- return connection._execute_compiled(compiled, multiparams, params)
-
- def connect(self, **kwargs):
- """Return a new :class:`.Connection` object.
-
- The :class:`.Connection`, upon construction, will procure a DBAPI connection
- from the :class:`.Pool` referenced by this :class:`.Engine`,
- returning it back to the :class:`.Pool` after the :meth:`.Connection.close`
- method is called.
-
- """
-
- return self._connection_cls(self, **kwargs)
-
- def contextual_connect(self, close_with_result=False, **kwargs):
- """Return a :class:`.Connection` object which may be part of some ongoing context.
-
- By default, this method does the same thing as :meth:`.Engine.connect`.
- Subclasses of :class:`.Engine` may override this method
- to provide contextual behavior.
-
- :param close_with_result: When True, the first :class:`.ResultProxy` created
- by the :class:`.Connection` will call the :meth:`.Connection.close` method
- of that connection as soon as any pending result rows are exhausted.
- This is used to supply the "connectionless execution" behavior provided
- by the :meth:`.Engine.execute` method.
-
- """
-
- return self._connection_cls(self,
- self.pool.connect(),
- close_with_result=close_with_result,
- **kwargs)
-
- def table_names(self, schema=None, connection=None):
- """Return a list of all table names available in the database.
-
- :param schema: Optional, retrieve names from a non-default schema.
-
- :param connection: Optional, use a specified connection. Default is
- the ``contextual_connect`` for this ``Engine``.
- """
-
- if connection is None:
- conn = self.contextual_connect()
- else:
- conn = connection
- if not schema:
- schema = self.dialect.default_schema_name
- try:
- return self.dialect.get_table_names(conn, schema)
- finally:
- if connection is None:
- conn.close()
-
- def reflecttable(self, table, connection=None, include_columns=None):
- """Given a Table object, reflects its columns and properties from the
- database."""
-
- if connection is None:
- conn = self.contextual_connect()
- else:
- conn = connection
- try:
- self.dialect.reflecttable(conn, table, include_columns)
- finally:
- if connection is None:
- conn.close()
-
- def has_table(self, table_name, schema=None):
- return self.run_callable(self.dialect.has_table, table_name, schema)
-
- def raw_connection(self):
- """Return a DB-API connection."""
-
- return self.pool.unique_connection()
-
-
-# This reconstructor is necessary so that pickles with the C extension or
-# without use the same Binary format.
-try:
- # We need a different reconstructor on the C extension so that we can
- # add extra checks that fields have correctly been initialized by
- # __setstate__.
- from sqlalchemy.cresultproxy import safe_rowproxy_reconstructor
-
- # The extra function embedding is needed so that the
- # reconstructor function has the same signature whether or not
- # the extension is present.
- def rowproxy_reconstructor(cls, state):
- return safe_rowproxy_reconstructor(cls, state)
-except ImportError:
- def rowproxy_reconstructor(cls, state):
- obj = cls.__new__(cls)
- obj.__setstate__(state)
- return obj
-
-try:
- from sqlalchemy.cresultproxy import BaseRowProxy
-except ImportError:
- class BaseRowProxy(object):
- __slots__ = ('_parent', '_row', '_processors', '_keymap')
-
- def __init__(self, parent, row, processors, keymap):
- """RowProxy objects are constructed by ResultProxy objects."""
-
- self._parent = parent
- self._row = row
- self._processors = processors
- self._keymap = keymap
-
- def __reduce__(self):
- return (rowproxy_reconstructor,
- (self.__class__, self.__getstate__()))
-
- def values(self):
- """Return the values represented by this RowProxy as a list."""
- return list(self)
-
- def __iter__(self):
- for processor, value in izip(self._processors, self._row):
- if processor is None:
- yield value
- else:
- yield processor(value)
-
- def __len__(self):
- return len(self._row)
-
- def __getitem__(self, key):
- try:
- processor, index = self._keymap[key]
- except KeyError:
- processor, index = self._parent._key_fallback(key)
- except TypeError:
- if isinstance(key, slice):
- l = []
- for processor, value in izip(self._processors[key],
- self._row[key]):
- if processor is None:
- l.append(value)
- else:
- l.append(processor(value))
- return tuple(l)
- else:
- raise
- if index is None:
- raise exc.InvalidRequestError(
- "Ambiguous column name '%s' in result set! "
- "try 'use_labels' option on select statement." % key)
- if processor is not None:
- return processor(self._row[index])
- else:
- return self._row[index]
-
- def __getattr__(self, name):
- try:
- # TODO: no test coverage here
- return self[name]
- except KeyError, e:
- raise AttributeError(e.args[0])
-
-
-class RowProxy(BaseRowProxy):
- """Proxy values from a single cursor row.
-
- Mostly follows "ordered dictionary" behavior, mapping result
- values to the string-based column name, the integer position of
- the result in the row, as well as Column instances which can be
- mapped to the original Columns that produced this result set (for
- results that correspond to constructed SQL expressions).
- """
- __slots__ = ()
-
- def __contains__(self, key):
- return self._parent._has_key(self._row, key)
-
- def __getstate__(self):
- return {
- '_parent': self._parent,
- '_row': tuple(self)
- }
-
- def __setstate__(self, state):
- self._parent = parent = state['_parent']
- self._row = state['_row']
- self._processors = parent._processors
- self._keymap = parent._keymap
-
- __hash__ = None
-
- def __eq__(self, other):
- return other is self or other == tuple(self)
-
- def __ne__(self, other):
- return not self.__eq__(other)
-
- def __repr__(self):
- return repr(tuple(self))
-
- def has_key(self, key):
- """Return True if this RowProxy contains the given key."""
-
- return self._parent._has_key(self._row, key)
-
- def items(self):
- """Return a list of tuples, each tuple containing a key/value pair."""
- # TODO: no coverage here
- return [(key, self[key]) for key in self.iterkeys()]
-
- def keys(self):
- """Return the list of keys as strings represented by this RowProxy."""
-
- return self._parent.keys
-
- def iterkeys(self):
- return iter(self._parent.keys)
-
- def itervalues(self):
- return iter(self)
-
-try:
- # Register RowProxy with Sequence,
- # so sequence protocol is implemented
- from collections import Sequence
- Sequence.register(RowProxy)
-except ImportError:
- pass
-
-
-class ResultMetaData(object):
- """Handle cursor.description, applying additional info from an execution
- context."""
-
- def __init__(self, parent, metadata):
- self._processors = processors = []
-
- # We do not strictly need to store the processor in the key mapping,
- # though it is faster in the Python version (probably because of the
- # saved attribute lookup self._processors)
- self._keymap = keymap = {}
- self.keys = []
- context = parent.context
- dialect = context.dialect
- typemap = dialect.dbapi_type_map
-
- for i, rec in enumerate(metadata):
- colname = rec[0]
- coltype = rec[1]
-
- if dialect.description_encoding:
- colname = dialect._description_decoder(colname)
-
- if context.result_map:
- try:
- name, obj, type_ = context.result_map[colname.lower()]
- except KeyError:
- name, obj, type_ = \
- colname, None, typemap.get(coltype, types.NULLTYPE)
- else:
- name, obj, type_ = \
- colname, None, typemap.get(coltype, types.NULLTYPE)
-
- processor = type_._cached_result_processor(dialect, coltype)
-
- processors.append(processor)
- rec = (processor, i)
-
- # indexes as keys. This is only needed for the Python version of
- # RowProxy (the C version uses a faster path for integer indexes).
- keymap[i] = rec
-
- # Column names as keys
- if keymap.setdefault(name.lower(), rec) is not rec:
- # We do not raise an exception directly because several
- # columns colliding by name is not a problem as long as the
- # user does not try to access them (ie use an index directly,
- # or the more precise ColumnElement)
- keymap[name.lower()] = (processor, None)
-
- if dialect.requires_name_normalize:
- colname = dialect.normalize_name(colname)
-
- self.keys.append(colname)
- if obj:
- for o in obj:
- keymap[o] = rec
-
- if parent._echo:
- context.engine.logger.debug(
- "Col %r", tuple(x[0] for x in metadata))
-
- def _set_keymap_synonym(self, name, origname):
- """Set a synonym for the given name.
-
- Some dialects (SQLite at the moment) may use this to
- adjust the column names that are significant within a
- row.
-
- """
- rec = (processor, i) = self._keymap[origname.lower()]
- if self._keymap.setdefault(name, rec) is not rec:
- self._keymap[name] = (processor, None)
-
- def _key_fallback(self, key):
- map = self._keymap
- result = None
- if isinstance(key, basestring):
- result = map.get(key.lower())
- # fallback for targeting a ColumnElement to a textual expression
- # this is a rare use case which only occurs when matching text()
- # constructs to ColumnElements, and after a pickle/unpickle roundtrip
- elif isinstance(key, expression.ColumnElement):
- if key._label and key._label.lower() in map:
- result = map[key._label.lower()]
- elif hasattr(key, 'name') and key.name.lower() in map:
- result = map[key.name.lower()]
- if result is None:
- raise exc.NoSuchColumnError(
- "Could not locate column in row for column '%s'" % key)
- else:
- map[key] = result
- return result
-
- def _has_key(self, row, key):
- if key in self._keymap:
- return True
- else:
- try:
- self._key_fallback(key)
- return True
- except exc.NoSuchColumnError:
- return False
-
- def __getstate__(self):
- return {
- '_pickled_keymap': dict(
- (key, index)
- for key, (processor, index) in self._keymap.iteritems()
- if isinstance(key, (basestring, int))
- ),
- 'keys': self.keys
- }
-
- def __setstate__(self, state):
- # the row has been processed at pickling time so we don't need any
- # processor anymore
- self._processors = [None for _ in xrange(len(state['keys']))]
- self._keymap = keymap = {}
- for key, index in state['_pickled_keymap'].iteritems():
- keymap[key] = (None, index)
- self.keys = state['keys']
- self._echo = False
-
-
-class ResultProxy(object):
- """Wraps a DB-API cursor object to provide easier access to row columns.
-
- Individual columns may be accessed by their integer position,
- case-insensitive column name, or by ``schema.Column``
- object. e.g.::
-
- row = fetchone()
-
- col1 = row[0] # access via integer position
-
- col2 = row['col2'] # access via name
-
- col3 = row[mytable.c.mycol] # access via Column object.
-
- ``ResultProxy`` also handles post-processing of result column
- data using ``TypeEngine`` objects, which are referenced from
- the originating SQL statement that produced this result set.
-
- """
-
- _process_row = RowProxy
- out_parameters = None
- _can_close_connection = False
-
- def __init__(self, context):
- self.context = context
- self.dialect = context.dialect
- self.closed = False
- self.cursor = self._saved_cursor = context.cursor
- self.connection = context.root_connection
- self._echo = self.connection._echo and \
- context.engine._should_log_debug()
- self._init_metadata()
-
- def _init_metadata(self):
- metadata = self._cursor_description()
- if metadata is None:
- self._metadata = None
- else:
- self._metadata = ResultMetaData(self, metadata)
-
- def keys(self):
- """Return the current set of string keys for rows."""
- if self._metadata:
- return self._metadata.keys
- else:
- return []
-
- @util.memoized_property
- def rowcount(self):
- """Return the 'rowcount' for this result.
-
- The 'rowcount' reports the number of rows affected
- by an UPDATE or DELETE statement. It has *no* other
- uses and is not intended to provide the number of rows
- present from a SELECT.
-
- Note that this row count may not be properly implemented in some
- dialects; this is indicated by
- :meth:`~sqlalchemy.engine.base.ResultProxy.supports_sane_rowcount()`
- and
- :meth:`~sqlalchemy.engine.base.ResultProxy.supports_sane_multi_rowcount()`.
- ``rowcount()`` also may not work at this time for a statement that
- uses ``returning()``.
-
- """
- try:
- return self.context.rowcount
- except Exception, e:
- self.connection._handle_dbapi_exception(
- e, None, None, self.cursor, self.context)
- raise
-
- @property
- def lastrowid(self):
- """return the 'lastrowid' accessor on the DBAPI cursor.
-
- This is a DBAPI specific method and is only functional
- for those backends which support it, for statements
- where it is appropriate. It's behavior is not
- consistent across backends.
-
- Usage of this method is normally unnecessary; the
- :attr:`~ResultProxy.inserted_primary_key` attribute provides a
- tuple of primary key values for a newly inserted row,
- regardless of database backend.
-
- """
- try:
- return self._saved_cursor.lastrowid
- except Exception, e:
- self.connection._handle_dbapi_exception(
- e, None, None,
- self._saved_cursor, self.context)
- raise
-
- @property
- def returns_rows(self):
- """True if this :class:`.ResultProxy` returns rows.
-
- I.e. if it is legal to call the methods
- :meth:`~.ResultProxy.fetchone`,
- :meth:`~.ResultProxy.fetchmany`
- :meth:`~.ResultProxy.fetchall`.
-
- """
- return self._metadata is not None
-
- @property
- def is_insert(self):
- """True if this :class:`.ResultProxy` is the result
- of a executing an expression language compiled
- :func:`.expression.insert` construct.
-
- When True, this implies that the
- :attr:`inserted_primary_key` attribute is accessible,
- assuming the statement did not include
- a user defined "returning" construct.
-
- """
- return self.context.isinsert
-
- def _cursor_description(self):
- """May be overridden by subclasses."""
-
- return self._saved_cursor.description
-
- def close(self, _autoclose_connection=True):
- """Close this ResultProxy.
-
- Closes the underlying DBAPI cursor corresponding to the execution.
-
- Note that any data cached within this ResultProxy is still available.
- For some types of results, this may include buffered rows.
-
- If this ResultProxy was generated from an implicit execution,
- the underlying Connection will also be closed (returns the
- underlying DBAPI connection to the connection pool.)
-
- This method is called automatically when:
-
- * all result rows are exhausted using the fetchXXX() methods.
- * cursor.description is None.
-
- """
-
- if not self.closed:
- self.closed = True
- self.connection._safe_close_cursor(self.cursor)
- if _autoclose_connection and \
- self.connection.should_close_with_result:
- self.connection.close()
- # allow consistent errors
- self.cursor = None
-
- def __iter__(self):
- while True:
- row = self.fetchone()
- if row is None:
- raise StopIteration
- else:
- yield row
-
- @util.memoized_property
- def inserted_primary_key(self):
- """Return the primary key for the row just inserted.
-
- The return value is a list of scalar values
- corresponding to the list of primary key columns
- in the target table.
-
- This only applies to single row :func:`.insert`
- constructs which did not explicitly specify
- :meth:`.Insert.returning`.
-
- Note that primary key columns which specify a
- server_default clause,
- or otherwise do not qualify as "autoincrement"
- columns (see the notes at :class:`.Column`), and were
- generated using the database-side default, will
- appear in this list as ``None`` unless the backend
- supports "returning" and the insert statement executed
- with the "implicit returning" enabled.
-
- """
-
- if not self.context.isinsert:
- raise exc.InvalidRequestError(
- "Statement is not an insert() expression construct.")
- elif self.context._is_explicit_returning:
- raise exc.InvalidRequestError(
- "Can't call inserted_primary_key when returning() "
- "is used.")
-
- return self.context.inserted_primary_key
-
- @util.deprecated("0.6", "Use :attr:`.ResultProxy.inserted_primary_key`")
- def last_inserted_ids(self):
- """Return the primary key for the row just inserted."""
-
- return self.inserted_primary_key
-
- def last_updated_params(self):
- """Return the collection of updated parameters from this
- execution.
-
- """
- if self.context.executemany:
- return self.context.compiled_parameters
- else:
- return self.context.compiled_parameters[0]
-
- def last_inserted_params(self):
- """Return the collection of inserted parameters from this
- execution.
-
- """
- if self.context.executemany:
- return self.context.compiled_parameters
- else:
- return self.context.compiled_parameters[0]
-
- def lastrow_has_defaults(self):
- """Return ``lastrow_has_defaults()`` from the underlying
- ExecutionContext.
-
- See ExecutionContext for details.
- """
-
- return self.context.lastrow_has_defaults()
-
- def postfetch_cols(self):
- """Return ``postfetch_cols()`` from the underlying ExecutionContext.
-
- See ExecutionContext for details.
- """
-
- return self.context.postfetch_cols
-
- def prefetch_cols(self):
- return self.context.prefetch_cols
-
- def supports_sane_rowcount(self):
- """Return ``supports_sane_rowcount`` from the dialect."""
-
- return self.dialect.supports_sane_rowcount
-
- def supports_sane_multi_rowcount(self):
- """Return ``supports_sane_multi_rowcount`` from the dialect."""
-
- return self.dialect.supports_sane_multi_rowcount
-
- def _fetchone_impl(self):
- try:
- return self.cursor.fetchone()
- except AttributeError:
- self._non_result()
-
- def _fetchmany_impl(self, size=None):
- try:
- if size is None:
- return self.cursor.fetchmany()
- else:
- return self.cursor.fetchmany(size)
- except AttributeError:
- self._non_result()
-
- def _fetchall_impl(self):
- try:
- return self.cursor.fetchall()
- except AttributeError:
- self._non_result()
-
- def _non_result(self):
- if self._metadata is None:
- raise exc.ResourceClosedError(
- "This result object does not return rows. "
- "It has been closed automatically.",
- )
- else:
- raise exc.ResourceClosedError("This result object is closed.")
-
- def process_rows(self, rows):
- process_row = self._process_row
- metadata = self._metadata
- keymap = metadata._keymap
- processors = metadata._processors
- if self._echo:
- log = self.context.engine.logger.debug
- l = []
- for row in rows:
- log("Row %r", row)
- l.append(process_row(metadata, row, processors, keymap))
- return l
- else:
- return [process_row(metadata, row, processors, keymap)
- for row in rows]
-
- def fetchall(self):
- """Fetch all rows, just like DB-API ``cursor.fetchall()``."""
-
- try:
- l = self.process_rows(self._fetchall_impl())
- self.close()
- return l
- except Exception, e:
- self.connection._handle_dbapi_exception(
- e, None, None,
- self.cursor, self.context)
- raise
-
- def fetchmany(self, size=None):
- """Fetch many rows, just like DB-API
- ``cursor.fetchmany(size=cursor.arraysize)``.
-
- If rows are present, the cursor remains open after this is called.
- Else the cursor is automatically closed and an empty list is returned.
-
- """
-
- try:
- l = self.process_rows(self._fetchmany_impl(size))
- if len(l) == 0:
- self.close()
- return l
- except Exception, e:
- self.connection._handle_dbapi_exception(
- e, None, None,
- self.cursor, self.context)
- raise
-
- def fetchone(self):
- """Fetch one row, just like DB-API ``cursor.fetchone()``.
-
- If a row is present, the cursor remains open after this is called.
- Else the cursor is automatically closed and None is returned.
-
- """
- try:
- row = self._fetchone_impl()
- if row is not None:
- return self.process_rows([row])[0]
- else:
- self.close()
- return None
- except Exception, e:
- self.connection._handle_dbapi_exception(
- e, None, None,
- self.cursor, self.context)
- raise
-
- def first(self):
- """Fetch the first row and then close the result set unconditionally.
-
- Returns None if no row is present.
-
- """
- if self._metadata is None:
- self._non_result()
-
- try:
- row = self._fetchone_impl()
- except Exception, e:
- self.connection._handle_dbapi_exception(
- e, None, None,
- self.cursor, self.context)
- raise
-
- try:
- if row is not None:
- return self.process_rows([row])[0]
- else:
- return None
- finally:
- self.close()
-
- def scalar(self):
- """Fetch the first column of the first row, and close the result set.
-
- Returns None if no row is present.
-
- """
- row = self.first()
- if row is not None:
- return row[0]
- else:
- return None
-
-class BufferedRowResultProxy(ResultProxy):
- """A ResultProxy with row buffering behavior.
-
- ``ResultProxy`` that buffers the contents of a selection of rows
- before ``fetchone()`` is called. This is to allow the results of
- ``cursor.description`` to be available immediately, when
- interfacing with a DB-API that requires rows to be consumed before
- this information is available (currently psycopg2, when used with
- server-side cursors).
-
- The pre-fetching behavior fetches only one row initially, and then
- grows its buffer size by a fixed amount with each successive need
- for additional rows up to a size of 100.
- """
-
- def _init_metadata(self):
- self.__buffer_rows()
- super(BufferedRowResultProxy, self)._init_metadata()
-
- # this is a "growth chart" for the buffering of rows.
- # each successive __buffer_rows call will use the next
- # value in the list for the buffer size until the max
- # is reached
- size_growth = {
- 1 : 5,
- 5 : 10,
- 10 : 20,
- 20 : 50,
- 50 : 100,
- 100 : 250,
- 250 : 500,
- 500 : 1000
- }
-
- def __buffer_rows(self):
- size = getattr(self, '_bufsize', 1)
- self.__rowbuffer = collections.deque(self.cursor.fetchmany(size))
- self._bufsize = self.size_growth.get(size, size)
-
- def _fetchone_impl(self):
- if self.closed:
- return None
- if not self.__rowbuffer:
- self.__buffer_rows()
- if not self.__rowbuffer:
- return None
- return self.__rowbuffer.popleft()
-
- def _fetchmany_impl(self, size=None):
- if size is None:
- return self._fetchall_impl()
- result = []
- for x in range(0, size):
- row = self._fetchone_impl()
- if row is None:
- break
- result.append(row)
- return result
-
- def _fetchall_impl(self):
- self.__rowbuffer.extend(self.cursor.fetchall())
- ret = self.__rowbuffer
- self.__rowbuffer = collections.deque()
- return ret
-
-class FullyBufferedResultProxy(ResultProxy):
- """A result proxy that buffers rows fully upon creation.
-
- Used for operations where a result is to be delivered
- after the database conversation can not be continued,
- such as MSSQL INSERT...OUTPUT after an autocommit.
-
- """
- def _init_metadata(self):
- super(FullyBufferedResultProxy, self)._init_metadata()
- self.__rowbuffer = self._buffer_rows()
-
- def _buffer_rows(self):
- return collections.deque(self.cursor.fetchall())
-
- def _fetchone_impl(self):
- if self.__rowbuffer:
- return self.__rowbuffer.popleft()
- else:
- return None
-
- def _fetchmany_impl(self, size=None):
- if size is None:
- return self._fetchall_impl()
- result = []
- for x in range(0, size):
- row = self._fetchone_impl()
- if row is None:
- break
- result.append(row)
- return result
-
- def _fetchall_impl(self):
- ret = self.__rowbuffer
- self.__rowbuffer = collections.deque()
- return ret
-
-class BufferedColumnRow(RowProxy):
- def __init__(self, parent, row, processors, keymap):
- # preprocess row
- row = list(row)
- # this is a tad faster than using enumerate
- index = 0
- for processor in parent._orig_processors:
- if processor is not None:
- row[index] = processor(row[index])
- index += 1
- row = tuple(row)
- super(BufferedColumnRow, self).__init__(parent, row,
- processors, keymap)
-
-class BufferedColumnResultProxy(ResultProxy):
- """A ResultProxy with column buffering behavior.
-
- ``ResultProxy`` that loads all columns into memory each time
- fetchone() is called. If fetchmany() or fetchall() are called,
- the full grid of results is fetched. This is to operate with
- databases where result rows contain "live" results that fall out
- of scope unless explicitly fetched. Currently this includes
- cx_Oracle LOB objects.
-
- """
-
- _process_row = BufferedColumnRow
-
- def _init_metadata(self):
- super(BufferedColumnResultProxy, self)._init_metadata()
- metadata = self._metadata
- # orig_processors will be used to preprocess each row when they are
- # constructed.
- metadata._orig_processors = metadata._processors
- # replace the all type processors by None processors.
- metadata._processors = [None for _ in xrange(len(metadata.keys))]
- keymap = {}
- for k, (func, index) in metadata._keymap.iteritems():
- keymap[k] = (None, index)
- self._metadata._keymap = keymap
-
- def fetchall(self):
- # can't call cursor.fetchall(), since rows must be
- # fully processed before requesting more from the DBAPI.
- l = []
- while True:
- row = self.fetchone()
- if row is None:
- break
- l.append(row)
- return l
-
- def fetchmany(self, size=None):
- # can't call cursor.fetchmany(), since rows must be
- # fully processed before requesting more from the DBAPI.
- if size is None:
- return self.fetchall()
- l = []
- for i in xrange(size):
- row = self.fetchone()
- if row is None:
- break
- l.append(row)
- return l
-
-def connection_memoize(key):
- """Decorator, memoize a function in a connection.info stash.
-
- Only applicable to functions which take no arguments other than a
- connection. The memo will be stored in ``connection.info[key]``.
- """
-
- @util.decorator
- def decorated(fn, self, connection):
- connection = connection.connect()
- try:
- return connection.info[key]
- except KeyError:
- connection.info[key] = val = fn(self, connection)
- return val
-
- return decorated
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/engine/ddl.py b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/engine/ddl.py
deleted file mode 100755
index 79958baa..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/engine/ddl.py
+++ /dev/null
@@ -1,172 +0,0 @@
-# engine/ddl.py
-# Copyright (C) 2009-2011 the SQLAlchemy authors and contributors <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-"""Routines to handle CREATE/DROP workflow."""
-
-from sqlalchemy import engine, schema
-from sqlalchemy.sql import util as sql_util
-
-
-class DDLBase(schema.SchemaVisitor):
- def __init__(self, connection):
- self.connection = connection
-
-class SchemaGenerator(DDLBase):
- def __init__(self, dialect, connection, checkfirst=False, tables=None, **kwargs):
- super(SchemaGenerator, self).__init__(connection, **kwargs)
- self.checkfirst = checkfirst
- self.tables = tables and set(tables) or None
- self.preparer = dialect.identifier_preparer
- self.dialect = dialect
-
- def _can_create_table(self, table):
- self.dialect.validate_identifier(table.name)
- if table.schema:
- self.dialect.validate_identifier(table.schema)
- return not self.checkfirst or \
- not self.dialect.has_table(self.connection,
- table.name, schema=table.schema)
-
- def _can_create_sequence(self, sequence):
- return self.dialect.supports_sequences and \
- (
- (not self.dialect.sequences_optional or
- not sequence.optional) and
- (
- not self.checkfirst or
- not self.dialect.has_sequence(
- self.connection,
- sequence.name,
- schema=sequence.schema)
- )
- )
-
- def visit_metadata(self, metadata):
- if self.tables:
- tables = self.tables
- else:
- tables = metadata.tables.values()
- collection = [t for t in sql_util.sort_tables(tables)
- if self._can_create_table(t)]
- seq_coll = [s for s in metadata._sequences.values()
- if s.column is None and self._can_create_sequence(s)]
-
- metadata.dispatch.before_create(metadata, self.connection,
- tables=collection,
- checkfirst=self.checkfirst)
-
- for seq in seq_coll:
- self.traverse_single(seq, create_ok=True)
-
- for table in collection:
- self.traverse_single(table, create_ok=True)
-
- metadata.dispatch.after_create(metadata, self.connection,
- tables=collection,
- checkfirst=self.checkfirst)
-
- def visit_table(self, table, create_ok=False):
- if not create_ok and not self._can_create_table(table):
- return
-
- table.dispatch.before_create(table, self.connection,
- checkfirst=self.checkfirst)
-
- for column in table.columns:
- if column.default is not None:
- self.traverse_single(column.default)
-
- self.connection.execute(schema.CreateTable(table))
-
- if hasattr(table, 'indexes'):
- for index in table.indexes:
- self.traverse_single(index)
-
- table.dispatch.after_create(table, self.connection,
- checkfirst=self.checkfirst)
-
- def visit_sequence(self, sequence, create_ok=False):
- if not create_ok and not self._can_create_sequence(sequence):
- return
- self.connection.execute(schema.CreateSequence(sequence))
-
- def visit_index(self, index):
- self.connection.execute(schema.CreateIndex(index))
-
-
-class SchemaDropper(DDLBase):
- def __init__(self, dialect, connection, checkfirst=False, tables=None, **kwargs):
- super(SchemaDropper, self).__init__(connection, **kwargs)
- self.checkfirst = checkfirst
- self.tables = tables
- self.preparer = dialect.identifier_preparer
- self.dialect = dialect
-
- def visit_metadata(self, metadata):
- if self.tables:
- tables = self.tables
- else:
- tables = metadata.tables.values()
- collection = [t for t in reversed(sql_util.sort_tables(tables))
- if self._can_drop_table(t)]
- seq_coll = [s for s in metadata._sequences.values()
- if s.column is None and self._can_drop_sequence(s)]
-
- metadata.dispatch.before_drop(metadata, self.connection,
- tables=collection,
- checkfirst=self.checkfirst)
-
- for table in collection:
- self.traverse_single(table, drop_ok=True)
-
- for seq in seq_coll:
- self.traverse_single(seq, drop_ok=True)
-
- metadata.dispatch.after_drop(metadata, self.connection,
- tables=collection,
- checkfirst=self.checkfirst)
-
- def _can_drop_table(self, table):
- self.dialect.validate_identifier(table.name)
- if table.schema:
- self.dialect.validate_identifier(table.schema)
- return not self.checkfirst or self.dialect.has_table(self.connection,
- table.name, schema=table.schema)
-
- def _can_drop_sequence(self, sequence):
- return self.dialect.supports_sequences and \
- ((not self.dialect.sequences_optional or
- not sequence.optional) and
- (not self.checkfirst or
- self.dialect.has_sequence(
- self.connection,
- sequence.name,
- schema=sequence.schema))
- )
-
- def visit_index(self, index):
- self.connection.execute(schema.DropIndex(index))
-
- def visit_table(self, table, drop_ok=False):
- if not drop_ok and not self._can_drop_table(table):
- return
-
- table.dispatch.before_drop(table, self.connection,
- checkfirst=self.checkfirst)
-
- for column in table.columns:
- if column.default is not None:
- self.traverse_single(column.default)
-
- self.connection.execute(schema.DropTable(table))
-
- table.dispatch.after_drop(table, self.connection,
- checkfirst=self.checkfirst)
-
- def visit_sequence(self, sequence, drop_ok=False):
- if not drop_ok and not self._can_drop_sequence(sequence):
- return
- self.connection.execute(schema.DropSequence(sequence))
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/engine/default.py b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/engine/default.py
deleted file mode 100755
index 9f6c5010..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/engine/default.py
+++ /dev/null
@@ -1,801 +0,0 @@
-# engine/default.py
-# Copyright (C) 2005-2011 the SQLAlchemy authors and contributors <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-"""Default implementations of per-dialect sqlalchemy.engine classes.
-
-These are semi-private implementation classes which are only of importance
-to database dialect authors; dialects will usually use the classes here
-as the base class for their own corresponding classes.
-
-"""
-
-import re, random
-from sqlalchemy.engine import base, reflection
-from sqlalchemy.sql import compiler, expression
-from sqlalchemy import exc, types as sqltypes, util, pool, processors
-import codecs
-import weakref
-
-AUTOCOMMIT_REGEXP = re.compile(
- r'\s*(?:UPDATE|INSERT|CREATE|DELETE|DROP|ALTER)',
- re.I | re.UNICODE)
-
-
-class DefaultDialect(base.Dialect):
- """Default implementation of Dialect"""
-
- statement_compiler = compiler.SQLCompiler
- ddl_compiler = compiler.DDLCompiler
- type_compiler = compiler.GenericTypeCompiler
- preparer = compiler.IdentifierPreparer
- supports_alter = True
-
- # most DBAPIs happy with this for execute().
- # not cx_oracle.
- execute_sequence_format = tuple
-
- supports_views = True
- supports_sequences = False
- sequences_optional = False
- preexecute_autoincrement_sequences = False
- postfetch_lastrowid = True
- implicit_returning = False
-
- supports_native_enum = False
- supports_native_boolean = False
-
- # if the NUMERIC type
- # returns decimal.Decimal.
- # *not* the FLOAT type however.
- supports_native_decimal = False
-
- # Py3K
- #supports_unicode_statements = True
- #supports_unicode_binds = True
- #returns_unicode_strings = True
- #description_encoding = None
- # Py2K
- supports_unicode_statements = False
- supports_unicode_binds = False
- returns_unicode_strings = False
- description_encoding = 'use_encoding'
- # end Py2K
-
-
- name = 'default'
-
- # length at which to truncate
- # any identifier.
- max_identifier_length = 9999
-
- # length at which to truncate
- # the name of an index.
- # Usually None to indicate
- # 'use max_identifier_length'.
- # thanks to MySQL, sigh
- max_index_name_length = None
-
- supports_sane_rowcount = True
- supports_sane_multi_rowcount = True
- dbapi_type_map = {}
- colspecs = {}
- default_paramstyle = 'named'
- supports_default_values = False
- supports_empty_insert = True
-
- server_version_info = None
-
- # indicates symbol names are
- # UPPERCASEd if they are case insensitive
- # within the database.
- # if this is True, the methods normalize_name()
- # and denormalize_name() must be provided.
- requires_name_normalize = False
-
- reflection_options = ()
-
- def __init__(self, convert_unicode=False, assert_unicode=False,
- encoding='utf-8', paramstyle=None, dbapi=None,
- implicit_returning=None,
- label_length=None, **kwargs):
-
- if not getattr(self, 'ported_sqla_06', True):
- util.warn(
- "The %s dialect is not yet ported to SQLAlchemy 0.6/0.7" %
- self.name)
-
- self.convert_unicode = convert_unicode
- if assert_unicode:
- util.warn_deprecated(
- "assert_unicode is deprecated. "
- "SQLAlchemy emits a warning in all cases where it "
- "would otherwise like to encode a Python unicode object "
- "into a specific encoding but a plain bytestring is "
- "received. "
- "This does *not* apply to DBAPIs that coerce Unicode "
- "natively.")
-
- self.encoding = encoding
- self.positional = False
- self._ischema = None
- self.dbapi = dbapi
- if paramstyle is not None:
- self.paramstyle = paramstyle
- elif self.dbapi is not None:
- self.paramstyle = self.dbapi.paramstyle
- else:
- self.paramstyle = self.default_paramstyle
- if implicit_returning is not None:
- self.implicit_returning = implicit_returning
- self.positional = self.paramstyle in ('qmark', 'format', 'numeric')
- self.identifier_preparer = self.preparer(self)
- self.type_compiler = self.type_compiler(self)
-
- if label_length and label_length > self.max_identifier_length:
- raise exc.ArgumentError(
- "Label length of %d is greater than this dialect's"
- " maximum identifier length of %d" %
- (label_length, self.max_identifier_length))
- self.label_length = label_length
-
- if self.description_encoding == 'use_encoding':
- self._description_decoder = processors.to_unicode_processor_factory(
- encoding
- )
- elif self.description_encoding is not None:
- self._description_decoder = processors.to_unicode_processor_factory(
- self.description_encoding
- )
- self._encoder = codecs.getencoder(self.encoding)
- self._decoder = processors.to_unicode_processor_factory(self.encoding)
-
- @util.memoized_property
- def _type_memos(self):
- return weakref.WeakKeyDictionary()
-
- @property
- def dialect_description(self):
- return self.name + "+" + self.driver
-
- @classmethod
- def get_pool_class(cls, url):
- return getattr(cls, 'poolclass', pool.QueuePool)
-
- def initialize(self, connection):
- try:
- self.server_version_info = \
- self._get_server_version_info(connection)
- except NotImplementedError:
- self.server_version_info = None
- try:
- self.default_schema_name = \
- self._get_default_schema_name(connection)
- except NotImplementedError:
- self.default_schema_name = None
-
- try:
- self.default_isolation_level = \
- self.get_isolation_level(connection.connection)
- except NotImplementedError:
- self.default_isolation_level = None
-
- self.returns_unicode_strings = self._check_unicode_returns(connection)
-
- self.do_rollback(connection.connection)
-
- def on_connect(self):
- """return a callable which sets up a newly created DBAPI connection.
-
- This is used to set dialect-wide per-connection options such as
- isolation modes, unicode modes, etc.
-
- If a callable is returned, it will be assembled into a pool listener
- that receives the direct DBAPI connection, with all wrappers removed.
-
- If None is returned, no listener will be generated.
-
- """
- return None
-
- def _check_unicode_returns(self, connection):
- # Py2K
- if self.supports_unicode_statements:
- cast_to = unicode
- else:
- cast_to = str
- # end Py2K
- # Py3K
- #cast_to = str
- def check_unicode(type_):
- cursor = connection.connection.cursor()
- try:
- cursor.execute(
- cast_to(
- expression.select(
- [expression.cast(
- expression.literal_column(
- "'test unicode returns'"), type_)
- ]).compile(dialect=self)
- )
- )
- row = cursor.fetchone()
-
- return isinstance(row[0], unicode)
- finally:
- cursor.close()
-
- # detect plain VARCHAR
- unicode_for_varchar = check_unicode(sqltypes.VARCHAR(60))
-
- # detect if there's an NVARCHAR type with different behavior available
- unicode_for_unicode = check_unicode(sqltypes.Unicode(60))
-
- if unicode_for_unicode and not unicode_for_varchar:
- return "conditional"
- else:
- return unicode_for_varchar
-
- def type_descriptor(self, typeobj):
- """Provide a database-specific ``TypeEngine`` object, given
- the generic object which comes from the types module.
-
- This method looks for a dictionary called
- ``colspecs`` as a class or instance-level variable,
- and passes on to ``types.adapt_type()``.
-
- """
- return sqltypes.adapt_type(typeobj, self.colspecs)
-
- def reflecttable(self, connection, table, include_columns):
- insp = reflection.Inspector.from_engine(connection)
- return insp.reflecttable(table, include_columns)
-
- def get_pk_constraint(self, conn, table_name, schema=None, **kw):
- """Compatiblity method, adapts the result of get_primary_keys()
- for those dialects which don't implement get_pk_constraint().
-
- """
- return {
- 'constrained_columns':
- self.get_primary_keys(conn, table_name,
- schema=schema, **kw)
- }
-
- def validate_identifier(self, ident):
- if len(ident) > self.max_identifier_length:
- raise exc.IdentifierError(
- "Identifier '%s' exceeds maximum length of %d characters" %
- (ident, self.max_identifier_length)
- )
-
- def connect(self, *cargs, **cparams):
- return self.dbapi.connect(*cargs, **cparams)
-
- def create_connect_args(self, url):
- opts = url.translate_connect_args()
- opts.update(url.query)
- return [[], opts]
-
- def do_begin(self, connection):
- """Implementations might want to put logic here for turning
- autocommit on/off, etc.
- """
-
- pass
-
- def do_rollback(self, connection):
- """Implementations might want to put logic here for turning
- autocommit on/off, etc.
- """
-
- connection.rollback()
-
- def do_commit(self, connection):
- """Implementations might want to put logic here for turning
- autocommit on/off, etc.
- """
-
- connection.commit()
-
- def create_xid(self):
- """Create a random two-phase transaction ID.
-
- This id will be passed to do_begin_twophase(), do_rollback_twophase(),
- do_commit_twophase(). Its format is unspecified.
- """
-
- return "_sa_%032x" % random.randint(0, 2 ** 128)
-
- def do_savepoint(self, connection, name):
- connection.execute(expression.SavepointClause(name))
-
- def do_rollback_to_savepoint(self, connection, name):
- connection.execute(expression.RollbackToSavepointClause(name))
-
- def do_release_savepoint(self, connection, name):
- connection.execute(expression.ReleaseSavepointClause(name))
-
- def do_executemany(self, cursor, statement, parameters, context=None):
- cursor.executemany(statement, parameters)
-
- def do_execute(self, cursor, statement, parameters, context=None):
- cursor.execute(statement, parameters)
-
- def is_disconnect(self, e, connection, cursor):
- return False
-
- def reset_isolation_level(self, dbapi_conn):
- # default_isolation_level is read from the first conenction
- # after the initial set of 'isolation_level', if any, so is
- # the configured default of this dialect.
- self.set_isolation_level(dbapi_conn, self.default_isolation_level)
-
-class DefaultExecutionContext(base.ExecutionContext):
- isinsert = False
- isupdate = False
- isdelete = False
- isddl = False
- executemany = False
- result_map = None
- compiled = None
- statement = None
- _is_implicit_returning = False
- _is_explicit_returning = False
-
- @classmethod
- def _init_ddl(cls, dialect, connection, dbapi_connection, compiled_ddl):
- """Initialize execution context for a DDLElement construct."""
-
- self = cls.__new__(cls)
- self.dialect = dialect
- self.root_connection = connection
- self._dbapi_connection = dbapi_connection
- self.engine = connection.engine
-
- self.compiled = compiled = compiled_ddl
- self.isddl = True
-
- self.execution_options = compiled.statement._execution_options
- if connection._execution_options:
- self.execution_options = dict(self.execution_options)
- self.execution_options.update(connection._execution_options)
-
- if not dialect.supports_unicode_statements:
- self.unicode_statement = unicode(compiled)
- self.statement = dialect._encoder(self.unicode_statement)[0]
- else:
- self.statement = self.unicode_statement = unicode(compiled)
-
- self.cursor = self.create_cursor()
- self.compiled_parameters = []
-
- if dialect.positional:
- self.parameters = [dialect.execute_sequence_format()]
- else:
- self.parameters = [{}]
-
- return self
-
- @classmethod
- def _init_compiled(cls, dialect, connection, dbapi_connection, compiled, parameters):
- """Initialize execution context for a Compiled construct."""
-
- self = cls.__new__(cls)
- self.dialect = dialect
- self.root_connection = connection
- self._dbapi_connection = dbapi_connection
- self.engine = connection.engine
-
- self.compiled = compiled
-
- if not compiled.can_execute:
- raise exc.ArgumentError("Not an executable clause")
-
- self.execution_options = compiled.statement._execution_options
- if connection._execution_options:
- self.execution_options = dict(self.execution_options)
- self.execution_options.update(connection._execution_options)
-
- # compiled clauseelement. process bind params, process table defaults,
- # track collections used by ResultProxy to target and process results
-
- self.result_map = compiled.result_map
-
- self.unicode_statement = unicode(compiled)
- if not dialect.supports_unicode_statements:
- self.statement = self.unicode_statement.encode(self.dialect.encoding)
- else:
- self.statement = self.unicode_statement
-
- self.isinsert = compiled.isinsert
- self.isupdate = compiled.isupdate
- self.isdelete = compiled.isdelete
-
- if self.isinsert or self.isupdate or self.isdelete:
- self._is_explicit_returning = compiled.statement._returning
- self._is_implicit_returning = compiled.returning and \
- not compiled.statement._returning
-
- if not parameters:
- self.compiled_parameters = [compiled.construct_params()]
- else:
- self.compiled_parameters = \
- [compiled.construct_params(m, _group_number=grp) for
- grp,m in enumerate(parameters)]
-
- self.executemany = len(parameters) > 1
-
- self.cursor = self.create_cursor()
- if self.isinsert or self.isupdate:
- self.postfetch_cols = self.compiled.postfetch
- self.prefetch_cols = self.compiled.prefetch
- self.__process_defaults()
-
- processors = compiled._bind_processors
-
- # Convert the dictionary of bind parameter values
- # into a dict or list to be sent to the DBAPI's
- # execute() or executemany() method.
- parameters = []
- if dialect.positional:
- for compiled_params in self.compiled_parameters:
- param = []
- for key in self.compiled.positiontup:
- if key in processors:
- param.append(processors[key](compiled_params[key]))
- else:
- param.append(compiled_params[key])
- parameters.append(dialect.execute_sequence_format(param))
- else:
- encode = not dialect.supports_unicode_statements
- for compiled_params in self.compiled_parameters:
- param = {}
- if encode:
- for key in compiled_params:
- if key in processors:
- param[dialect._encoder(key)[0]] = \
- processors[key](compiled_params[key])
- else:
- param[dialect._encoder(key)[0]] = compiled_params[key]
- else:
- for key in compiled_params:
- if key in processors:
- param[key] = processors[key](compiled_params[key])
- else:
- param[key] = compiled_params[key]
- parameters.append(param)
- self.parameters = dialect.execute_sequence_format(parameters)
-
- return self
-
- @classmethod
- def _init_statement(cls, dialect, connection, dbapi_connection, statement, parameters):
- """Initialize execution context for a string SQL statement."""
-
- self = cls.__new__(cls)
- self.dialect = dialect
- self.root_connection = connection
- self._dbapi_connection = dbapi_connection
- self.engine = connection.engine
-
- # plain text statement
- self.execution_options = connection._execution_options
-
- if not parameters:
- if self.dialect.positional:
- self.parameters = [dialect.execute_sequence_format()]
- else:
- self.parameters = [{}]
- elif isinstance(parameters[0], dialect.execute_sequence_format):
- self.parameters = parameters
- elif isinstance(parameters[0], dict):
- if dialect.supports_unicode_statements:
- self.parameters = parameters
- else:
- self.parameters= [
- dict((dialect._encoder(k)[0], d[k]) for k in d)
- for d in parameters
- ] or [{}]
- else:
- self.parameters = [dialect.execute_sequence_format(p)
- for p in parameters]
-
- self.executemany = len(parameters) > 1
-
- if not dialect.supports_unicode_statements and isinstance(statement, unicode):
- self.unicode_statement = statement
- self.statement = dialect._encoder(statement)[0]
- else:
- self.statement = self.unicode_statement = statement
-
- self.cursor = self.create_cursor()
- return self
-
- @classmethod
- def _init_default(cls, dialect, connection, dbapi_connection):
- """Initialize execution context for a ColumnDefault construct."""
-
- self = cls.__new__(cls)
- self.dialect = dialect
- self.root_connection = connection
- self._dbapi_connection = dbapi_connection
- self.engine = connection.engine
- self.execution_options = connection._execution_options
- self.cursor = self.create_cursor()
- return self
-
- @util.memoized_property
- def is_crud(self):
- return self.isinsert or self.isupdate or self.isdelete
-
- @util.memoized_property
- def should_autocommit(self):
- autocommit = self.execution_options.get('autocommit',
- not self.compiled and
- self.statement and
- expression.PARSE_AUTOCOMMIT
- or False)
-
- if autocommit is expression.PARSE_AUTOCOMMIT:
- return self.should_autocommit_text(self.unicode_statement)
- else:
- return autocommit
-
- def _execute_scalar(self, stmt, type_):
- """Execute a string statement on the current cursor, returning a
- scalar result.
-
- Used to fire off sequences, default phrases, and "select lastrowid"
- types of statements individually or in the context of a parent INSERT
- or UPDATE statement.
-
- """
-
- conn = self.root_connection
- if isinstance(stmt, unicode) and \
- not self.dialect.supports_unicode_statements:
- stmt = self.dialect._encoder(stmt)[0]
-
- if self.dialect.positional:
- default_params = self.dialect.execute_sequence_format()
- else:
- default_params = {}
-
- conn._cursor_execute(self.cursor, stmt, default_params)
- r = self.cursor.fetchone()[0]
- if type_ is not None:
- # apply type post processors to the result
- proc = type_._cached_result_processor(
- self.dialect,
- self.cursor.description[0][1]
- )
- if proc:
- return proc(r)
- return r
-
- @property
- def connection(self):
- return self.root_connection._branch()
-
- def should_autocommit_text(self, statement):
- return AUTOCOMMIT_REGEXP.match(statement)
-
- def create_cursor(self):
- return self._dbapi_connection.cursor()
-
- def pre_exec(self):
- pass
-
- def post_exec(self):
- pass
-
- def get_lastrowid(self):
- """return self.cursor.lastrowid, or equivalent, after an INSERT.
-
- This may involve calling special cursor functions,
- issuing a new SELECT on the cursor (or a new one),
- or returning a stored value that was
- calculated within post_exec().
-
- This function will only be called for dialects
- which support "implicit" primary key generation,
- keep preexecute_autoincrement_sequences set to False,
- and when no explicit id value was bound to the
- statement.
-
- The function is called once, directly after
- post_exec() and before the transaction is committed
- or ResultProxy is generated. If the post_exec()
- method assigns a value to `self._lastrowid`, the
- value is used in place of calling get_lastrowid().
-
- Note that this method is *not* equivalent to the
- ``lastrowid`` method on ``ResultProxy``, which is a
- direct proxy to the DBAPI ``lastrowid`` accessor
- in all cases.
-
- """
- return self.cursor.lastrowid
-
- def handle_dbapi_exception(self, e):
- pass
-
- def get_result_proxy(self):
- return base.ResultProxy(self)
-
- @property
- def rowcount(self):
- return self.cursor.rowcount
-
- def supports_sane_rowcount(self):
- return self.dialect.supports_sane_rowcount
-
- def supports_sane_multi_rowcount(self):
- return self.dialect.supports_sane_multi_rowcount
-
- def post_insert(self):
- if not self._is_implicit_returning and \
- self.dialect.postfetch_lastrowid and \
- (not self.inserted_primary_key or \
- None in self.inserted_primary_key):
-
- table = self.compiled.statement.table
- lastrowid = self.get_lastrowid()
-
- autoinc_col = table._autoincrement_column
- if autoinc_col is not None:
- # apply type post processors to the lastrowid
- proc = autoinc_col.type._cached_result_processor(self.dialect, None)
- if proc is not None:
- lastrowid = proc(lastrowid)
-
- self.inserted_primary_key = [
- c is autoinc_col and lastrowid or v
- for c, v in zip(
- table.primary_key,
- self.inserted_primary_key)
- ]
-
- def _fetch_implicit_returning(self, resultproxy):
- table = self.compiled.statement.table
- row = resultproxy.fetchone()
-
- ipk = []
- for c, v in zip(table.primary_key, self.inserted_primary_key):
- if v is not None:
- ipk.append(v)
- else:
- ipk.append(row[c])
-
- self.inserted_primary_key = ipk
-
- def lastrow_has_defaults(self):
- return (self.isinsert or self.isupdate) and \
- bool(self.postfetch_cols)
-
- def set_input_sizes(self, translate=None, exclude_types=None):
- """Given a cursor and ClauseParameters, call the appropriate
- style of ``setinputsizes()`` on the cursor, using DB-API types
- from the bind parameter's ``TypeEngine`` objects.
-
- This method only called by those dialects which require it,
- currently cx_oracle.
-
- """
-
- if not hasattr(self.compiled, 'bind_names'):
- return
-
- types = dict(
- (self.compiled.bind_names[bindparam], bindparam.type)
- for bindparam in self.compiled.bind_names)
-
- if self.dialect.positional:
- inputsizes = []
- for key in self.compiled.positiontup:
- typeengine = types[key]
- dbtype = typeengine.dialect_impl(self.dialect).get_dbapi_type(self.dialect.dbapi)
- if dbtype is not None and (not exclude_types or dbtype not in exclude_types):
- inputsizes.append(dbtype)
- try:
- self.cursor.setinputsizes(*inputsizes)
- except Exception, e:
- self.root_connection._handle_dbapi_exception(e, None, None, None, self)
- raise
- else:
- inputsizes = {}
- for key in self.compiled.bind_names.values():
- typeengine = types[key]
- dbtype = typeengine.dialect_impl(self.dialect).get_dbapi_type(self.dialect.dbapi)
- if dbtype is not None and (not exclude_types or dbtype not in exclude_types):
- if translate:
- key = translate.get(key, key)
- inputsizes[self.dialect._encoder(key)[0]] = dbtype
- try:
- self.cursor.setinputsizes(**inputsizes)
- except Exception, e:
- self.root_connection._handle_dbapi_exception(e, None, None, None, self)
- raise
-
- def _exec_default(self, default, type_):
- if default.is_sequence:
- return self.fire_sequence(default, type_)
- elif default.is_callable:
- return default.arg(self)
- elif default.is_clause_element:
- # TODO: expensive branching here should be
- # pulled into _exec_scalar()
- conn = self.connection
- c = expression.select([default.arg]).compile(bind=conn)
- return conn._execute_compiled(c, (), {}).scalar()
- else:
- return default.arg
-
- def get_insert_default(self, column):
- if column.default is None:
- return None
- else:
- return self._exec_default(column.default, column.type)
-
- def get_update_default(self, column):
- if column.onupdate is None:
- return None
- else:
- return self._exec_default(column.onupdate, column.type)
-
- def __process_defaults(self):
- """Generate default values for compiled insert/update statements,
- and generate inserted_primary_key collection.
- """
-
- if self.executemany:
- if len(self.compiled.prefetch):
- scalar_defaults = {}
-
- # pre-determine scalar Python-side defaults
- # to avoid many calls of get_insert_default()/
- # get_update_default()
- for c in self.prefetch_cols:
- if self.isinsert and c.default and c.default.is_scalar:
- scalar_defaults[c] = c.default.arg
- elif self.isupdate and c.onupdate and c.onupdate.is_scalar:
- scalar_defaults[c] = c.onupdate.arg
-
- for param in self.compiled_parameters:
- self.current_parameters = param
- for c in self.prefetch_cols:
- if c in scalar_defaults:
- val = scalar_defaults[c]
- elif self.isinsert:
- val = self.get_insert_default(c)
- else:
- val = self.get_update_default(c)
- if val is not None:
- param[c.key] = val
- del self.current_parameters
- else:
- self.current_parameters = compiled_parameters = \
- self.compiled_parameters[0]
-
- for c in self.compiled.prefetch:
- if self.isinsert:
- val = self.get_insert_default(c)
- else:
- val = self.get_update_default(c)
-
- if val is not None:
- compiled_parameters[c.key] = val
- del self.current_parameters
-
- if self.isinsert:
- self.inserted_primary_key = [
- self.compiled_parameters[0].get(c.key, None)
- for c in self.compiled.\
- statement.table.primary_key
- ]
-
-
-DefaultDialect.execution_ctx_cls = DefaultExecutionContext
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/engine/reflection.py b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/engine/reflection.py
deleted file mode 100755
index ca436032..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/engine/reflection.py
+++ /dev/null
@@ -1,477 +0,0 @@
-# engine/reflection.py
-# Copyright (C) 2005-2011 the SQLAlchemy authors and contributors <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-"""Provides an abstraction for obtaining database schema information.
-
-Usage Notes:
-
-Here are some general conventions when accessing the low level inspector
-methods such as get_table_names, get_columns, etc.
-
-1. Inspector methods return lists of dicts in most cases for the following
- reasons:
-
- * They're both standard types that can be serialized.
- * Using a dict instead of a tuple allows easy expansion of attributes.
- * Using a list for the outer structure maintains order and is easy to work
- with (e.g. list comprehension [d['name'] for d in cols]).
-
-2. Records that contain a name, such as the column name in a column record
- use the key 'name'. So for most return values, each record will have a
- 'name' attribute..
-"""
-
-import sqlalchemy
-from sqlalchemy import exc, sql
-from sqlalchemy import util
-from sqlalchemy.types import TypeEngine
-from sqlalchemy import schema as sa_schema
-
-
-@util.decorator
-def cache(fn, self, con, *args, **kw):
- info_cache = kw.get('info_cache', None)
- if info_cache is None:
- return fn(self, con, *args, **kw)
- key = (
- fn.__name__,
- tuple(a for a in args if isinstance(a, basestring)),
- tuple((k, v) for k, v in kw.iteritems() if isinstance(v, (basestring, int, float)))
- )
- ret = info_cache.get(key)
- if ret is None:
- ret = fn(self, con, *args, **kw)
- info_cache[key] = ret
- return ret
-
-
-class Inspector(object):
- """Performs database schema inspection.
-
- The Inspector acts as a proxy to the reflection methods of the
- :class:`~sqlalchemy.engine.base.Dialect`, providing a
- consistent interface as well as caching support for previously
- fetched metadata.
-
- The preferred method to construct an :class:`.Inspector` is via the
- :meth:`Inspector.from_engine` method. I.e.::
-
- engine = create_engine('...')
- insp = Inspector.from_engine(engine)
-
- Where above, the :class:`~sqlalchemy.engine.base.Dialect` may opt
- to return an :class:`.Inspector` subclass that provides additional
- methods specific to the dialect's target database.
-
- """
-
- def __init__(self, bind):
- """Initialize a new :class:`.Inspector`.
-
- :param bind: a :class:`~sqlalchemy.engine.base.Connectable`,
- which is typically an instance of
- :class:`~sqlalchemy.engine.base.Engine` or
- :class:`~sqlalchemy.engine.base.Connection`.
-
- For a dialect-specific instance of :class:`.Inspector`, see
- :meth:`Inspector.from_engine`
-
- """
- # this might not be a connection, it could be an engine.
- self.bind = bind
-
- # set the engine
- if hasattr(bind, 'engine'):
- self.engine = bind.engine
- else:
- self.engine = bind
-
- if self.engine is bind:
- # if engine, ensure initialized
- bind.connect().close()
-
- self.dialect = self.engine.dialect
- self.info_cache = {}
-
- @classmethod
- def from_engine(cls, bind):
- """Construct a new dialect-specific Inspector object from the given engine or connection.
-
- :param bind: a :class:`~sqlalchemy.engine.base.Connectable`,
- which is typically an instance of
- :class:`~sqlalchemy.engine.base.Engine` or
- :class:`~sqlalchemy.engine.base.Connection`.
-
- This method differs from direct a direct constructor call of :class:`.Inspector`
- in that the :class:`~sqlalchemy.engine.base.Dialect` is given a chance to provide
- a dialect-specific :class:`.Inspector` instance, which may provide additional
- methods.
-
- See the example at :class:`.Inspector`.
-
- """
- if hasattr(bind.dialect, 'inspector'):
- return bind.dialect.inspector(bind)
- return Inspector(bind)
-
- @property
- def default_schema_name(self):
- """Return the default schema name presented by the dialect
- for the current engine's database user.
-
- E.g. this is typically ``public`` for Postgresql and ``dbo``
- for SQL Server.
-
- """
- return self.dialect.default_schema_name
-
- def get_schema_names(self):
- """Return all schema names.
- """
-
- if hasattr(self.dialect, 'get_schema_names'):
- return self.dialect.get_schema_names(self.bind,
- info_cache=self.info_cache)
- return []
-
- def get_table_names(self, schema=None, order_by=None):
- """Return all table names in `schema`.
-
- :param schema: Optional, retrieve names from a non-default schema.
- :param order_by: Optional, may be the string "foreign_key" to sort
- the result on foreign key dependencies.
-
- This should probably not return view names or maybe it should return
- them with an indicator t or v.
- """
-
- if hasattr(self.dialect, 'get_table_names'):
- tnames = self.dialect.get_table_names(self.bind,
- schema,
- info_cache=self.info_cache)
- else:
- tnames = self.engine.table_names(schema)
- if order_by == 'foreign_key':
- ordered_tnames = tnames[:]
- # Order based on foreign key dependencies.
- for tname in tnames:
- table_pos = tnames.index(tname)
- fkeys = self.get_foreign_keys(tname, schema)
- for fkey in fkeys:
- rtable = fkey['referred_table']
- if rtable in ordered_tnames:
- ref_pos = ordered_tnames.index(rtable)
- # Make sure it's lower in the list than anything it
- # references.
- if table_pos > ref_pos:
- ordered_tnames.pop(table_pos) # rtable moves up 1
- # insert just below rtable
- ordered_tnames.index(ref_pos, tname)
- tnames = ordered_tnames
- return tnames
-
- def get_table_options(self, table_name, schema=None, **kw):
- """Return a dictionary of options specified when the table of the given name was created.
-
- This currently includes some options that apply to MySQL tables.
-
- """
- if hasattr(self.dialect, 'get_table_options'):
- return self.dialect.get_table_options(self.bind, table_name, schema,
- info_cache=self.info_cache,
- **kw)
- return {}
-
- def get_view_names(self, schema=None):
- """Return all view names in `schema`.
-
- :param schema: Optional, retrieve names from a non-default schema.
- """
-
- return self.dialect.get_view_names(self.bind, schema,
- info_cache=self.info_cache)
-
- def get_view_definition(self, view_name, schema=None):
- """Return definition for `view_name`.
-
- :param schema: Optional, retrieve names from a non-default schema.
- """
-
- return self.dialect.get_view_definition(
- self.bind, view_name, schema, info_cache=self.info_cache)
-
- def get_columns(self, table_name, schema=None, **kw):
- """Return information about columns in `table_name`.
-
- Given a string `table_name` and an optional string `schema`, return
- column information as a list of dicts with these keys:
-
- name
- the column's name
-
- type
- :class:`~sqlalchemy.types.TypeEngine`
-
- nullable
- boolean
-
- default
- the column's default value
-
- attrs
- dict containing optional column attributes
- """
-
- col_defs = self.dialect.get_columns(self.bind, table_name, schema,
- info_cache=self.info_cache,
- **kw)
- for col_def in col_defs:
- # make this easy and only return instances for coltype
- coltype = col_def['type']
- if not isinstance(coltype, TypeEngine):
- col_def['type'] = coltype()
- return col_defs
-
- def get_primary_keys(self, table_name, schema=None, **kw):
- """Return information about primary keys in `table_name`.
-
- Given a string `table_name`, and an optional string `schema`, return
- primary key information as a list of column names.
- """
-
- pkeys = self.dialect.get_primary_keys(self.bind, table_name, schema,
- info_cache=self.info_cache,
- **kw)
-
- return pkeys
-
- def get_pk_constraint(self, table_name, schema=None, **kw):
- """Return information about primary key constraint on `table_name`.
-
- Given a string `table_name`, and an optional string `schema`, return
- primary key information as a dictionary with these keys:
-
- constrained_columns
- a list of column names that make up the primary key
-
- name
- optional name of the primary key constraint.
-
- """
- pkeys = self.dialect.get_pk_constraint(self.bind, table_name, schema,
- info_cache=self.info_cache,
- **kw)
-
- return pkeys
-
-
- def get_foreign_keys(self, table_name, schema=None, **kw):
- """Return information about foreign_keys in `table_name`.
-
- Given a string `table_name`, and an optional string `schema`, return
- foreign key information as a list of dicts with these keys:
-
- constrained_columns
- a list of column names that make up the foreign key
-
- referred_schema
- the name of the referred schema
-
- referred_table
- the name of the referred table
-
- referred_columns
- a list of column names in the referred table that correspond to
- constrained_columns
-
- name
- optional name of the foreign key constraint.
-
- \**kw
- other options passed to the dialect's get_foreign_keys() method.
-
- """
-
- fk_defs = self.dialect.get_foreign_keys(self.bind, table_name, schema,
- info_cache=self.info_cache,
- **kw)
- return fk_defs
-
- def get_indexes(self, table_name, schema=None, **kw):
- """Return information about indexes in `table_name`.
-
- Given a string `table_name` and an optional string `schema`, return
- index information as a list of dicts with these keys:
-
- name
- the index's name
-
- column_names
- list of column names in order
-
- unique
- boolean
-
- \**kw
- other options passed to the dialect's get_indexes() method.
- """
-
- indexes = self.dialect.get_indexes(self.bind, table_name,
- schema,
- info_cache=self.info_cache, **kw)
- return indexes
-
- def reflecttable(self, table, include_columns):
- """Given a Table object, load its internal constructs based on introspection.
-
- This is the underlying method used by most dialects to produce
- table reflection. Direct usage is like::
-
- from sqlalchemy import create_engine, MetaData, Table
- from sqlalchemy.engine import reflection
-
- engine = create_engine('...')
- meta = MetaData()
- user_table = Table('user', meta)
- insp = Inspector.from_engine(engine)
- insp.reflecttable(user_table, None)
-
- :param table: a :class:`~sqlalchemy.schema.Table` instance.
- :param include_columns: a list of string column names to include
- in the reflection process. If ``None``, all columns are reflected.
-
- """
- dialect = self.bind.dialect
-
- # MySQL dialect does this. Applicable with other dialects?
- if hasattr(dialect, '_connection_charset') \
- and hasattr(dialect, '_adjust_casing'):
- charset = dialect._connection_charset
- dialect._adjust_casing(table)
-
- # table attributes we might need.
- reflection_options = dict(
- (k, table.kwargs.get(k)) for k in dialect.reflection_options if k in table.kwargs)
-
- schema = table.schema
- table_name = table.name
-
- # apply table options
- tbl_opts = self.get_table_options(table_name, schema, **table.kwargs)
- if tbl_opts:
- table.kwargs.update(tbl_opts)
-
- # table.kwargs will need to be passed to each reflection method. Make
- # sure keywords are strings.
- tblkw = table.kwargs.copy()
- for (k, v) in tblkw.items():
- del tblkw[k]
- tblkw[str(k)] = v
-
- # Py2K
- if isinstance(schema, str):
- schema = schema.decode(dialect.encoding)
- if isinstance(table_name, str):
- table_name = table_name.decode(dialect.encoding)
- # end Py2K
-
- # columns
- found_table = False
- for col_d in self.get_columns(table_name, schema, **tblkw):
- found_table = True
- table.dispatch.column_reflect(table, col_d)
-
- name = col_d['name']
- if include_columns and name not in include_columns:
- continue
-
- coltype = col_d['type']
- col_kw = {
- 'nullable':col_d['nullable'],
- }
- for k in ('autoincrement', 'quote', 'info', 'key'):
- if k in col_d:
- col_kw[k] = col_d[k]
-
- colargs = []
- if col_d.get('default') is not None:
- # the "default" value is assumed to be a literal SQL expression,
- # so is wrapped in text() so that no quoting occurs on re-issuance.
- colargs.append(
- sa_schema.DefaultClause(
- sql.text(col_d['default']), _reflected=True
- )
- )
-
- if 'sequence' in col_d:
- # TODO: mssql, maxdb and sybase are using this.
- seq = col_d['sequence']
- sequence = sa_schema.Sequence(seq['name'], 1, 1)
- if 'start' in seq:
- sequence.start = seq['start']
- if 'increment' in seq:
- sequence.increment = seq['increment']
- colargs.append(sequence)
-
- col = sa_schema.Column(name, coltype, *colargs, **col_kw)
- table.append_column(col)
-
- if not found_table:
- raise exc.NoSuchTableError(table.name)
-
- # Primary keys
- pk_cons = self.get_pk_constraint(table_name, schema, **tblkw)
- if pk_cons:
- primary_key_constraint = sa_schema.PrimaryKeyConstraint(name=pk_cons.get('name'),
- *[table.c[pk] for pk in pk_cons['constrained_columns']
- if pk in table.c]
- )
-
- table.append_constraint(primary_key_constraint)
-
- # Foreign keys
- fkeys = self.get_foreign_keys(table_name, schema, **tblkw)
- for fkey_d in fkeys:
- conname = fkey_d['name']
- constrained_columns = fkey_d['constrained_columns']
- referred_schema = fkey_d['referred_schema']
- referred_table = fkey_d['referred_table']
- referred_columns = fkey_d['referred_columns']
- refspec = []
- if referred_schema is not None:
- sa_schema.Table(referred_table, table.metadata,
- autoload=True, schema=referred_schema,
- autoload_with=self.bind,
- **reflection_options
- )
- for column in referred_columns:
- refspec.append(".".join(
- [referred_schema, referred_table, column]))
- else:
- sa_schema.Table(referred_table, table.metadata, autoload=True,
- autoload_with=self.bind,
- **reflection_options
- )
- for column in referred_columns:
- refspec.append(".".join([referred_table, column]))
- table.append_constraint(
- sa_schema.ForeignKeyConstraint(constrained_columns, refspec,
- conname, link_to_name=True))
- # Indexes
- indexes = self.get_indexes(table_name, schema)
- for index_d in indexes:
- name = index_d['name']
- columns = index_d['column_names']
- unique = index_d['unique']
- flavor = index_d.get('type', 'unknown type')
- if include_columns and \
- not set(columns).issubset(include_columns):
- util.warn(
- "Omitting %s KEY for (%s), key covers omitted columns." %
- (flavor, ', '.join(columns)))
- continue
- sa_schema.Index(name, *[table.columns[c] for c in columns],
- **dict(unique=unique))
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/engine/strategies.py b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/engine/strategies.py
deleted file mode 100755
index eee19ee1..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/engine/strategies.py
+++ /dev/null
@@ -1,242 +0,0 @@
-# engine/strategies.py
-# Copyright (C) 2005-2011 the SQLAlchemy authors and contributors <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-"""Strategies for creating new instances of Engine types.
-
-These are semi-private implementation classes which provide the
-underlying behavior for the "strategy" keyword argument available on
-:func:`~sqlalchemy.engine.create_engine`. Current available options are
-``plain``, ``threadlocal``, and ``mock``.
-
-New strategies can be added via new ``EngineStrategy`` classes.
-"""
-
-from operator import attrgetter
-
-from sqlalchemy.engine import base, threadlocal, url
-from sqlalchemy import util, exc, event
-from sqlalchemy import pool as poollib
-
-strategies = {}
-
-
-class EngineStrategy(object):
- """An adaptor that processes input arguements and produces an Engine.
-
- Provides a ``create`` method that receives input arguments and
- produces an instance of base.Engine or a subclass.
-
- """
-
- def __init__(self):
- strategies[self.name] = self
-
- def create(self, *args, **kwargs):
- """Given arguments, returns a new Engine instance."""
-
- raise NotImplementedError()
-
-
-class DefaultEngineStrategy(EngineStrategy):
- """Base class for built-in stratgies."""
-
- def create(self, name_or_url, **kwargs):
- # create url.URL object
- u = url.make_url(name_or_url)
-
- dialect_cls = u.get_dialect()
-
- dialect_args = {}
- # consume dialect arguments from kwargs
- for k in util.get_cls_kwargs(dialect_cls):
- if k in kwargs:
- dialect_args[k] = kwargs.pop(k)
-
- dbapi = kwargs.pop('module', None)
- if dbapi is None:
- dbapi_args = {}
- for k in util.get_func_kwargs(dialect_cls.dbapi):
- if k in kwargs:
- dbapi_args[k] = kwargs.pop(k)
- dbapi = dialect_cls.dbapi(**dbapi_args)
-
- dialect_args['dbapi'] = dbapi
-
- # create dialect
- dialect = dialect_cls(**dialect_args)
-
- # assemble connection arguments
- (cargs, cparams) = dialect.create_connect_args(u)
- cparams.update(kwargs.pop('connect_args', {}))
-
- # look for existing pool or create
- pool = kwargs.pop('pool', None)
- if pool is None:
- def connect():
- try:
- return dialect.connect(*cargs, **cparams)
- except Exception, e:
- # Py3K
- #raise exc.DBAPIError.instance(None, None,
- # e, dialect.dbapi.Error) from e
- # Py2K
- import sys
- raise exc.DBAPIError.instance(
- None, None, e, dialect.dbapi.Error), \
- None, sys.exc_info()[2]
- # end Py2K
-
- creator = kwargs.pop('creator', connect)
-
- poolclass = kwargs.pop('poolclass', None)
- if poolclass is None:
- poolclass = dialect_cls.get_pool_class(u)
- pool_args = {}
-
- # consume pool arguments from kwargs, translating a few of
- # the arguments
- translate = {'logging_name': 'pool_logging_name',
- 'echo': 'echo_pool',
- 'timeout': 'pool_timeout',
- 'recycle': 'pool_recycle',
- 'events':'pool_events',
- 'use_threadlocal':'pool_threadlocal'}
- for k in util.get_cls_kwargs(poolclass):
- tk = translate.get(k, k)
- if tk in kwargs:
- pool_args[k] = kwargs.pop(tk)
- pool = poolclass(creator, **pool_args)
- else:
- if isinstance(pool, poollib._DBProxy):
- pool = pool.get_pool(*cargs, **cparams)
- else:
- pool = pool
-
- # create engine.
- engineclass = self.engine_cls
- engine_args = {}
- for k in util.get_cls_kwargs(engineclass):
- if k in kwargs:
- engine_args[k] = kwargs.pop(k)
-
- _initialize = kwargs.pop('_initialize', True)
-
- # all kwargs should be consumed
- if kwargs:
- raise TypeError(
- "Invalid argument(s) %s sent to create_engine(), "
- "using configuration %s/%s/%s. Please check that the "
- "keyword arguments are appropriate for this combination "
- "of components." % (','.join("'%s'" % k for k in kwargs),
- dialect.__class__.__name__,
- pool.__class__.__name__,
- engineclass.__name__))
-
- engine = engineclass(pool, dialect, u, **engine_args)
-
- if _initialize:
- do_on_connect = dialect.on_connect()
- if do_on_connect:
- def on_connect(dbapi_connection, connection_record):
- conn = getattr(dbapi_connection, '_sqla_unwrap', dbapi_connection)
- if conn is None:
- return
- do_on_connect(conn)
-
- event.listen(pool, 'first_connect', on_connect)
- event.listen(pool, 'connect', on_connect)
-
- def first_connect(dbapi_connection, connection_record):
- c = base.Connection(engine, connection=dbapi_connection)
-
- # TODO: removing this allows the on connect activities
- # to generate events. tests currently assume these aren't
- # sent. do we want users to get all the initial connect
- # activities as events ?
- c._has_events = False
-
- dialect.initialize(c)
- event.listen(pool, 'first_connect', first_connect)
-
- return engine
-
-
-class PlainEngineStrategy(DefaultEngineStrategy):
- """Strategy for configuring a regular Engine."""
-
- name = 'plain'
- engine_cls = base.Engine
-
-PlainEngineStrategy()
-
-
-class ThreadLocalEngineStrategy(DefaultEngineStrategy):
- """Strategy for configuring an Engine with thredlocal behavior."""
-
- name = 'threadlocal'
- engine_cls = threadlocal.TLEngine
-
-ThreadLocalEngineStrategy()
-
-
-class MockEngineStrategy(EngineStrategy):
- """Strategy for configuring an Engine-like object with mocked execution.
-
- Produces a single mock Connectable object which dispatches
- statement execution to a passed-in function.
-
- """
-
- name = 'mock'
-
- def create(self, name_or_url, executor, **kwargs):
- # create url.URL object
- u = url.make_url(name_or_url)
-
- dialect_cls = u.get_dialect()
-
- dialect_args = {}
- # consume dialect arguments from kwargs
- for k in util.get_cls_kwargs(dialect_cls):
- if k in kwargs:
- dialect_args[k] = kwargs.pop(k)
-
- # create dialect
- dialect = dialect_cls(**dialect_args)
-
- return MockEngineStrategy.MockConnection(dialect, executor)
-
- class MockConnection(base.Connectable):
- def __init__(self, dialect, execute):
- self._dialect = dialect
- self.execute = execute
-
- engine = property(lambda s: s)
- dialect = property(attrgetter('_dialect'))
- name = property(lambda s: s._dialect.name)
-
- def contextual_connect(self, **kwargs):
- return self
-
- def compiler(self, statement, parameters, **kwargs):
- return self._dialect.compiler(
- statement, parameters, engine=self, **kwargs)
-
- def create(self, entity, **kwargs):
- kwargs['checkfirst'] = False
- from sqlalchemy.engine import ddl
-
- ddl.SchemaGenerator(self.dialect, self, **kwargs).traverse(entity)
-
- def drop(self, entity, **kwargs):
- kwargs['checkfirst'] = False
- from sqlalchemy.engine import ddl
- ddl.SchemaDropper(self.dialect, self, **kwargs).traverse(entity)
-
- def execute(self, object, *multiparams, **params):
- raise NotImplementedError()
-
-MockEngineStrategy()
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/engine/threadlocal.py b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/engine/threadlocal.py
deleted file mode 100755
index 45780ad0..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/engine/threadlocal.py
+++ /dev/null
@@ -1,126 +0,0 @@
-# engine/threadlocal.py
-# Copyright (C) 2005-2011 the SQLAlchemy authors and contributors <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-"""Provides a thread-local transactional wrapper around the root Engine class.
-
-The ``threadlocal`` module is invoked when using the ``strategy="threadlocal"`` flag
-with :func:`~sqlalchemy.engine.create_engine`. This module is semi-private and is
-invoked automatically when the threadlocal engine strategy is used.
-"""
-
-from sqlalchemy import util, event
-from sqlalchemy.engine import base
-import weakref
-
-class TLConnection(base.Connection):
- def __init__(self, *arg, **kw):
- super(TLConnection, self).__init__(*arg, **kw)
- self.__opencount = 0
-
- def _increment_connect(self):
- self.__opencount += 1
- return self
-
- def close(self):
- if self.__opencount == 1:
- base.Connection.close(self)
- self.__opencount -= 1
-
- def _force_close(self):
- self.__opencount = 0
- base.Connection.close(self)
-
-class TLEngine(base.Engine):
- """An Engine that includes support for thread-local managed transactions."""
-
- _tl_connection_cls = TLConnection
-
- def __init__(self, *args, **kwargs):
- super(TLEngine, self).__init__(*args, **kwargs)
- self._connections = util.threading.local()
-
-
- def contextual_connect(self, **kw):
- if not hasattr(self._connections, 'conn'):
- connection = None
- else:
- connection = self._connections.conn()
-
- if connection is None or connection.closed:
- # guards against pool-level reapers, if desired.
- # or not connection.connection.is_valid:
- connection = self._tl_connection_cls(self, self.pool.connect(), **kw)
- self._connections.conn = conn = weakref.ref(connection)
-
- return connection._increment_connect()
-
- def begin_twophase(self, xid=None):
- if not hasattr(self._connections, 'trans'):
- self._connections.trans = []
- self._connections.trans.append(self.contextual_connect().begin_twophase(xid=xid))
- return self
-
- def begin_nested(self):
- if not hasattr(self._connections, 'trans'):
- self._connections.trans = []
- self._connections.trans.append(self.contextual_connect().begin_nested())
- return self
-
- def begin(self):
- if not hasattr(self._connections, 'trans'):
- self._connections.trans = []
- self._connections.trans.append(self.contextual_connect().begin())
- return self
-
- def __enter__(self):
- return self
-
- def __exit__(self, type, value, traceback):
- if type is None:
- self.commit()
- else:
- self.rollback()
-
- def prepare(self):
- if not hasattr(self._connections, 'trans') or \
- not self._connections.trans:
- return
- self._connections.trans[-1].prepare()
-
- def commit(self):
- if not hasattr(self._connections, 'trans') or \
- not self._connections.trans:
- return
- trans = self._connections.trans.pop(-1)
- trans.commit()
-
- def rollback(self):
- if not hasattr(self._connections, 'trans') or \
- not self._connections.trans:
- return
- trans = self._connections.trans.pop(-1)
- trans.rollback()
-
- def dispose(self):
- self._connections = util.threading.local()
- super(TLEngine, self).dispose()
-
- @property
- def closed(self):
- return not hasattr(self._connections, 'conn') or \
- self._connections.conn() is None or \
- self._connections.conn().closed
-
- def close(self):
- if not self.closed:
- self.contextual_connect().close()
- connection = self._connections.conn()
- connection._force_close()
- del self._connections.conn
- self._connections.trans = []
-
- def __repr__(self):
- return 'TLEngine(%s)' % str(self.url)
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/engine/url.py b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/engine/url.py
deleted file mode 100755
index 7d5e0692..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/engine/url.py
+++ /dev/null
@@ -1,221 +0,0 @@
-# engine/url.py
-# Copyright (C) 2005-2011 the SQLAlchemy authors and contributors <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-"""Provides the :class:`~sqlalchemy.engine.url.URL` class which encapsulates
-information about a database connection specification.
-
-The URL object is created automatically when :func:`~sqlalchemy.engine.create_engine` is called
-with a string argument; alternatively, the URL is a public-facing construct which can
-be used directly and is also accepted directly by ``create_engine()``.
-"""
-
-import re, cgi, sys, urllib
-from sqlalchemy import exc
-
-
-class URL(object):
- """
- Represent the components of a URL used to connect to a database.
-
- This object is suitable to be passed directly to a
- ``create_engine()`` call. The fields of the URL are parsed from a
- string by the ``module-level make_url()`` function. the string
- format of the URL is an RFC-1738-style string.
-
- All initialization parameters are available as public attributes.
-
- :param drivername: the name of the database backend.
- This name will correspond to a module in sqlalchemy/databases
- or a third party plug-in.
-
- :param username: The user name.
-
- :param password: database password.
-
- :param host: The name of the host.
-
- :param port: The port number.
-
- :param database: The database name.
-
- :param query: A dictionary of options to be passed to the
- dialect and/or the DBAPI upon connect.
-
- """
-
- def __init__(self, drivername, username=None, password=None,
- host=None, port=None, database=None, query=None):
- self.drivername = drivername
- self.username = username
- self.password = password
- self.host = host
- if port is not None:
- self.port = int(port)
- else:
- self.port = None
- self.database = database
- self.query = query or {}
-
- def __str__(self):
- s = self.drivername + "://"
- if self.username is not None:
- s += self.username
- if self.password is not None:
- s += ':' + urllib.quote_plus(self.password)
- s += "@"
- if self.host is not None:
- s += self.host
- if self.port is not None:
- s += ':' + str(self.port)
- if self.database is not None:
- s += '/' + self.database
- if self.query:
- keys = self.query.keys()
- keys.sort()
- s += '?' + "&".join("%s=%s" % (k, self.query[k]) for k in keys)
- return s
-
- def __hash__(self):
- return hash(str(self))
-
- def __eq__(self, other):
- return \
- isinstance(other, URL) and \
- self.drivername == other.drivername and \
- self.username == other.username and \
- self.password == other.password and \
- self.host == other.host and \
- self.database == other.database and \
- self.query == other.query
-
- def get_dialect(self):
- """Return the SQLAlchemy database dialect class corresponding
- to this URL's driver name.
- """
-
- try:
- if '+' in self.drivername:
- dialect, driver = self.drivername.split('+')
- else:
- dialect, driver = self.drivername, 'base'
-
- module = __import__('sqlalchemy.dialects.%s' % (dialect, )).dialects
- module = getattr(module, dialect)
- module = getattr(module, driver)
-
- return module.dialect
- except ImportError:
- module = self._load_entry_point()
- if module is not None:
- return module
- else:
- raise exc.ArgumentError(
- "Could not determine dialect for '%s'." % self.drivername)
-
- def _load_entry_point(self):
- """attempt to load this url's dialect from entry points, or return None
- if pkg_resources is not installed or there is no matching entry point.
-
- Raise ImportError if the actual load fails.
-
- """
- try:
- import pkg_resources
- except ImportError:
- return None
-
- for res in pkg_resources.iter_entry_points('sqlalchemy.dialects'):
- if res.name == self.drivername:
- return res.load()
- else:
- return None
-
- def translate_connect_args(self, names=[], **kw):
- """Translate url attributes into a dictionary of connection arguments.
-
- Returns attributes of this url (`host`, `database`, `username`,
- `password`, `port`) as a plain dictionary. The attribute names are
- used as the keys by default. Unset or false attributes are omitted
- from the final dictionary.
-
- :param \**kw: Optional, alternate key names for url attributes.
-
- :param names: Deprecated. Same purpose as the keyword-based alternate names,
- but correlates the name to the original positionally.
- """
-
- translated = {}
- attribute_names = ['host', 'database', 'username', 'password', 'port']
- for sname in attribute_names:
- if names:
- name = names.pop(0)
- elif sname in kw:
- name = kw[sname]
- else:
- name = sname
- if name is not None and getattr(self, sname, False):
- translated[name] = getattr(self, sname)
- return translated
-
-def make_url(name_or_url):
- """Given a string or unicode instance, produce a new URL instance.
-
- The given string is parsed according to the RFC 1738 spec. If an
- existing URL object is passed, just returns the object.
- """
-
- if isinstance(name_or_url, basestring):
- return _parse_rfc1738_args(name_or_url)
- else:
- return name_or_url
-
-def _parse_rfc1738_args(name):
- pattern = re.compile(r'''
- (?P<name>[\w\+]+)://
- (?:
- (?P<username>[^:/]*)
- (?::(?P<password>[^/]*))?
- @)?
- (?:
- (?P<host>[^/:]*)
- (?::(?P<port>[^/]*))?
- )?
- (?:/(?P<database>.*))?
- '''
- , re.X)
-
- m = pattern.match(name)
- if m is not None:
- components = m.groupdict()
- if components['database'] is not None:
- tokens = components['database'].split('?', 2)
- components['database'] = tokens[0]
- query = (len(tokens) > 1 and dict(cgi.parse_qsl(tokens[1]))) or None
- # Py2K
- if query is not None:
- query = dict((k.encode('ascii'), query[k]) for k in query)
- # end Py2K
- else:
- query = None
- components['query'] = query
-
- if components['password'] is not None:
- components['password'] = urllib.unquote_plus(components['password'])
-
- name = components.pop('name')
- return URL(name, **components)
- else:
- raise exc.ArgumentError(
- "Could not parse rfc1738 URL from string '%s'" % name)
-
-def _parse_keyvalue_args(name):
- m = re.match( r'(\w+)://(.*)', name)
- if m is not None:
- (name, args) = m.group(1, 2)
- opts = dict( cgi.parse_qsl( args ) )
- return URL(name, *opts)
- else:
- return None
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/event.py b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/event.py
deleted file mode 100755
index 4be227c5..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/event.py
+++ /dev/null
@@ -1,347 +0,0 @@
-# sqlalchemy/event.py
-# Copyright (C) 2005-2011 the SQLAlchemy authors and contributors <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-"""Base event API."""
-
-from sqlalchemy import util, exc
-
-CANCEL = util.symbol('CANCEL')
-NO_RETVAL = util.symbol('NO_RETVAL')
-
-def listen(target, identifier, fn, *args, **kw):
- """Register a listener function for the given target.
-
- e.g.::
-
- from sqlalchemy import event
- from sqlalchemy.schema import UniqueConstraint
-
- def unique_constraint_name(const, table):
- const.name = "uq_%s_%s" % (
- table.name,
- list(const.columns)[0].name
- )
- event.listen(
- UniqueConstraint,
- "after_parent_attach",
- unique_constraint_name)
-
- """
-
- for evt_cls in _registrars[identifier]:
- tgt = evt_cls._accept_with(target)
- if tgt is not None:
- tgt.dispatch._listen(tgt, identifier, fn, *args, **kw)
- return
- raise exc.InvalidRequestError("No such event '%s' for target '%s'" %
- (identifier,target))
-
-def listens_for(target, identifier, *args, **kw):
- """Decorate a function as a listener for the given target + identifier.
-
- e.g.::
-
- from sqlalchemy import event
- from sqlalchemy.schema import UniqueConstraint
-
- @event.listens_for(UniqueConstraint, "after_parent_attach")
- def unique_constraint_name(const, table):
- const.name = "uq_%s_%s" % (
- table.name,
- list(const.columns)[0].name
- )
- """
- def decorate(fn):
- listen(target, identifier, fn, *args, **kw)
- return fn
- return decorate
-
-def remove(target, identifier, fn):
- """Remove an event listener.
-
- Note that some event removals, particularly for those event dispatchers
- which create wrapper functions and secondary even listeners, may not yet
- be supported.
-
- """
- for evt_cls in _registrars[identifier]:
- for tgt in evt_cls._accept_with(target):
- tgt.dispatch._remove(identifier, tgt, fn, *args, **kw)
- return
-
-_registrars = util.defaultdict(list)
-
-def _is_event_name(name):
- return not name.startswith('_') and name != 'dispatch'
-
-class _UnpickleDispatch(object):
- """Serializable callable that re-generates an instance of :class:`_Dispatch`
- given a particular :class:`.Events` subclass.
-
- """
- def __call__(self, _parent_cls):
- for cls in _parent_cls.__mro__:
- if 'dispatch' in cls.__dict__:
- return cls.__dict__['dispatch'].dispatch_cls(_parent_cls)
- else:
- raise AttributeError("No class with a 'dispatch' member present.")
-
-class _Dispatch(object):
- """Mirror the event listening definitions of an Events class with
- listener collections.
-
- Classes which define a "dispatch" member will return a
- non-instantiated :class:`._Dispatch` subclass when the member
- is accessed at the class level. When the "dispatch" member is
- accessed at the instance level of its owner, an instance
- of the :class:`._Dispatch` class is returned.
-
- A :class:`._Dispatch` class is generated for each :class:`.Events`
- class defined, by the :func:`._create_dispatcher_class` function.
- The original :class:`.Events` classes remain untouched.
- This decouples the construction of :class:`.Events` subclasses from
- the implementation used by the event internals, and allows
- inspecting tools like Sphinx to work in an unsurprising
- way against the public API.
-
- """
-
- def __init__(self, _parent_cls):
- self._parent_cls = _parent_cls
-
- def __reduce__(self):
- return _UnpickleDispatch(), (self._parent_cls, )
-
- def _update(self, other, only_propagate=True):
- """Populate from the listeners in another :class:`_Dispatch`
- object."""
-
- for ls in _event_descriptors(other):
- getattr(self, ls.name)._update(ls, only_propagate=only_propagate)
-
-def _event_descriptors(target):
- return [getattr(target, k) for k in dir(target) if _is_event_name(k)]
-
-class _EventMeta(type):
- """Intercept new Event subclasses and create
- associated _Dispatch classes."""
-
- def __init__(cls, classname, bases, dict_):
- _create_dispatcher_class(cls, classname, bases, dict_)
- return type.__init__(cls, classname, bases, dict_)
-
-def _create_dispatcher_class(cls, classname, bases, dict_):
- """Create a :class:`._Dispatch` class corresponding to an
- :class:`.Events` class."""
-
- # there's all kinds of ways to do this,
- # i.e. make a Dispatch class that shares the '_listen' method
- # of the Event class, this is the straight monkeypatch.
- dispatch_base = getattr(cls, 'dispatch', _Dispatch)
- cls.dispatch = dispatch_cls = type("%sDispatch" % classname,
- (dispatch_base, ), {})
- dispatch_cls._listen = cls._listen
- dispatch_cls._clear = cls._clear
-
- for k in dict_:
- if _is_event_name(k):
- setattr(dispatch_cls, k, _DispatchDescriptor(dict_[k]))
- _registrars[k].append(cls)
-
-def _remove_dispatcher(cls):
- for k in dir(cls):
- if _is_event_name(k):
- _registrars[k].remove(cls)
- if not _registrars[k]:
- del _registrars[k]
-
-class Events(object):
- """Define event listening functions for a particular target type."""
-
-
- __metaclass__ = _EventMeta
-
- @classmethod
- def _accept_with(cls, target):
- # Mapper, ClassManager, Session override this to
- # also accept classes, scoped_sessions, sessionmakers, etc.
- if hasattr(target, 'dispatch') and (
- isinstance(target.dispatch, cls.dispatch) or \
- isinstance(target.dispatch, type) and \
- issubclass(target.dispatch, cls.dispatch)
- ):
- return target
- else:
- return None
-
- @classmethod
- def _listen(cls, target, identifier, fn, propagate=False, insert=False):
- if insert:
- getattr(target.dispatch, identifier).insert(fn, target, propagate)
- else:
- getattr(target.dispatch, identifier).append(fn, target, propagate)
-
- @classmethod
- def _remove(cls, target, identifier, fn):
- getattr(target.dispatch, identifier).remove(fn, target)
-
- @classmethod
- def _clear(cls):
- for attr in dir(cls.dispatch):
- if _is_event_name(attr):
- getattr(cls.dispatch, attr).clear()
-
-class _DispatchDescriptor(object):
- """Class-level attributes on :class:`._Dispatch` classes."""
-
- def __init__(self, fn):
- self.__name__ = fn.__name__
- self.__doc__ = fn.__doc__
- self._clslevel = util.defaultdict(list)
-
- def insert(self, obj, target, propagate):
- assert isinstance(target, type), \
- "Class-level Event targets must be classes."
-
- stack = [target]
- while stack:
- cls = stack.pop(0)
- stack.extend(cls.__subclasses__())
- self._clslevel[cls].insert(0, obj)
-
- def append(self, obj, target, propagate):
- assert isinstance(target, type), \
- "Class-level Event targets must be classes."
-
- stack = [target]
- while stack:
- cls = stack.pop(0)
- stack.extend(cls.__subclasses__())
- self._clslevel[cls].append(obj)
-
- def remove(self, obj, target):
- stack = [target]
- while stack:
- cls = stack.pop(0)
- stack.extend(cls.__subclasses__())
- self._clslevel[cls].remove(obj)
-
- def clear(self):
- """Clear all class level listeners"""
-
- for dispatcher in self._clslevel.values():
- dispatcher[:] = []
-
- def __get__(self, obj, cls):
- if obj is None:
- return self
- obj.__dict__[self.__name__] = result = \
- _ListenerCollection(self, obj._parent_cls)
- return result
-
-class _ListenerCollection(object):
- """Instance-level attributes on instances of :class:`._Dispatch`.
-
- Represents a collection of listeners.
-
- """
-
- _exec_once = False
-
- def __init__(self, parent, target_cls):
- self.parent_listeners = parent._clslevel[target_cls]
- self.name = parent.__name__
- self.listeners = []
- self.propagate = set()
-
- def exec_once(self, *args, **kw):
- """Execute this event, but only if it has not been
- executed already for this collection."""
-
- if not self._exec_once:
- self(*args, **kw)
- self._exec_once = True
-
- def __call__(self, *args, **kw):
- """Execute this event."""
-
- for fn in self.parent_listeners:
- fn(*args, **kw)
- for fn in self.listeners:
- fn(*args, **kw)
-
- # I'm not entirely thrilled about the overhead here,
- # but this allows class-level listeners to be added
- # at any point.
- #
- # alternatively, _DispatchDescriptor could notify
- # all _ListenerCollection objects, but then we move
- # to a higher memory model, i.e.weakrefs to all _ListenerCollection
- # objects, the _DispatchDescriptor collection repeated
- # for all instances.
-
- def __len__(self):
- return len(self.parent_listeners + self.listeners)
-
- def __iter__(self):
- return iter(self.parent_listeners + self.listeners)
-
- def __getitem__(self, index):
- return (self.parent_listeners + self.listeners)[index]
-
- def __nonzero__(self):
- return bool(self.listeners or self.parent_listeners)
-
- def _update(self, other, only_propagate=True):
- """Populate from the listeners in another :class:`_Dispatch`
- object."""
-
- existing_listeners = self.listeners
- existing_listener_set = set(existing_listeners)
- self.propagate.update(other.propagate)
- existing_listeners.extend([l for l
- in other.listeners
- if l not in existing_listener_set
- and not only_propagate or l in self.propagate
- ])
-
- def insert(self, obj, target, propagate):
- if obj not in self.listeners:
- self.listeners.insert(0, obj)
- if propagate:
- self.propagate.add(obj)
-
- def append(self, obj, target, propagate):
- if obj not in self.listeners:
- self.listeners.append(obj)
- if propagate:
- self.propagate.add(obj)
-
- def remove(self, obj, target):
- if obj in self.listeners:
- self.listeners.remove(obj)
- self.propagate.discard(obj)
-
- def clear(self):
- self.listeners[:] = []
- self.propagate.clear()
-
-class dispatcher(object):
- """Descriptor used by target classes to
- deliver the _Dispatch class at the class level
- and produce new _Dispatch instances for target
- instances.
-
- """
- def __init__(self, events):
- self.dispatch_cls = events.dispatch
- self.events = events
-
- def __get__(self, obj, cls):
- if obj is None:
- return self.dispatch_cls
- obj.__dict__['dispatch'] = disp = self.dispatch_cls(cls)
- return disp
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/events.py b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/events.py
deleted file mode 100755
index 50637705..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/events.py
+++ /dev/null
@@ -1,429 +0,0 @@
-# sqlalchemy/events.py
-# Copyright (C) 2005-2011 the SQLAlchemy authors and contributors <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-"""Core event interfaces."""
-
-from sqlalchemy import event, exc
-
-class DDLEvents(event.Events):
- """
- Define event listeners for schema objects,
- that is, :class:`.SchemaItem` and :class:`.SchemaEvent`
- subclasses, including :class:`.MetaData`, :class:`.Table`,
- :class:`.Column`.
-
- :class:`.MetaData` and :class:`.Table` support events
- specifically regarding when CREATE and DROP
- DDL is emitted to the database.
-
- Attachment events are also provided to customize
- behavior whenever a child schema element is associated
- with a parent, such as, when a :class:`.Column` is associated
- with its :class:`.Table`, when a :class:`.ForeignKeyConstraint`
- is associated with a :class:`.Table`, etc.
-
- Example using the ``after_create`` event::
-
- from sqlalchemy import event
- from sqlalchemy import Table, Column, Metadata, Integer
-
- m = MetaData()
- some_table = Table('some_table', m, Column('data', Integer))
-
- def after_create(target, connection, **kw):
- connection.execute("ALTER TABLE %s SET name=foo_%s" %
- (target.name, target.name))
-
- event.listen(some_table, "after_create", after_create)
-
- DDL events integrate closely with the
- :class:`.DDL` class and the :class:`.DDLElement` hierarchy
- of DDL clause constructs, which are themselves appropriate
- as listener callables::
-
- from sqlalchemy import DDL
- event.listen(
- some_table,
- "after_create",
- DDL("ALTER TABLE %(table)s SET name=foo_%(table)s")
- )
-
- The methods here define the name of an event as well
- as the names of members that are passed to listener
- functions.
-
- See also:
-
- :ref:`event_toplevel`
-
- :class:`.DDLElement`
-
- :class:`.DDL`
-
- :ref:`schema_ddl_sequences`
-
- """
-
- def before_create(self, target, connection, **kw):
- """Called before CREATE statments are emitted.
-
- :param target: the :class:`.MetaData` or :class:`.Table`
- object which is the target of the event.
- :param connection: the :class:`.Connection` where the
- CREATE statement or statements will be emitted.
- :param \**kw: additional keyword arguments relevant
- to the event. Currently this includes the ``tables``
- argument in the case of a :class:`.MetaData` object,
- which is the list of :class:`.Table` objects for which
- CREATE will be emitted.
-
- """
-
- def after_create(self, target, connection, **kw):
- """Called after CREATE statments are emitted.
-
- :param target: the :class:`.MetaData` or :class:`.Table`
- object which is the target of the event.
- :param connection: the :class:`.Connection` where the
- CREATE statement or statements have been emitted.
- :param \**kw: additional keyword arguments relevant
- to the event. Currently this includes the ``tables``
- argument in the case of a :class:`.MetaData` object,
- which is the list of :class:`.Table` objects for which
- CREATE has been emitted.
-
- """
-
- def before_drop(self, target, connection, **kw):
- """Called before DROP statments are emitted.
-
- :param target: the :class:`.MetaData` or :class:`.Table`
- object which is the target of the event.
- :param connection: the :class:`.Connection` where the
- DROP statement or statements will be emitted.
- :param \**kw: additional keyword arguments relevant
- to the event. Currently this includes the ``tables``
- argument in the case of a :class:`.MetaData` object,
- which is the list of :class:`.Table` objects for which
- DROP will be emitted.
-
- """
-
- def after_drop(self, target, connection, **kw):
- """Called after DROP statments are emitted.
-
- :param target: the :class:`.MetaData` or :class:`.Table`
- object which is the target of the event.
- :param connection: the :class:`.Connection` where the
- DROP statement or statements have been emitted.
- :param \**kw: additional keyword arguments relevant
- to the event. Currently this includes the ``tables``
- argument in the case of a :class:`.MetaData` object,
- which is the list of :class:`.Table` objects for which
- DROP has been emitted.
-
- """
-
- def before_parent_attach(self, target, parent):
- """Called before a :class:`.SchemaItem` is associated with
- a parent :class:`.SchemaItem`.
-
- :param target: the target object
- :param parent: the parent to which the target is being attached.
-
- :func:`.event.listen` also accepts a modifier for this event:
-
- :param propagate=False: When True, the listener function will
- be established for any copies made of the target object,
- i.e. those copies that are generated when
- :meth:`.Table.tometadata` is used.
-
- """
-
- def after_parent_attach(self, target, parent):
- """Called after a :class:`.SchemaItem` is associated with
- a parent :class:`.SchemaItem`.
-
- :param target: the target object
- :param parent: the parent to which the target is being attached.
-
- :func:`.event.listen` also accepts a modifier for this event:
-
- :param propagate=False: When True, the listener function will
- be established for any copies made of the target object,
- i.e. those copies that are generated when
- :meth:`.Table.tometadata` is used.
-
- """
-
- def column_reflect(self, table, column_info):
- """Called for each unit of 'column info' retrieved when
- a :class:`.Table` is being reflected.
-
- The dictionary of column information as returned by the
- dialect is passed, and can be modified. The dictionary
- is that returned in each element of the list returned
- by :meth:`.reflection.Inspector.get_columns`.
-
- The event is called before any action is taken against
- this dictionary, and the contents can be modified.
- The :class:`.Column` specific arguments ``info``, ``key``,
- and ``quote`` can also be added to the dictionary and
- will be passed to the constructor of :class:`.Column`.
-
- Note that this event is only meaningful if either
- associated with the :class:`.Table` class across the
- board, e.g.::
-
- from sqlalchemy.schema import Table
- from sqlalchemy import event
-
- def listen_for_reflect(table, column_info):
- "receive a column_reflect event"
- # ...
-
- event.listen(
- Table,
- 'column_reflect',
- listen_for_reflect)
-
- ...or with a specific :class:`.Table` instance using
- the ``listeners`` argument::
-
- def listen_for_reflect(table, column_info):
- "receive a column_reflect event"
- # ...
-
- t = Table(
- 'sometable',
- autoload=True,
- listeners=[
- ('column_reflect', listen_for_reflect)
- ])
-
- This because the reflection process initiated by ``autoload=True``
- completes within the scope of the constructor for :class:`.Table`.
-
- """
-
-class SchemaEventTarget(object):
- """Base class for elements that are the targets of :class:`.DDLEvents` events.
-
- This includes :class:`.SchemaItem` as well as :class:`.SchemaType`.
-
- """
- dispatch = event.dispatcher(DDLEvents)
-
- def _set_parent(self, parent):
- """Associate with this SchemaEvent's parent object."""
-
- raise NotImplementedError()
-
- def _set_parent_with_dispatch(self, parent):
- self.dispatch.before_parent_attach(self, parent)
- self._set_parent(parent)
- self.dispatch.after_parent_attach(self, parent)
-
-class PoolEvents(event.Events):
- """Available events for :class:`.Pool`.
-
- The methods here define the name of an event as well
- as the names of members that are passed to listener
- functions.
-
- e.g.::
-
- from sqlalchemy import event
-
- def my_on_checkout(dbapi_conn, connection_rec, connection_proxy):
- "handle an on checkout event"
-
- events.listen(Pool, 'checkout', my_on_checkout)
-
- In addition to accepting the :class:`.Pool` class and :class:`.Pool` instances,
- :class:`.PoolEvents` also accepts :class:`.Engine` objects and
- the :class:`.Engine` class as targets, which will be resolved
- to the ``.pool`` attribute of the given engine or the :class:`.Pool`
- class::
-
- engine = create_engine("postgresql://scott:tiger@localhost/test")
-
- # will associate with engine.pool
- events.listen(engine, 'checkout', my_on_checkout)
-
- """
-
- @classmethod
- def _accept_with(cls, target):
- from sqlalchemy.engine import Engine
- from sqlalchemy.pool import Pool
-
- if isinstance(target, type):
- if issubclass(target, Engine):
- return Pool
- elif issubclass(target, Pool):
- return target
- elif isinstance(target, Engine):
- return target.pool
- else:
- return target
-
- def connect(self, dbapi_connection, connection_record):
- """Called once for each new DB-API connection or Pool's ``creator()``.
-
- :param dbapi_con:
- A newly connected raw DB-API connection (not a SQLAlchemy
- ``Connection`` wrapper).
-
- :param con_record:
- The ``_ConnectionRecord`` that persistently manages the connection
-
- """
-
- def first_connect(self, dbapi_connection, connection_record):
- """Called exactly once for the first DB-API connection.
-
- :param dbapi_con:
- A newly connected raw DB-API connection (not a SQLAlchemy
- ``Connection`` wrapper).
-
- :param con_record:
- The ``_ConnectionRecord`` that persistently manages the connection
-
- """
-
- def checkout(self, dbapi_connection, connection_record, connection_proxy):
- """Called when a connection is retrieved from the Pool.
-
- :param dbapi_con:
- A raw DB-API connection
-
- :param con_record:
- The ``_ConnectionRecord`` that persistently manages the connection
-
- :param con_proxy:
- The ``_ConnectionFairy`` which manages the connection for the span of
- the current checkout.
-
- If you raise an ``exc.DisconnectionError``, the current
- connection will be disposed and a fresh connection retrieved.
- Processing of all checkout listeners will abort and restart
- using the new connection.
- """
-
- def checkin(self, dbapi_connection, connection_record):
- """Called when a connection returns to the pool.
-
- Note that the connection may be closed, and may be None if the
- connection has been invalidated. ``checkin`` will not be called
- for detached connections. (They do not return to the pool.)
-
- :param dbapi_con:
- A raw DB-API connection
-
- :param con_record:
- The ``_ConnectionRecord`` that persistently manages the connection
-
- """
-
-class ConnectionEvents(event.Events):
- """Available events for :class:`.Connection`.
-
- The methods here define the name of an event as well as the names of members that are passed to listener functions.
-
- e.g.::
-
- from sqlalchemy import event, create_engine
-
- def before_execute(conn, clauseelement, multiparams, params):
- log.info("Received statement: %s" % clauseelement)
-
- engine = create_engine('postgresql://scott:tiger@localhost/test')
- event.listen(engine, "before_execute", before_execute)
-
- Some events allow modifiers to the listen() function.
-
- :param retval=False: Applies to the :meth:`.before_execute` and
- :meth:`.before_cursor_execute` events only. When True, the
- user-defined event function must have a return value, which
- is a tuple of parameters that replace the given statement
- and parameters. See those methods for a description of
- specific return arguments.
-
- """
-
- @classmethod
- def _listen(cls, target, identifier, fn, retval=False):
- target._has_events = True
-
- if not retval:
- if identifier == 'before_execute':
- orig_fn = fn
- def wrap(conn, clauseelement, multiparams, params):
- orig_fn(conn, clauseelement, multiparams, params)
- return clauseelement, multiparams, params
- fn = wrap
- elif identifier == 'before_cursor_execute':
- orig_fn = fn
- def wrap(conn, cursor, statement,
- parameters, context, executemany):
- orig_fn(conn, cursor, statement,
- parameters, context, executemany)
- return statement, parameters
- fn = wrap
-
- elif retval and identifier not in ('before_execute', 'before_cursor_execute'):
- raise exc.ArgumentError(
- "Only the 'before_execute' and "
- "'before_cursor_execute' engine "
- "event listeners accept the 'retval=True' "
- "argument.")
- event.Events._listen(target, identifier, fn)
-
- def before_execute(self, conn, clauseelement, multiparams, params):
- """Intercept high level execute() events."""
-
- def after_execute(self, conn, clauseelement, multiparams, params, result):
- """Intercept high level execute() events."""
-
- def before_cursor_execute(self, conn, cursor, statement,
- parameters, context, executemany):
- """Intercept low-level cursor execute() events."""
-
- def after_cursor_execute(self, conn, cursor, statement,
- parameters, context, executemany):
- """Intercept low-level cursor execute() events."""
-
- def begin(self, conn):
- """Intercept begin() events."""
-
- def rollback(self, conn):
- """Intercept rollback() events."""
-
- def commit(self, conn):
- """Intercept commit() events."""
-
- def savepoint(self, conn, name=None):
- """Intercept savepoint() events."""
-
- def rollback_savepoint(self, conn, name, context):
- """Intercept rollback_savepoint() events."""
-
- def release_savepoint(self, conn, name, context):
- """Intercept release_savepoint() events."""
-
- def begin_twophase(self, conn, xid):
- """Intercept begin_twophase() events."""
-
- def prepare_twophase(self, conn, xid):
- """Intercept prepare_twophase() events."""
-
- def rollback_twophase(self, conn, xid, is_prepared):
- """Intercept rollback_twophase() events."""
-
- def commit_twophase(self, conn, xid, is_prepared):
- """Intercept commit_twophase() events."""
-
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/exc.py b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/exc.py
deleted file mode 100755
index 3e88ee3a..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/exc.py
+++ /dev/null
@@ -1,238 +0,0 @@
-# sqlalchemy/exc.py
-# Copyright (C) 2005-2011 the SQLAlchemy authors and contributors <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-"""Exceptions used with SQLAlchemy.
-
-The base exception class is :class:`.SQLAlchemyError`. Exceptions which are raised as a
-result of DBAPI exceptions are all subclasses of
-:class:`.DBAPIError`.
-
-"""
-
-
-class SQLAlchemyError(Exception):
- """Generic error class."""
-
-
-class ArgumentError(SQLAlchemyError):
- """Raised when an invalid or conflicting function argument is supplied.
-
- This error generally corresponds to construction time state errors.
-
- """
-
-
-class CircularDependencyError(SQLAlchemyError):
- """Raised by topological sorts when a circular dependency is detected"""
- def __init__(self, message, cycles, edges):
- message += ": cycles: %r all edges: %r" % (cycles, edges)
- SQLAlchemyError.__init__(self, message)
- self.cycles = cycles
- self.edges = edges
-
-class CompileError(SQLAlchemyError):
- """Raised when an error occurs during SQL compilation"""
-
-class IdentifierError(SQLAlchemyError):
- """Raised when a schema name is beyond the max character limit"""
-
-# Moved to orm.exc; compatibility definition installed by orm import until 0.6
-ConcurrentModificationError = None
-
-class DisconnectionError(SQLAlchemyError):
- """A disconnect is detected on a raw DB-API connection.
-
- This error is raised and consumed internally by a connection pool. It can
- be raised by a ``PoolListener`` so that the host pool forces a disconnect.
-
- """
-
-
-# Moved to orm.exc; compatibility definition installed by orm import until 0.6
-FlushError = None
-
-class TimeoutError(SQLAlchemyError):
- """Raised when a connection pool times out on getting a connection."""
-
-
-class InvalidRequestError(SQLAlchemyError):
- """SQLAlchemy was asked to do something it can't do.
-
- This error generally corresponds to runtime state errors.
-
- """
-
-class ResourceClosedError(InvalidRequestError):
- """An operation was requested from a connection, cursor, or other
- object that's in a closed state."""
-
-class NoSuchColumnError(KeyError, InvalidRequestError):
- """A nonexistent column is requested from a ``RowProxy``."""
-
-class NoReferenceError(InvalidRequestError):
- """Raised by ``ForeignKey`` to indicate a reference cannot be resolved."""
-
-class NoReferencedTableError(NoReferenceError):
- """Raised by ``ForeignKey`` when the referred ``Table`` cannot be located."""
-
- def __init__(self, message, tname):
- NoReferenceError.__init__(self, message)
- self.table_name = tname
-
-class NoReferencedColumnError(NoReferenceError):
- """Raised by ``ForeignKey`` when the referred ``Column`` cannot be located."""
-
- def __init__(self, message, tname, cname):
- NoReferenceError.__init__(self, message)
- self.table_name = tname
- self.column_name = cname
-
-class NoSuchTableError(InvalidRequestError):
- """Table does not exist or is not visible to a connection."""
-
-
-class UnboundExecutionError(InvalidRequestError):
- """SQL was attempted without a database connection to execute it on."""
-
-
-# Moved to orm.exc; compatibility definition installed by orm import until 0.6
-UnmappedColumnError = None
-
-class StatementError(SQLAlchemyError):
- """An error occurred during execution of a SQL statement.
-
- :class:`.StatementError` wraps the exception raised
- during execution, and features :attr:`.statement`
- and :attr:`.params` attributes which supply context regarding
- the specifics of the statement which had an issue.
-
- The wrapped exception object is available in
- the :attr:`.orig` attribute.
-
- """
-
- def __init__(self, message, statement, params, orig):
- SQLAlchemyError.__init__(self, message)
- self.statement = statement
- self.params = params
- self.orig = orig
-
- def __str__(self):
- if isinstance(self.params, (list, tuple)) and \
- len(self.params) > 10 and \
- isinstance(self.params[0], (list, dict, tuple)):
- return ' '.join((SQLAlchemyError.__str__(self),
- repr(self.statement),
- repr(self.params[:2]),
- '... and a total of %i bound parameter sets' % len(self.params)))
- return ' '.join((SQLAlchemyError.__str__(self),
- repr(self.statement), repr(self.params)))
-
-class DBAPIError(StatementError):
- """Raised when the execution of a database operation fails.
-
- ``DBAPIError`` wraps exceptions raised by the DB-API underlying the
- database operation. Driver-specific implementations of the standard
- DB-API exception types are wrapped by matching sub-types of SQLAlchemy's
- ``DBAPIError`` when possible. DB-API's ``Error`` type maps to
- ``DBAPIError`` in SQLAlchemy, otherwise the names are identical. Note
- that there is no guarantee that different DB-API implementations will
- raise the same exception type for any given error condition.
-
- :class:`.DBAPIError` features :attr:`.statement`
- and :attr:`.params` attributes which supply context regarding
- the specifics of the statement which had an issue, for the
- typical case when the error was raised within the context of
- emitting a SQL statement.
-
- The wrapped exception object is available in the :attr:`.orig` attribute.
- Its type and properties are DB-API implementation specific.
-
- """
-
- @classmethod
- def instance(cls, statement, params,
- orig,
- dbapi_base_err,
- connection_invalidated=False):
- # Don't ever wrap these, just return them directly as if
- # DBAPIError didn't exist.
- if isinstance(orig, (KeyboardInterrupt, SystemExit)):
- return orig
-
- if orig is not None:
- # not a DBAPI error, statement is present.
- # raise a StatementError
- if not isinstance(orig, dbapi_base_err) and statement:
- return StatementError(str(orig), statement, params, orig)
-
- name, glob = orig.__class__.__name__, globals()
- if name in glob and issubclass(glob[name], DBAPIError):
- cls = glob[name]
-
- return cls(statement, params, orig, connection_invalidated)
-
- def __init__(self, statement, params, orig, connection_invalidated=False):
- try:
- text = str(orig)
- except (KeyboardInterrupt, SystemExit):
- raise
- except Exception, e:
- text = 'Error in str() of DB-API-generated exception: ' + str(e)
- StatementError.__init__(
- self,
- '(%s) %s' % (orig.__class__.__name__, text),
- statement,
- params,
- orig
- )
- self.connection_invalidated = connection_invalidated
-
-
-class InterfaceError(DBAPIError):
- """Wraps a DB-API InterfaceError."""
-
-
-class DatabaseError(DBAPIError):
- """Wraps a DB-API DatabaseError."""
-
-
-class DataError(DatabaseError):
- """Wraps a DB-API DataError."""
-
-
-class OperationalError(DatabaseError):
- """Wraps a DB-API OperationalError."""
-
-
-class IntegrityError(DatabaseError):
- """Wraps a DB-API IntegrityError."""
-
-
-class InternalError(DatabaseError):
- """Wraps a DB-API InternalError."""
-
-
-class ProgrammingError(DatabaseError):
- """Wraps a DB-API ProgrammingError."""
-
-
-class NotSupportedError(DatabaseError):
- """Wraps a DB-API NotSupportedError."""
-
-
-# Warnings
-
-class SADeprecationWarning(DeprecationWarning):
- """Issued once per usage of a deprecated API."""
-
-
-class SAPendingDeprecationWarning(PendingDeprecationWarning):
- """Issued once per usage of a deprecated API."""
-
-
-class SAWarning(RuntimeWarning):
- """Issued at runtime."""
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/ext/__init__.py b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/ext/__init__.py
deleted file mode 100755
index a66421b2..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/ext/__init__.py
+++ /dev/null
@@ -1,6 +0,0 @@
-# ext/__init__.py
-# Copyright (C) 2005-2011 the SQLAlchemy authors and contributors <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/ext/associationproxy.py b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/ext/associationproxy.py
deleted file mode 100755
index 31bfa90f..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/ext/associationproxy.py
+++ /dev/null
@@ -1,912 +0,0 @@
-# ext/associationproxy.py
-# Copyright (C) 2005-2011 the SQLAlchemy authors and contributors <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-"""Contain the ``AssociationProxy`` class.
-
-The ``AssociationProxy`` is a Python property object which provides
-transparent proxied access to the endpoint of an association object.
-
-See the example ``examples/association/proxied_association.py``.
-
-"""
-import itertools
-import operator
-import weakref
-from sqlalchemy import exceptions
-from sqlalchemy import orm
-from sqlalchemy import util
-from sqlalchemy.orm import collections
-from sqlalchemy.sql import not_
-
-
-def association_proxy(target_collection, attr, **kw):
- """Return a Python property implementing a view of a target
- attribute which references an attribute on members of the
- target.
-
- Implements a read/write view over an instance's *target_collection*,
- extracting *attr* from each member of the collection. The property acts
- somewhat like this list comprehension::
-
- [getattr(member, *attr*)
- for member in getattr(instance, *target_collection*)]
-
- Unlike the list comprehension, the collection returned by the property is
- always in sync with *target_collection*, and mutations made to either
- collection will be reflected in both.
-
- The association proxy also works with scalar attributes, which in
- turn reference scalar attributes or collections.
-
- Implements a Python property representing a relationship as a collection of
- simpler values, or a scalar value. The proxied property will mimic the collection type of
- the target (list, dict or set), or, in the case of a one to one relationship,
- a simple scalar value.
-
- :param target_collection: Name of the relationship attribute we'll proxy to,
- usually created with :func:`~sqlalchemy.orm.relationship`.
-
- :param attr: Attribute on the associated instance or instances we'll proxy for.
-
- For example, given a target collection of [obj1, obj2], a list created
- by this proxy property would look like [getattr(obj1, *attr*),
- getattr(obj2, *attr*)]
-
- If the relationship is one-to-one or otherwise uselist=False, then simply:
- getattr(obj, *attr*)
-
- :param creator: optional.
-
- When new items are added to this proxied collection, new instances of
- the class collected by the target collection will be created. For list
- and set collections, the target class constructor will be called with
- the 'value' for the new instance. For dict types, two arguments are
- passed: key and value.
-
- If you want to construct instances differently, supply a *creator*
- function that takes arguments as above and returns instances.
-
- For scalar relationships, creator() will be called if the target is None.
- If the target is present, set operations are proxied to setattr() on the
- associated object.
-
- If you have an associated object with multiple attributes, you may set
- up multiple association proxies mapping to different attributes. See
- the unit tests for examples, and for examples of how creator() functions
- can be used to construct the scalar relationship on-demand in this
- situation.
-
- :param \*\*kw: Passes along any other keyword arguments to
- :class:`.AssociationProxy`.
-
- """
- return AssociationProxy(target_collection, attr, **kw)
-
-
-class AssociationProxy(object):
- """A descriptor that presents a read/write view of an object attribute."""
-
- def __init__(self, target_collection, attr, creator=None,
- getset_factory=None, proxy_factory=None,
- proxy_bulk_set=None):
- """Arguments are:
-
- target_collection
- Name of the collection we'll proxy to, usually created with
- 'relationship()' in a mapper setup.
-
- attr
- Attribute on the collected instances we'll proxy for. For example,
- given a target collection of [obj1, obj2], a list created by this
- proxy property would look like [getattr(obj1, attr), getattr(obj2,
- attr)]
-
- creator
- Optional. When new items are added to this proxied collection, new
- instances of the class collected by the target collection will be
- created. For list and set collections, the target class constructor
- will be called with the 'value' for the new instance. For dict
- types, two arguments are passed: key and value.
-
- If you want to construct instances differently, supply a 'creator'
- function that takes arguments as above and returns instances.
-
- getset_factory
- Optional. Proxied attribute access is automatically handled by
- routines that get and set values based on the `attr` argument for
- this proxy.
-
- If you would like to customize this behavior, you may supply a
- `getset_factory` callable that produces a tuple of `getter` and
- `setter` functions. The factory is called with two arguments, the
- abstract type of the underlying collection and this proxy instance.
-
- proxy_factory
- Optional. The type of collection to emulate is determined by
- sniffing the target collection. If your collection type can't be
- determined by duck typing or you'd like to use a different
- collection implementation, you may supply a factory function to
- produce those collections. Only applicable to non-scalar relationships.
-
- proxy_bulk_set
- Optional, use with proxy_factory. See the _set() method for
- details.
-
- """
- self.target_collection = target_collection
- self.value_attr = attr
- self.creator = creator
- self.getset_factory = getset_factory
- self.proxy_factory = proxy_factory
- self.proxy_bulk_set = proxy_bulk_set
-
- self.owning_class = None
- self.key = '_%s_%s_%s' % (
- type(self).__name__, target_collection, id(self))
- self.collection_class = None
-
- def _get_property(self):
- return (orm.class_mapper(self.owning_class).
- get_property(self.target_collection))
-
- @util.memoized_property
- def target_class(self):
- """The class the proxy is attached to."""
- return self._get_property().mapper.class_
-
- @util.memoized_property
- def scalar(self):
- scalar = not self._get_property().uselist
- if scalar:
- self._initialize_scalar_accessors()
- return scalar
-
- @util.memoized_property
- def _value_is_scalar(self):
- return not self._get_property().\
- mapper.get_property(self.value_attr).uselist
-
- def __get__(self, obj, class_):
- if self.owning_class is None:
- self.owning_class = class_ and class_ or type(obj)
- if obj is None:
- return self
-
- if self.scalar:
- return self._scalar_get(getattr(obj, self.target_collection))
- else:
- try:
- # If the owning instance is reborn (orm session resurrect,
- # etc.), refresh the proxy cache.
- creator_id, proxy = getattr(obj, self.key)
- if id(obj) == creator_id:
- return proxy
- except AttributeError:
- pass
- proxy = self._new(_lazy_collection(obj, self.target_collection))
- setattr(obj, self.key, (id(obj), proxy))
- return proxy
-
- def __set__(self, obj, values):
- if self.owning_class is None:
- self.owning_class = type(obj)
-
- if self.scalar:
- creator = self.creator and self.creator or self.target_class
- target = getattr(obj, self.target_collection)
- if target is None:
- setattr(obj, self.target_collection, creator(values))
- else:
- self._scalar_set(target, values)
- else:
- proxy = self.__get__(obj, None)
- if proxy is not values:
- proxy.clear()
- self._set(proxy, values)
-
- def __delete__(self, obj):
- if self.owning_class is None:
- self.owning_class = type(obj)
- delattr(obj, self.key)
-
- def _initialize_scalar_accessors(self):
- if self.getset_factory:
- get, set = self.getset_factory(None, self)
- else:
- get, set = self._default_getset(None)
- self._scalar_get, self._scalar_set = get, set
-
- def _default_getset(self, collection_class):
- attr = self.value_attr
- getter = operator.attrgetter(attr)
- if collection_class is dict:
- setter = lambda o, k, v: setattr(o, attr, v)
- else:
- setter = lambda o, v: setattr(o, attr, v)
- return getter, setter
-
- def _new(self, lazy_collection):
- creator = self.creator and self.creator or self.target_class
- self.collection_class = util.duck_type_collection(lazy_collection())
-
- if self.proxy_factory:
- return self.proxy_factory(lazy_collection, creator, self.value_attr, self)
-
- if self.getset_factory:
- getter, setter = self.getset_factory(self.collection_class, self)
- else:
- getter, setter = self._default_getset(self.collection_class)
-
- if self.collection_class is list:
- return _AssociationList(lazy_collection, creator, getter, setter, self)
- elif self.collection_class is dict:
- return _AssociationDict(lazy_collection, creator, getter, setter, self)
- elif self.collection_class is set:
- return _AssociationSet(lazy_collection, creator, getter, setter, self)
- else:
- raise exceptions.ArgumentError(
- 'could not guess which interface to use for '
- 'collection_class "%s" backing "%s"; specify a '
- 'proxy_factory and proxy_bulk_set manually' %
- (self.collection_class.__name__, self.target_collection))
-
- def _inflate(self, proxy):
- creator = self.creator and self.creator or self.target_class
-
- if self.getset_factory:
- getter, setter = self.getset_factory(self.collection_class, self)
- else:
- getter, setter = self._default_getset(self.collection_class)
-
- proxy.creator = creator
- proxy.getter = getter
- proxy.setter = setter
-
- def _set(self, proxy, values):
- if self.proxy_bulk_set:
- self.proxy_bulk_set(proxy, values)
- elif self.collection_class is list:
- proxy.extend(values)
- elif self.collection_class is dict:
- proxy.update(values)
- elif self.collection_class is set:
- proxy.update(values)
- else:
- raise exceptions.ArgumentError(
- 'no proxy_bulk_set supplied for custom '
- 'collection_class implementation')
-
- @property
- def _comparator(self):
- return self._get_property().comparator
-
- def any(self, criterion=None, **kwargs):
- if self._value_is_scalar:
- value_expr = getattr(self.target_class, self.value_attr).has(criterion, **kwargs)
- else:
- value_expr = getattr(self.target_class, self.value_attr).any(criterion, **kwargs)
-
- # check _value_is_scalar here, otherwise
- # we're scalar->scalar - call .any() so that
- # the "can't call any() on a scalar" msg is raised.
- if self.scalar and not self._value_is_scalar:
- return self._comparator.has(
- value_expr
- )
- else:
- return self._comparator.any(
- value_expr
- )
-
- def has(self, criterion=None, **kwargs):
- return self._comparator.has(
- getattr(self.target_class, self.value_attr).has(criterion, **kwargs)
- )
-
- def contains(self, obj):
- if self.scalar and not self._value_is_scalar:
- return self._comparator.has(
- getattr(self.target_class, self.value_attr).contains(obj)
- )
- else:
- return self._comparator.any(**{self.value_attr: obj})
-
- def __eq__(self, obj):
- return self._comparator.has(**{self.value_attr: obj})
-
- def __ne__(self, obj):
- return not_(self.__eq__(obj))
-
-
-class _lazy_collection(object):
- def __init__(self, obj, target):
- self.ref = weakref.ref(obj)
- self.target = target
-
- def __call__(self):
- obj = self.ref()
- if obj is None:
- raise exceptions.InvalidRequestError(
- "stale association proxy, parent object has gone out of "
- "scope")
- return getattr(obj, self.target)
-
- def __getstate__(self):
- return {'obj':self.ref(), 'target':self.target}
-
- def __setstate__(self, state):
- self.ref = weakref.ref(state['obj'])
- self.target = state['target']
-
-class _AssociationCollection(object):
- def __init__(self, lazy_collection, creator, getter, setter, parent):
- """Constructs an _AssociationCollection.
-
- This will always be a subclass of either _AssociationList,
- _AssociationSet, or _AssociationDict.
-
- lazy_collection
- A callable returning a list-based collection of entities (usually an
- object attribute managed by a SQLAlchemy relationship())
-
- creator
- A function that creates new target entities. Given one parameter:
- value. This assertion is assumed::
-
- obj = creator(somevalue)
- assert getter(obj) == somevalue
-
- getter
- A function. Given an associated object, return the 'value'.
-
- setter
- A function. Given an associated object and a value, store that
- value on the object.
-
- """
- self.lazy_collection = lazy_collection
- self.creator = creator
- self.getter = getter
- self.setter = setter
- self.parent = parent
-
- col = property(lambda self: self.lazy_collection())
-
- def __len__(self):
- return len(self.col)
-
- def __nonzero__(self):
- return bool(self.col)
-
- def __getstate__(self):
- return {'parent':self.parent, 'lazy_collection':self.lazy_collection}
-
- def __setstate__(self, state):
- self.parent = state['parent']
- self.lazy_collection = state['lazy_collection']
- self.parent._inflate(self)
-
-class _AssociationList(_AssociationCollection):
- """Generic, converting, list-to-list proxy."""
-
- def _create(self, value):
- return self.creator(value)
-
- def _get(self, object):
- return self.getter(object)
-
- def _set(self, object, value):
- return self.setter(object, value)
-
- def __getitem__(self, index):
- return self._get(self.col[index])
-
- def __setitem__(self, index, value):
- if not isinstance(index, slice):
- self._set(self.col[index], value)
- else:
- if index.stop is None:
- stop = len(self)
- elif index.stop < 0:
- stop = len(self) + index.stop
- else:
- stop = index.stop
- step = index.step or 1
-
- rng = range(index.start or 0, stop, step)
- if step == 1:
- for i in rng:
- del self[index.start]
- i = index.start
- for item in value:
- self.insert(i, item)
- i += 1
- else:
- if len(value) != len(rng):
- raise ValueError(
- "attempt to assign sequence of size %s to "
- "extended slice of size %s" % (len(value),
- len(rng)))
- for i, item in zip(rng, value):
- self._set(self.col[i], item)
-
- def __delitem__(self, index):
- del self.col[index]
-
- def __contains__(self, value):
- for member in self.col:
- # testlib.pragma exempt:__eq__
- if self._get(member) == value:
- return True
- return False
-
- def __getslice__(self, start, end):
- return [self._get(member) for member in self.col[start:end]]
-
- def __setslice__(self, start, end, values):
- members = [self._create(v) for v in values]
- self.col[start:end] = members
-
- def __delslice__(self, start, end):
- del self.col[start:end]
-
- def __iter__(self):
- """Iterate over proxied values.
-
- For the actual domain objects, iterate over .col instead or
- just use the underlying collection directly from its property
- on the parent.
- """
-
- for member in self.col:
- yield self._get(member)
- raise StopIteration
-
- def append(self, value):
- item = self._create(value)
- self.col.append(item)
-
- def count(self, value):
- return sum([1 for _ in
- itertools.ifilter(lambda v: v == value, iter(self))])
-
- def extend(self, values):
- for v in values:
- self.append(v)
-
- def insert(self, index, value):
- self.col[index:index] = [self._create(value)]
-
- def pop(self, index=-1):
- return self.getter(self.col.pop(index))
-
- def remove(self, value):
- for i, val in enumerate(self):
- if val == value:
- del self.col[i]
- return
- raise ValueError("value not in list")
-
- def reverse(self):
- """Not supported, use reversed(mylist)"""
-
- raise NotImplementedError
-
- def sort(self):
- """Not supported, use sorted(mylist)"""
-
- raise NotImplementedError
-
- def clear(self):
- del self.col[0:len(self.col)]
-
- def __eq__(self, other):
- return list(self) == other
-
- def __ne__(self, other):
- return list(self) != other
-
- def __lt__(self, other):
- return list(self) < other
-
- def __le__(self, other):
- return list(self) <= other
-
- def __gt__(self, other):
- return list(self) > other
-
- def __ge__(self, other):
- return list(self) >= other
-
- def __cmp__(self, other):
- return cmp(list(self), other)
-
- def __add__(self, iterable):
- try:
- other = list(iterable)
- except TypeError:
- return NotImplemented
- return list(self) + other
-
- def __radd__(self, iterable):
- try:
- other = list(iterable)
- except TypeError:
- return NotImplemented
- return other + list(self)
-
- def __mul__(self, n):
- if not isinstance(n, int):
- return NotImplemented
- return list(self) * n
- __rmul__ = __mul__
-
- def __iadd__(self, iterable):
- self.extend(iterable)
- return self
-
- def __imul__(self, n):
- # unlike a regular list *=, proxied __imul__ will generate unique
- # backing objects for each copy. *= on proxied lists is a bit of
- # a stretch anyhow, and this interpretation of the __imul__ contract
- # is more plausibly useful than copying the backing objects.
- if not isinstance(n, int):
- return NotImplemented
- if n == 0:
- self.clear()
- elif n > 1:
- self.extend(list(self) * (n - 1))
- return self
-
- def copy(self):
- return list(self)
-
- def __repr__(self):
- return repr(list(self))
-
- def __hash__(self):
- raise TypeError("%s objects are unhashable" % type(self).__name__)
-
- for func_name, func in locals().items():
- if (util.callable(func) and func.func_name == func_name and
- not func.__doc__ and hasattr(list, func_name)):
- func.__doc__ = getattr(list, func_name).__doc__
- del func_name, func
-
-
-_NotProvided = util.symbol('_NotProvided')
-class _AssociationDict(_AssociationCollection):
- """Generic, converting, dict-to-dict proxy."""
-
- def _create(self, key, value):
- return self.creator(key, value)
-
- def _get(self, object):
- return self.getter(object)
-
- def _set(self, object, key, value):
- return self.setter(object, key, value)
-
- def __getitem__(self, key):
- return self._get(self.col[key])
-
- def __setitem__(self, key, value):
- if key in self.col:
- self._set(self.col[key], key, value)
- else:
- self.col[key] = self._create(key, value)
-
- def __delitem__(self, key):
- del self.col[key]
-
- def __contains__(self, key):
- # testlib.pragma exempt:__hash__
- return key in self.col
-
- def has_key(self, key):
- # testlib.pragma exempt:__hash__
- return key in self.col
-
- def __iter__(self):
- return self.col.iterkeys()
-
- def clear(self):
- self.col.clear()
-
- def __eq__(self, other):
- return dict(self) == other
-
- def __ne__(self, other):
- return dict(self) != other
-
- def __lt__(self, other):
- return dict(self) < other
-
- def __le__(self, other):
- return dict(self) <= other
-
- def __gt__(self, other):
- return dict(self) > other
-
- def __ge__(self, other):
- return dict(self) >= other
-
- def __cmp__(self, other):
- return cmp(dict(self), other)
-
- def __repr__(self):
- return repr(dict(self.items()))
-
- def get(self, key, default=None):
- try:
- return self[key]
- except KeyError:
- return default
-
- def setdefault(self, key, default=None):
- if key not in self.col:
- self.col[key] = self._create(key, default)
- return default
- else:
- return self[key]
-
- def keys(self):
- return self.col.keys()
-
- def iterkeys(self):
- return self.col.iterkeys()
-
- def values(self):
- return [ self._get(member) for member in self.col.values() ]
-
- def itervalues(self):
- for key in self.col:
- yield self._get(self.col[key])
- raise StopIteration
-
- def items(self):
- return [(k, self._get(self.col[k])) for k in self]
-
- def iteritems(self):
- for key in self.col:
- yield (key, self._get(self.col[key]))
- raise StopIteration
-
- def pop(self, key, default=_NotProvided):
- if default is _NotProvided:
- member = self.col.pop(key)
- else:
- member = self.col.pop(key, default)
- return self._get(member)
-
- def popitem(self):
- item = self.col.popitem()
- return (item[0], self._get(item[1]))
-
- def update(self, *a, **kw):
- if len(a) > 1:
- raise TypeError('update expected at most 1 arguments, got %i' %
- len(a))
- elif len(a) == 1:
- seq_or_map = a[0]
- for item in seq_or_map:
- if isinstance(item, tuple):
- self[item[0]] = item[1]
- else:
- self[item] = seq_or_map[item]
-
- for key, value in kw:
- self[key] = value
-
- def copy(self):
- return dict(self.items())
-
- def __hash__(self):
- raise TypeError("%s objects are unhashable" % type(self).__name__)
-
- for func_name, func in locals().items():
- if (util.callable(func) and func.func_name == func_name and
- not func.__doc__ and hasattr(dict, func_name)):
- func.__doc__ = getattr(dict, func_name).__doc__
- del func_name, func
-
-
-class _AssociationSet(_AssociationCollection):
- """Generic, converting, set-to-set proxy."""
-
- def _create(self, value):
- return self.creator(value)
-
- def _get(self, object):
- return self.getter(object)
-
- def _set(self, object, value):
- return self.setter(object, value)
-
- def __len__(self):
- return len(self.col)
-
- def __nonzero__(self):
- if self.col:
- return True
- else:
- return False
-
- def __contains__(self, value):
- for member in self.col:
- # testlib.pragma exempt:__eq__
- if self._get(member) == value:
- return True
- return False
-
- def __iter__(self):
- """Iterate over proxied values.
-
- For the actual domain objects, iterate over .col instead or just use
- the underlying collection directly from its property on the parent.
-
- """
- for member in self.col:
- yield self._get(member)
- raise StopIteration
-
- def add(self, value):
- if value not in self:
- self.col.add(self._create(value))
-
- # for discard and remove, choosing a more expensive check strategy rather
- # than call self.creator()
- def discard(self, value):
- for member in self.col:
- if self._get(member) == value:
- self.col.discard(member)
- break
-
- def remove(self, value):
- for member in self.col:
- if self._get(member) == value:
- self.col.discard(member)
- return
- raise KeyError(value)
-
- def pop(self):
- if not self.col:
- raise KeyError('pop from an empty set')
- member = self.col.pop()
- return self._get(member)
-
- def update(self, other):
- for value in other:
- self.add(value)
-
- def __ior__(self, other):
- if not collections._set_binops_check_strict(self, other):
- return NotImplemented
- for value in other:
- self.add(value)
- return self
-
- def _set(self):
- return set(iter(self))
-
- def union(self, other):
- return set(self).union(other)
-
- __or__ = union
-
- def difference(self, other):
- return set(self).difference(other)
-
- __sub__ = difference
-
- def difference_update(self, other):
- for value in other:
- self.discard(value)
-
- def __isub__(self, other):
- if not collections._set_binops_check_strict(self, other):
- return NotImplemented
- for value in other:
- self.discard(value)
- return self
-
- def intersection(self, other):
- return set(self).intersection(other)
-
- __and__ = intersection
-
- def intersection_update(self, other):
- want, have = self.intersection(other), set(self)
-
- remove, add = have - want, want - have
-
- for value in remove:
- self.remove(value)
- for value in add:
- self.add(value)
-
- def __iand__(self, other):
- if not collections._set_binops_check_strict(self, other):
- return NotImplemented
- want, have = self.intersection(other), set(self)
-
- remove, add = have - want, want - have
-
- for value in remove:
- self.remove(value)
- for value in add:
- self.add(value)
- return self
-
- def symmetric_difference(self, other):
- return set(self).symmetric_difference(other)
-
- __xor__ = symmetric_difference
-
- def symmetric_difference_update(self, other):
- want, have = self.symmetric_difference(other), set(self)
-
- remove, add = have - want, want - have
-
- for value in remove:
- self.remove(value)
- for value in add:
- self.add(value)
-
- def __ixor__(self, other):
- if not collections._set_binops_check_strict(self, other):
- return NotImplemented
- want, have = self.symmetric_difference(other), set(self)
-
- remove, add = have - want, want - have
-
- for value in remove:
- self.remove(value)
- for value in add:
- self.add(value)
- return self
-
- def issubset(self, other):
- return set(self).issubset(other)
-
- def issuperset(self, other):
- return set(self).issuperset(other)
-
- def clear(self):
- self.col.clear()
-
- def copy(self):
- return set(self)
-
- def __eq__(self, other):
- return set(self) == other
-
- def __ne__(self, other):
- return set(self) != other
-
- def __lt__(self, other):
- return set(self) < other
-
- def __le__(self, other):
- return set(self) <= other
-
- def __gt__(self, other):
- return set(self) > other
-
- def __ge__(self, other):
- return set(self) >= other
-
- def __repr__(self):
- return repr(set(self))
-
- def __hash__(self):
- raise TypeError("%s objects are unhashable" % type(self).__name__)
-
- for func_name, func in locals().items():
- if (util.callable(func) and func.func_name == func_name and
- not func.__doc__ and hasattr(set, func_name)):
- func.__doc__ = getattr(set, func_name).__doc__
- del func_name, func
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/ext/compiler.py b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/ext/compiler.py
deleted file mode 100755
index 7b083774..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/ext/compiler.py
+++ /dev/null
@@ -1,355 +0,0 @@
-# ext/compiler.py
-# Copyright (C) 2005-2011 the SQLAlchemy authors and contributors <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-"""Provides an API for creation of custom ClauseElements and compilers.
-
-Synopsis
-========
-
-Usage involves the creation of one or more :class:`~sqlalchemy.sql.expression.ClauseElement`
-subclasses and one or more callables defining its compilation::
-
- from sqlalchemy.ext.compiler import compiles
- from sqlalchemy.sql.expression import ColumnClause
-
- class MyColumn(ColumnClause):
- pass
-
- @compiles(MyColumn)
- def compile_mycolumn(element, compiler, **kw):
- return "[%s]" % element.name
-
-Above, ``MyColumn`` extends :class:`~sqlalchemy.sql.expression.ColumnClause`,
-the base expression element for named column objects. The ``compiles``
-decorator registers itself with the ``MyColumn`` class so that it is invoked
-when the object is compiled to a string::
-
- from sqlalchemy import select
-
- s = select([MyColumn('x'), MyColumn('y')])
- print str(s)
-
-Produces::
-
- SELECT [x], [y]
-
-Dialect-specific compilation rules
-==================================
-
-Compilers can also be made dialect-specific. The appropriate compiler will be
-invoked for the dialect in use::
-
- from sqlalchemy.schema import DDLElement
-
- class AlterColumn(DDLElement):
-
- def __init__(self, column, cmd):
- self.column = column
- self.cmd = cmd
-
- @compiles(AlterColumn)
- def visit_alter_column(element, compiler, **kw):
- return "ALTER COLUMN %s ..." % element.column.name
-
- @compiles(AlterColumn, 'postgresql')
- def visit_alter_column(element, compiler, **kw):
- return "ALTER TABLE %s ALTER COLUMN %s ..." % (element.table.name, element.column.name)
-
-The second ``visit_alter_table`` will be invoked when any ``postgresql`` dialect is used.
-
-Compiling sub-elements of a custom expression construct
-=======================================================
-
-The ``compiler`` argument is the :class:`~sqlalchemy.engine.base.Compiled`
-object in use. This object can be inspected for any information about the
-in-progress compilation, including ``compiler.dialect``,
-``compiler.statement`` etc. The :class:`~sqlalchemy.sql.compiler.SQLCompiler`
-and :class:`~sqlalchemy.sql.compiler.DDLCompiler` both include a ``process()``
-method which can be used for compilation of embedded attributes::
-
- from sqlalchemy.sql.expression import Executable, ClauseElement
-
- class InsertFromSelect(Executable, ClauseElement):
- def __init__(self, table, select):
- self.table = table
- self.select = select
-
- @compiles(InsertFromSelect)
- def visit_insert_from_select(element, compiler, **kw):
- return "INSERT INTO %s (%s)" % (
- compiler.process(element.table, asfrom=True),
- compiler.process(element.select)
- )
-
- insert = InsertFromSelect(t1, select([t1]).where(t1.c.x>5))
- print insert
-
-Produces::
-
- "INSERT INTO mytable (SELECT mytable.x, mytable.y, mytable.z FROM mytable WHERE mytable.x > :x_1)"
-
-Cross Compiling between SQL and DDL compilers
----------------------------------------------
-
-SQL and DDL constructs are each compiled using different base compilers - ``SQLCompiler``
-and ``DDLCompiler``. A common need is to access the compilation rules of SQL expressions
-from within a DDL expression. The ``DDLCompiler`` includes an accessor ``sql_compiler`` for this reason, such as below where we generate a CHECK
-constraint that embeds a SQL expression::
-
- @compiles(MyConstraint)
- def compile_my_constraint(constraint, ddlcompiler, **kw):
- return "CONSTRAINT %s CHECK (%s)" % (
- constraint.name,
- ddlcompiler.sql_compiler.process(constraint.expression)
- )
-
-Changing the default compilation of existing constructs
-=======================================================
-
-The compiler extension applies just as well to the existing constructs. When overriding
-the compilation of a built in SQL construct, the @compiles decorator is invoked upon
-the appropriate class (be sure to use the class, i.e. ``Insert`` or ``Select``, instead of the creation function such as ``insert()`` or ``select()``).
-
-Within the new compilation function, to get at the "original" compilation routine,
-use the appropriate visit_XXX method - this because compiler.process() will call upon the
-overriding routine and cause an endless loop. Such as, to add "prefix" to all insert statements::
-
- from sqlalchemy.sql.expression import Insert
-
- @compiles(Insert)
- def prefix_inserts(insert, compiler, **kw):
- return compiler.visit_insert(insert.prefix_with("some prefix"), **kw)
-
-The above compiler will prefix all INSERT statements with "some prefix" when compiled.
-
-.. _type_compilation_extension:
-
-Changing Compilation of Types
-=============================
-
-``compiler`` works for types, too, such as below where we implement the MS-SQL specific 'max' keyword for ``String``/``VARCHAR``::
-
- @compiles(String, 'mssql')
- @compiles(VARCHAR, 'mssql')
- def compile_varchar(element, compiler, **kw):
- if element.length == 'max':
- return "VARCHAR('max')"
- else:
- return compiler.visit_VARCHAR(element, **kw)
-
- foo = Table('foo', metadata,
- Column('data', VARCHAR('max'))
- )
-
-Subclassing Guidelines
-======================
-
-A big part of using the compiler extension is subclassing SQLAlchemy
-expression constructs. To make this easier, the expression and
-schema packages feature a set of "bases" intended for common tasks.
-A synopsis is as follows:
-
-* :class:`~sqlalchemy.sql.expression.ClauseElement` - This is the root
- expression class. Any SQL expression can be derived from this base, and is
- probably the best choice for longer constructs such as specialized INSERT
- statements.
-
-* :class:`~sqlalchemy.sql.expression.ColumnElement` - The root of all
- "column-like" elements. Anything that you'd place in the "columns" clause of
- a SELECT statement (as well as order by and group by) can derive from this -
- the object will automatically have Python "comparison" behavior.
-
- :class:`~sqlalchemy.sql.expression.ColumnElement` classes want to have a
- ``type`` member which is expression's return type. This can be established
- at the instance level in the constructor, or at the class level if its
- generally constant::
-
- class timestamp(ColumnElement):
- type = TIMESTAMP()
-
-* :class:`~sqlalchemy.sql.expression.FunctionElement` - This is a hybrid of a
- ``ColumnElement`` and a "from clause" like object, and represents a SQL
- function or stored procedure type of call. Since most databases support
- statements along the line of "SELECT FROM <some function>"
- ``FunctionElement`` adds in the ability to be used in the FROM clause of a
- ``select()`` construct::
-
- from sqlalchemy.sql.expression import FunctionElement
-
- class coalesce(FunctionElement):
- name = 'coalesce'
-
- @compiles(coalesce)
- def compile(element, compiler, **kw):
- return "coalesce(%s)" % compiler.process(element.clauses)
-
- @compiles(coalesce, 'oracle')
- def compile(element, compiler, **kw):
- if len(element.clauses) > 2:
- raise TypeError("coalesce only supports two arguments on Oracle")
- return "nvl(%s)" % compiler.process(element.clauses)
-
-* :class:`~sqlalchemy.schema.DDLElement` - The root of all DDL expressions,
- like CREATE TABLE, ALTER TABLE, etc. Compilation of ``DDLElement``
- subclasses is issued by a ``DDLCompiler`` instead of a ``SQLCompiler``.
- ``DDLElement`` also features ``Table`` and ``MetaData`` event hooks via the
- ``execute_at()`` method, allowing the construct to be invoked during CREATE
- TABLE and DROP TABLE sequences.
-
-* :class:`~sqlalchemy.sql.expression.Executable` - This is a mixin which should be
- used with any expression class that represents a "standalone" SQL statement that
- can be passed directly to an ``execute()`` method. It is already implicit
- within ``DDLElement`` and ``FunctionElement``.
-
-Further Examples
-================
-
-"UTC timestamp" function
--------------------------
-
-A function that works like "CURRENT_TIMESTAMP" except applies the appropriate conversions
-so that the time is in UTC time. Timestamps are best stored in relational databases
-as UTC, without time zones. UTC so that your database doesn't think time has gone
-backwards in the hour when daylight savings ends, without timezones because timezones
-are like character encodings - they're best applied only at the endpoints of an
-application (i.e. convert to UTC upon user input, re-apply desired timezone upon display).
-
-For Postgresql and Microsoft SQL Server::
-
- from sqlalchemy.sql import expression
- from sqlalchemy.ext.compiler import compiles
- from sqlalchemy.types import DateTime
-
- class utcnow(expression.FunctionElement):
- type = DateTime()
-
- @compiles(utcnow, 'postgresql')
- def pg_utcnow(element, compiler, **kw):
- return "TIMEZONE('utc', CURRENT_TIMESTAMP)"
-
- @compiles(utcnow, 'mssql')
- def ms_utcnow(element, compiler, **kw):
- return "GETUTCDATE()"
-
-Example usage::
-
- from sqlalchemy import (
- Table, Column, Integer, String, DateTime, MetaData
- )
- metadata = MetaData()
- event = Table("event", metadata,
- Column("id", Integer, primary_key=True),
- Column("description", String(50), nullable=False),
- Column("timestamp", DateTime, server_default=utcnow())
- )
-
-"GREATEST" function
--------------------
-
-The "GREATEST" function is given any number of arguments and returns the one that is
-of the highest value - it's equivalent to Python's ``max`` function. A SQL
-standard version versus a CASE based version which only accommodates two
-arguments::
-
- from sqlalchemy.sql import expression
- from sqlalchemy.ext.compiler import compiles
- from sqlalchemy.types import Numeric
-
- class greatest(expression.FunctionElement):
- type = Numeric()
- name = 'greatest'
-
- @compiles(greatest)
- def default_greatest(element, compiler, **kw):
- return compiler.visit_function(element)
-
- @compiles(greatest, 'sqlite')
- @compiles(greatest, 'mssql')
- @compiles(greatest, 'oracle')
- def case_greatest(element, compiler, **kw):
- arg1, arg2 = list(element.clauses)
- return "CASE WHEN %s > %s THEN %s ELSE %s END" % (
- compiler.process(arg1),
- compiler.process(arg2),
- compiler.process(arg1),
- compiler.process(arg2),
- )
-
-Example usage::
-
- Session.query(Account).\\
- filter(
- greatest(
- Account.checking_balance,
- Account.savings_balance) > 10000
- )
-
-"false" expression
-------------------
-
-Render a "false" constant expression, rendering as "0" on platforms that don't have a "false" constant::
-
- from sqlalchemy.sql import expression
- from sqlalchemy.ext.compiler import compiles
-
- class sql_false(expression.ColumnElement):
- pass
-
- @compiles(sql_false)
- def default_false(element, compiler, **kw):
- return "false"
-
- @compiles(sql_false, 'mssql')
- @compiles(sql_false, 'mysql')
- @compiles(sql_false, 'oracle')
- def int_false(element, compiler, **kw):
- return "0"
-
-Example usage::
-
- from sqlalchemy import select, union_all
-
- exp = union_all(
- select([users.c.name, sql_false().label("enrolled")]),
- select([customers.c.name, customers.c.enrolled])
- )
-
-"""
-
-def compiles(class_, *specs):
- def decorate(fn):
- existing = class_.__dict__.get('_compiler_dispatcher', None)
- existing_dispatch = class_.__dict__.get('_compiler_dispatch')
- if not existing:
- existing = _dispatcher()
-
- if existing_dispatch:
- existing.specs['default'] = existing_dispatch
-
- # TODO: why is the lambda needed ?
- setattr(class_, '_compiler_dispatch', lambda *arg, **kw: existing(*arg, **kw))
- setattr(class_, '_compiler_dispatcher', existing)
-
- if specs:
- for s in specs:
- existing.specs[s] = fn
-
- else:
- existing.specs['default'] = fn
- return fn
- return decorate
-
-class _dispatcher(object):
- def __init__(self):
- self.specs = {}
-
- def __call__(self, element, compiler, **kw):
- # TODO: yes, this could also switch off of DBAPI in use.
- fn = self.specs.get(compiler.dialect.name, None)
- if not fn:
- fn = self.specs['default']
- return fn(element, compiler, **kw)
-
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/ext/declarative.py b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/ext/declarative.py
deleted file mode 100755
index 62a11705..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/ext/declarative.py
+++ /dev/null
@@ -1,1425 +0,0 @@
-# ext/declarative.py
-# Copyright (C) 2005-2011 the SQLAlchemy authors and contributors <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-"""
-Synopsis
-========
-
-SQLAlchemy object-relational configuration involves the
-combination of :class:`.Table`, :func:`.mapper`, and class
-objects to define a mapped class.
-:mod:`~sqlalchemy.ext.declarative` allows all three to be
-expressed at once within the class declaration. As much as
-possible, regular SQLAlchemy schema and ORM constructs are
-used directly, so that configuration between "classical" ORM
-usage and declarative remain highly similar.
-
-As a simple example::
-
- from sqlalchemy.ext.declarative import declarative_base
-
- Base = declarative_base()
-
- class SomeClass(Base):
- __tablename__ = 'some_table'
- id = Column(Integer, primary_key=True)
- name = Column(String(50))
-
-Above, the :func:`declarative_base` callable returns a new base class from
-which all mapped classes should inherit. When the class definition is
-completed, a new :class:`.Table` and
-:func:`.mapper` will have been generated.
-
-The resulting table and mapper are accessible via
-``__table__`` and ``__mapper__`` attributes on the
-``SomeClass`` class::
-
- # access the mapped Table
- SomeClass.__table__
-
- # access the Mapper
- SomeClass.__mapper__
-
-Defining Attributes
-===================
-
-In the previous example, the :class:`.Column` objects are
-automatically named with the name of the attribute to which they are
-assigned.
-
-To name columns explicitly with a name distinct from their mapped attribute,
-just give the column a name. Below, column "some_table_id" is mapped to the
-"id" attribute of `SomeClass`, but in SQL will be represented as "some_table_id"::
-
- class SomeClass(Base):
- __tablename__ = 'some_table'
- id = Column("some_table_id", Integer, primary_key=True)
-
-Attributes may be added to the class after its construction, and they will be
-added to the underlying :class:`.Table` and
-:func:`.mapper()` definitions as appropriate::
-
- SomeClass.data = Column('data', Unicode)
- SomeClass.related = relationship(RelatedInfo)
-
-Classes which are constructed using declarative can interact freely
-with classes that are mapped explicitly with :func:`mapper`.
-
-It is recommended, though not required, that all tables
-share the same underlying :class:`~sqlalchemy.schema.MetaData` object,
-so that string-configured :class:`~sqlalchemy.schema.ForeignKey`
-references can be resolved without issue.
-
-Accessing the MetaData
-=======================
-
-The :func:`declarative_base` base class contains a
-:class:`.MetaData` object where newly defined
-:class:`.Table` objects are collected. This object is
-intended to be accessed directly for
-:class:`.MetaData`-specific operations. Such as, to issue
-CREATE statements for all tables::
-
- engine = create_engine('sqlite://')
- Base.metadata.create_all(engine)
-
-The usual techniques of associating :class:`.MetaData:` with :class:`.Engine`
-apply, such as assigning to the ``bind`` attribute::
-
- Base.metadata.bind = create_engine('sqlite://')
-
-To associate the engine with the :func:`declarative_base` at time
-of construction, the ``bind`` argument is accepted::
-
- Base = declarative_base(bind=create_engine('sqlite://'))
-
-:func:`declarative_base` can also receive a pre-existing
-:class:`.MetaData` object, which allows a
-declarative setup to be associated with an already
-existing traditional collection of :class:`~sqlalchemy.schema.Table`
-objects::
-
- mymetadata = MetaData()
- Base = declarative_base(metadata=mymetadata)
-
-Configuring Relationships
-=========================
-
-Relationships to other classes are done in the usual way, with the added
-feature that the class specified to :func:`~sqlalchemy.orm.relationship`
-may be a string name. The "class registry" associated with ``Base``
-is used at mapper compilation time to resolve the name into the actual
-class object, which is expected to have been defined once the mapper
-configuration is used::
-
- class User(Base):
- __tablename__ = 'users'
-
- id = Column(Integer, primary_key=True)
- name = Column(String(50))
- addresses = relationship("Address", backref="user")
-
- class Address(Base):
- __tablename__ = 'addresses'
-
- id = Column(Integer, primary_key=True)
- email = Column(String(50))
- user_id = Column(Integer, ForeignKey('users.id'))
-
-Column constructs, since they are just that, are immediately usable,
-as below where we define a primary join condition on the ``Address``
-class using them::
-
- class Address(Base):
- __tablename__ = 'addresses'
-
- id = Column(Integer, primary_key=True)
- email = Column(String(50))
- user_id = Column(Integer, ForeignKey('users.id'))
- user = relationship(User, primaryjoin=user_id == User.id)
-
-In addition to the main argument for :func:`~sqlalchemy.orm.relationship`,
-other arguments which depend upon the columns present on an as-yet
-undefined class may also be specified as strings. These strings are
-evaluated as Python expressions. The full namespace available within
-this evaluation includes all classes mapped for this declarative base,
-as well as the contents of the ``sqlalchemy`` package, including
-expression functions like :func:`~sqlalchemy.sql.expression.desc` and
-:attr:`~sqlalchemy.sql.expression.func`::
-
- class User(Base):
- # ....
- addresses = relationship("Address",
- order_by="desc(Address.email)",
- primaryjoin="Address.user_id==User.id")
-
-As an alternative to string-based attributes, attributes may also be
-defined after all classes have been created. Just add them to the target
-class after the fact::
-
- User.addresses = relationship(Address,
- primaryjoin=Address.user_id==User.id)
-
-Configuring Many-to-Many Relationships
-======================================
-
-Many-to-many relationships are also declared in the same way
-with declarative as with traditional mappings. The
-``secondary`` argument to
-:func:`.relationship` is as usual passed a
-:class:`.Table` object, which is typically declared in the
-traditional way. The :class:`.Table` usually shares
-the :class:`.MetaData` object used by the declarative base::
-
- keywords = Table(
- 'keywords', Base.metadata,
- Column('author_id', Integer, ForeignKey('authors.id')),
- Column('keyword_id', Integer, ForeignKey('keywords.id'))
- )
-
- class Author(Base):
- __tablename__ = 'authors'
- id = Column(Integer, primary_key=True)
- keywords = relationship("Keyword", secondary=keywords)
-
-As with traditional mapping, its generally not a good idea to use
-a :class:`.Table` as the "secondary" argument which is also mapped to
-a class, unless the :class:`.relationship` is declared with ``viewonly=True``.
-Otherwise, the unit-of-work system may attempt duplicate INSERT and
-DELETE statements against the underlying table.
-
-.. _declarative_sql_expressions:
-
-Defining SQL Expressions
-========================
-
-The usage of :func:`.column_property` with Declarative to define
-load-time, mapped SQL expressions is
-pretty much the same as that described in
-:ref:`mapper_sql_expressions`. Local columns within the same
-class declaration can be referenced directly::
-
- class User(Base):
- __tablename__ = 'user'
- id = Column(Integer, primary_key=True)
- firstname = Column(String)
- lastname = Column(String)
- fullname = column_property(
- firstname + " " + lastname
- )
-
-Correlated subqueries reference the :class:`.Column` objects they
-need either from the local class definition or from remote
-classes::
-
- from sqlalchemy.sql import func
-
- class Address(Base):
- __tablename__ = 'address'
-
- id = Column('id', Integer, primary_key=True)
- user_id = Column(Integer, ForeignKey('user.id'))
-
- class User(Base):
- __tablename__ = 'user'
-
- id = Column(Integer, primary_key=True)
- name = Column(String)
-
- address_count = column_property(
- select([func.count(Address.id)]).\\
- where(Address.user_id==id)
- )
-
-In the case that the ``address_count`` attribute above doesn't have access to
-``Address`` when ``User`` is defined, the ``address_count`` attribute should
-be added to ``User`` when both ``User`` and ``Address`` are available (i.e.
-there is no string based "late compilation" feature like there is with
-:func:`.relationship` at this time). Note we reference the ``id`` column
-attribute of ``User`` with its class when we are no longer in the declaration
-of the ``User`` class::
-
- User.address_count = column_property(
- select([func.count(Address.id)]).\\
- where(Address.user_id==User.id)
- )
-
-Table Configuration
-===================
-
-Table arguments other than the name, metadata, and mapped Column
-arguments are specified using the ``__table_args__`` class attribute.
-This attribute accommodates both positional as well as keyword
-arguments that are normally sent to the
-:class:`~sqlalchemy.schema.Table` constructor.
-The attribute can be specified in one of two forms. One is as a
-dictionary::
-
- class MyClass(Base):
- __tablename__ = 'sometable'
- __table_args__ = {'mysql_engine':'InnoDB'}
-
-The other, a tuple, where each argument is positional
-(usually constraints)::
-
- class MyClass(Base):
- __tablename__ = 'sometable'
- __table_args__ = (
- ForeignKeyConstraint(['id'], ['remote_table.id']),
- UniqueConstraint('foo'),
- )
-
-Keyword arguments can be specified with the above form by
-specifying the last argument as a dictionary::
-
- class MyClass(Base):
- __tablename__ = 'sometable'
- __table_args__ = (
- ForeignKeyConstraint(['id'], ['remote_table.id']),
- UniqueConstraint('foo'),
- {'autoload':True}
- )
-
-Using a Hybrid Approach with __table__
-=======================================
-
-As an alternative to ``__tablename__``, a direct
-:class:`~sqlalchemy.schema.Table` construct may be used. The
-:class:`~sqlalchemy.schema.Column` objects, which in this case require
-their names, will be added to the mapping just like a regular mapping
-to a table::
-
- class MyClass(Base):
- __table__ = Table('my_table', Base.metadata,
- Column('id', Integer, primary_key=True),
- Column('name', String(50))
- )
-
-``__table__`` provides a more focused point of control for establishing
-table metadata, while still getting most of the benefits of using declarative.
-An application that uses reflection might want to load table metadata elsewhere
-and simply pass it to declarative classes::
-
- from sqlalchemy.ext.declarative import declarative_base
-
- Base = declarative_base()
- Base.metadata.reflect(some_engine)
-
- class User(Base):
- __table__ = metadata.tables['user']
-
- class Address(Base):
- __table__ = metadata.tables['address']
-
-Some configuration schemes may find it more appropriate to use ``__table__``,
-such as those which already take advantage of the data-driven nature of
-:class:`.Table` to customize and/or automate schema definition. See
-the wiki example `NamingConventions <http://www.sqlalchemy.org/trac/wiki/UsageRecipes/NamingConventions>`_
-for one such example.
-
-Mapper Configuration
-====================
-
-Declarative makes use of the :func:`~.orm.mapper` function internally
-when it creates the mapping to the declared table. The options
-for :func:`~.orm.mapper` are passed directly through via the ``__mapper_args__``
-class attribute. As always, arguments which reference locally
-mapped columns can reference them directly from within the
-class declaration::
-
- from datetime import datetime
-
- class Widget(Base):
- __tablename__ = 'widgets'
-
- id = Column(Integer, primary_key=True)
- timestamp = Column(DateTime, nullable=False)
-
- __mapper_args__ = {
- 'version_id_col': timestamp,
- 'version_id_generator': lambda v:datetime.now()
- }
-
-.. _declarative_inheritance:
-
-Inheritance Configuration
-=========================
-
-Declarative supports all three forms of inheritance as intuitively
-as possible. The ``inherits`` mapper keyword argument is not needed
-as declarative will determine this from the class itself. The various
-"polymorphic" keyword arguments are specified using ``__mapper_args__``.
-
-Joined Table Inheritance
-~~~~~~~~~~~~~~~~~~~~~~~~
-
-Joined table inheritance is defined as a subclass that defines its own
-table::
-
- class Person(Base):
- __tablename__ = 'people'
- id = Column(Integer, primary_key=True)
- discriminator = Column('type', String(50))
- __mapper_args__ = {'polymorphic_on': discriminator}
-
- class Engineer(Person):
- __tablename__ = 'engineers'
- __mapper_args__ = {'polymorphic_identity': 'engineer'}
- id = Column(Integer, ForeignKey('people.id'), primary_key=True)
- primary_language = Column(String(50))
-
-Note that above, the ``Engineer.id`` attribute, since it shares the
-same attribute name as the ``Person.id`` attribute, will in fact
-represent the ``people.id`` and ``engineers.id`` columns together, and
-will render inside a query as ``"people.id"``.
-To provide the ``Engineer`` class with an attribute that represents
-only the ``engineers.id`` column, give it a different attribute name::
-
- class Engineer(Person):
- __tablename__ = 'engineers'
- __mapper_args__ = {'polymorphic_identity': 'engineer'}
- engineer_id = Column('id', Integer, ForeignKey('people.id'),
- primary_key=True)
- primary_language = Column(String(50))
-
-Single Table Inheritance
-~~~~~~~~~~~~~~~~~~~~~~~~
-
-Single table inheritance is defined as a subclass that does not have
-its own table; you just leave out the ``__table__`` and ``__tablename__``
-attributes::
-
- class Person(Base):
- __tablename__ = 'people'
- id = Column(Integer, primary_key=True)
- discriminator = Column('type', String(50))
- __mapper_args__ = {'polymorphic_on': discriminator}
-
- class Engineer(Person):
- __mapper_args__ = {'polymorphic_identity': 'engineer'}
- primary_language = Column(String(50))
-
-When the above mappers are configured, the ``Person`` class is mapped
-to the ``people`` table *before* the ``primary_language`` column is
-defined, and this column will not be included in its own mapping.
-When ``Engineer`` then defines the ``primary_language`` column, the
-column is added to the ``people`` table so that it is included in the
-mapping for ``Engineer`` and is also part of the table's full set of
-columns. Columns which are not mapped to ``Person`` are also excluded
-from any other single or joined inheriting classes using the
-``exclude_properties`` mapper argument. Below, ``Manager`` will have
-all the attributes of ``Person`` and ``Manager`` but *not* the
-``primary_language`` attribute of ``Engineer``::
-
- class Manager(Person):
- __mapper_args__ = {'polymorphic_identity': 'manager'}
- golf_swing = Column(String(50))
-
-The attribute exclusion logic is provided by the
-``exclude_properties`` mapper argument, and declarative's default
-behavior can be disabled by passing an explicit ``exclude_properties``
-collection (empty or otherwise) to the ``__mapper_args__``.
-
-Concrete Table Inheritance
-~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-Concrete is defined as a subclass which has its own table and sets the
-``concrete`` keyword argument to ``True``::
-
- class Person(Base):
- __tablename__ = 'people'
- id = Column(Integer, primary_key=True)
- name = Column(String(50))
-
- class Engineer(Person):
- __tablename__ = 'engineers'
- __mapper_args__ = {'concrete':True}
- id = Column(Integer, primary_key=True)
- primary_language = Column(String(50))
- name = Column(String(50))
-
-Usage of an abstract base class is a little less straightforward as it
-requires usage of :func:`~sqlalchemy.orm.util.polymorphic_union`::
-
- engineers = Table('engineers', Base.metadata,
- Column('id', Integer, primary_key=True),
- Column('name', String(50)),
- Column('primary_language', String(50))
- )
- managers = Table('managers', Base.metadata,
- Column('id', Integer, primary_key=True),
- Column('name', String(50)),
- Column('golf_swing', String(50))
- )
-
- punion = polymorphic_union({
- 'engineer':engineers,
- 'manager':managers
- }, 'type', 'punion')
-
- class Person(Base):
- __table__ = punion
- __mapper_args__ = {'polymorphic_on':punion.c.type}
-
- class Engineer(Person):
- __table__ = engineers
- __mapper_args__ = {'polymorphic_identity':'engineer', 'concrete':True}
-
- class Manager(Person):
- __table__ = managers
- __mapper_args__ = {'polymorphic_identity':'manager', 'concrete':True}
-
-
-Mixin Classes
-==============
-
-A common need when using :mod:`~sqlalchemy.ext.declarative` is to
-share some functionality, often a set of columns, across many
-classes. The normal Python idiom would be to put this common code into
-a base class and have all the other classes subclass this class.
-
-When using :mod:`~sqlalchemy.ext.declarative`, this need is met by
-using a "mixin class". A mixin class is one that isn't mapped to a
-table and doesn't subclass the declarative :class:`.Base`. For example::
-
- class MyMixin(object):
-
- __table_args__ = {'mysql_engine': 'InnoDB'}
- __mapper_args__= {'always_refresh': True}
-
- id = Column(Integer, primary_key=True)
-
-
- class MyModel(Base,MyMixin):
- __tablename__ = 'test'
-
- name = Column(String(1000))
-
-Where above, the class ``MyModel`` will contain an "id" column
-as well as ``__table_args__`` and ``__mapper_args__`` defined
-by the ``MyMixin`` mixin class.
-
-Mixing in Columns
-~~~~~~~~~~~~~~~~~
-
-The most basic way to specify a column on a mixin is by simple
-declaration::
-
- class TimestampMixin(object):
- created_at = Column(DateTime, default=func.now())
-
- class MyModel(Base, TimestampMixin):
- __tablename__ = 'test'
-
- id = Column(Integer, primary_key=True)
- name = Column(String(1000))
-
-Where above, all declarative classes that include ``TimestampMixin``
-will also have a column ``created_at`` that applies a timestamp to
-all row insertions.
-
-Those familiar with the SQLAlchemy expression language know that
-the object identity of clause elements defines their role in a schema.
-Two ``Table`` objects ``a`` and ``b`` may both have a column called
-``id``, but the way these are differentiated is that ``a.c.id``
-and ``b.c.id`` are two distinct Python objects, referencing their
-parent tables ``a`` and ``b`` respectively.
-
-In the case of the mixin column, it seems that only one
-:class:`.Column` object is explicitly created, yet the ultimate
-``created_at`` column above must exist as a distinct Python object
-for each separate destination class. To accomplish this, the declarative
-extension creates a **copy** of each :class:`.Column` object encountered on
-a class that is detected as a mixin.
-
-This copy mechanism is limited to simple columns that have no foreign
-keys, as a :class:`.ForeignKey` itself contains references to columns
-which can't be properly recreated at this level. For columns that
-have foreign keys, as well as for the variety of mapper-level constructs
-that require destination-explicit context, the
-:func:`~.declared_attr` decorator (renamed from ``sqlalchemy.util.classproperty`` in 0.6.5)
-is provided so that
-patterns common to many classes can be defined as callables::
-
- from sqlalchemy.ext.declarative import declared_attr
-
- class ReferenceAddressMixin(object):
- @declared_attr
- def address_id(cls):
- return Column(Integer, ForeignKey('address.id'))
-
- class User(Base, ReferenceAddressMixin):
- __tablename__ = 'user'
- id = Column(Integer, primary_key=True)
-
-Where above, the ``address_id`` class-level callable is executed at the
-point at which the ``User`` class is constructed, and the declarative
-extension can use the resulting :class:`.Column` object as returned by
-the method without the need to copy it.
-
-Columns generated by :func:`~.declared_attr` can also be
-referenced by ``__mapper_args__`` to a limited degree, currently
-by ``polymorphic_on`` and ``version_id_col``, by specifying the
-classdecorator itself into the dictionary - the declarative extension
-will resolve them at class construction time::
-
- class MyMixin:
- @declared_attr
- def type_(cls):
- return Column(String(50))
-
- __mapper_args__= {'polymorphic_on':type_}
-
- class MyModel(Base,MyMixin):
- __tablename__='test'
- id = Column(Integer, primary_key=True)
-
-Mixing in Relationships
-~~~~~~~~~~~~~~~~~~~~~~~
-
-Relationships created by :func:`~sqlalchemy.orm.relationship` are provided
-with declarative mixin classes exclusively using the
-:func:`.declared_attr` approach, eliminating any ambiguity
-which could arise when copying a relationship and its possibly column-bound
-contents. Below is an example which combines a foreign key column and a
-relationship so that two classes ``Foo`` and ``Bar`` can both be configured to
-reference a common target class via many-to-one::
-
- class RefTargetMixin(object):
- @declared_attr
- def target_id(cls):
- return Column('target_id', ForeignKey('target.id'))
-
- @declared_attr
- def target(cls):
- return relationship("Target")
-
- class Foo(Base, RefTargetMixin):
- __tablename__ = 'foo'
- id = Column(Integer, primary_key=True)
-
- class Bar(Base, RefTargetMixin):
- __tablename__ = 'bar'
- id = Column(Integer, primary_key=True)
-
- class Target(Base):
- __tablename__ = 'target'
- id = Column(Integer, primary_key=True)
-
-:func:`~sqlalchemy.orm.relationship` definitions which require explicit
-primaryjoin, order_by etc. expressions should use the string forms
-for these arguments, so that they are evaluated as late as possible.
-To reference the mixin class in these expressions, use the given ``cls``
-to get it's name::
-
- class RefTargetMixin(object):
- @declared_attr
- def target_id(cls):
- return Column('target_id', ForeignKey('target.id'))
-
- @declared_attr
- def target(cls):
- return relationship("Target",
- primaryjoin="Target.id==%s.target_id" % cls.__name__
- )
-
-Mixing in deferred(), column_property(), etc.
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-Like :func:`~sqlalchemy.orm.relationship`, all
-:class:`~sqlalchemy.orm.interfaces.MapperProperty` subclasses such as
-:func:`~sqlalchemy.orm.deferred`, :func:`~sqlalchemy.orm.column_property`,
-etc. ultimately involve references to columns, and therefore, when
-used with declarative mixins, have the :func:`.declared_attr`
-requirement so that no reliance on copying is needed::
-
- class SomethingMixin(object):
-
- @declared_attr
- def dprop(cls):
- return deferred(Column(Integer))
-
- class Something(Base, SomethingMixin):
- __tablename__ = "something"
-
-
-Controlling table inheritance with mixins
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-The ``__tablename__`` attribute in conjunction with the hierarchy of
-classes involved in a declarative mixin scenario controls what type of
-table inheritance, if any,
-is configured by the declarative extension.
-
-If the ``__tablename__`` is computed by a mixin, you may need to
-control which classes get the computed attribute in order to get the
-type of table inheritance you require.
-
-For example, if you had a mixin that computes ``__tablename__`` but
-where you wanted to use that mixin in a single table inheritance
-hierarchy, you can explicitly specify ``__tablename__`` as ``None`` to
-indicate that the class should not have a table mapped::
-
- from sqlalchemy.ext.declarative import declared_attr
-
- class Tablename:
- @declared_attr
- def __tablename__(cls):
- return cls.__name__.lower()
-
- class Person(Base,Tablename):
- id = Column(Integer, primary_key=True)
- discriminator = Column('type', String(50))
- __mapper_args__ = {'polymorphic_on': discriminator}
-
- class Engineer(Person):
- __tablename__ = None
- __mapper_args__ = {'polymorphic_identity': 'engineer'}
- primary_language = Column(String(50))
-
-Alternatively, you can make the mixin intelligent enough to only
-return a ``__tablename__`` in the event that no table is already
-mapped in the inheritance hierarchy. To help with this, a
-:func:`~sqlalchemy.ext.declarative.has_inherited_table` helper
-function is provided that returns ``True`` if a parent class already
-has a mapped table.
-
-As an example, here's a mixin that will only allow single table
-inheritance::
-
- from sqlalchemy.ext.declarative import declared_attr
- from sqlalchemy.ext.declarative import has_inherited_table
-
- class Tablename:
- @declared_attr
- def __tablename__(cls):
- if has_inherited_table(cls):
- return None
- return cls.__name__.lower()
-
- class Person(Base,Tablename):
- id = Column(Integer, primary_key=True)
- discriminator = Column('type', String(50))
- __mapper_args__ = {'polymorphic_on': discriminator}
-
- class Engineer(Person):
- primary_language = Column(String(50))
- __mapper_args__ = {'polymorphic_identity': 'engineer'}
-
-If you want to use a similar pattern with a mix of single and joined
-table inheritance, you would need a slightly different mixin and use
-it on any joined table child classes in addition to their parent
-classes::
-
- from sqlalchemy.ext.declarative import declared_attr
- from sqlalchemy.ext.declarative import has_inherited_table
-
- class Tablename:
- @declared_attr
- def __tablename__(cls):
- if (has_inherited_table(cls) and
- Tablename not in cls.__bases__):
- return None
- return cls.__name__.lower()
-
- class Person(Base,Tablename):
- id = Column(Integer, primary_key=True)
- discriminator = Column('type', String(50))
- __mapper_args__ = {'polymorphic_on': discriminator}
-
- # This is single table inheritance
- class Engineer(Person):
- primary_language = Column(String(50))
- __mapper_args__ = {'polymorphic_identity': 'engineer'}
-
- # This is joined table inheritance
- class Manager(Person,Tablename):
- id = Column(Integer, ForeignKey('person.id'), primary_key=True)
- preferred_recreation = Column(String(50))
- __mapper_args__ = {'polymorphic_identity': 'engineer'}
-
-Combining Table/Mapper Arguments from Multiple Mixins
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-In the case of ``__table_args__`` or ``__mapper_args__``
-specified with declarative mixins, you may want to combine
-some parameters from several mixins with those you wish to
-define on the class iteself. The
-:func:`.declared_attr` decorator can be used
-here to create user-defined collation routines that pull
-from multiple collections::
-
- from sqlalchemy.ext.declarative import declared_attr
-
- class MySQLSettings:
- __table_args__ = {'mysql_engine':'InnoDB'}
-
- class MyOtherMixin:
- __table_args__ = {'info':'foo'}
-
- class MyModel(Base,MySQLSettings,MyOtherMixin):
- __tablename__='my_model'
-
- @declared_attr
- def __table_args__(cls):
- args = dict()
- args.update(MySQLSettings.__table_args__)
- args.update(MyOtherMixin.__table_args__)
- return args
-
- id = Column(Integer, primary_key=True)
-
-Creating Indexes with Mixins
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-To define a named, potentially multicolumn :class:`.Index` that applies to all
-tables derived from a mixin, use the "inline" form of :class:`.Index` and establish
-it as part of ``__table_args__``::
-
- class MyMixin(object):
- a = Column(Integer)
- b = Column(Integer)
-
- @declared_attr
- def __table_args__(cls):
- return (Index('test_idx_%s' % cls.__tablename__, 'a', 'b'),)
-
- class MyModel(Base,MyMixin):
- __tablename__ = 'atable'
- c = Column(Integer,primary_key=True)
-
-
-Class Constructor
-=================
-
-As a convenience feature, the :func:`declarative_base` sets a default
-constructor on classes which takes keyword arguments, and assigns them
-to the named attributes::
-
- e = Engineer(primary_language='python')
-
-Sessions
-========
-
-Note that ``declarative`` does nothing special with sessions, and is
-only intended as an easier way to configure mappers and
-:class:`~sqlalchemy.schema.Table` objects. A typical application
-setup using :func:`~sqlalchemy.orm.scoped_session` might look like::
-
- engine = create_engine('postgresql://scott:tiger@localhost/test')
- Session = scoped_session(sessionmaker(autocommit=False,
- autoflush=False,
- bind=engine))
- Base = declarative_base()
-
-Mapped instances then make usage of
-:class:`~sqlalchemy.orm.session.Session` in the usual way.
-
-"""
-
-from sqlalchemy.schema import Table, Column, MetaData, _get_table_key
-from sqlalchemy.orm import synonym as _orm_synonym, mapper,\
- comparable_property, class_mapper
-from sqlalchemy.orm.interfaces import MapperProperty
-from sqlalchemy.orm.properties import RelationshipProperty, ColumnProperty, CompositeProperty
-from sqlalchemy.orm.util import _is_mapped_class
-from sqlalchemy import util, exc
-from sqlalchemy.sql import util as sql_util, expression
-
-
-__all__ = 'declarative_base', 'synonym_for', \
- 'comparable_using', 'instrument_declarative'
-
-def instrument_declarative(cls, registry, metadata):
- """Given a class, configure the class declaratively,
- using the given registry, which can be any dictionary, and
- MetaData object.
-
- """
- if '_decl_class_registry' in cls.__dict__:
- raise exc.InvalidRequestError(
- "Class %r already has been "
- "instrumented declaratively" % cls)
- cls._decl_class_registry = registry
- cls.metadata = metadata
- _as_declarative(cls, cls.__name__, cls.__dict__)
-
-def has_inherited_table(cls):
- """Given a class, return True if any of the classes it inherits from has a
- mapped table, otherwise return False.
- """
- for class_ in cls.__mro__:
- if getattr(class_,'__table__',None) is not None:
- return True
- return False
-
-def _as_declarative(cls, classname, dict_):
-
- # dict_ will be a dictproxy, which we can't write to, and we need to!
- dict_ = dict(dict_)
-
- column_copies = {}
- potential_columns = {}
-
- mapper_args = {}
- table_args = inherited_table_args = None
- tablename = None
- parent_columns = ()
-
- declarative_props = (declared_attr, util.classproperty)
-
- for base in cls.__mro__:
- class_mapped = _is_mapped_class(base)
- if class_mapped:
- parent_columns = base.__table__.c.keys()
-
- for name,obj in vars(base).items():
- if name == '__mapper_args__':
- if not mapper_args and (
- not class_mapped or
- isinstance(obj, declarative_props)
- ):
- mapper_args = cls.__mapper_args__
- elif name == '__tablename__':
- if not tablename and (
- not class_mapped or
- isinstance(obj, declarative_props)
- ):
- tablename = cls.__tablename__
- elif name == '__table_args__':
- if not table_args and (
- not class_mapped or
- isinstance(obj, declarative_props)
- ):
- table_args = cls.__table_args__
- if not isinstance(table_args, (tuple, dict, type(None))):
- raise exc.ArgumentError(
- "__table_args__ value must be a tuple, "
- "dict, or None")
- if base is not cls:
- inherited_table_args = True
- elif class_mapped:
- continue
- elif base is not cls:
- # we're a mixin.
-
- if isinstance(obj, Column):
- if obj.foreign_keys:
- raise exc.InvalidRequestError(
- "Columns with foreign keys to other columns "
- "must be declared as @declared_attr callables "
- "on declarative mixin classes. ")
- if name not in dict_ and not (
- '__table__' in dict_ and
- (obj.name or name) in dict_['__table__'].c
- ) and name not in potential_columns:
- potential_columns[name] = \
- column_copies[obj] = \
- obj.copy()
- column_copies[obj]._creation_order = \
- obj._creation_order
- elif isinstance(obj, MapperProperty):
- raise exc.InvalidRequestError(
- "Mapper properties (i.e. deferred,"
- "column_property(), relationship(), etc.) must "
- "be declared as @declared_attr callables "
- "on declarative mixin classes.")
- elif isinstance(obj, declarative_props):
- dict_[name] = ret = \
- column_copies[obj] = getattr(cls, name)
- if isinstance(ret, (Column, MapperProperty)) and \
- ret.doc is None:
- ret.doc = obj.__doc__
-
- # apply inherited columns as we should
- for k, v in potential_columns.items():
- if tablename or (v.name or k) not in parent_columns:
- dict_[k] = v
-
- if inherited_table_args and not tablename:
- table_args = None
-
- # make sure that column copies are used rather
- # than the original columns from any mixins
- for k in ('version_id_col', 'polymorphic_on',):
- if k in mapper_args:
- v = mapper_args[k]
- mapper_args[k] = column_copies.get(v,v)
-
- if classname in cls._decl_class_registry:
- util.warn("The classname %r is already in the registry of this"
- " declarative base, mapped to %r" % (
- classname,
- cls._decl_class_registry[classname]
- ))
- cls._decl_class_registry[classname] = cls
- our_stuff = util.OrderedDict()
-
- for k in dict_:
- value = dict_[k]
- if isinstance(value, declarative_props):
- value = getattr(cls, k)
-
- if (isinstance(value, tuple) and len(value) == 1 and
- isinstance(value[0], (Column, MapperProperty))):
- util.warn("Ignoring declarative-like tuple value of attribute "
- "%s: possibly a copy-and-paste error with a comma "
- "left at the end of the line?" % k)
- continue
- if not isinstance(value, (Column, MapperProperty)):
- continue
- if k == 'metadata':
- raise exc.InvalidRequestError(
- "Attribute name 'metadata' is reserved "
- "for the MetaData instance when using a "
- "declarative base class."
- )
- prop = _deferred_relationship(cls, value)
- our_stuff[k] = prop
-
- # set up attributes in the order they were created
- our_stuff.sort(key=lambda key: our_stuff[key]._creation_order)
-
- # extract columns from the class dict
- cols = set()
- for key, c in our_stuff.iteritems():
- if isinstance(c, (ColumnProperty, CompositeProperty)):
- for col in c.columns:
- if isinstance(col, Column) and \
- col.table is None:
- _undefer_column_name(key, col)
- cols.add(col)
- elif isinstance(c, Column):
- _undefer_column_name(key, c)
- cols.add(c)
- # if the column is the same name as the key,
- # remove it from the explicit properties dict.
- # the normal rules for assigning column-based properties
- # will take over, including precedence of columns
- # in multi-column ColumnProperties.
- if key == c.key:
- del our_stuff[key]
- cols = sorted(cols, key=lambda c:c._creation_order)
-
- table = None
- if '__table__' not in dict_:
- if tablename is not None:
-
- if isinstance(table_args, dict):
- args, table_kw = (), table_args
- elif isinstance(table_args, tuple):
- if isinstance(table_args[-1], dict):
- args, table_kw = table_args[0:-1], table_args[-1]
- else:
- args, table_kw = table_args, {}
- else:
- args, table_kw = (), {}
-
- autoload = dict_.get('__autoload__')
- if autoload:
- table_kw['autoload'] = True
-
- cls.__table__ = table = Table(tablename, cls.metadata,
- *(tuple(cols) + tuple(args)),
- **table_kw)
- else:
- table = cls.__table__
- if cols:
- for c in cols:
- if not table.c.contains_column(c):
- raise exc.ArgumentError(
- "Can't add additional column %r when "
- "specifying __table__" % c.key
- )
-
- if 'inherits' not in mapper_args:
- for c in cls.__bases__:
- if _is_mapped_class(c):
- mapper_args['inherits'] = cls._decl_class_registry.get(
- c.__name__, None)
- break
-
- if hasattr(cls, '__mapper_cls__'):
- mapper_cls = util.unbound_method_to_callable(cls.__mapper_cls__)
- else:
- mapper_cls = mapper
-
- if table is None and 'inherits' not in mapper_args:
- raise exc.InvalidRequestError(
- "Class %r does not have a __table__ or __tablename__ "
- "specified and does not inherit from an existing "
- "table-mapped class." % cls
- )
-
- elif 'inherits' in mapper_args and not mapper_args.get('concrete', False):
- inherited_mapper = class_mapper(mapper_args['inherits'],
- compile=False)
- inherited_table = inherited_mapper.local_table
-
- if table is None:
- # single table inheritance.
- # ensure no table args
- if table_args:
- raise exc.ArgumentError(
- "Can't place __table_args__ on an inherited class "
- "with no table."
- )
-
- # add any columns declared here to the inherited table.
- for c in cols:
- if c.primary_key:
- raise exc.ArgumentError(
- "Can't place primary key columns on an inherited "
- "class with no table."
- )
- if c.name in inherited_table.c:
- raise exc.ArgumentError(
- "Column '%s' on class %s conflicts with "
- "existing column '%s'" %
- (c, cls, inherited_table.c[c.name])
- )
- inherited_table.append_column(c)
-
- # single or joined inheritance
- # exclude any cols on the inherited table which are not mapped on the
- # parent class, to avoid
- # mapping columns specific to sibling/nephew classes
- inherited_mapper = class_mapper(mapper_args['inherits'],
- compile=False)
- inherited_table = inherited_mapper.local_table
-
- if 'exclude_properties' not in mapper_args:
- mapper_args['exclude_properties'] = exclude_properties = \
- set([c.key for c in inherited_table.c
- if c not in inherited_mapper._columntoproperty])
- exclude_properties.difference_update([c.key for c in cols])
-
- # look through columns in the current mapper that
- # are keyed to a propname different than the colname
- # (if names were the same, we'd have popped it out above,
- # in which case the mapper makes this combination).
- # See if the superclass has a similar column property.
- # If so, join them together.
- for k, col in our_stuff.items():
- if not isinstance(col, expression.ColumnElement):
- continue
- if k in inherited_mapper._props:
- p = inherited_mapper._props[k]
- if isinstance(p, ColumnProperty):
- # note here we place the superclass column
- # first. this corresponds to the
- # append() in mapper._configure_property().
- # change this ordering when we do [ticket:1892]
- our_stuff[k] = p.columns + [col]
-
-
- cls.__mapper__ = mapper_cls(cls,
- table,
- properties=our_stuff,
- **mapper_args)
-
-class DeclarativeMeta(type):
- def __init__(cls, classname, bases, dict_):
- if '_decl_class_registry' in cls.__dict__:
- return type.__init__(cls, classname, bases, dict_)
-
- _as_declarative(cls, classname, cls.__dict__)
- return type.__init__(cls, classname, bases, dict_)
-
- def __setattr__(cls, key, value):
- if '__mapper__' in cls.__dict__:
- if isinstance(value, Column):
- _undefer_column_name(key, value)
- cls.__table__.append_column(value)
- cls.__mapper__.add_property(key, value)
- elif isinstance(value, ColumnProperty):
- for col in value.columns:
- if isinstance(col, Column) and col.table is None:
- _undefer_column_name(key, col)
- cls.__table__.append_column(col)
- cls.__mapper__.add_property(key, value)
- elif isinstance(value, MapperProperty):
- cls.__mapper__.add_property(
- key,
- _deferred_relationship(cls, value)
- )
- else:
- type.__setattr__(cls, key, value)
- else:
- type.__setattr__(cls, key, value)
-
-
-class _GetColumns(object):
- def __init__(self, cls):
- self.cls = cls
-
- def __getattr__(self, key):
- mapper = class_mapper(self.cls, compile=False)
- if mapper:
- if not mapper.has_property(key):
- raise exc.InvalidRequestError(
- "Class %r does not have a mapped column named %r"
- % (self.cls, key))
-
- prop = mapper.get_property(key)
- if not isinstance(prop, ColumnProperty):
- raise exc.InvalidRequestError(
- "Property %r is not an instance of"
- " ColumnProperty (i.e. does not correspond"
- " directly to a Column)." % key)
- return getattr(self.cls, key)
-
-class _GetTable(object):
- def __init__(self, key, metadata):
- self.key = key
- self.metadata = metadata
-
- def __getattr__(self, key):
- return self.metadata.tables[
- _get_table_key(key, self.key)
- ]
-
-def _deferred_relationship(cls, prop):
- def resolve_arg(arg):
- import sqlalchemy
-
- def access_cls(key):
- if key in cls._decl_class_registry:
- return _GetColumns(cls._decl_class_registry[key])
- elif key in cls.metadata.tables:
- return cls.metadata.tables[key]
- elif key in cls.metadata._schemas:
- return _GetTable(key, cls.metadata)
- else:
- return sqlalchemy.__dict__[key]
-
- d = util.PopulateDict(access_cls)
- def return_cls():
- try:
- x = eval(arg, globals(), d)
-
- if isinstance(x, _GetColumns):
- return x.cls
- else:
- return x
- except NameError, n:
- raise exc.InvalidRequestError(
- "When initializing mapper %s, expression %r failed to "
- "locate a name (%r). If this is a class name, consider "
- "adding this relationship() to the %r class after "
- "both dependent classes have been defined." %
- (prop.parent, arg, n.args[0], cls)
- )
- return return_cls
-
- if isinstance(prop, RelationshipProperty):
- for attr in ('argument', 'order_by', 'primaryjoin', 'secondaryjoin',
- 'secondary', '_user_defined_foreign_keys', 'remote_side'):
- v = getattr(prop, attr)
- if isinstance(v, basestring):
- setattr(prop, attr, resolve_arg(v))
-
- if prop.backref and isinstance(prop.backref, tuple):
- key, kwargs = prop.backref
- for attr in ('primaryjoin', 'secondaryjoin', 'secondary',
- 'foreign_keys', 'remote_side', 'order_by'):
- if attr in kwargs and isinstance(kwargs[attr], basestring):
- kwargs[attr] = resolve_arg(kwargs[attr])
-
-
- return prop
-
-def synonym_for(name, map_column=False):
- """Decorator, make a Python @property a query synonym for a column.
-
- A decorator version of :func:`~sqlalchemy.orm.synonym`. The function being
- decorated is the 'descriptor', otherwise passes its arguments through to
- synonym()::
-
- @synonym_for('col')
- @property
- def prop(self):
- return 'special sauce'
-
- The regular ``synonym()`` is also usable directly in a declarative setting
- and may be convenient for read/write properties::
-
- prop = synonym('col', descriptor=property(_read_prop, _write_prop))
-
- """
- def decorate(fn):
- return _orm_synonym(name, map_column=map_column, descriptor=fn)
- return decorate
-
-def comparable_using(comparator_factory):
- """Decorator, allow a Python @property to be used in query criteria.
-
- This is a decorator front end to
- :func:`~sqlalchemy.orm.comparable_property` that passes
- through the comparator_factory and the function being decorated::
-
- @comparable_using(MyComparatorType)
- @property
- def prop(self):
- return 'special sauce'
-
- The regular ``comparable_property()`` is also usable directly in a
- declarative setting and may be convenient for read/write properties::
-
- prop = comparable_property(MyComparatorType)
-
- """
- def decorate(fn):
- return comparable_property(comparator_factory, fn)
- return decorate
-
-class declared_attr(property):
- """Mark a class-level method as representing the definition of
- a mapped property or special declarative member name.
-
- .. note:: @declared_attr is available as
- ``sqlalchemy.util.classproperty`` for SQLAlchemy versions
- 0.6.2, 0.6.3, 0.6.4.
-
- @declared_attr turns the attribute into a scalar-like
- property that can be invoked from the uninstantiated class.
- Declarative treats attributes specifically marked with
- @declared_attr as returning a construct that is specific
- to mapping or declarative table configuration. The name
- of the attribute is that of what the non-dynamic version
- of the attribute would be.
-
- @declared_attr is more often than not applicable to mixins,
- to define relationships that are to be applied to different
- implementors of the class::
-
- class ProvidesUser(object):
- "A mixin that adds a 'user' relationship to classes."
-
- @declared_attr
- def user(self):
- return relationship("User")
-
- It also can be applied to mapped classes, such as to provide
- a "polymorphic" scheme for inheritance::
-
- class Employee(Base):
- id = Column(Integer, primary_key=True)
- type = Column(String(50), nullable=False)
-
- @declared_attr
- def __tablename__(cls):
- return cls.__name__.lower()
-
- @declared_attr
- def __mapper_args__(cls):
- if cls.__name__ == 'Employee':
- return {
- "polymorphic_on":cls.type,
- "polymorphic_identity":"Employee"
- }
- else:
- return {"polymorphic_identity":cls.__name__}
-
- """
-
- def __init__(self, fget, *arg, **kw):
- super(declared_attr, self).__init__(fget, *arg, **kw)
- self.__doc__ = fget.__doc__
-
- def __get__(desc, self, cls):
- return desc.fget(cls)
-
-def _declarative_constructor(self, **kwargs):
- """A simple constructor that allows initialization from kwargs.
-
- Sets attributes on the constructed instance using the names and
- values in ``kwargs``.
-
- Only keys that are present as
- attributes of the instance's class are allowed. These could be,
- for example, any mapped columns or relationships.
- """
- cls_ = type(self)
- for k in kwargs:
- if not hasattr(cls_, k):
- raise TypeError(
- "%r is an invalid keyword argument for %s" %
- (k, cls_.__name__))
- setattr(self, k, kwargs[k])
-_declarative_constructor.__name__ = '__init__'
-
-def declarative_base(bind=None, metadata=None, mapper=None, cls=object,
- name='Base', constructor=_declarative_constructor,
- metaclass=DeclarativeMeta):
- """Construct a base class for declarative class definitions.
-
- The new base class will be given a metaclass that produces
- appropriate :class:`~sqlalchemy.schema.Table` objects and makes
- the appropriate :func:`~sqlalchemy.orm.mapper` calls based on the
- information provided declaratively in the class and any subclasses
- of the class.
-
- :param bind: An optional
- :class:`~sqlalchemy.engine.base.Connectable`, will be assigned
- the ``bind`` attribute on the :class:`~sqlalchemy.MetaData`
- instance.
-
- :param metadata:
- An optional :class:`~sqlalchemy.MetaData` instance. All
- :class:`~sqlalchemy.schema.Table` objects implicitly declared by
- subclasses of the base will share this MetaData. A MetaData instance
- will be created if none is provided. The
- :class:`~sqlalchemy.MetaData` instance will be available via the
- `metadata` attribute of the generated declarative base class.
-
- :param mapper:
- An optional callable, defaults to :func:`~sqlalchemy.orm.mapper`. Will
- be used to map subclasses to their Tables.
-
- :param cls:
- Defaults to :class:`object`. A type to use as the base for the generated
- declarative base class. May be a class or tuple of classes.
-
- :param name:
- Defaults to ``Base``. The display name for the generated
- class. Customizing this is not required, but can improve clarity in
- tracebacks and debugging.
-
- :param constructor:
- Defaults to
- :func:`~sqlalchemy.ext.declarative._declarative_constructor`, an
- __init__ implementation that assigns \**kwargs for declared
- fields and relationships to an instance. If ``None`` is supplied,
- no __init__ will be provided and construction will fall back to
- cls.__init__ by way of the normal Python semantics.
-
- :param metaclass:
- Defaults to :class:`.DeclarativeMeta`. A metaclass or __metaclass__
- compatible callable to use as the meta type of the generated
- declarative base class.
-
- """
- lcl_metadata = metadata or MetaData()
- if bind:
- lcl_metadata.bind = bind
-
- bases = not isinstance(cls, tuple) and (cls,) or cls
- class_dict = dict(_decl_class_registry=dict(),
- metadata=lcl_metadata)
-
- if constructor:
- class_dict['__init__'] = constructor
- if mapper:
- class_dict['__mapper_cls__'] = mapper
-
- return metaclass(name, bases, class_dict)
-
-def _undefer_column_name(key, column):
- if column.key is None:
- column.key = key
- if column.name is None:
- column.name = key
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/ext/horizontal_shard.py b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/ext/horizontal_shard.py
deleted file mode 100755
index 6aafb227..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/ext/horizontal_shard.py
+++ /dev/null
@@ -1,128 +0,0 @@
-# ext/horizontal_shard.py
-# Copyright (C) 2005-2011 the SQLAlchemy authors and contributors <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-"""Horizontal sharding support.
-
-Defines a rudimental 'horizontal sharding' system which allows a Session to
-distribute queries and persistence operations across multiple databases.
-
-For a usage example, see the :ref:`examples_sharding` example included in
-the source distrbution.
-
-"""
-
-from sqlalchemy import exc as sa_exc
-from sqlalchemy import util
-from sqlalchemy.orm.session import Session
-from sqlalchemy.orm.query import Query
-
-__all__ = ['ShardedSession', 'ShardedQuery']
-
-class ShardedQuery(Query):
- def __init__(self, *args, **kwargs):
- super(ShardedQuery, self).__init__(*args, **kwargs)
- self.id_chooser = self.session.id_chooser
- self.query_chooser = self.session.query_chooser
- self._shard_id = None
-
- def set_shard(self, shard_id):
- """return a new query, limited to a single shard ID.
-
- all subsequent operations with the returned query will
- be against the single shard regardless of other state.
- """
-
- q = self._clone()
- q._shard_id = shard_id
- return q
-
- def _execute_and_instances(self, context):
- def iter_for_shard(shard_id):
- context.attributes['shard_id'] = shard_id
- result = self._connection_from_session(
- mapper=self._mapper_zero(),
- shard_id=shard_id).execute(
- context.statement,
- self._params)
- return self.instances(result, context)
-
- if self._shard_id is not None:
- return iter_for_shard(self._shard_id)
- else:
- partial = []
- for shard_id in self.query_chooser(self):
- partial.extend(iter_for_shard(shard_id))
-
- # if some kind of in memory 'sorting'
- # were done, this is where it would happen
- return iter(partial)
-
- def get(self, ident, **kwargs):
- if self._shard_id is not None:
- return super(ShardedQuery, self).get(ident)
- else:
- ident = util.to_list(ident)
- for shard_id in self.id_chooser(self, ident):
- o = self.set_shard(shard_id).get(ident, **kwargs)
- if o is not None:
- return o
- else:
- return None
-
-class ShardedSession(Session):
- def __init__(self, shard_chooser, id_chooser, query_chooser, shards=None,
- query_cls=ShardedQuery, **kwargs):
- """Construct a ShardedSession.
-
- :param shard_chooser: A callable which, passed a Mapper, a mapped instance, and possibly a
- SQL clause, returns a shard ID. This id may be based off of the
- attributes present within the object, or on some round-robin
- scheme. If the scheme is based on a selection, it should set
- whatever state on the instance to mark it in the future as
- participating in that shard.
-
- :param id_chooser: A callable, passed a query and a tuple of identity values, which
- should return a list of shard ids where the ID might reside. The
- databases will be queried in the order of this listing.
-
- :param query_chooser: For a given Query, returns the list of shard_ids where the query
- should be issued. Results from all shards returned will be combined
- together into a single listing.
-
- :param shards: A dictionary of string shard names to :class:`~sqlalchemy.engine.base.Engine`
- objects.
-
- """
- super(ShardedSession, self).__init__(query_cls=query_cls, **kwargs)
- self.shard_chooser = shard_chooser
- self.id_chooser = id_chooser
- self.query_chooser = query_chooser
- self.__binds = {}
- self.connection_callable = self.connection
- if shards is not None:
- for k in shards:
- self.bind_shard(k, shards[k])
-
- def connection(self, mapper=None, instance=None, shard_id=None, **kwargs):
- if shard_id is None:
- shard_id = self.shard_chooser(mapper, instance)
-
- if self.transaction is not None:
- return self.transaction.connection(mapper, shard_id=shard_id)
- else:
- return self.get_bind(mapper,
- shard_id=shard_id,
- instance=instance).contextual_connect(**kwargs)
-
- def get_bind(self, mapper, shard_id=None, instance=None, clause=None, **kw):
- if shard_id is None:
- shard_id = self.shard_chooser(mapper, instance, clause=clause)
- return self.__binds[shard_id]
-
- def bind_shard(self, shard_id, bind):
- self.__binds[shard_id] = bind
-
-
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/ext/hybrid.py b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/ext/hybrid.py
deleted file mode 100755
index c16c38b2..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/ext/hybrid.py
+++ /dev/null
@@ -1,425 +0,0 @@
-# ext/hybrid.py
-# Copyright (C) 2005-2011 the SQLAlchemy authors and contributors <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-"""Define attributes on ORM-mapped classes that have "hybrid" behavior.
-
-"hybrid" means the attribute has distinct behaviors defined at the
-class level and at the instance level.
-
-The :mod:`~sqlalchemy.ext.hybrid` extension provides a special form of method
-decorator, is around 50 lines of code and has almost no dependencies on the rest
-of SQLAlchemy. It can in theory work with any class-level expression generator.
-
-Consider a table ``interval`` as below::
-
- from sqlalchemy import MetaData, Table, Column, Integer
-
- metadata = MetaData()
-
- interval_table = Table('interval', metadata,
- Column('id', Integer, primary_key=True),
- Column('start', Integer, nullable=False),
- Column('end', Integer, nullable=False)
- )
-
-We can define higher level functions on mapped classes that produce SQL
-expressions at the class level, and Python expression evaluation at the
-instance level. Below, each function decorated with :func:`.hybrid_method`
-or :func:`.hybrid_property` may receive ``self`` as an instance of the class,
-or as the class itself::
-
- from sqlalchemy.ext.hybrid import hybrid_property, hybrid_method
- from sqlalchemy.orm import mapper, Session, aliased
-
- class Interval(object):
- def __init__(self, start, end):
- self.start = start
- self.end = end
-
- @hybrid_property
- def length(self):
- return self.end - self.start
-
- @hybrid_method
- def contains(self,point):
- return (self.start <= point) & (point < self.end)
-
- @hybrid_method
- def intersects(self, other):
- return self.contains(other.start) | self.contains(other.end)
-
- mapper(Interval, interval_table)
-
-Above, the ``length`` property returns the difference between the ``end`` and
-``start`` attributes. With an instance of ``Interval``, this subtraction occurs
-in Python, using normal Python descriptor mechanics::
-
- >>> i1 = Interval(5, 10)
- >>> i1.length
- 5
-
-At the class level, the usual descriptor behavior of returning the descriptor
-itself is modified by :class:`.hybrid_property`, to instead evaluate the function
-body given the ``Interval`` class as the argument::
-
- >>> print Interval.length
- interval."end" - interval.start
-
- >>> print Session().query(Interval).filter(Interval.length > 10)
- SELECT interval.id AS interval_id, interval.start AS interval_start,
- interval."end" AS interval_end
- FROM interval
- WHERE interval."end" - interval.start > :param_1
-
-ORM methods such as :meth:`~.Query.filter_by` generally use ``getattr()`` to
-locate attributes, so can also be used with hybrid attributes::
-
- >>> print Session().query(Interval).filter_by(length=5)
- SELECT interval.id AS interval_id, interval.start AS interval_start,
- interval."end" AS interval_end
- FROM interval
- WHERE interval."end" - interval.start = :param_1
-
-The ``contains()`` and ``intersects()`` methods are decorated with :class:`.hybrid_method`.
-This decorator applies the same idea to methods which accept
-zero or more arguments. The above methods return boolean values, and take advantage
-of the Python ``|`` and ``&`` bitwise operators to produce equivalent instance-level and
-SQL expression-level boolean behavior::
-
- >>> i1.contains(6)
- True
- >>> i1.contains(15)
- False
- >>> i1.intersects(Interval(7, 18))
- True
- >>> i1.intersects(Interval(25, 29))
- False
-
- >>> print Session().query(Interval).filter(Interval.contains(15))
- SELECT interval.id AS interval_id, interval.start AS interval_start,
- interval."end" AS interval_end
- FROM interval
- WHERE interval.start <= :start_1 AND interval."end" > :end_1
-
- >>> ia = aliased(Interval)
- >>> print Session().query(Interval, ia).filter(Interval.intersects(ia))
- SELECT interval.id AS interval_id, interval.start AS interval_start,
- interval."end" AS interval_end, interval_1.id AS interval_1_id,
- interval_1.start AS interval_1_start, interval_1."end" AS interval_1_end
- FROM interval, interval AS interval_1
- WHERE interval.start <= interval_1.start
- AND interval."end" > interval_1.start
- OR interval.start <= interval_1."end"
- AND interval."end" > interval_1."end"
-
-Defining Expression Behavior Distinct from Attribute Behavior
---------------------------------------------------------------
-
-Our usage of the ``&`` and ``|`` bitwise operators above was fortunate, considering
-our functions operated on two boolean values to return a new one. In many cases, the construction
-of an in-Python function and a SQLAlchemy SQL expression have enough differences that two
-separate Python expressions should be defined. The :mod:`~sqlalchemy.ext.hybrid` decorators
-define the :meth:`.hybrid_property.expression` modifier for this purpose. As an example we'll
-define the radius of the interval, which requires the usage of the absolute value function::
-
- from sqlalchemy import func
-
- class Interval(object):
- # ...
-
- @hybrid_property
- def radius(self):
- return abs(self.length) / 2
-
- @radius.expression
- def radius(cls):
- return func.abs(cls.length) / 2
-
-Above the Python function ``abs()`` is used for instance-level operations, the SQL function
-``ABS()`` is used via the :attr:`.func` object for class-level expressions::
-
- >>> i1.radius
- 2
-
- >>> print Session().query(Interval).filter(Interval.radius > 5)
- SELECT interval.id AS interval_id, interval.start AS interval_start,
- interval."end" AS interval_end
- FROM interval
- WHERE abs(interval."end" - interval.start) / :abs_1 > :param_1
-
-Defining Setters
-----------------
-
-Hybrid properties can also define setter methods. If we wanted ``length`` above, when
-set, to modify the endpoint value::
-
- class Interval(object):
- # ...
-
- @hybrid_property
- def length(self):
- return self.end - self.start
-
- @length.setter
- def length(self, value):
- self.end = self.start + value
-
-The ``length(self, value)`` method is now called upon set::
-
- >>> i1 = Interval(5, 10)
- >>> i1.length
- 5
- >>> i1.length = 12
- >>> i1.end
- 17
-
-Working with Relationships
---------------------------
-
-There's no essential difference when creating hybrids that work with related objects as
-opposed to column-based data. The need for distinct expressions tends to be greater.
-Consider the following declarative mapping which relates a ``User`` to a ``SavingsAccount``::
-
- from sqlalchemy import Column, Integer, ForeignKey, Numeric, String
- from sqlalchemy.orm import relationship
- from sqlalchemy.ext.declarative import declarative_base
- from sqlalchemy.ext.hybrid import hybrid_property
-
- Base = declarative_base()
-
- class SavingsAccount(Base):
- __tablename__ = 'account'
- id = Column(Integer, primary_key=True)
- user_id = Column(Integer, ForeignKey('user.id'), nullable=False)
- balance = Column(Numeric(15, 5))
-
- class User(Base):
- __tablename__ = 'user'
- id = Column(Integer, primary_key=True)
- name = Column(String(100), nullable=False)
-
- accounts = relationship("SavingsAccount", backref="owner")
-
- @hybrid_property
- def balance(self):
- if self.accounts:
- return self.accounts[0].balance
- else:
- return None
-
- @balance.setter
- def balance(self, value):
- if not self.accounts:
- account = Account(owner=self)
- else:
- account = self.accounts[0]
- account.balance = balance
-
- @balance.expression
- def balance(cls):
- return SavingsAccount.balance
-
-The above hybrid property ``balance`` works with the first ``SavingsAccount`` entry in the list of
-accounts for this user. The in-Python getter/setter methods can treat ``accounts`` as a Python
-list available on ``self``.
-
-However, at the expression level, we can't travel along relationships to column attributes
-directly since SQLAlchemy is explicit about joins. So here, it's expected that the ``User`` class will be
-used in an appropriate context such that an appropriate join to ``SavingsAccount`` will be present::
-
- >>> print Session().query(User, User.balance).join(User.accounts).filter(User.balance > 5000)
- SELECT "user".id AS user_id, "user".name AS user_name, account.balance AS account_balance
- FROM "user" JOIN account ON "user".id = account.user_id
- WHERE account.balance > :balance_1
-
-Note however, that while the instance level accessors need to worry about whether ``self.accounts``
-is even present, this issue expresses itself differently at the SQL expression level, where we basically
-would use an outer join::
-
- >>> from sqlalchemy import or_
- >>> print (Session().query(User, User.balance).outerjoin(User.accounts).
- ... filter(or_(User.balance < 5000, User.balance == None)))
- SELECT "user".id AS user_id, "user".name AS user_name, account.balance AS account_balance
- FROM "user" LEFT OUTER JOIN account ON "user".id = account.user_id
- WHERE account.balance < :balance_1 OR account.balance IS NULL
-
-.. _hybrid_custom_comparators:
-
-Building Custom Comparators
----------------------------
-
-The hybrid property also includes a helper that allows construction of custom comparators.
-A comparator object allows one to customize the behavior of each SQLAlchemy expression
-operator individually. They are useful when creating custom types that have
-some highly idiosyncratic behavior on the SQL side.
-
-The example class below allows case-insensitive comparisons on the attribute
-named ``word_insensitive``::
-
- from sqlalchemy.ext.hybrid import Comparator
-
- class CaseInsensitiveComparator(Comparator):
- def __eq__(self, other):
- return func.lower(self.__clause_element__()) == func.lower(other)
-
- class SearchWord(Base):
- __tablename__ = 'searchword'
- id = Column(Integer, primary_key=True)
- word = Column(String(255), nullable=False)
-
- @hybrid_property
- def word_insensitive(self):
- return self.word.lower()
-
- @word_insensitive.comparator
- def word_insensitive(cls):
- return CaseInsensitiveComparator(cls.word)
-
-Above, SQL expressions against ``word_insensitive`` will apply the ``LOWER()``
-SQL function to both sides::
-
- >>> print Session().query(SearchWord).filter_by(word_insensitive="Trucks")
- SELECT searchword.id AS searchword_id, searchword.word AS searchword_word
- FROM searchword
- WHERE lower(searchword.word) = lower(:lower_1)
-
-"""
-from sqlalchemy import util
-from sqlalchemy.orm import attributes, interfaces
-
-class hybrid_method(object):
- """A decorator which allows definition of a Python object method with both
- instance-level and class-level behavior.
-
- """
-
-
- def __init__(self, func, expr=None):
- """Create a new :class:`.hybrid_method`.
-
- Usage is typically via decorator::
-
- from sqlalchemy.ext.hybrid import hybrid_method
-
- class SomeClass(object):
- @hybrid_method
- def value(self, x, y):
- return self._value + x + y
-
- @value.expression
- def value(self, x, y):
- return func.some_function(self._value, x, y)
-
- """
- self.func = func
- self.expr = expr or func
-
- def __get__(self, instance, owner):
- if instance is None:
- return self.expr.__get__(owner, owner.__class__)
- else:
- return self.func.__get__(instance, owner)
-
- def expression(self, expr):
- """Provide a modifying decorator that defines a SQL-expression producing method."""
-
- self.expr = expr
- return self
-
-class hybrid_property(object):
- """A decorator which allows definition of a Python descriptor with both
- instance-level and class-level behavior.
-
- """
-
- def __init__(self, fget, fset=None, fdel=None, expr=None):
- """Create a new :class:`.hybrid_property`.
-
- Usage is typically via decorator::
-
- from sqlalchemy.ext.hybrid import hybrid_property
-
- class SomeClass(object):
- @hybrid_property
- def value(self):
- return self._value
-
- @value.setter
- def value(self, value):
- self._value = value
-
- """
- self.fget = fget
- self.fset = fset
- self.fdel = fdel
- self.expr = expr or fget
- util.update_wrapper(self, fget)
-
- def __get__(self, instance, owner):
- if instance is None:
- return self.expr(owner)
- else:
- return self.fget(instance)
-
- def __set__(self, instance, value):
- self.fset(instance, value)
-
- def __delete__(self, instance):
- self.fdel(instance)
-
- def setter(self, fset):
- """Provide a modifying decorator that defines a value-setter method."""
-
- self.fset = fset
- return self
-
- def deleter(self, fdel):
- """Provide a modifying decorator that defines a value-deletion method."""
-
- self.fdel = fdel
- return self
-
- def expression(self, expr):
- """Provide a modifying decorator that defines a SQL-expression producing method."""
-
- self.expr = expr
- return self
-
- def comparator(self, comparator):
- """Provide a modifying decorator that defines a custom comparator producing method.
-
- The return value of the decorated method should be an instance of
- :class:`~.hybrid.Comparator`.
-
- """
-
- proxy_attr = attributes.\
- create_proxied_attribute(self)
- def expr(owner):
- return proxy_attr(owner, self.__name__, self, comparator(owner))
- self.expr = expr
- return self
-
-
-class Comparator(interfaces.PropComparator):
- """A helper class that allows easy construction of custom :class:`~.orm.interfaces.PropComparator`
- classes for usage with hybrids."""
-
-
- def __init__(self, expression):
- self.expression = expression
-
- def __clause_element__(self):
- expr = self.expression
- while hasattr(expr, '__clause_element__'):
- expr = expr.__clause_element__()
- return expr
-
- def adapted(self, adapter):
- # interesting....
- return self
-
-
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/ext/mutable.py b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/ext/mutable.py
deleted file mode 100755
index 078f9f3a..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/ext/mutable.py
+++ /dev/null
@@ -1,554 +0,0 @@
-# ext/mutable.py
-# Copyright (C) 2005-2011 the SQLAlchemy authors and contributors <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-"""Provide support for tracking of in-place changes to scalar values,
-which are propagated into ORM change events on owning parent objects.
-
-The :mod:`sqlalchemy.ext.mutable` extension replaces SQLAlchemy's legacy approach to in-place
-mutations of scalar values, established by the :class:`.types.MutableType`
-class as well as the ``mutable=True`` type flag, with a system that allows
-change events to be propagated from the value to the owning parent, thereby
-removing the need for the ORM to maintain copies of values as well as the very
-expensive requirement of scanning through all "mutable" values on each flush
-call, looking for changes.
-
-.. _mutable_scalars:
-
-Establishing Mutability on Scalar Column Values
-===============================================
-
-A typical example of a "mutable" structure is a Python dictionary.
-Following the example introduced in :ref:`types_toplevel`, we
-begin with a custom type that marshals Python dictionaries into
-JSON strings before being persisted::
-
- from sqlalchemy.types import TypeDecorator, VARCHAR
- import json
-
- class JSONEncodedDict(TypeDecorator):
- "Represents an immutable structure as a json-encoded string."
-
- impl = VARCHAR
-
- def process_bind_param(self, value, dialect):
- if value is not None:
- value = json.dumps(value)
- return value
-
- def process_result_value(self, value, dialect):
- if value is not None:
- value = json.loads(value)
- return value
-
-The usage of ``json`` is only for the purposes of example. The :mod:`sqlalchemy.ext.mutable`
-extension can be used
-with any type whose target Python type may be mutable, including
-:class:`.PickleType`, :class:`.postgresql.ARRAY`, etc.
-
-When using the :mod:`sqlalchemy.ext.mutable` extension, the value itself
-tracks all parents which reference it. Here we will replace the usage
-of plain Python dictionaries with a dict subclass that implements
-the :class:`.Mutable` mixin::
-
- import collections
- from sqlalchemy.ext.mutable import Mutable
-
- class MutationDict(Mutable, dict):
- @classmethod
- def coerce(cls, key, value):
- "Convert plain dictionaries to MutationDict."
-
- if not isinstance(value, MutationDict):
- if isinstance(value, dict):
- return MutationDict(value)
-
- # this call will raise ValueError
- return Mutable.coerce(key, value)
- else:
- return value
-
- def __setitem__(self, key, value):
- "Detect dictionary set events and emit change events."
-
- dict.__setitem__(self, key, value)
- self.changed()
-
- def __delitem__(self, key):
- "Detect dictionary del events and emit change events."
-
- dict.__delitem__(self, key)
- self.changed()
-
-The above dictionary class takes the approach of subclassing the Python
-built-in ``dict`` to produce a dict
-subclass which routes all mutation events through ``__setitem__``. There are
-many variants on this approach, such as subclassing ``UserDict.UserDict``,
-the newer ``collections.MutableMapping``, etc. The part that's important to this
-example is that the :meth:`.Mutable.changed` method is called whenever an in-place change to the
-datastructure takes place.
-
-We also redefine the :meth:`.Mutable.coerce` method which will be used to
-convert any values that are not instances of ``MutationDict``, such
-as the plain dictionaries returned by the ``json`` module, into the
-appropriate type. Defining this method is optional; we could just as well created our
-``JSONEncodedDict`` such that it always returns an instance of ``MutationDict``,
-and additionally ensured that all calling code uses ``MutationDict``
-explicitly. When :meth:`.Mutable.coerce` is not overridden, any values
-applied to a parent object which are not instances of the mutable type
-will raise a ``ValueError``.
-
-Our new ``MutationDict`` type offers a class method
-:meth:`~.Mutable.as_mutable` which we can use within column metadata
-to associate with types. This method grabs the given type object or
-class and associates a listener that will detect all future mappings
-of this type, applying event listening instrumentation to the mapped
-attribute. Such as, with classical table metadata::
-
- from sqlalchemy import Table, Column, Integer
-
- my_data = Table('my_data', metadata,
- Column('id', Integer, primary_key=True),
- Column('data', MutationDict.as_mutable(JSONEncodedDict))
- )
-
-Above, :meth:`~.Mutable.as_mutable` returns an instance of ``JSONEncodedDict``
-(if the type object was not an instance already), which will intercept any
-attributes which are mapped against this type. Below we establish a simple
-mapping against the ``my_data`` table::
-
- from sqlalchemy import mapper
-
- class MyDataClass(object):
- pass
-
- # associates mutation listeners with MyDataClass.data
- mapper(MyDataClass, my_data)
-
-The ``MyDataClass.data`` member will now be notified of in place changes
-to its value.
-
-There's no difference in usage when using declarative::
-
- from sqlalchemy.ext.declarative import declarative_base
-
- Base = declarative_base()
-
- class MyDataClass(Base):
- __tablename__ = 'my_data'
- id = Column(Integer, primary_key=True)
- data = Column(MutationDict.as_mutable(JSONEncodedDict))
-
-Any in-place changes to the ``MyDataClass.data`` member
-will flag the attribute as "dirty" on the parent object::
-
- >>> from sqlalchemy.orm import Session
-
- >>> sess = Session()
- >>> m1 = MyDataClass(data={'value1':'foo'})
- >>> sess.add(m1)
- >>> sess.commit()
-
- >>> m1.data['value1'] = 'bar'
- >>> assert m1 in sess.dirty
- True
-
-The ``MutationDict`` can be associated with all future instances
-of ``JSONEncodedDict`` in one step, using :meth:`~.Mutable.associate_with`. This
-is similar to :meth:`~.Mutable.as_mutable` except it will intercept
-all occurrences of ``MutationDict`` in all mappings unconditionally, without
-the need to declare it individually::
-
- MutationDict.associate_with(JSONEncodedDict)
-
- class MyDataClass(Base):
- __tablename__ = 'my_data'
- id = Column(Integer, primary_key=True)
- data = Column(JSONEncodedDict)
-
-
-Supporting Pickling
---------------------
-
-The key to the :mod:`sqlalchemy.ext.mutable` extension relies upon the
-placement of a ``weakref.WeakKeyDictionary`` upon the value object, which
-stores a mapping of parent mapped objects keyed to the attribute name under
-which they are associated with this value. ``WeakKeyDictionary`` objects are
-not picklable, due to the fact that they contain weakrefs and function
-callbacks. In our case, this is a good thing, since if this dictionary were
-picklable, it could lead to an excessively large pickle size for our value
-objects that are pickled by themselves outside of the context of the parent.
-The developer responsiblity here is only to provide a ``__getstate__`` method
-that excludes the :meth:`~.MutableBase._parents` collection from the pickle
-stream::
-
- class MyMutableType(Mutable):
- def __getstate__(self):
- d = self.__dict__.copy()
- d.pop('_parents', None)
- return d
-
-With our dictionary example, we need to return the contents of the dict itself
-(and also restore them on __setstate__)::
-
- class MutationDict(Mutable, dict):
- # ....
-
- def __getstate__(self):
- return dict(self)
-
- def __setstate__(self, state):
- self.update(state)
-
-In the case that our mutable value object is pickled as it is attached to one
-or more parent objects that are also part of the pickle, the :class:`.Mutable`
-mixin will re-establish the :attr:`.Mutable._parents` collection on each value
-object as the owning parents themselves are unpickled.
-
-.. _mutable_composites:
-
-Establishing Mutability on Composites
-=====================================
-
-Composites are a special ORM feature which allow a single scalar attribute to
-be assigned an object value which represents information "composed" from one
-or more columns from the underlying mapped table. The usual example is that of
-a geometric "point", and is introduced in :ref:`mapper_composite`.
-
-As of SQLAlchemy 0.7, the internals of :func:`.orm.composite` have been
-greatly simplified and in-place mutation detection is no longer enabled by
-default; instead, the user-defined value must detect changes on its own and
-propagate them to all owning parents. The :mod:`sqlalchemy.ext.mutable`
-extension provides the helper class :class:`.MutableComposite`, which is a
-slight variant on the :class:`.Mutable` class.
-
-As is the case with :class:`.Mutable`, the user-defined composite class
-subclasses :class:`.MutableComposite` as a mixin, and detects and delivers
-change events to its parents via the :meth:`.MutableComposite.changed` method.
-In the case of a composite class, the detection is usually via the usage of
-Python descriptors (i.e. ``@property``), or alternatively via the special
-Python method ``__setattr__()``. Below we expand upon the ``Point`` class
-introduced in :ref:`mapper_composite` to subclass :class:`.MutableComposite`
-and to also route attribute set events via ``__setattr__`` to the
-:meth:`.MutableComposite.changed` method::
-
- from sqlalchemy.ext.mutable import MutableComposite
-
- class Point(MutableComposite):
- def __init__(self, x, y):
- self.x = x
- self.y = y
-
- def __setattr__(self, key, value):
- "Intercept set events"
-
- # set the attribute
- object.__setattr__(self, key, value)
-
- # alert all parents to the change
- self.changed()
-
- def __composite_values__(self):
- return self.x, self.y
-
- def __eq__(self, other):
- return isinstance(other, Point) and \\
- other.x == self.x and \\
- other.y == self.y
-
- def __ne__(self, other):
- return not self.__eq__(other)
-
-The :class:`.MutableComposite` class uses a Python metaclass to automatically
-establish listeners for any usage of :func:`.orm.composite` that specifies our
-``Point`` type. Below, when ``Point`` is mapped to the ``Vertex`` class,
-listeners are established which will route change events from ``Point``
-objects to each of the ``Vertex.start`` and ``Vertex.end`` attributes::
-
- from sqlalchemy.orm import composite, mapper
- from sqlalchemy import Table, Column
-
- vertices = Table('vertices', metadata,
- Column('id', Integer, primary_key=True),
- Column('x1', Integer),
- Column('y1', Integer),
- Column('x2', Integer),
- Column('y2', Integer),
- )
-
- class Vertex(object):
- pass
-
- mapper(Vertex, vertices, properties={
- 'start': composite(Point, vertices.c.x1, vertices.c.y1),
- 'end': composite(Point, vertices.c.x2, vertices.c.y2)
- })
-
-Any in-place changes to the ``Vertex.start`` or ``Vertex.end`` members
-will flag the attribute as "dirty" on the parent object::
-
- >>> from sqlalchemy.orm import Session
-
- >>> sess = Session()
- >>> v1 = Vertex(start=Point(3, 4), end=Point(12, 15))
- >>> sess.add(v1)
- >>> sess.commit()
-
- >>> v1.end.x = 8
- >>> assert v1 in sess.dirty
- True
-
-Supporting Pickling
---------------------
-
-As is the case with :class:`.Mutable`, the :class:`.MutableComposite` helper
-class uses a ``weakref.WeakKeyDictionary`` available via the
-:meth:`.MutableBase._parents` attribute which isn't picklable. If we need to
-pickle instances of ``Point`` or its owning class ``Vertex``, we at least need
-to define a ``__getstate__`` that doesn't include the ``_parents`` dictionary.
-Below we define both a ``__getstate__`` and a ``__setstate__`` that package up
-the minimal form of our ``Point`` class::
-
- class Point(MutableComposite):
- # ...
-
- def __getstate__(self):
- return self.x, self.y
-
- def __setstate__(self, state):
- self.x, self.y = state
-
-As with :class:`.Mutable`, the :class:`.MutableComposite` augments the
-pickling process of the parent's object-relational state so that the
-:meth:`.MutableBase._parents` collection is restored to all ``Point`` objects.
-
-"""
-from sqlalchemy.orm.attributes import flag_modified
-from sqlalchemy import event, types
-from sqlalchemy.orm import mapper, object_mapper
-from sqlalchemy.util import memoized_property
-import weakref
-
-class MutableBase(object):
- """Common base class to :class:`.Mutable` and :class:`.MutableComposite`."""
-
- @memoized_property
- def _parents(self):
- """Dictionary of parent object->attribute name on the parent.
-
- This attribute is a so-called "memoized" property. It initializes
- itself with a new ``weakref.WeakKeyDictionary`` the first time
- it is accessed, returning the same object upon subsequent access.
-
- """
-
- return weakref.WeakKeyDictionary()
-
- @classmethod
- def _listen_on_attribute(cls, attribute, coerce):
- """Establish this type as a mutation listener for the given
- mapped descriptor.
-
- """
- key = attribute.key
- parent_cls = attribute.class_
-
- def load(state, *args):
- """Listen for objects loaded or refreshed.
-
- Wrap the target data member's value with
- ``Mutable``.
-
- """
- val = state.dict.get(key, None)
- if val is not None:
- if coerce:
- val = cls.coerce(key, val)
- state.dict[key] = val
- val._parents[state.obj()] = key
-
- def set(target, value, oldvalue, initiator):
- """Listen for set/replace events on the target
- data member.
-
- Establish a weak reference to the parent object
- on the incoming value, remove it for the one
- outgoing.
-
- """
- if not isinstance(value, cls):
- value = cls.coerce(key, value)
- if value is not None:
- value._parents[target.obj()] = key
- if isinstance(oldvalue, cls):
- oldvalue._parents.pop(target.obj(), None)
- return value
-
- def pickle(state, state_dict):
- val = state.dict.get(key, None)
- if val is not None:
- if 'ext.mutable.values' not in state_dict:
- state_dict['ext.mutable.values'] = []
- state_dict['ext.mutable.values'].append(val)
-
- def unpickle(state, state_dict):
- if 'ext.mutable.values' in state_dict:
- for val in state_dict['ext.mutable.values']:
- val._parents[state.obj()] = key
-
- event.listen(parent_cls, 'load', load, raw=True)
- event.listen(parent_cls, 'refresh', load, raw=True)
- event.listen(attribute, 'set', set, raw=True, retval=True)
- event.listen(parent_cls, 'pickle', pickle, raw=True)
- event.listen(parent_cls, 'unpickle', unpickle, raw=True)
-
-class Mutable(MutableBase):
- """Mixin that defines transparent propagation of change
- events to a parent object.
-
- See the example in :ref:`mutable_scalars` for usage information.
-
- """
-
- def changed(self):
- """Subclasses should call this method whenever change events occur."""
-
- for parent, key in self._parents.items():
- flag_modified(parent, key)
-
- @classmethod
- def coerce(cls, key, value):
- """Given a value, coerce it into this type.
-
- By default raises ValueError.
- """
- if value is None:
- return None
- raise ValueError("Attribute '%s' does not accept objects of type %s" % (key, type(value)))
-
- @classmethod
- def associate_with_attribute(cls, attribute):
- """Establish this type as a mutation listener for the given
- mapped descriptor.
-
- """
- cls._listen_on_attribute(attribute, True)
-
- @classmethod
- def associate_with(cls, sqltype):
- """Associate this wrapper with all future mapped columns
- of the given type.
-
- This is a convenience method that calls ``associate_with_attribute`` automatically.
-
- .. warning:: The listeners established by this method are *global*
- to all mappers, and are *not* garbage collected. Only use
- :meth:`.associate_with` for types that are permanent to an application,
- not with ad-hoc types else this will cause unbounded growth
- in memory usage.
-
- """
-
- def listen_for_type(mapper, class_):
- for prop in mapper.iterate_properties:
- if hasattr(prop, 'columns'):
- if isinstance(prop.columns[0].type, sqltype):
- cls.associate_with_attribute(getattr(class_, prop.key))
- break
-
- event.listen(mapper, 'mapper_configured', listen_for_type)
-
- @classmethod
- def as_mutable(cls, sqltype):
- """Associate a SQL type with this mutable Python type.
-
- This establishes listeners that will detect ORM mappings against
- the given type, adding mutation event trackers to those mappings.
-
- The type is returned, unconditionally as an instance, so that
- :meth:`.as_mutable` can be used inline::
-
- Table('mytable', metadata,
- Column('id', Integer, primary_key=True),
- Column('data', MyMutableType.as_mutable(PickleType))
- )
-
- Note that the returned type is always an instance, even if a class
- is given, and that only columns which are declared specifically with that
- type instance receive additional instrumentation.
-
- To associate a particular mutable type with all occurrences of a
- particular type, use the :meth:`.Mutable.associate_with` classmethod
- of the particular :meth:`.Mutable` subclass to establish a global
- association.
-
- .. warning:: The listeners established by this method are *global*
- to all mappers, and are *not* garbage collected. Only use
- :meth:`.as_mutable` for types that are permanent to an application,
- not with ad-hoc types else this will cause unbounded growth
- in memory usage.
-
- """
- sqltype = types.to_instance(sqltype)
-
- def listen_for_type(mapper, class_):
- for prop in mapper.iterate_properties:
- if hasattr(prop, 'columns'):
- if prop.columns[0].type is sqltype:
- cls.associate_with_attribute(getattr(class_, prop.key))
- break
-
- event.listen(mapper, 'mapper_configured', listen_for_type)
-
- return sqltype
-
-class _MutableCompositeMeta(type):
- def __init__(cls, classname, bases, dict_):
- cls._setup_listeners()
- return type.__init__(cls, classname, bases, dict_)
-
-class MutableComposite(MutableBase):
- """Mixin that defines transparent propagation of change
- events on a SQLAlchemy "composite" object to its
- owning parent or parents.
-
- See the example in :ref:`mutable_composites` for usage information.
-
- .. warning:: The listeners established by the :class:`.MutableComposite`
- class are *global* to all mappers, and are *not* garbage collected. Only use
- :class:`.MutableComposite` for types that are permanent to an application,
- not with ad-hoc types else this will cause unbounded growth
- in memory usage.
-
- """
- __metaclass__ = _MutableCompositeMeta
-
- def changed(self):
- """Subclasses should call this method whenever change events occur."""
-
- for parent, key in self._parents.items():
-
- prop = object_mapper(parent).get_property(key)
- for value, attr_name in zip(
- self.__composite_values__(),
- prop._attribute_keys):
- setattr(parent, attr_name, value)
-
- @classmethod
- def _setup_listeners(cls):
- """Associate this wrapper with all future mapped composites
- of the given type.
-
- This is a convenience method that calls ``associate_with_attribute`` automatically.
-
- """
-
- def listen_for_type(mapper, class_):
- for prop in mapper.iterate_properties:
- if hasattr(prop, 'composite_class') and issubclass(prop.composite_class, cls):
- cls._listen_on_attribute(getattr(class_, prop.key), False)
-
- event.listen(mapper, 'mapper_configured', listen_for_type)
-
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/ext/orderinglist.py b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/ext/orderinglist.py
deleted file mode 100755
index ce63b88e..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/ext/orderinglist.py
+++ /dev/null
@@ -1,321 +0,0 @@
-# ext/orderinglist.py
-# Copyright (C) 2005-2011 the SQLAlchemy authors and contributors <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-"""A custom list that manages index/position information for its children.
-
-:author: Jason Kirtland
-
-``orderinglist`` is a helper for mutable ordered relationships. It will intercept
-list operations performed on a relationship collection and automatically
-synchronize changes in list position with an attribute on the related objects.
-(See :ref:`advdatamapping_entitycollections` for more information on the general pattern.)
-
-Example: Two tables that store slides in a presentation. Each slide
-has a number of bullet points, displayed in order by the 'position'
-column on the bullets table. These bullets can be inserted and re-ordered
-by your end users, and you need to update the 'position' column of all
-affected rows when changes are made.
-
-.. sourcecode:: python+sql
-
- slides_table = Table('Slides', metadata,
- Column('id', Integer, primary_key=True),
- Column('name', String))
-
- bullets_table = Table('Bullets', metadata,
- Column('id', Integer, primary_key=True),
- Column('slide_id', Integer, ForeignKey('Slides.id')),
- Column('position', Integer),
- Column('text', String))
-
- class Slide(object):
- pass
- class Bullet(object):
- pass
-
- mapper(Slide, slides_table, properties={
- 'bullets': relationship(Bullet, order_by=[bullets_table.c.position])
- })
- mapper(Bullet, bullets_table)
-
-The standard relationship mapping will produce a list-like attribute on each Slide
-containing all related Bullets, but coping with changes in ordering is totally
-your responsibility. If you insert a Bullet into that list, there is no
-magic- it won't have a position attribute unless you assign it it one, and
-you'll need to manually renumber all the subsequent Bullets in the list to
-accommodate the insert.
-
-An ``orderinglist`` can automate this and manage the 'position' attribute on all
-related bullets for you.
-
-.. sourcecode:: python+sql
-
- mapper(Slide, slides_table, properties={
- 'bullets': relationship(Bullet,
- collection_class=ordering_list('position'),
- order_by=[bullets_table.c.position])
- })
- mapper(Bullet, bullets_table)
-
- s = Slide()
- s.bullets.append(Bullet())
- s.bullets.append(Bullet())
- s.bullets[1].position
- >>> 1
- s.bullets.insert(1, Bullet())
- s.bullets[2].position
- >>> 2
-
-Use the ``ordering_list`` function to set up the ``collection_class`` on relationships
-(as in the mapper example above). This implementation depends on the list
-starting in the proper order, so be SURE to put an order_by on your relationship.
-
-.. warning:: ``ordering_list`` only provides limited functionality when a primary
- key column or unique column is the target of the sort. Since changing the order of
- entries often means that two rows must trade values, this is not possible when
- the value is constrained by a primary key or unique constraint, since one of the rows
- would temporarily have to point to a third available value so that the other row
- could take its old value. ``ordering_list`` doesn't do any of this for you,
- nor does SQLAlchemy itself.
-
-``ordering_list`` takes the name of the related object's ordering attribute as
-an argument. By default, the zero-based integer index of the object's
-position in the ``ordering_list`` is synchronized with the ordering attribute:
-index 0 will get position 0, index 1 position 1, etc. To start numbering at 1
-or some other integer, provide ``count_from=1``.
-
-Ordering values are not limited to incrementing integers. Almost any scheme
-can implemented by supplying a custom ``ordering_func`` that maps a Python list
-index to any value you require.
-
-
-
-
-"""
-from sqlalchemy.orm.collections import collection
-from sqlalchemy import util
-
-__all__ = [ 'ordering_list' ]
-
-
-def ordering_list(attr, count_from=None, **kw):
- """Prepares an OrderingList factory for use in mapper definitions.
-
- Returns an object suitable for use as an argument to a Mapper relationship's
- ``collection_class`` option. Arguments are:
-
- attr
- Name of the mapped attribute to use for storage and retrieval of
- ordering information
-
- count_from (optional)
- Set up an integer-based ordering, starting at ``count_from``. For
- example, ``ordering_list('pos', count_from=1)`` would create a 1-based
- list in SQL, storing the value in the 'pos' column. Ignored if
- ``ordering_func`` is supplied.
-
- Passes along any keyword arguments to ``OrderingList`` constructor.
- """
-
- kw = _unsugar_count_from(count_from=count_from, **kw)
- return lambda: OrderingList(attr, **kw)
-
-# Ordering utility functions
-def count_from_0(index, collection):
- """Numbering function: consecutive integers starting at 0."""
-
- return index
-
-def count_from_1(index, collection):
- """Numbering function: consecutive integers starting at 1."""
-
- return index + 1
-
-def count_from_n_factory(start):
- """Numbering function: consecutive integers starting at arbitrary start."""
-
- def f(index, collection):
- return index + start
- try:
- f.__name__ = 'count_from_%i' % start
- except TypeError:
- pass
- return f
-
-def _unsugar_count_from(**kw):
- """Builds counting functions from keywrod arguments.
-
- Keyword argument filter, prepares a simple ``ordering_func`` from a
- ``count_from`` argument, otherwise passes ``ordering_func`` on unchanged.
- """
-
- count_from = kw.pop('count_from', None)
- if kw.get('ordering_func', None) is None and count_from is not None:
- if count_from == 0:
- kw['ordering_func'] = count_from_0
- elif count_from == 1:
- kw['ordering_func'] = count_from_1
- else:
- kw['ordering_func'] = count_from_n_factory(count_from)
- return kw
-
-class OrderingList(list):
- """A custom list that manages position information for its children.
-
- See the module and __init__ documentation for more details. The
- ``ordering_list`` factory function is used to configure ``OrderingList``
- collections in ``mapper`` relationship definitions.
-
- """
-
- def __init__(self, ordering_attr=None, ordering_func=None,
- reorder_on_append=False):
- """A custom list that manages position information for its children.
-
- ``OrderingList`` is a ``collection_class`` list implementation that
- syncs position in a Python list with a position attribute on the
- mapped objects.
-
- This implementation relies on the list starting in the proper order,
- so be **sure** to put an ``order_by`` on your relationship.
-
- ordering_attr
- Name of the attribute that stores the object's order in the
- relationship.
-
- ordering_func
- Optional. A function that maps the position in the Python list to a
- value to store in the ``ordering_attr``. Values returned are
- usually (but need not be!) integers.
-
- An ``ordering_func`` is called with two positional parameters: the
- index of the element in the list, and the list itself.
-
- If omitted, Python list indexes are used for the attribute values.
- Two basic pre-built numbering functions are provided in this module:
- ``count_from_0`` and ``count_from_1``. For more exotic examples
- like stepped numbering, alphabetical and Fibonacci numbering, see
- the unit tests.
-
- reorder_on_append
- Default False. When appending an object with an existing (non-None)
- ordering value, that value will be left untouched unless
- ``reorder_on_append`` is true. This is an optimization to avoid a
- variety of dangerous unexpected database writes.
-
- SQLAlchemy will add instances to the list via append() when your
- object loads. If for some reason the result set from the database
- skips a step in the ordering (say, row '1' is missing but you get
- '2', '3', and '4'), reorder_on_append=True would immediately
- renumber the items to '1', '2', '3'. If you have multiple sessions
- making changes, any of whom happen to load this collection even in
- passing, all of the sessions would try to "clean up" the numbering
- in their commits, possibly causing all but one to fail with a
- concurrent modification error. Spooky action at a distance.
-
- Recommend leaving this with the default of False, and just call
- ``reorder()`` if you're doing ``append()`` operations with
- previously ordered instances or when doing some housekeeping after
- manual sql operations.
-
- """
- self.ordering_attr = ordering_attr
- if ordering_func is None:
- ordering_func = count_from_0
- self.ordering_func = ordering_func
- self.reorder_on_append = reorder_on_append
-
- # More complex serialization schemes (multi column, e.g.) are possible by
- # subclassing and reimplementing these two methods.
- def _get_order_value(self, entity):
- return getattr(entity, self.ordering_attr)
-
- def _set_order_value(self, entity, value):
- setattr(entity, self.ordering_attr, value)
-
- def reorder(self):
- """Synchronize ordering for the entire collection.
-
- Sweeps through the list and ensures that each object has accurate
- ordering information set.
-
- """
- for index, entity in enumerate(self):
- self._order_entity(index, entity, True)
-
- # As of 0.5, _reorder is no longer semi-private
- _reorder = reorder
-
- def _order_entity(self, index, entity, reorder=True):
- have = self._get_order_value(entity)
-
- # Don't disturb existing ordering if reorder is False
- if have is not None and not reorder:
- return
-
- should_be = self.ordering_func(index, self)
- if have != should_be:
- self._set_order_value(entity, should_be)
-
- def append(self, entity):
- super(OrderingList, self).append(entity)
- self._order_entity(len(self) - 1, entity, self.reorder_on_append)
-
- def _raw_append(self, entity):
- """Append without any ordering behavior."""
-
- super(OrderingList, self).append(entity)
- _raw_append = collection.adds(1)(_raw_append)
-
- def insert(self, index, entity):
- super(OrderingList, self).insert(index, entity)
- self._reorder()
-
- def remove(self, entity):
- super(OrderingList, self).remove(entity)
- self._reorder()
-
- def pop(self, index=-1):
- entity = super(OrderingList, self).pop(index)
- self._reorder()
- return entity
-
- def __setitem__(self, index, entity):
- if isinstance(index, slice):
- step = index.step or 1
- start = index.start or 0
- if start < 0:
- start += len(self)
- stop = index.stop or len(self)
- if stop < 0:
- stop += len(self)
-
- for i in xrange(start, stop, step):
- self.__setitem__(i, entity[i])
- else:
- self._order_entity(index, entity, True)
- super(OrderingList, self).__setitem__(index, entity)
-
- def __delitem__(self, index):
- super(OrderingList, self).__delitem__(index)
- self._reorder()
-
- # Py2K
- def __setslice__(self, start, end, values):
- super(OrderingList, self).__setslice__(start, end, values)
- self._reorder()
-
- def __delslice__(self, start, end):
- super(OrderingList, self).__delslice__(start, end)
- self._reorder()
- # end Py2K
-
- for func_name, func in locals().items():
- if (util.callable(func) and func.func_name == func_name and
- not func.__doc__ and hasattr(list, func_name)):
- func.__doc__ = getattr(list, func_name).__doc__
- del func_name, func
-
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/ext/serializer.py b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/ext/serializer.py
deleted file mode 100755
index 077a0fd9..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/ext/serializer.py
+++ /dev/null
@@ -1,161 +0,0 @@
-# ext/serializer.py
-# Copyright (C) 2005-2011 the SQLAlchemy authors and contributors <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-"""Serializer/Deserializer objects for usage with SQLAlchemy query structures,
-allowing "contextual" deserialization.
-
-Any SQLAlchemy query structure, either based on sqlalchemy.sql.*
-or sqlalchemy.orm.* can be used. The mappers, Tables, Columns, Session
-etc. which are referenced by the structure are not persisted in serialized
-form, but are instead re-associated with the query structure
-when it is deserialized.
-
-Usage is nearly the same as that of the standard Python pickle module::
-
- from sqlalchemy.ext.serializer import loads, dumps
- metadata = MetaData(bind=some_engine)
- Session = scoped_session(sessionmaker())
-
- # ... define mappers
-
- query = Session.query(MyClass).filter(MyClass.somedata=='foo').order_by(MyClass.sortkey)
-
- # pickle the query
- serialized = dumps(query)
-
- # unpickle. Pass in metadata + scoped_session
- query2 = loads(serialized, metadata, Session)
-
- print query2.all()
-
-Similar restrictions as when using raw pickle apply; mapped classes must be
-themselves be pickleable, meaning they are importable from a module-level
-namespace.
-
-The serializer module is only appropriate for query structures. It is not
-needed for:
-
-* instances of user-defined classes. These contain no references to engines,
- sessions or expression constructs in the typical case and can be serialized directly.
-
-* Table metadata that is to be loaded entirely from the serialized structure (i.e. is
- not already declared in the application). Regular pickle.loads()/dumps() can
- be used to fully dump any ``MetaData`` object, typically one which was reflected
- from an existing database at some previous point in time. The serializer module
- is specifically for the opposite case, where the Table metadata is already present
- in memory.
-
-"""
-
-from sqlalchemy.orm import class_mapper, Query
-from sqlalchemy.orm.session import Session
-from sqlalchemy.orm.mapper import Mapper
-from sqlalchemy.orm.attributes import QueryableAttribute
-from sqlalchemy import Table, Column
-from sqlalchemy.engine import Engine
-from sqlalchemy.util import pickle
-import re
-import base64
-# Py3K
-#from io import BytesIO as byte_buffer
-# Py2K
-from cStringIO import StringIO as byte_buffer
-# end Py2K
-
-# Py3K
-#def b64encode(x):
-# return base64.b64encode(x).decode('ascii')
-#def b64decode(x):
-# return base64.b64decode(x.encode('ascii'))
-# Py2K
-b64encode = base64.b64encode
-b64decode = base64.b64decode
-# end Py2K
-
-__all__ = ['Serializer', 'Deserializer', 'dumps', 'loads']
-
-
-
-def Serializer(*args, **kw):
- pickler = pickle.Pickler(*args, **kw)
-
- def persistent_id(obj):
- #print "serializing:", repr(obj)
- if isinstance(obj, QueryableAttribute):
- cls = obj.impl.class_
- key = obj.impl.key
- id = "attribute:" + key + ":" + b64encode(pickle.dumps(cls))
- elif isinstance(obj, Mapper) and not obj.non_primary:
- id = "mapper:" + b64encode(pickle.dumps(obj.class_))
- elif isinstance(obj, Table):
- id = "table:" + str(obj)
- elif isinstance(obj, Column) and isinstance(obj.table, Table):
- id = "column:" + str(obj.table) + ":" + obj.key
- elif isinstance(obj, Session):
- id = "session:"
- elif isinstance(obj, Engine):
- id = "engine:"
- else:
- return None
- return id
-
- pickler.persistent_id = persistent_id
- return pickler
-
-our_ids = re.compile(r'(mapper|table|column|session|attribute|engine):(.*)')
-
-def Deserializer(file, metadata=None, scoped_session=None, engine=None):
- unpickler = pickle.Unpickler(file)
-
- def get_engine():
- if engine:
- return engine
- elif scoped_session and scoped_session().bind:
- return scoped_session().bind
- elif metadata and metadata.bind:
- return metadata.bind
- else:
- return None
-
- def persistent_load(id):
- m = our_ids.match(id)
- if not m:
- return None
- else:
- type_, args = m.group(1, 2)
- if type_ == 'attribute':
- key, clsarg = args.split(":")
- cls = pickle.loads(b64decode(clsarg))
- return getattr(cls, key)
- elif type_ == "mapper":
- cls = pickle.loads(b64decode(args))
- return class_mapper(cls)
- elif type_ == "table":
- return metadata.tables[args]
- elif type_ == "column":
- table, colname = args.split(':')
- return metadata.tables[table].c[colname]
- elif type_ == "session":
- return scoped_session()
- elif type_ == "engine":
- return get_engine()
- else:
- raise Exception("Unknown token: %s" % type_)
- unpickler.persistent_load = persistent_load
- return unpickler
-
-def dumps(obj, protocol=0):
- buf = byte_buffer()
- pickler = Serializer(buf, protocol)
- pickler.dump(obj)
- return buf.getvalue()
-
-def loads(data, metadata=None, scoped_session=None, engine=None):
- buf = byte_buffer(data)
- unpickler = Deserializer(buf, metadata, scoped_session, engine)
- return unpickler.load()
-
-
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/ext/sqlsoup.py b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/ext/sqlsoup.py
deleted file mode 100755
index f76a175a..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/ext/sqlsoup.py
+++ /dev/null
@@ -1,797 +0,0 @@
-# ext/sqlsoup.py
-# Copyright (C) 2005-2011 the SQLAlchemy authors and contributors <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-"""
-Introduction
-============
-
-SqlSoup provides a convenient way to access existing database
-tables without having to declare table or mapper classes ahead
-of time. It is built on top of the SQLAlchemy ORM and provides a
-super-minimalistic interface to an existing database.
-
-SqlSoup effectively provides a coarse grained, alternative
-interface to working with the SQLAlchemy ORM, providing a "self
-configuring" interface for extremely rudimental operations. It's
-somewhat akin to a "super novice mode" version of the ORM. While
-SqlSoup can be very handy, users are strongly encouraged to use
-the full ORM for non-trivial applications.
-
-Suppose we have a database with users, books, and loans tables
-(corresponding to the PyWebOff dataset, if you're curious).
-
-Creating a SqlSoup gateway is just like creating an SQLAlchemy
-engine::
-
- >>> from sqlalchemy.ext.sqlsoup import SqlSoup
- >>> db = SqlSoup('sqlite:///:memory:')
-
-or, you can re-use an existing engine::
-
- >>> db = SqlSoup(engine)
-
-You can optionally specify a schema within the database for your
-SqlSoup::
-
- >>> db.schema = myschemaname
-
-Loading objects
-===============
-
-Loading objects is as easy as this::
-
- >>> users = db.users.all()
- >>> users.sort()
- >>> users
- [
- MappedUsers(name=u'Joe Student',email=u'student@example.edu',
- password=u'student',classname=None,admin=0),
- MappedUsers(name=u'Bhargan Basepair',email=u'basepair@example.edu',
- password=u'basepair',classname=None,admin=1)
- ]
-
-Of course, letting the database do the sort is better::
-
- >>> db.users.order_by(db.users.name).all()
- [
- MappedUsers(name=u'Bhargan Basepair',email=u'basepair@example.edu',
- password=u'basepair',classname=None,admin=1),
- MappedUsers(name=u'Joe Student',email=u'student@example.edu',
- password=u'student',classname=None,admin=0)
- ]
-
-Field access is intuitive::
-
- >>> users[0].email
- u'student@example.edu'
-
-Of course, you don't want to load all users very often. Let's
-add a WHERE clause. Let's also switch the order_by to DESC while
-we're at it::
-
- >>> from sqlalchemy import or_, and_, desc
- >>> where = or_(db.users.name=='Bhargan Basepair', db.users.email=='student@example.edu')
- >>> db.users.filter(where).order_by(desc(db.users.name)).all()
- [
- MappedUsers(name=u'Joe Student',email=u'student@example.edu',
- password=u'student',classname=None,admin=0),
- MappedUsers(name=u'Bhargan Basepair',email=u'basepair@example.edu',
- password=u'basepair',classname=None,admin=1)
- ]
-
-You can also use .first() (to retrieve only the first object
-from a query) or .one() (like .first when you expect exactly one
-user -- it will raise an exception if more were returned)::
-
- >>> db.users.filter(db.users.name=='Bhargan Basepair').one()
- MappedUsers(name=u'Bhargan Basepair',email=u'basepair@example.edu',
- password=u'basepair',classname=None,admin=1)
-
-Since name is the primary key, this is equivalent to
-
- >>> db.users.get('Bhargan Basepair')
- MappedUsers(name=u'Bhargan Basepair',email=u'basepair@example.edu',
- password=u'basepair',classname=None,admin=1)
-
-This is also equivalent to
-
- >>> db.users.filter_by(name='Bhargan Basepair').one()
- MappedUsers(name=u'Bhargan Basepair',email=u'basepair@example.edu',
- password=u'basepair',classname=None,admin=1)
-
-filter_by is like filter, but takes kwargs instead of full
-clause expressions. This makes it more concise for simple
-queries like this, but you can't do complex queries like the
-or\_ above or non-equality based comparisons this way.
-
-Full query documentation
-------------------------
-
-Get, filter, filter_by, order_by, limit, and the rest of the
-query methods are explained in detail in
-:ref:`ormtutorial_querying`.
-
-Modifying objects
-=================
-
-Modifying objects is intuitive::
-
- >>> user = _
- >>> user.email = 'basepair+nospam@example.edu'
- >>> db.commit()
-
-(SqlSoup leverages the sophisticated SQLAlchemy unit-of-work
-code, so multiple updates to a single object will be turned into
-a single ``UPDATE`` statement when you commit.)
-
-To finish covering the basics, let's insert a new loan, then
-delete it::
-
- >>> book_id = db.books.filter_by(title='Regional Variation in Moss').first().id
- >>> db.loans.insert(book_id=book_id, user_name=user.name)
- MappedLoans(book_id=2,user_name=u'Bhargan Basepair',loan_date=None)
-
- >>> loan = db.loans.filter_by(book_id=2, user_name='Bhargan Basepair').one()
- >>> db.delete(loan)
- >>> db.commit()
-
-You can also delete rows that have not been loaded as objects.
-Let's do our insert/delete cycle once more, this time using the
-loans table's delete method. (For SQLAlchemy experts: note that
-no flush() call is required since this delete acts at the SQL
-level, not at the Mapper level.) The same where-clause
-construction rules apply here as to the select methods::
-
- >>> db.loans.insert(book_id=book_id, user_name=user.name)
- MappedLoans(book_id=2,user_name=u'Bhargan Basepair',loan_date=None)
- >>> db.loans.delete(db.loans.book_id==2)
-
-You can similarly update multiple rows at once. This will change the
-book_id to 1 in all loans whose book_id is 2::
-
- >>> db.loans.update(db.loans.book_id==2, book_id=1)
- >>> db.loans.filter_by(book_id=1).all()
- [MappedLoans(book_id=1,user_name=u'Joe Student',
- loan_date=datetime.datetime(2006, 7, 12, 0, 0))]
-
-
-Joins
-=====
-
-Occasionally, you will want to pull out a lot of data from related
-tables all at once. In this situation, it is far more efficient to
-have the database perform the necessary join. (Here we do not have *a
-lot of data* but hopefully the concept is still clear.) SQLAlchemy is
-smart enough to recognize that loans has a foreign key to users, and
-uses that as the join condition automatically::
-
- >>> join1 = db.join(db.users, db.loans, isouter=True)
- >>> join1.filter_by(name='Joe Student').all()
- [
- MappedJoin(name=u'Joe Student',email=u'student@example.edu',
- password=u'student',classname=None,admin=0,book_id=1,
- user_name=u'Joe Student',loan_date=datetime.datetime(2006, 7, 12, 0, 0))
- ]
-
-If you're unfortunate enough to be using MySQL with the default MyISAM
-storage engine, you'll have to specify the join condition manually,
-since MyISAM does not store foreign keys. Here's the same join again,
-with the join condition explicitly specified::
-
- >>> db.join(db.users, db.loans, db.users.name==db.loans.user_name, isouter=True)
- <class 'sqlalchemy.ext.sqlsoup.MappedJoin'>
-
-You can compose arbitrarily complex joins by combining Join objects
-with tables or other joins. Here we combine our first join with the
-books table::
-
- >>> join2 = db.join(join1, db.books)
- >>> join2.all()
- [
- MappedJoin(name=u'Joe Student',email=u'student@example.edu',
- password=u'student',classname=None,admin=0,book_id=1,
- user_name=u'Joe Student',loan_date=datetime.datetime(2006, 7, 12, 0, 0),
- id=1,title=u'Mustards I Have Known',published_year=u'1989',
- authors=u'Jones')
- ]
-
-If you join tables that have an identical column name, wrap your join
-with `with_labels`, to disambiguate columns with their table name
-(.c is short for .columns)::
-
- >>> db.with_labels(join1).c.keys()
- [u'users_name', u'users_email', u'users_password',
- u'users_classname', u'users_admin', u'loans_book_id',
- u'loans_user_name', u'loans_loan_date']
-
-You can also join directly to a labeled object::
-
- >>> labeled_loans = db.with_labels(db.loans)
- >>> db.join(db.users, labeled_loans, isouter=True).c.keys()
- [u'name', u'email', u'password', u'classname',
- u'admin', u'loans_book_id', u'loans_user_name', u'loans_loan_date']
-
-
-Relationships
-=============
-
-You can define relationships on SqlSoup classes:
-
- >>> db.users.relate('loans', db.loans)
-
-These can then be used like a normal SA property:
-
- >>> db.users.get('Joe Student').loans
- [MappedLoans(book_id=1,user_name=u'Joe Student',
- loan_date=datetime.datetime(2006, 7, 12, 0, 0))]
-
- >>> db.users.filter(~db.users.loans.any()).all()
- [MappedUsers(name=u'Bhargan Basepair',
- email='basepair+nospam@example.edu',
- password=u'basepair',classname=None,admin=1)]
-
-relate can take any options that the relationship function
-accepts in normal mapper definition:
-
- >>> del db._cache['users']
- >>> db.users.relate('loans', db.loans, order_by=db.loans.loan_date, cascade='all, delete-orphan')
-
-Advanced Use
-============
-
-Sessions, Transations and Application Integration
--------------------------------------------------
-
-**Note:** please read and understand this section thoroughly
-before using SqlSoup in any web application.
-
-SqlSoup uses a ScopedSession to provide thread-local sessions.
-You can get a reference to the current one like this::
-
- >>> session = db.session
-
-The default session is available at the module level in SQLSoup,
-via::
-
- >>> from sqlalchemy.ext.sqlsoup import Session
-
-The configuration of this session is ``autoflush=True``,
-``autocommit=False``. This means when you work with the SqlSoup
-object, you need to call ``db.commit()`` in order to have
-changes persisted. You may also call ``db.rollback()`` to roll
-things back.
-
-Since the SqlSoup object's Session automatically enters into a
-transaction as soon as it's used, it is *essential* that you
-call ``commit()`` or ``rollback()`` on it when the work within a
-thread completes. This means all the guidelines for web
-application integration at :ref:`session_lifespan` must be
-followed.
-
-The SqlSoup object can have any session or scoped session
-configured onto it. This is of key importance when integrating
-with existing code or frameworks such as Pylons. If your
-application already has a ``Session`` configured, pass it to
-your SqlSoup object::
-
- >>> from myapplication import Session
- >>> db = SqlSoup(session=Session)
-
-If the ``Session`` is configured with ``autocommit=True``, use
-``flush()`` instead of ``commit()`` to persist changes - in this
-case, the ``Session`` closes out its transaction immediately and
-no external management is needed. ``rollback()`` is also not
-available. Configuring a new SQLSoup object in "autocommit" mode
-looks like::
-
- >>> from sqlalchemy.orm import scoped_session, sessionmaker
- >>> db = SqlSoup('sqlite://', session=scoped_session(sessionmaker(autoflush=False, expire_on_commit=False, autocommit=True)))
-
-
-Mapping arbitrary Selectables
------------------------------
-
-SqlSoup can map any SQLAlchemy :class:`.Selectable` with the map
-method. Let's map an :func:`.expression.select` object that uses an aggregate
-function; we'll use the SQLAlchemy :class:`.Table` that SqlSoup
-introspected as the basis. (Since we're not mapping to a simple
-table or join, we need to tell SQLAlchemy how to find the
-*primary key* which just needs to be unique within the select,
-and not necessarily correspond to a *real* PK in the database.)::
-
- >>> from sqlalchemy import select, func
- >>> b = db.books._table
- >>> s = select([b.c.published_year, func.count('*').label('n')], from_obj=[b], group_by=[b.c.published_year])
- >>> s = s.alias('years_with_count')
- >>> years_with_count = db.map(s, primary_key=[s.c.published_year])
- >>> years_with_count.filter_by(published_year='1989').all()
- [MappedBooks(published_year=u'1989',n=1)]
-
-Obviously if we just wanted to get a list of counts associated with
-book years once, raw SQL is going to be less work. The advantage of
-mapping a Select is reusability, both standalone and in Joins. (And if
-you go to full SQLAlchemy, you can perform mappings like this directly
-to your object models.)
-
-An easy way to save mapped selectables like this is to just hang them on
-your db object::
-
- >>> db.years_with_count = years_with_count
-
-Python is flexible like that!
-
-Raw SQL
--------
-
-SqlSoup works fine with SQLAlchemy's text construct, described
-in :ref:`sqlexpression_text`. You can also execute textual SQL
-directly using the `execute()` method, which corresponds to the
-`execute()` method on the underlying `Session`. Expressions here
-are expressed like ``text()`` constructs, using named parameters
-with colons::
-
- >>> rp = db.execute('select name, email from users where name like :name order by name', name='%Bhargan%')
- >>> for name, email in rp.fetchall(): print name, email
- Bhargan Basepair basepair+nospam@example.edu
-
-Or you can get at the current transaction's connection using
-`connection()`. This is the raw connection object which can
-accept any sort of SQL expression or raw SQL string passed to
-the database::
-
- >>> conn = db.connection()
- >>> conn.execute("'select name, email from users where name like ? order by name'", '%Bhargan%')
-
-Dynamic table names
--------------------
-
-You can load a table whose name is specified at runtime with the
-entity() method:
-
- >>> tablename = 'loans'
- >>> db.entity(tablename) == db.loans
- True
-
-entity() also takes an optional schema argument. If none is
-specified, the default schema is used.
-
-"""
-
-from sqlalchemy import Table, MetaData, join
-from sqlalchemy import schema, sql, util
-from sqlalchemy.engine.base import Engine
-from sqlalchemy.orm import scoped_session, sessionmaker, mapper, \
- class_mapper, relationship, session,\
- object_session, attributes
-from sqlalchemy.orm.interfaces import MapperExtension, EXT_CONTINUE
-from sqlalchemy.exc import SQLAlchemyError, InvalidRequestError, ArgumentError
-from sqlalchemy.sql import expression
-
-
-__all__ = ['PKNotFoundError', 'SqlSoup']
-
-Session = scoped_session(sessionmaker(autoflush=True, autocommit=False))
-
-class AutoAdd(MapperExtension):
- def __init__(self, scoped_session):
- self.scoped_session = scoped_session
-
- def instrument_class(self, mapper, class_):
- class_.__init__ = self._default__init__(mapper)
-
- def _default__init__(ext, mapper):
- def __init__(self, **kwargs):
- for key, value in kwargs.iteritems():
- setattr(self, key, value)
- return __init__
-
- def init_instance(self, mapper, class_, oldinit, instance, args, kwargs):
- session = self.scoped_session()
- state = attributes.instance_state(instance)
- session._save_impl(state)
- return EXT_CONTINUE
-
- def init_failed(self, mapper, class_, oldinit, instance, args, kwargs):
- sess = object_session(instance)
- if sess:
- sess.expunge(instance)
- return EXT_CONTINUE
-
-class PKNotFoundError(SQLAlchemyError):
- pass
-
-def _ddl_error(cls):
- msg = 'SQLSoup can only modify mapped Tables (found: %s)' \
- % cls._table.__class__.__name__
- raise InvalidRequestError(msg)
-
-# metaclass is necessary to expose class methods with getattr, e.g.
-# we want to pass db.users.select through to users._mapper.select
-class SelectableClassType(type):
- def insert(cls, **kwargs):
- _ddl_error(cls)
-
- def __clause_element__(cls):
- return cls._table
-
- def __getattr__(cls, attr):
- if attr == '_query':
- # called during mapper init
- raise AttributeError()
- return getattr(cls._query, attr)
-
-class TableClassType(SelectableClassType):
- def insert(cls, **kwargs):
- o = cls()
- o.__dict__.update(kwargs)
- return o
-
- def relate(cls, propname, *args, **kwargs):
- class_mapper(cls)._configure_property(propname, relationship(*args, **kwargs))
-
-def _is_outer_join(selectable):
- if not isinstance(selectable, sql.Join):
- return False
- if selectable.isouter:
- return True
- return _is_outer_join(selectable.left) or _is_outer_join(selectable.right)
-
-def _selectable_name(selectable):
- if isinstance(selectable, sql.Alias):
- return _selectable_name(selectable.element)
- elif isinstance(selectable, sql.Select):
- return ''.join(_selectable_name(s) for s in selectable.froms)
- elif isinstance(selectable, schema.Table):
- return selectable.name.capitalize()
- else:
- x = selectable.__class__.__name__
- if x[0] == '_':
- x = x[1:]
- return x
-
-def _class_for_table(session, engine, selectable, base_cls, mapper_kwargs):
- selectable = expression._clause_element_as_expr(selectable)
- mapname = 'Mapped' + _selectable_name(selectable)
- # Py2K
- if isinstance(mapname, unicode):
- engine_encoding = engine.dialect.encoding
- mapname = mapname.encode(engine_encoding)
- # end Py2K
-
- if isinstance(selectable, Table):
- klass = TableClassType(mapname, (base_cls,), {})
- else:
- klass = SelectableClassType(mapname, (base_cls,), {})
-
- def _compare(self, o):
- L = list(self.__class__.c.keys())
- L.sort()
- t1 = [getattr(self, k) for k in L]
- try:
- t2 = [getattr(o, k) for k in L]
- except AttributeError:
- raise TypeError('unable to compare with %s' % o.__class__)
- return t1, t2
-
- # python2/python3 compatible system of
- # __cmp__ - __lt__ + __eq__
-
- def __lt__(self, o):
- t1, t2 = _compare(self, o)
- return t1 < t2
-
- def __eq__(self, o):
- t1, t2 = _compare(self, o)
- return t1 == t2
-
- def __repr__(self):
- L = ["%s=%r" % (key, getattr(self, key, ''))
- for key in self.__class__.c.keys()]
- return '%s(%s)' % (self.__class__.__name__, ','.join(L))
-
- for m in ['__eq__', '__repr__', '__lt__']:
- setattr(klass, m, eval(m))
- klass._table = selectable
- klass.c = expression.ColumnCollection()
- mappr = mapper(klass,
- selectable,
- extension=AutoAdd(session),
- **mapper_kwargs)
-
- for k in mappr.iterate_properties:
- klass.c[k.key] = k.columns[0]
-
- klass._query = session.query_property()
- return klass
-
-class SqlSoup(object):
- """Represent an ORM-wrapped database resource."""
-
- def __init__(self, engine_or_metadata, base=object, session=None):
- """Initialize a new :class:`.SqlSoup`.
-
- :param engine_or_metadata: a string database URL, :class:`.Engine`
- or :class:`.MetaData` object to associate with. If the
- argument is a :class:`.MetaData`, it should be *bound*
- to an :class:`.Engine`.
- :param base: a class which will serve as the default class for
- returned mapped classes. Defaults to ``object``.
- :param session: a :class:`.ScopedSession` or :class:`.Session` with
- which to associate ORM operations for this :class:`.SqlSoup` instance.
- If ``None``, a :class:`.ScopedSession` that's local to this
- module is used.
-
- """
-
- self.session = session or Session
- self.base=base
-
- if isinstance(engine_or_metadata, MetaData):
- self._metadata = engine_or_metadata
- elif isinstance(engine_or_metadata, (basestring, Engine)):
- self._metadata = MetaData(engine_or_metadata)
- else:
- raise ArgumentError("invalid engine or metadata argument %r" %
- engine_or_metadata)
-
- self._cache = {}
- self.schema = None
-
- @property
- def bind(self):
- """The :class:`.Engine` associated with this :class:`.SqlSoup`."""
- return self._metadata.bind
-
- engine = bind
-
- def delete(self, instance):
- """Mark an instance as deleted."""
-
- self.session.delete(instance)
-
- def execute(self, stmt, **params):
- """Execute a SQL statement.
-
- The statement may be a string SQL string,
- an :func:`.expression.select` construct, or an :func:`.expression.text`
- construct.
-
- """
- return self.session.execute(sql.text(stmt, bind=self.bind), **params)
-
- @property
- def _underlying_session(self):
- if isinstance(self.session, session.Session):
- return self.session
- else:
- return self.session()
-
- def connection(self):
- """Return the current :class:`.Connection` in use by the current transaction."""
-
- return self._underlying_session._connection_for_bind(self.bind)
-
- def flush(self):
- """Flush pending changes to the database.
-
- See :meth:`.Session.flush`.
-
- """
- self.session.flush()
-
- def rollback(self):
- """Rollback the current transction.
-
- See :meth:`.Session.rollback`.
-
- """
- self.session.rollback()
-
- def commit(self):
- """Commit the current transaction.
-
- See :meth:`.Session.commit`.
-
- """
- self.session.commit()
-
- def clear(self):
- """Synonym for :meth:`.SqlSoup.expunge_all`."""
-
- self.session.expunge_all()
-
- def expunge(self, instance):
- """Remove an instance from the :class:`.Session`.
-
- See :meth:`.Session.expunge`.
-
- """
- self.session.expunge(instance)
-
- def expunge_all(self):
- """Clear all objects from the current :class:`.Session`.
-
- See :meth:`.Session.expunge_all`.
-
- """
- self.session.expunge_all()
-
- def map_to(self, attrname, tablename=None, selectable=None,
- schema=None, base=None, mapper_args=util.immutabledict()):
- """Configure a mapping to the given attrname.
-
- This is the "master" method that can be used to create any
- configuration.
-
- (new in 0.6.6)
-
- :param attrname: String attribute name which will be
- established as an attribute on this :class:.`.SqlSoup`
- instance.
- :param base: a Python class which will be used as the
- base for the mapped class. If ``None``, the "base"
- argument specified by this :class:`.SqlSoup`
- instance's constructor will be used, which defaults to
- ``object``.
- :param mapper_args: Dictionary of arguments which will
- be passed directly to :func:`.orm.mapper`.
- :param tablename: String name of a :class:`.Table` to be
- reflected. If a :class:`.Table` is already available,
- use the ``selectable`` argument. This argument is
- mutually exclusive versus the ``selectable`` argument.
- :param selectable: a :class:`.Table`, :class:`.Join`, or
- :class:`.Select` object which will be mapped. This
- argument is mutually exclusive versus the ``tablename``
- argument.
- :param schema: String schema name to use if the
- ``tablename`` argument is present.
-
-
- """
- if attrname in self._cache:
- raise InvalidRequestError(
- "Attribute '%s' is already mapped to '%s'" % (
- attrname,
- class_mapper(self._cache[attrname]).mapped_table
- ))
-
- if tablename is not None:
- if not isinstance(tablename, basestring):
- raise ArgumentError("'tablename' argument must be a string."
- )
- if selectable is not None:
- raise ArgumentError("'tablename' and 'selectable' "
- "arguments are mutually exclusive")
-
- selectable = Table(tablename,
- self._metadata,
- autoload=True,
- autoload_with=self.bind,
- schema=schema or self.schema)
- elif schema:
- raise ArgumentError("'tablename' argument is required when "
- "using 'schema'.")
- elif selectable is not None:
- if not isinstance(selectable, expression.FromClause):
- raise ArgumentError("'selectable' argument must be a "
- "table, select, join, or other "
- "selectable construct.")
- else:
- raise ArgumentError("'tablename' or 'selectable' argument is "
- "required.")
-
- if not selectable.primary_key.columns:
- if tablename:
- raise PKNotFoundError(
- "table '%s' does not have a primary "
- "key defined" % tablename)
- else:
- raise PKNotFoundError(
- "selectable '%s' does not have a primary "
- "key defined" % selectable)
-
- mapped_cls = _class_for_table(
- self.session,
- self.engine,
- selectable,
- base or self.base,
- mapper_args
- )
- self._cache[attrname] = mapped_cls
- return mapped_cls
-
-
- def map(self, selectable, base=None, **mapper_args):
- """Map a selectable directly.
-
- The class and its mapping are not cached and will
- be discarded once dereferenced (as of 0.6.6).
-
- :param selectable: an :func:`.expression.select` construct.
- :param base: a Python class which will be used as the
- base for the mapped class. If ``None``, the "base"
- argument specified by this :class:`.SqlSoup`
- instance's constructor will be used, which defaults to
- ``object``.
- :param mapper_args: Dictionary of arguments which will
- be passed directly to :func:`.orm.mapper`.
-
- """
-
- return _class_for_table(
- self.session,
- self.engine,
- selectable,
- base or self.base,
- mapper_args
- )
-
- def with_labels(self, selectable, base=None, **mapper_args):
- """Map a selectable directly, wrapping the
- selectable in a subquery with labels.
-
- The class and its mapping are not cached and will
- be discarded once dereferenced (as of 0.6.6).
-
- :param selectable: an :func:`.expression.select` construct.
- :param base: a Python class which will be used as the
- base for the mapped class. If ``None``, the "base"
- argument specified by this :class:`.SqlSoup`
- instance's constructor will be used, which defaults to
- ``object``.
- :param mapper_args: Dictionary of arguments which will
- be passed directly to :func:`.orm.mapper`.
-
- """
-
- # TODO give meaningful aliases
- return self.map(
- expression._clause_element_as_expr(selectable).
- select(use_labels=True).
- alias('foo'), base=base, **mapper_args)
-
- def join(self, left, right, onclause=None, isouter=False,
- base=None, **mapper_args):
- """Create an :func:`.expression.join` and map to it.
-
- The class and its mapping are not cached and will
- be discarded once dereferenced (as of 0.6.6).
-
- :param left: a mapped class or table object.
- :param right: a mapped class or table object.
- :param onclause: optional "ON" clause construct..
- :param isouter: if True, the join will be an OUTER join.
- :param base: a Python class which will be used as the
- base for the mapped class. If ``None``, the "base"
- argument specified by this :class:`.SqlSoup`
- instance's constructor will be used, which defaults to
- ``object``.
- :param mapper_args: Dictionary of arguments which will
- be passed directly to :func:`.orm.mapper`.
-
- """
-
- j = join(left, right, onclause=onclause, isouter=isouter)
- return self.map(j, base=base, **mapper_args)
-
- def entity(self, attr, schema=None):
- """Return the named entity from this :class:`.SqlSoup`, or
- create if not present.
-
- For more generalized mapping, see :meth:`.map_to`.
-
- """
- try:
- return self._cache[attr]
- except KeyError, ke:
- return self.map_to(attr, tablename=attr, schema=schema)
-
- def __getattr__(self, attr):
- return self.entity(attr)
-
- def __repr__(self):
- return 'SqlSoup(%r)' % self._metadata
-
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/interfaces.py b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/interfaces.py
deleted file mode 100755
index d1e3fa6b..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/interfaces.py
+++ /dev/null
@@ -1,305 +0,0 @@
-# sqlalchemy/interfaces.py
-# Copyright (C) 2007-2011 the SQLAlchemy authors and contributors <see AUTHORS file>
-# Copyright (C) 2007 Jason Kirtland jek@discorporate.us
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-"""Interfaces and abstract types.
-
-This module is **deprecated** and is superseded by the
-event system.
-
-"""
-
-from sqlalchemy import event, util
-
-class PoolListener(object):
- """Hooks into the lifecycle of connections in a :class:`.Pool`.
-
- .. note:: :class:`.PoolListener` is deprecated. Please
- refer to :class:`.PoolEvents`.
-
- Usage::
-
- class MyListener(PoolListener):
- def connect(self, dbapi_con, con_record):
- '''perform connect operations'''
- # etc.
-
- # create a new pool with a listener
- p = QueuePool(..., listeners=[MyListener()])
-
- # add a listener after the fact
- p.add_listener(MyListener())
-
- # usage with create_engine()
- e = create_engine("url://", listeners=[MyListener()])
-
- All of the standard connection :class:`~sqlalchemy.pool.Pool` types can
- accept event listeners for key connection lifecycle events:
- creation, pool check-out and check-in. There are no events fired
- when a connection closes.
-
- For any given DB-API connection, there will be one ``connect``
- event, `n` number of ``checkout`` events, and either `n` or `n - 1`
- ``checkin`` events. (If a ``Connection`` is detached from its
- pool via the ``detach()`` method, it won't be checked back in.)
-
- These are low-level events for low-level objects: raw Python
- DB-API connections, without the conveniences of the SQLAlchemy
- ``Connection`` wrapper, ``Dialect`` services or ``ClauseElement``
- execution. If you execute SQL through the connection, explicitly
- closing all cursors and other resources is recommended.
-
- Events also receive a ``_ConnectionRecord``, a long-lived internal
- ``Pool`` object that basically represents a "slot" in the
- connection pool. ``_ConnectionRecord`` objects have one public
- attribute of note: ``info``, a dictionary whose contents are
- scoped to the lifetime of the DB-API connection managed by the
- record. You can use this shared storage area however you like.
-
- There is no need to subclass ``PoolListener`` to handle events.
- Any class that implements one or more of these methods can be used
- as a pool listener. The ``Pool`` will inspect the methods
- provided by a listener object and add the listener to one or more
- internal event queues based on its capabilities. In terms of
- efficiency and function call overhead, you're much better off only
- providing implementations for the hooks you'll be using.
-
- """
-
- @classmethod
- def _adapt_listener(cls, self, listener):
- """Adapt a :class:`.PoolListener` to individual
- :class:`event.Dispatch` events.
-
- """
-
- listener = util.as_interface(listener, methods=('connect',
- 'first_connect', 'checkout', 'checkin'))
- if hasattr(listener, 'connect'):
- event.listen(self, 'connect', listener.connect)
- if hasattr(listener, 'first_connect'):
- event.listen(self, 'first_connect', listener.first_connect)
- if hasattr(listener, 'checkout'):
- event.listen(self, 'checkout', listener.checkout)
- if hasattr(listener, 'checkin'):
- event.listen(self, 'checkin', listener.checkin)
-
-
- def connect(self, dbapi_con, con_record):
- """Called once for each new DB-API connection or Pool's ``creator()``.
-
- dbapi_con
- A newly connected raw DB-API connection (not a SQLAlchemy
- ``Connection`` wrapper).
-
- con_record
- The ``_ConnectionRecord`` that persistently manages the connection
-
- """
-
- def first_connect(self, dbapi_con, con_record):
- """Called exactly once for the first DB-API connection.
-
- dbapi_con
- A newly connected raw DB-API connection (not a SQLAlchemy
- ``Connection`` wrapper).
-
- con_record
- The ``_ConnectionRecord`` that persistently manages the connection
-
- """
-
- def checkout(self, dbapi_con, con_record, con_proxy):
- """Called when a connection is retrieved from the Pool.
-
- dbapi_con
- A raw DB-API connection
-
- con_record
- The ``_ConnectionRecord`` that persistently manages the connection
-
- con_proxy
- The ``_ConnectionFairy`` which manages the connection for the span of
- the current checkout.
-
- If you raise an ``exc.DisconnectionError``, the current
- connection will be disposed and a fresh connection retrieved.
- Processing of all checkout listeners will abort and restart
- using the new connection.
- """
-
- def checkin(self, dbapi_con, con_record):
- """Called when a connection returns to the pool.
-
- Note that the connection may be closed, and may be None if the
- connection has been invalidated. ``checkin`` will not be called
- for detached connections. (They do not return to the pool.)
-
- dbapi_con
- A raw DB-API connection
-
- con_record
- The ``_ConnectionRecord`` that persistently manages the connection
-
- """
-
-class ConnectionProxy(object):
- """Allows interception of statement execution by Connections.
-
- .. note:: :class:`.ConnectionProxy` is deprecated. Please
- refer to :class:`.ConnectionEvents`.
-
- Either or both of the ``execute()`` and ``cursor_execute()``
- may be implemented to intercept compiled statement and
- cursor level executions, e.g.::
-
- class MyProxy(ConnectionProxy):
- def execute(self, conn, execute, clauseelement, *multiparams, **params):
- print "compiled statement:", clauseelement
- return execute(clauseelement, *multiparams, **params)
-
- def cursor_execute(self, execute, cursor, statement, parameters, context, executemany):
- print "raw statement:", statement
- return execute(cursor, statement, parameters, context)
-
- The ``execute`` argument is a function that will fulfill the default
- execution behavior for the operation. The signature illustrated
- in the example should be used.
-
- The proxy is installed into an :class:`~sqlalchemy.engine.Engine` via
- the ``proxy`` argument::
-
- e = create_engine('someurl://', proxy=MyProxy())
-
- """
-
- @classmethod
- def _adapt_listener(cls, self, listener):
-
- def adapt_execute(conn, clauseelement, multiparams, params):
-
- def execute_wrapper(clauseelement, *multiparams, **params):
- return clauseelement, multiparams, params
-
- return listener.execute(conn, execute_wrapper,
- clauseelement, *multiparams,
- **params)
-
- event.listen(self, 'before_execute', adapt_execute)
-
- def adapt_cursor_execute(conn, cursor, statement,
- parameters,context, executemany, ):
-
- def execute_wrapper(
- cursor,
- statement,
- parameters,
- context,
- ):
- return statement, parameters
-
- return listener.cursor_execute(
- execute_wrapper,
- cursor,
- statement,
- parameters,
- context,
- executemany,
- )
-
- event.listen(self, 'before_cursor_execute', adapt_cursor_execute)
-
- def do_nothing_callback(*arg, **kw):
- pass
-
- def adapt_listener(fn):
-
- def go(conn, *arg, **kw):
- fn(conn, do_nothing_callback, *arg, **kw)
-
- return util.update_wrapper(go, fn)
-
- event.listen(self, 'begin', adapt_listener(listener.begin))
- event.listen(self, 'rollback',
- adapt_listener(listener.rollback))
- event.listen(self, 'commit', adapt_listener(listener.commit))
- event.listen(self, 'savepoint',
- adapt_listener(listener.savepoint))
- event.listen(self, 'rollback_savepoint',
- adapt_listener(listener.rollback_savepoint))
- event.listen(self, 'release_savepoint',
- adapt_listener(listener.release_savepoint))
- event.listen(self, 'begin_twophase',
- adapt_listener(listener.begin_twophase))
- event.listen(self, 'prepare_twophase',
- adapt_listener(listener.prepare_twophase))
- event.listen(self, 'rollback_twophase',
- adapt_listener(listener.rollback_twophase))
- event.listen(self, 'commit_twophase',
- adapt_listener(listener.commit_twophase))
-
-
- def execute(self, conn, execute, clauseelement, *multiparams, **params):
- """Intercept high level execute() events."""
-
-
- return execute(clauseelement, *multiparams, **params)
-
- def cursor_execute(self, execute, cursor, statement, parameters, context, executemany):
- """Intercept low-level cursor execute() events."""
-
- return execute(cursor, statement, parameters, context)
-
- def begin(self, conn, begin):
- """Intercept begin() events."""
-
- return begin()
-
- def rollback(self, conn, rollback):
- """Intercept rollback() events."""
-
- return rollback()
-
- def commit(self, conn, commit):
- """Intercept commit() events."""
-
- return commit()
-
- def savepoint(self, conn, savepoint, name=None):
- """Intercept savepoint() events."""
-
- return savepoint(name=name)
-
- def rollback_savepoint(self, conn, rollback_savepoint, name, context):
- """Intercept rollback_savepoint() events."""
-
- return rollback_savepoint(name, context)
-
- def release_savepoint(self, conn, release_savepoint, name, context):
- """Intercept release_savepoint() events."""
-
- return release_savepoint(name, context)
-
- def begin_twophase(self, conn, begin_twophase, xid):
- """Intercept begin_twophase() events."""
-
- return begin_twophase(xid)
-
- def prepare_twophase(self, conn, prepare_twophase, xid):
- """Intercept prepare_twophase() events."""
-
- return prepare_twophase(xid)
-
- def rollback_twophase(self, conn, rollback_twophase, xid, is_prepared):
- """Intercept rollback_twophase() events."""
-
- return rollback_twophase(xid, is_prepared)
-
- def commit_twophase(self, conn, commit_twophase, xid, is_prepared):
- """Intercept commit_twophase() events."""
-
- return commit_twophase(xid, is_prepared)
-
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/log.py b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/log.py
deleted file mode 100755
index e77730a9..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/log.py
+++ /dev/null
@@ -1,212 +0,0 @@
-# sqlalchemy/log.py
-# Copyright (C) 2006-2011 the SQLAlchemy authors and contributors <see AUTHORS file>
-# Includes alterations by Vinay Sajip vinay_sajip@yahoo.co.uk
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-"""Logging control and utilities.
-
-Control of logging for SA can be performed from the regular python logging
-module. The regular dotted module namespace is used, starting at
-'sqlalchemy'. For class-level logging, the class name is appended.
-
-The "echo" keyword parameter, available on SQLA :class:`.Engine`
-and :class:`.Pool` objects, corresponds to a logger specific to that
-instance only.
-
-"""
-
-import logging
-import sys
-from sqlalchemy import util
-
-# set initial level to WARN. This so that
-# log statements don't occur in the absense of explicit
-# logging being enabled for 'sqlalchemy'.
-rootlogger = logging.getLogger('sqlalchemy')
-if rootlogger.level == logging.NOTSET:
- rootlogger.setLevel(logging.WARN)
-
-def _add_default_handler(logger):
- handler = logging.StreamHandler(sys.stdout)
- handler.setFormatter(logging.Formatter(
- '%(asctime)s %(levelname)s %(name)s %(message)s'))
- logger.addHandler(handler)
-
-_logged_classes = set()
-def class_logger(cls, enable=False):
- logger = logging.getLogger(cls.__module__ + "." + cls.__name__)
- if enable == 'debug':
- logger.setLevel(logging.DEBUG)
- elif enable == 'info':
- logger.setLevel(logging.INFO)
- cls._should_log_debug = lambda self: logger.isEnabledFor(logging.DEBUG)
- cls._should_log_info = lambda self: logger.isEnabledFor(logging.INFO)
- cls.logger = logger
- _logged_classes.add(cls)
-
-
-class Identified(object):
- logging_name = None
-
- def _should_log_debug(self):
- return self.logger.isEnabledFor(logging.DEBUG)
-
- def _should_log_info(self):
- return self.logger.isEnabledFor(logging.INFO)
-
-class InstanceLogger(object):
- """A logger adapter (wrapper) for :class:`.Identified` subclasses.
-
- This allows multiple instances (e.g. Engine or Pool instances)
- to share a logger, but have its verbosity controlled on a
- per-instance basis.
-
- The basic functionality is to return a logging level
- which is based on an instance's echo setting.
-
- Default implementation is:
-
- 'debug' -> logging.DEBUG
- True -> logging.INFO
- False -> Effective level of underlying logger
- (logging.WARNING by default)
- None -> same as False
- """
-
- # Map echo settings to logger levels
- _echo_map = {
- None: logging.NOTSET,
- False: logging.NOTSET,
- True: logging.INFO,
- 'debug': logging.DEBUG,
- }
-
- def __init__(self, echo, name):
- self.echo = echo
- self.logger = logging.getLogger(name)
-
- # if echo flag is enabled and no handlers,
- # add a handler to the list
- if self._echo_map[echo] <= logging.INFO \
- and not self.logger.handlers:
- _add_default_handler(self.logger)
-
- #
- # Boilerplate convenience methods
- #
- def debug(self, msg, *args, **kwargs):
- """Delegate a debug call to the underlying logger."""
-
- self.log(logging.DEBUG, msg, *args, **kwargs)
-
- def info(self, msg, *args, **kwargs):
- """Delegate an info call to the underlying logger."""
-
- self.log(logging.INFO, msg, *args, **kwargs)
-
- def warning(self, msg, *args, **kwargs):
- """Delegate a warning call to the underlying logger."""
-
- self.log(logging.WARNING, msg, *args, **kwargs)
-
- warn = warning
-
- def error(self, msg, *args, **kwargs):
- """
- Delegate an error call to the underlying logger.
- """
- self.log(logging.ERROR, msg, *args, **kwargs)
-
- def exception(self, msg, *args, **kwargs):
- """Delegate an exception call to the underlying logger."""
-
- kwargs["exc_info"] = 1
- self.log(logging.ERROR, msg, *args, **kwargs)
-
- def critical(self, msg, *args, **kwargs):
- """Delegate a critical call to the underlying logger."""
-
- self.log(logging.CRITICAL, msg, *args, **kwargs)
-
- def log(self, level, msg, *args, **kwargs):
- """Delegate a log call to the underlying logger.
-
- The level here is determined by the echo
- flag as well as that of the underlying logger, and
- logger._log() is called directly.
-
- """
-
- # inline the logic from isEnabledFor(),
- # getEffectiveLevel(), to avoid overhead.
-
- if self.logger.manager.disable >= level:
- return
-
- selected_level = self._echo_map[self.echo]
- if selected_level == logging.NOTSET:
- selected_level = self.logger.getEffectiveLevel()
-
- if level >= selected_level:
- self.logger._log(level, msg, args, **kwargs)
-
- def isEnabledFor(self, level):
- """Is this logger enabled for level 'level'?"""
-
- if self.logger.manager.disable >= level:
- return False
- return level >= self.getEffectiveLevel()
-
- def getEffectiveLevel(self):
- """What's the effective level for this logger?"""
-
- level = self._echo_map[self.echo]
- if level == logging.NOTSET:
- level = self.logger.getEffectiveLevel()
- return level
-
-def instance_logger(instance, echoflag=None):
- """create a logger for an instance that implements :class:`.Identified`."""
-
- if instance.logging_name:
- name = "%s.%s.%s" % (instance.__class__.__module__,
- instance.__class__.__name__, instance.logging_name)
- else:
- name = "%s.%s" % (instance.__class__.__module__,
- instance.__class__.__name__)
-
- instance._echo = echoflag
-
- if echoflag in (False, None):
- # if no echo setting or False, return a Logger directly,
- # avoiding overhead of filtering
- logger = logging.getLogger(name)
- else:
- # if a specified echo flag, return an EchoLogger,
- # which checks the flag, overrides normal log
- # levels by calling logger._log()
- logger = InstanceLogger(echoflag, name)
-
- instance.logger = logger
-
-class echo_property(object):
- __doc__ = """\
- When ``True``, enable log output for this element.
-
- This has the effect of setting the Python logging level for the namespace
- of this element's class and object reference. A value of boolean ``True``
- indicates that the loglevel ``logging.INFO`` will be set for the logger,
- whereas the string value ``debug`` will set the loglevel to
- ``logging.DEBUG``.
- """
-
- def __get__(self, instance, owner):
- if instance is None:
- return self
- else:
- return instance._echo
-
- def __set__(self, instance, value):
- instance_logger(instance, echoflag=value)
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/orm/__init__.py b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/orm/__init__.py
deleted file mode 100755
index 8a0312d5..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/orm/__init__.py
+++ /dev/null
@@ -1,1278 +0,0 @@
-# orm/__init__.py
-# Copyright (C) 2005-2011 the SQLAlchemy authors and contributors <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-"""
-Functional constructs for ORM configuration.
-
-See the SQLAlchemy object relational tutorial and mapper configuration
-documentation for an overview of how this module is used.
-
-"""
-
-from sqlalchemy.orm import exc
-from sqlalchemy.orm.mapper import (
- Mapper,
- _mapper_registry,
- class_mapper,
- configure_mappers
- )
-from sqlalchemy.orm.interfaces import (
- EXT_CONTINUE,
- EXT_STOP,
- InstrumentationManager,
- MapperExtension,
- PropComparator,
- SessionExtension,
- AttributeExtension,
- )
-from sqlalchemy.orm.util import (
- aliased,
- join,
- object_mapper,
- outerjoin,
- polymorphic_union,
- with_parent,
- )
-from sqlalchemy.orm.properties import (
- ColumnProperty,
- ComparableProperty,
- CompositeProperty,
- RelationshipProperty,
- PropertyLoader,
- SynonymProperty,
- )
-from sqlalchemy.orm import mapper as mapperlib
-from sqlalchemy.orm.mapper import reconstructor, validates
-from sqlalchemy.orm import strategies
-from sqlalchemy.orm.query import AliasOption, Query
-from sqlalchemy.sql import util as sql_util
-from sqlalchemy.orm.session import Session
-from sqlalchemy.orm.session import object_session, sessionmaker, \
- make_transient
-from sqlalchemy.orm.scoping import ScopedSession
-from sqlalchemy import util as sa_util
-
-__all__ = (
- 'EXT_CONTINUE',
- 'EXT_STOP',
- 'InstrumentationManager',
- 'MapperExtension',
- 'AttributeExtension',
- 'PropComparator',
- 'Query',
- 'Session',
- 'aliased',
- 'backref',
- 'class_mapper',
- 'clear_mappers',
- 'column_property',
- 'comparable_property',
- 'compile_mappers',
- 'configure_mappers',
- 'composite',
- 'contains_alias',
- 'contains_eager',
- 'create_session',
- 'defer',
- 'deferred',
- 'dynamic_loader',
- 'eagerload',
- 'eagerload_all',
- 'immediateload',
- 'join',
- 'joinedload',
- 'joinedload_all',
- 'lazyload',
- 'mapper',
- 'make_transient',
- 'noload',
- 'object_mapper',
- 'object_session',
- 'outerjoin',
- 'polymorphic_union',
- 'reconstructor',
- 'relationship',
- 'relation',
- 'scoped_session',
- 'sessionmaker',
- 'subqueryload',
- 'subqueryload_all',
- 'synonym',
- 'undefer',
- 'undefer_group',
- 'validates'
- )
-
-
-def scoped_session(session_factory, scopefunc=None):
- """Provides thread-local or scoped management of :class:`.Session` objects.
-
- This is a front-end function to
- :class:`.ScopedSession`.
-
- :param session_factory: a callable function that produces
- :class:`.Session` instances, such as :func:`sessionmaker`.
-
- :param scopefunc: Optional "scope" function which would be
- passed to the :class:`.ScopedRegistry`. If None, the
- :class:`.ThreadLocalRegistry` is used by default.
-
- :returns: an :class:`.ScopedSession` instance
-
- Usage::
-
- Session = scoped_session(sessionmaker(autoflush=True))
-
- To instantiate a Session object which is part of the scoped context,
- instantiate normally::
-
- session = Session()
-
- Most session methods are available as classmethods from the scoped
- session::
-
- Session.commit()
- Session.close()
-
- """
- return ScopedSession(session_factory, scopefunc=scopefunc)
-
-def create_session(bind=None, **kwargs):
- """Create a new :class:`.Session`
- with no automation enabled by default.
-
- This function is used primarily for testing. The usual
- route to :class:`.Session` creation is via its constructor
- or the :func:`.sessionmaker` function.
-
- :param bind: optional, a single Connectable to use for all
- database access in the created
- :class:`~sqlalchemy.orm.session.Session`.
-
- :param \*\*kwargs: optional, passed through to the
- :class:`.Session` constructor.
-
- :returns: an :class:`~sqlalchemy.orm.session.Session` instance
-
- The defaults of create_session() are the opposite of that of
- :func:`sessionmaker`; ``autoflush`` and ``expire_on_commit`` are
- False, ``autocommit`` is True. In this sense the session acts
- more like the "classic" SQLAlchemy 0.3 session with these.
-
- Usage::
-
- >>> from sqlalchemy.orm import create_session
- >>> session = create_session()
-
- It is recommended to use :func:`sessionmaker` instead of
- create_session().
-
- """
- kwargs.setdefault('autoflush', False)
- kwargs.setdefault('autocommit', True)
- kwargs.setdefault('expire_on_commit', False)
- return Session(bind=bind, **kwargs)
-
-def relationship(argument, secondary=None, **kwargs):
- """Provide a relationship of a primary Mapper to a secondary Mapper.
-
- .. note:: :func:`relationship` is historically known as
- :func:`relation` prior to version 0.6.
-
- This corresponds to a parent-child or associative table relationship. The
- constructed class is an instance of :class:`.RelationshipProperty`.
-
- A typical :func:`relationship`::
-
- mapper(Parent, properties={
- 'children': relationship(Children)
- })
-
- :param argument:
- a class or :class:`.Mapper` instance, representing the target of
- the relationship.
-
- :param secondary:
- for a many-to-many relationship, specifies the intermediary
- table. The *secondary* keyword argument should generally only
- be used for a table that is not otherwise expressed in any class
- mapping. In particular, using the Association Object Pattern is
- generally mutually exclusive with the use of the *secondary*
- keyword argument.
-
- :param active_history=False:
- When ``True``, indicates that the "previous" value for a
- many-to-one reference should be loaded when replaced, if
- not already loaded. Normally, history tracking logic for
- simple many-to-ones only needs to be aware of the "new"
- value in order to perform a flush. This flag is available
- for applications that make use of
- :func:`.attributes.get_history` which also need to know
- the "previous" value of the attribute. (New in 0.6.6)
-
- :param backref:
- indicates the string name of a property to be placed on the related
- mapper's class that will handle this relationship in the other
- direction. The other property will be created automatically
- when the mappers are configured. Can also be passed as a
- :func:`backref` object to control the configuration of the
- new relationship.
-
- :param back_populates:
- Takes a string name and has the same meaning as ``backref``,
- except the complementing property is **not** created automatically,
- and instead must be configured explicitly on the other mapper. The
- complementing property should also indicate ``back_populates``
- to this relationship to ensure proper functioning.
-
- :param cascade:
- a comma-separated list of cascade rules which determines how
- Session operations should be "cascaded" from parent to child.
- This defaults to ``False``, which means the default cascade
- should be used. The default value is ``"save-update, merge"``.
-
- Available cascades are:
-
- * ``save-update`` - cascade the :meth:`.Session.add`
- operation. This cascade applies both to future and
- past calls to :meth:`~sqlalchemy.orm.session.Session.add`,
- meaning new items added to a collection or scalar relationship
- get placed into the same session as that of the parent, and
- also applies to items which have been removed from this
- relationship but are still part of unflushed history.
-
- * ``merge`` - cascade the :meth:`~sqlalchemy.orm.session.Session.merge`
- operation
-
- * ``expunge`` - cascade the :meth:`.Session.expunge`
- operation
-
- * ``delete`` - cascade the :meth:`.Session.delete`
- operation
-
- * ``delete-orphan`` - if an item of the child's type with no
- parent is detected, mark it for deletion. Note that this
- option prevents a pending item of the child's class from being
- persisted without a parent present.
-
- * ``refresh-expire`` - cascade the :meth:`.Session.expire`
- and :meth:`~sqlalchemy.orm.session.Session.refresh` operations
-
- * ``all`` - shorthand for "save-update,merge, refresh-expire,
- expunge, delete"
-
- :param cascade_backrefs=True:
- a boolean value indicating if the ``save-update`` cascade should
- operate along a backref event. When set to ``False`` on a
- one-to-many relationship that has a many-to-one backref, assigning
- a persistent object to the many-to-one attribute on a transient object
- will not add the transient to the session. Similarly, when
- set to ``False`` on a many-to-one relationship that has a one-to-many
- backref, appending a persistent object to the one-to-many collection
- on a transient object will not add the transient to the session.
-
- ``cascade_backrefs`` is new in 0.6.5.
-
- :param collection_class:
- a class or callable that returns a new list-holding object. will
- be used in place of a plain list for storing elements.
- Behavior of this attribute is described in detail at
- :ref:`custom_collections`.
-
- :param comparator_factory:
- a class which extends :class:`.RelationshipProperty.Comparator` which
- provides custom SQL clause generation for comparison operations.
-
- :param doc:
- docstring which will be applied to the resulting descriptor.
-
- :param extension:
- an :class:`.AttributeExtension` instance, or list of extensions,
- which will be prepended to the list of attribute listeners for
- the resulting descriptor placed on the class.
- **Deprecated.** Please see :class:`.AttributeEvents`.
-
- :param foreign_keys:
- a list of columns which are to be used as "foreign key" columns.
- Normally, :func:`relationship` uses the :class:`.ForeignKey`
- and :class:`.ForeignKeyConstraint` objects present within the
- mapped or secondary :class:`.Table` to determine the "foreign" side of
- the join condition. This is used to construct SQL clauses in order
- to load objects, as well as to "synchronize" values from
- primary key columns to referencing foreign key columns.
- The ``foreign_keys`` parameter overrides the notion of what's
- "foreign" in the table metadata, allowing the specification
- of a list of :class:`.Column` objects that should be considered
- part of the foreign key.
-
- There are only two use cases for ``foreign_keys`` - one, when it is not
- convenient for :class:`.Table` metadata to contain its own foreign key
- metadata (which should be almost never, unless reflecting a large amount of
- tables from a MySQL MyISAM schema, or a schema that doesn't actually
- have foreign keys on it). The other is for extremely
- rare and exotic composite foreign key setups where some columns
- should artificially not be considered as foreign.
-
- :param innerjoin=False:
- when ``True``, joined eager loads will use an inner join to join
- against related tables instead of an outer join. The purpose
- of this option is strictly one of performance, as inner joins
- generally perform better than outer joins. This flag can
- be set to ``True`` when the relationship references an object
- via many-to-one using local foreign keys that are not nullable,
- or when the reference is one-to-one or a collection that is
- guaranteed to have one or at least one entry.
-
- :param join_depth:
- when non-``None``, an integer value indicating how many levels
- deep "eager" loaders should join on a self-referring or cyclical
- relationship. The number counts how many times the same Mapper
- shall be present in the loading condition along a particular join
- branch. When left at its default of ``None``, eager loaders
- will stop chaining when they encounter a the same target mapper
- which is already higher up in the chain. This option applies
- both to joined- and subquery- eager loaders.
-
- :param lazy='select': specifies
- how the related items should be loaded. Default value is
- ``select``. Values include:
-
- * ``select`` - items should be loaded lazily when the property is first
- accessed, using a separate SELECT statement, or identity map
- fetch for simple many-to-one references.
-
- * ``immediate`` - items should be loaded as the parents are loaded,
- using a separate SELECT statement, or identity map fetch for
- simple many-to-one references. (new as of 0.6.5)
-
- * ``joined`` - items should be loaded "eagerly" in the same query as
- that of the parent, using a JOIN or LEFT OUTER JOIN. Whether
- the join is "outer" or not is determined by the ``innerjoin``
- parameter.
-
- * ``subquery`` - items should be loaded "eagerly" within the same
- query as that of the parent, using a second SQL statement
- which issues a JOIN to a subquery of the original
- statement.
-
- * ``noload`` - no loading should occur at any time. This is to
- support "write-only" attributes, or attributes which are
- populated in some manner specific to the application.
-
- * ``dynamic`` - the attribute will return a pre-configured
- :class:`~sqlalchemy.orm.query.Query` object for all read
- operations, onto which further filtering operations can be
- applied before iterating the results. The dynamic
- collection supports a limited set of mutation operations,
- allowing ``append()`` and ``remove()``. Changes to the
- collection will not be visible until flushed
- to the database, where it is then refetched upon iteration.
-
- * True - a synonym for 'select'
-
- * False - a synonyn for 'joined'
-
- * None - a synonym for 'noload'
-
- Detailed discussion of loader strategies is at :ref:`loading_toplevel`.
-
- :param load_on_pending=False:
- Indicates loading behavior for transient or pending parent objects.
-
- When set to ``True``, causes the lazy-loader to
- issue a query for a parent object that is not persistent, meaning it has
- never been flushed. This may take effect for a pending object when
- autoflush is disabled, or for a transient object that has been
- "attached" to a :class:`.Session` but is not part of its pending
- collection. Attachment of transient objects to the session without
- moving to the "pending" state is not a supported behavior at this time.
-
- Note that the load of related objects on a pending or transient object
- also does not trigger any attribute change events - no user-defined
- events will be emitted for these attributes, and if and when the
- object is ultimately flushed, only the user-specific foreign key
- attributes will be part of the modified state.
-
- The load_on_pending flag does not improve behavior
- when the ORM is used normally - object references should be constructed
- at the object level, not at the foreign key level, so that they
- are present in an ordinary way before flush() proceeds. This flag
- is not not intended for general use.
-
- New in 0.6.5.
-
- :param order_by:
- indicates the ordering that should be applied when loading these
- items.
-
- :param passive_deletes=False:
- Indicates loading behavior during delete operations.
-
- A value of True indicates that unloaded child items should not
- be loaded during a delete operation on the parent. Normally,
- when a parent item is deleted, all child items are loaded so
- that they can either be marked as deleted, or have their
- foreign key to the parent set to NULL. Marking this flag as
- True usually implies an ON DELETE <CASCADE|SET NULL> rule is in
- place which will handle updating/deleting child rows on the
- database side.
-
- Additionally, setting the flag to the string value 'all' will
- disable the "nulling out" of the child foreign keys, when there
- is no delete or delete-orphan cascade enabled. This is
- typically used when a triggering or error raise scenario is in
- place on the database side. Note that the foreign key
- attributes on in-session child objects will not be changed
- after a flush occurs so this is a very special use-case
- setting.
-
- :param passive_updates=True:
- Indicates loading and INSERT/UPDATE/DELETE behavior when the
- source of a foreign key value changes (i.e. an "on update"
- cascade), which are typically the primary key columns of the
- source row.
-
- When True, it is assumed that ON UPDATE CASCADE is configured on
- the foreign key in the database, and that the database will
- handle propagation of an UPDATE from a source column to
- dependent rows. Note that with databases which enforce
- referential integrity (i.e. PostgreSQL, MySQL with InnoDB tables),
- ON UPDATE CASCADE is required for this operation. The
- relationship() will update the value of the attribute on related
- items which are locally present in the session during a flush.
-
- When False, it is assumed that the database does not enforce
- referential integrity and will not be issuing its own CASCADE
- operation for an update. The relationship() will issue the
- appropriate UPDATE statements to the database in response to the
- change of a referenced key, and items locally present in the
- session during a flush will also be refreshed.
-
- This flag should probably be set to False if primary key changes
- are expected and the database in use doesn't support CASCADE
- (i.e. SQLite, MySQL MyISAM tables).
-
- Also see the passive_updates flag on ``mapper()``.
-
- A future SQLAlchemy release will provide a "detect" feature for
- this flag.
-
- :param post_update:
- this indicates that the relationship should be handled by a
- second UPDATE statement after an INSERT or before a
- DELETE. Currently, it also will issue an UPDATE after the
- instance was UPDATEd as well, although this technically should
- be improved. This flag is used to handle saving bi-directional
- dependencies between two individual rows (i.e. each row
- references the other), where it would otherwise be impossible to
- INSERT or DELETE both rows fully since one row exists before the
- other. Use this flag when a particular mapping arrangement will
- incur two rows that are dependent on each other, such as a table
- that has a one-to-many relationship to a set of child rows, and
- also has a column that references a single child row within that
- list (i.e. both tables contain a foreign key to each other). If
- a ``flush()`` operation returns an error that a "cyclical
- dependency" was detected, this is a cue that you might want to
- use ``post_update`` to "break" the cycle.
-
- :param primaryjoin:
- a ColumnElement (i.e. WHERE criterion) that will be used as the primary
- join of this child object against the parent object, or in a
- many-to-many relationship the join of the primary object to the
- association table. By default, this value is computed based on the
- foreign key relationships of the parent and child tables (or association
- table).
-
- :param remote_side:
- used for self-referential relationships, indicates the column or
- list of columns that form the "remote side" of the relationship.
-
- :param secondaryjoin:
- a ColumnElement (i.e. WHERE criterion) that will be used as the join of
- an association table to the child object. By default, this value is
- computed based on the foreign key relationships of the association and
- child tables.
-
- :param single_parent=(True|False):
- when True, installs a validator which will prevent objects
- from being associated with more than one parent at a time.
- This is used for many-to-one or many-to-many relationships that
- should be treated either as one-to-one or one-to-many. Its
- usage is optional unless delete-orphan cascade is also
- set on this relationship(), in which case its required (new in 0.5.2).
-
- :param uselist=(True|False):
- a boolean that indicates if this property should be loaded as a
- list or a scalar. In most cases, this value is determined
- automatically by ``relationship()``, based on the type and direction
- of the relationship - one to many forms a list, many to one
- forms a scalar, many to many is a list. If a scalar is desired
- where normally a list would be present, such as a bi-directional
- one-to-one relationship, set uselist to False.
-
- :param viewonly=False:
- when set to True, the relationship is used only for loading objects
- within the relationship, and has no effect on the unit-of-work
- flush process. Relationships with viewonly can specify any kind of
- join conditions to provide additional views of related objects
- onto a parent object. Note that the functionality of a viewonly
- relationship has its limits - complicated join conditions may
- not compile into eager or lazy loaders properly. If this is the
- case, use an alternative method.
-
- """
- return RelationshipProperty(argument, secondary=secondary, **kwargs)
-
-def relation(*arg, **kw):
- """A synonym for :func:`relationship`."""
-
- return relationship(*arg, **kw)
-
-def dynamic_loader(argument, **kw):
- """Construct a dynamically-loading mapper property.
-
- This is essentially the same as
- using the ``lazy='dynamic'`` argument with :func:`relationship`::
-
- dynamic_loader(SomeClass)
-
- # vs.
-
- relationship(SomeClass, lazy="dynamic")
-
- A :func:`relationship` that is "dynamic" features the behavior
- that read operations return an active :class:`.Query` object which
- reads from the database when accessed. Items may be appended to the
- attribute via ``append()``, or removed via ``remove()``; changes will be
- persisted to the database during a :meth:`Sesion.flush`. However, no other
- Python list or collection mutation operations are available.
-
- All arguments accepted by :func:`relationship` are
- accepted here, other than ``lazy`` which is fixed at ``dynamic``.
-
- """
- kw['lazy'] = 'dynamic'
- return relationship(argument, **kw)
-
-def column_property(*args, **kwargs):
- """Provide a column-level property for use with a Mapper.
-
- Column-based properties can normally be applied to the mapper's
- ``properties`` dictionary using the :class:`.Column` element directly.
- Use this function when the given column is not directly present within the
- mapper's selectable; examples include SQL expressions, functions, and
- scalar SELECT queries.
-
- Columns that aren't present in the mapper's selectable won't be persisted
- by the mapper and are effectively "read-only" attributes.
-
- :param \*cols:
- list of Column objects to be mapped.
-
- :param active_history=False:
- When ``True``, indicates that the "previous" value for a
- scalar attribute should be loaded when replaced, if not
- already loaded. Normally, history tracking logic for
- simple non-primary-key scalar values only needs to be
- aware of the "new" value in order to perform a flush. This
- flag is available for applications that make use of
- :func:`.attributes.get_history` which also need to know
- the "previous" value of the attribute. (new in 0.6.6)
-
- :param comparator_factory: a class which extends
- :class:`.ColumnProperty.Comparator` which provides custom SQL clause
- generation for comparison operations.
-
- :param group:
- a group name for this property when marked as deferred.
-
- :param deferred:
- when True, the column property is "deferred", meaning that
- it does not load immediately, and is instead loaded when the
- attribute is first accessed on an instance. See also
- :func:`~sqlalchemy.orm.deferred`.
-
- :param doc:
- optional string that will be applied as the doc on the
- class-bound descriptor.
-
- :param extension:
- an
- :class:`.AttributeExtension`
- instance, or list of extensions, which will be prepended
- to the list of attribute listeners for the resulting
- descriptor placed on the class.
- **Deprecated.** Please see :class:`.AttributeEvents`.
-
-
- """
-
- return ColumnProperty(*args, **kwargs)
-
-def composite(class_, *cols, **kwargs):
- """Return a composite column-based property for use with a Mapper.
-
- See the mapping documention section :ref:`mapper_composite` for a full
- usage example.
-
- :param class\_:
- The "composite type" class.
-
- :param \*cols:
- List of Column objects to be mapped.
-
- :param active_history=False:
- When ``True``, indicates that the "previous" value for a
- scalar attribute should be loaded when replaced, if not
- already loaded. See the same flag on :func:`.column_property`.
- (This flag becomes meaningful specifically for
- :func:`.composite` in 0.7 - previously it was a placeholder).
-
- :param group:
- A group name for this property when marked as deferred.
-
- :param deferred:
- When True, the column property is "deferred", meaning that it does not
- load immediately, and is instead loaded when the attribute is first
- accessed on an instance. See also :func:`~sqlalchemy.orm.deferred`.
-
- :param comparator_factory: a class which extends
- :class:`.CompositeProperty.Comparator` which provides custom SQL clause
- generation for comparison operations.
-
- :param doc:
- optional string that will be applied as the doc on the
- class-bound descriptor.
-
- :param extension:
- an :class:`.AttributeExtension` instance,
- or list of extensions, which will be prepended to the list of
- attribute listeners for the resulting descriptor placed on the class.
- **Deprecated.** Please see :class:`.AttributeEvents`.
-
- """
- return CompositeProperty(class_, *cols, **kwargs)
-
-
-def backref(name, **kwargs):
- """Create a back reference with explicit keyword arguments, which are the same
- arguments one can send to :func:`relationship`.
-
- Used with the ``backref`` keyword argument to :func:`relationship` in
- place of a string argument, e.g.::
-
- 'items':relationship(SomeItem, backref=backref('parent', lazy='subquery'))
-
- """
- return (name, kwargs)
-
-def deferred(*columns, **kwargs):
- """Return a :class:`.DeferredColumnProperty`, which indicates this
- object attributes should only be loaded from its corresponding
- table column when first accessed.
-
- Used with the `properties` dictionary sent to :func:`mapper`.
-
- """
- return ColumnProperty(deferred=True, *columns, **kwargs)
-
-def mapper(class_, local_table=None, *args, **params):
- """Return a new :class:`~.Mapper` object.
-
- :param class\_: The class to be mapped.
-
- :param local_table: The table to which the class is mapped, or None if
- this mapper inherits from another mapper using concrete table
- inheritance.
-
- :param always_refresh: If True, all query operations for this mapped
- class will overwrite all data within object instances that already
- exist within the session, erasing any in-memory changes with
- whatever information was loaded from the database. Usage of this
- flag is highly discouraged; as an alternative, see the method
- :meth:`.Query.populate_existing`.
-
- :param allow_null_pks: This flag is deprecated - this is stated as
- allow_partial_pks which defaults to True.
-
- :param allow_partial_pks: Defaults to True. Indicates that a
- composite primary key with some NULL values should be considered as
- possibly existing within the database. This affects whether a
- mapper will assign an incoming row to an existing identity, as well
- as if :meth:`.Session.merge` will check the database first for a
- particular primary key value. A "partial primary key" can occur if
- one has mapped to an OUTER JOIN, for example.
-
- :param batch: Indicates that save operations of multiple entities
- can be batched together for efficiency. setting to False indicates
- that an instance will be fully saved before saving the next
- instance, which includes inserting/updating all table rows
- corresponding to the entity as well as calling all
- :class:`.MapperExtension` methods corresponding to the save
- operation.
-
- :param column_prefix: A string which will be prepended to the `key`
- name of all :class:`.Column` objects when creating
- column-based properties from the
- given :class:`.Table`. Does not affect explicitly specified
- column-based properties
-
- :param concrete: If True, indicates this mapper should use concrete
- table inheritance with its parent mapper.
-
- :param exclude_properties: A list or set of string column names to
- be excluded from mapping. As of SQLAlchemy 0.6.4, this collection
- may also include :class:`.Column` objects. Columns named or present
- in this list will not be automatically mapped. Note that neither
- this option nor include_properties will allow one to circumvent plan
- Python inheritance - if mapped class ``B`` inherits from mapped
- class ``A``, no combination of includes or excludes will allow ``B``
- to have fewer properties than its superclass, ``A``.
-
- :param extension: A :class:`.MapperExtension` instance or
- list of :class:`.MapperExtension`
- instances which will be applied to all operations by this
- :class:`.Mapper`. **Deprecated.** Please see :class:`.MapperEvents`.
-
- :param include_properties: An inclusive list or set of string column
- names to map. As of SQLAlchemy 0.6.4, this collection may also
- include :class:`.Column` objects in order to disambiguate between
- same-named columns in a selectable (such as a
- :func:`~.expression.join()`). If this list is not ``None``, columns
- present in the mapped table but not named or present in this list
- will not be automatically mapped. See also "exclude_properties".
-
- :param inherits: Another :class:`.Mapper` for which
- this :class:`.Mapper` will have an inheritance
- relationship with.
-
- :param inherit_condition: For joined table inheritance, a SQL
- expression (constructed
- :class:`.ClauseElement`) which will
- define how the two tables are joined; defaults to a natural join
- between the two tables.
-
- :param inherit_foreign_keys: When inherit_condition is used and the
- condition contains no ForeignKey columns, specify the "foreign"
- columns of the join condition in this list. else leave as None.
-
- :param non_primary: Construct a :class:`.Mapper` that will define only
- the selection of instances, not their persistence. Any number of
- non_primary mappers may be created for a particular class.
-
- :param order_by: A single :class:`.Column` or list of :class:`.Column`
- objects for which selection operations should use as the default
- ordering for entities. Defaults to the OID/ROWID of the table if
- any, or the first primary key column of the table.
-
- :param passive_updates: Indicates UPDATE behavior of foreign keys
- when a primary key changes on a joined-table inheritance or other
- joined table mapping.
-
- When True, it is assumed that ON UPDATE CASCADE is configured on
- the foreign key in the database, and that the database will handle
- propagation of an UPDATE from a source column to dependent rows.
- Note that with databases which enforce referential integrity (i.e.
- PostgreSQL, MySQL with InnoDB tables), ON UPDATE CASCADE is
- required for this operation. The relationship() will update the
- value of the attribute on related items which are locally present
- in the session during a flush.
-
- When False, it is assumed that the database does not enforce
- referential integrity and will not be issuing its own CASCADE
- operation for an update. The relationship() will issue the
- appropriate UPDATE statements to the database in response to the
- change of a referenced key, and items locally present in the
- session during a flush will also be refreshed.
-
- This flag should probably be set to False if primary key changes
- are expected and the database in use doesn't support CASCADE (i.e.
- SQLite, MySQL MyISAM tables).
-
- Also see the passive_updates flag on :func:`relationship()`.
-
- A future SQLAlchemy release will provide a "detect" feature for
- this flag.
-
- :param polymorphic_on: Used with mappers in an inheritance
- relationship, a :class:`.Column` which will identify the class/mapper
- combination to be used with a particular row. Requires the
- ``polymorphic_identity`` value to be set for all mappers in the
- inheritance hierarchy. The column specified by ``polymorphic_on``
- is usually a column that resides directly within the base mapper's
- mapped table; alternatively, it may be a column that is only
- present within the <selectable> portion of the ``with_polymorphic``
- argument.
-
- :param polymorphic_identity: A value which will be stored in the
- Column denoted by polymorphic_on, corresponding to the class
- identity of this mapper.
-
- :param properties: A dictionary mapping the string names of object
- attributes to ``MapperProperty`` instances, which define the
- persistence behavior of that attribute. Note that the columns in
- the mapped table are automatically converted into
- ``ColumnProperty`` instances based on the ``key`` property of each
- :class:`.Column` (although they can be overridden using this dictionary).
-
- :param primary_key: A list of :class:`.Column` objects which define the
- primary key to be used against this mapper's selectable unit.
- This is normally simply the primary key of the ``local_table``, but
- can be overridden here.
-
- :param version_id_col: A :class:`.Column` which must have an integer type
- that will be used to keep a running version id of mapped entities
- in the database. this is used during save operations to ensure that
- no other thread or process has updated the instance during the
- lifetime of the entity, else a :class:`.StaleDataError` exception is
- thrown.
-
- :param version_id_generator: A callable which defines the algorithm
- used to generate new version ids. Defaults to an integer
- generator. Can be replaced with one that generates timestamps,
- uuids, etc. e.g.::
-
- import uuid
-
- mapper(Cls, table,
- version_id_col=table.c.version_uuid,
- version_id_generator=lambda version:uuid.uuid4().hex
- )
-
- The callable receives the current version identifier as its
- single argument.
-
- :param with_polymorphic: A tuple in the form ``(<classes>,
- <selectable>)`` indicating the default style of "polymorphic"
- loading, that is, which tables are queried at once. <classes> is
- any single or list of mappers and/or classes indicating the
- inherited classes that should be loaded at once. The special value
- ``'*'`` may be used to indicate all descending classes should be
- loaded immediately. The second tuple argument <selectable>
- indicates a selectable that will be used to query for multiple
- classes. Normally, it is left as None, in which case this mapper
- will form an outer join from the base mapper's table to that of
- all desired sub-mappers. When specified, it provides the
- selectable to be used for polymorphic loading. When
- with_polymorphic includes mappers which load from a "concrete"
- inheriting table, the <selectable> argument is required, since it
- usually requires more complex UNION queries.
-
- """
- return Mapper(class_, local_table, *args, **params)
-
-def synonym(name, map_column=False, descriptor=None,
- comparator_factory=None, doc=None):
- """Denote an attribute name as a synonym to a mapped property.
-
- .. note:: :func:`.synonym` is superseded as of 0.7 by
- the :mod:`~sqlalchemy.ext.hybrid` extension. See
- the documentation for hybrids at :ref:`hybrids_toplevel`.
-
- Used with the ``properties`` dictionary sent to
- :func:`~sqlalchemy.orm.mapper`::
-
- class MyClass(object):
- def _get_status(self):
- return self._status
- def _set_status(self, value):
- self._status = value
- status = property(_get_status, _set_status)
-
- mapper(MyClass, sometable, properties={
- "status":synonym("_status", map_column=True)
- })
-
- Above, the ``status`` attribute of MyClass will produce
- expression behavior against the table column named ``status``,
- using the Python attribute ``_status`` on the mapped class
- to represent the underlying value.
-
- :param name: the name of the existing mapped property, which can be
- any other ``MapperProperty`` including column-based properties and
- relationships.
-
- :param map_column: if ``True``, an additional ``ColumnProperty`` is created
- on the mapper automatically, using the synonym's name as the keyname of
- the property, and the keyname of this ``synonym()`` as the name of the
- column to map.
-
- """
- return SynonymProperty(name, map_column=map_column,
- descriptor=descriptor,
- comparator_factory=comparator_factory,
- doc=doc)
-
-def comparable_property(comparator_factory, descriptor=None):
- """Provides a method of applying a :class:`.PropComparator`
- to any Python descriptor attribute.
-
- .. note:: :func:`.comparable_property` is superseded as of 0.7 by
- the :mod:`~sqlalchemy.ext.hybrid` extension. See the example
- at :ref:`hybrid_custom_comparators`.
-
- Allows a regular Python @property (descriptor) to be used in queries and
- SQL constructs like a managed attribute. comparable_property wraps a
- descriptor with a proxy that directs operator overrides such as ==
- (__eq__) to the supplied comparator but proxies everything else through to
- the original descriptor. Used with the ``properties`` dictionary sent to
- :func:`~sqlalchemy.orm.mapper`::
-
- from sqlalchemy.orm import mapper, comparable_property
- from sqlalchemy.orm.interfaces import PropComparator
- from sqlalchemy.sql import func
- from sqlalchemy import Table, MetaData, Integer, String, Column
-
- metadata = MetaData()
-
- word_table = Table('word', metadata,
- Column('id', Integer, primary_key=True),
- Column('word', String(200), nullable=False)
- )
-
- class CaseInsensitiveComparator(PropComparator):
- def __clause_element__(self):
- return self.prop
-
- def __eq__(self, other):
- return func.lower(self.__clause_element__()) == func.lower(other)
-
- class SearchWord(object):
- pass
-
- mapper(SearchWord, word_table, properties={
- 'word_insensitive': comparable_property(CaseInsensitiveComparator)
- })
-
- A mapping like the above allows the ``word_insensitive`` attribute
- to render an expression like::
-
- >>> print SearchWord.word_insensitive == "Trucks"
- lower(:lower_1) = lower(:lower_2)
-
- :param comparator_factory:
- A PropComparator subclass or factory that defines operator behavior
- for this property.
-
- :param descriptor:
- Optional when used in a ``properties={}`` declaration. The Python
- descriptor or property to layer comparison behavior on top of.
-
- The like-named descriptor will be automatically retreived from the
- mapped class if left blank in a ``properties`` declaration.
-
- """
- return ComparableProperty(comparator_factory, descriptor)
-
-@sa_util.deprecated("0.7", message=":func:`.compile_mappers` "
- "is renamed to :func:`.configure_mappers`")
-def compile_mappers():
- """Initialize the inter-mapper relationships of all mappers that have been defined."""
-
- configure_mappers()
-
-def clear_mappers():
- """Remove all mappers from all classes.
-
- This function removes all instrumentation from classes and disposes
- of their associated mappers. Once called, the classes are unmapped
- and can be later re-mapped with new mappers.
-
- :func:`.clear_mappers` is *not* for normal use, as there is literally no
- valid usage for it outside of very specific testing scenarios. Normally,
- mappers are permanent structural components of user-defined classes, and
- are never discarded independently of their class. If a mapped class itself
- is garbage collected, its mapper is automatically disposed of as well. As
- such, :func:`.clear_mappers` is only for usage in test suites that re-use
- the same classes with different mappings, which is itself an extremely rare
- use case - the only such use case is in fact SQLAlchemy's own test suite,
- and possibly the test suites of other ORM extension libraries which
- intend to test various combinations of mapper construction upon a fixed
- set of classes.
-
- """
- mapperlib._COMPILE_MUTEX.acquire()
- try:
- while _mapper_registry:
- try:
- # can't even reliably call list(weakdict) in jython
- mapper, b = _mapper_registry.popitem()
- mapper.dispose()
- except KeyError:
- pass
- finally:
- mapperlib._COMPILE_MUTEX.release()
-
-def joinedload(*keys, **kw):
- """Return a ``MapperOption`` that will convert the property of the given
- name or series of mapped attributes into an joined eager load.
-
- .. note:: This function is known as :func:`eagerload` in all versions
- of SQLAlchemy prior to version 0.6beta3, including the 0.5 and 0.4
- series. :func:`eagerload` will remain available for the foreseeable
- future in order to enable cross-compatibility.
-
- Used with :meth:`~sqlalchemy.orm.query.Query.options`.
-
- examples::
-
- # joined-load the "orders" colleciton on "User"
- query(User).options(joinedload(User.orders))
-
- # joined-load the "keywords" collection on each "Item",
- # but not the "items" collection on "Order" - those
- # remain lazily loaded.
- query(Order).options(joinedload(Order.items, Item.keywords))
-
- # to joined-load across both, use joinedload_all()
- query(Order).options(joinedload_all(Order.items, Item.keywords))
-
- :func:`joinedload` also accepts a keyword argument `innerjoin=True` which
- indicates using an inner join instead of an outer::
-
- query(Order).options(joinedload(Order.user, innerjoin=True))
-
- Note that the join created by :func:`joinedload` is aliased such that no
- other aspects of the query will affect what it loads. To use joined eager
- loading with a join that is constructed manually using
- :meth:`~sqlalchemy.orm.query.Query.join` or :func:`~sqlalchemy.orm.join`,
- see :func:`contains_eager`.
-
- See also: :func:`subqueryload`, :func:`lazyload`
-
- """
- innerjoin = kw.pop('innerjoin', None)
- if innerjoin is not None:
- return (
- strategies.EagerLazyOption(keys, lazy='joined'),
- strategies.EagerJoinOption(keys, innerjoin)
- )
- else:
- return strategies.EagerLazyOption(keys, lazy='joined')
-
-def joinedload_all(*keys, **kw):
- """Return a ``MapperOption`` that will convert all properties along the
- given dot-separated path or series of mapped attributes
- into an joined eager load.
-
- .. note:: This function is known as :func:`eagerload_all` in all versions
- of SQLAlchemy prior to version 0.6beta3, including the 0.5 and 0.4
- series. :func:`eagerload_all` will remain available for the
- foreseeable future in order to enable cross-compatibility.
-
- Used with :meth:`~sqlalchemy.orm.query.Query.options`.
-
- For example::
-
- query.options(joinedload_all('orders.items.keywords'))...
-
- will set all of 'orders', 'orders.items', and 'orders.items.keywords' to
- load in one joined eager load.
-
- Individual descriptors are accepted as arguments as well::
-
- query.options(joinedload_all(User.orders, Order.items, Item.keywords))
-
- The keyword arguments accept a flag `innerjoin=True|False` which will
- override the value of the `innerjoin` flag specified on the
- relationship().
-
- See also: :func:`subqueryload_all`, :func:`lazyload`
-
- """
- innerjoin = kw.pop('innerjoin', None)
- if innerjoin is not None:
- return (
- strategies.EagerLazyOption(keys, lazy='joined', chained=True),
- strategies.EagerJoinOption(keys, innerjoin, chained=True)
- )
- else:
- return strategies.EagerLazyOption(keys, lazy='joined', chained=True)
-
-def eagerload(*args, **kwargs):
- """A synonym for :func:`joinedload()`."""
- return joinedload(*args, **kwargs)
-
-def eagerload_all(*args, **kwargs):
- """A synonym for :func:`joinedload_all()`"""
- return joinedload_all(*args, **kwargs)
-
-def subqueryload(*keys):
- """Return a ``MapperOption`` that will convert the property
- of the given name or series of mapped attributes
- into an subquery eager load.
-
- Used with :meth:`~sqlalchemy.orm.query.Query.options`.
-
- examples::
-
- # subquery-load the "orders" colleciton on "User"
- query(User).options(subqueryload(User.orders))
-
- # subquery-load the "keywords" collection on each "Item",
- # but not the "items" collection on "Order" - those
- # remain lazily loaded.
- query(Order).options(subqueryload(Order.items, Item.keywords))
-
- # to subquery-load across both, use subqueryload_all()
- query(Order).options(subqueryload_all(Order.items, Item.keywords))
-
- See also: :func:`joinedload`, :func:`lazyload`
-
- """
- return strategies.EagerLazyOption(keys, lazy="subquery")
-
-def subqueryload_all(*keys):
- """Return a ``MapperOption`` that will convert all properties along the
- given dot-separated path or series of mapped attributes
- into a subquery eager load.
-
- Used with :meth:`~sqlalchemy.orm.query.Query.options`.
-
- For example::
-
- query.options(subqueryload_all('orders.items.keywords'))...
-
- will set all of 'orders', 'orders.items', and 'orders.items.keywords' to
- load in one subquery eager load.
-
- Individual descriptors are accepted as arguments as well::
-
- query.options(subqueryload_all(User.orders, Order.items,
- Item.keywords))
-
- See also: :func:`joinedload_all`, :func:`lazyload`, :func:`immediateload`
-
- """
- return strategies.EagerLazyOption(keys, lazy="subquery", chained=True)
-
-def lazyload(*keys):
- """Return a ``MapperOption`` that will convert the property of the given
- name or series of mapped attributes into a lazy load.
-
- Used with :meth:`~sqlalchemy.orm.query.Query.options`.
-
- See also: :func:`eagerload`, :func:`subqueryload`, :func:`immediateload`
-
- """
- return strategies.EagerLazyOption(keys, lazy=True)
-
-def lazyload_all(*keys):
- """Return a ``MapperOption`` that will convert all the properties
- along the given dot-separated path or series of mapped attributes
- into a lazy load.
-
- Used with :meth:`~sqlalchemy.orm.query.Query.options`.
-
- See also: :func:`eagerload`, :func:`subqueryload`, :func:`immediateload`
-
- """
- return strategies.EagerLazyOption(keys, lazy=True, chained=True)
-
-def noload(*keys):
- """Return a ``MapperOption`` that will convert the property of the
- given name or series of mapped attributes into a non-load.
-
- Used with :meth:`~sqlalchemy.orm.query.Query.options`.
-
- See also: :func:`lazyload`, :func:`eagerload`,
- :func:`subqueryload`, :func:`immediateload`
-
- """
- return strategies.EagerLazyOption(keys, lazy=None)
-
-def immediateload(*keys):
- """Return a ``MapperOption`` that will convert the property of the given
- name or series of mapped attributes into an immediate load.
-
- Used with :meth:`~sqlalchemy.orm.query.Query.options`.
-
- See also: :func:`lazyload`, :func:`eagerload`, :func:`subqueryload`
-
- New as of verison 0.6.5.
-
- """
- return strategies.EagerLazyOption(keys, lazy='immediate')
-
-def contains_alias(alias):
- """Return a ``MapperOption`` that will indicate to the query that
- the main table has been aliased.
-
- `alias` is the string name or ``Alias`` object representing the
- alias.
-
- """
- return AliasOption(alias)
-
-def contains_eager(*keys, **kwargs):
- """Return a ``MapperOption`` that will indicate to the query that
- the given attribute should be eagerly loaded from columns currently
- in the query.
-
- Used with :meth:`~sqlalchemy.orm.query.Query.options`.
-
- The option is used in conjunction with an explicit join that loads
- the desired rows, i.e.::
-
- sess.query(Order).\\
- join(Order.user).\\
- options(contains_eager(Order.user))
-
- The above query would join from the ``Order`` entity to its related
- ``User`` entity, and the returned ``Order`` objects would have the
- ``Order.user`` attribute pre-populated.
-
- :func:`contains_eager` also accepts an `alias` argument, which is the
- string name of an alias, an :func:`~sqlalchemy.sql.expression.alias`
- construct, or an :func:`~sqlalchemy.orm.aliased` construct. Use this when
- the eagerly-loaded rows are to come from an aliased table::
-
- user_alias = aliased(User)
- sess.query(Order).\\
- join((user_alias, Order.user)).\\
- options(contains_eager(Order.user, alias=user_alias))
-
- See also :func:`eagerload` for the "automatic" version of this
- functionality.
-
- For additional examples of :func:`contains_eager` see
- :ref:`contains_eager`.
-
- """
- alias = kwargs.pop('alias', None)
- if kwargs:
- raise exceptions.ArgumentError('Invalid kwargs for contains_eag'
- 'er: %r' % kwargs.keys())
- return strategies.EagerLazyOption(keys, lazy='joined',
- propagate_to_loaders=False, chained=True), \
- strategies.LoadEagerFromAliasOption(keys, alias=alias, chained=True)
-
-def defer(*keys):
- """Return a ``MapperOption`` that will convert the column property of the
- given name into a deferred load.
-
- Used with :meth:`~sqlalchemy.orm.query.Query.options`.
-
- """
- return strategies.DeferredOption(keys, defer=True)
-
-def undefer(*keys):
- """Return a ``MapperOption`` that will convert the column property of the
- given name into a non-deferred (regular column) load.
-
- Used with :meth:`~sqlalchemy.orm.query.Query.options`.
-
- """
- return strategies.DeferredOption(keys, defer=False)
-
-def undefer_group(name):
- """Return a ``MapperOption`` that will convert the given group of deferred
- column properties into a non-deferred (regular column) load.
-
- Used with :meth:`~sqlalchemy.orm.query.Query.options`.
-
- """
- return strategies.UndeferGroupOption(name)
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/orm/attributes.py b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/orm/attributes.py
deleted file mode 100755
index ba262266..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/orm/attributes.py
+++ /dev/null
@@ -1,1335 +0,0 @@
-# orm/attributes.py
-# Copyright (C) 2005-2011 the SQLAlchemy authors and contributors <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-"""Defines instrumentation for class attributes and their interaction
-with instances.
-
-This module is usually not directly visible to user applications, but
-defines a large part of the ORM's interactivity.
-
-
-"""
-
-import operator
-from operator import itemgetter
-
-from sqlalchemy import util, event, exc as sa_exc
-from sqlalchemy.orm import interfaces, collections, events
-
-
-mapperutil = util.importlater("sqlalchemy.orm", "util")
-
-PASSIVE_NO_RESULT = util.symbol('PASSIVE_NO_RESULT')
-ATTR_WAS_SET = util.symbol('ATTR_WAS_SET')
-ATTR_EMPTY = util.symbol('ATTR_EMPTY')
-NO_VALUE = util.symbol('NO_VALUE')
-NEVER_SET = util.symbol('NEVER_SET')
-
-PASSIVE_RETURN_NEVER_SET = util.symbol('PASSIVE_RETURN_NEVER_SET'
-"""Symbol indicating that a 'default' value, i.e. None or blank
-collection, should not be assigned to an attribute when a get()
-is performed and no value was present. NEVER_SET is returned
-instead.
-""")
-
-PASSIVE_NO_INITIALIZE = util.symbol('PASSIVE_NO_INITIALIZE',
-"""Symbol indicating that loader callables should
- not be fired off, and a non-initialized attribute
- should remain that way.
-""")
-
-PASSIVE_NO_FETCH = util.symbol('PASSIVE_NO_FETCH',
-"""Symbol indicating that loader callables should not emit SQL,
- but a value can be fetched from the current session.
-
- Non-initialized attributes should be initialized to an empty value.
-
-""")
-
-PASSIVE_NO_FETCH_RELATED = util.symbol('PASSIVE_NO_FETCH_RELATED',
-"""Symbol indicating that loader callables should not emit SQL for
- loading a related object, but can refresh the attributes of the local
- instance in order to locate a related object in the current session.
-
- Non-initialized attributes should be initialized to an empty value.
-
- The unit of work uses this mode to check if history is present
- on many-to-one attributes with minimal SQL emitted.
-
-""")
-
-PASSIVE_ONLY_PERSISTENT = util.symbol('PASSIVE_ONLY_PERSISTENT',
-"""Symbol indicating that loader callables should only fire off for
- parent objects which are persistent (i.e., have a database
- identity).
-
- Load operations for the "previous" value of an attribute make
- use of this flag during change events.
-
-""")
-
-PASSIVE_OFF = util.symbol('PASSIVE_OFF',
-"""Symbol indicating that loader callables should be executed
- normally.
-
-""")
-
-
-class QueryableAttribute(interfaces.PropComparator):
- """Base class for class-bound attributes. """
-
- def __init__(self, class_, key, impl=None,
- comparator=None, parententity=None):
- self.class_ = class_
- self.key = key
- self.impl = impl
- self.comparator = comparator
- self.parententity = parententity
-
- manager = manager_of_class(class_)
- # manager is None in the case of AliasedClass
- if manager:
- # propagate existing event listeners from
- # immediate superclass
- for base in manager._bases:
- if key in base:
- self.dispatch._update(base[key].dispatch)
-
- dispatch = event.dispatcher(events.AttributeEvents)
- dispatch.dispatch_cls._active_history = False
-
- @util.memoized_property
- def _supports_population(self):
- return self.impl.supports_population
-
- def get_history(self, instance, passive=PASSIVE_OFF):
- return self.impl.get_history(instance_state(instance),
- instance_dict(instance), passive)
-
- def __selectable__(self):
- # TODO: conditionally attach this method based on clause_element ?
- return self
-
- def __clause_element__(self):
- return self.comparator.__clause_element__()
-
- def label(self, name):
- return self.__clause_element__().label(name)
-
- def operate(self, op, *other, **kwargs):
- return op(self.comparator, *other, **kwargs)
-
- def reverse_operate(self, op, other, **kwargs):
- return op(other, self.comparator, **kwargs)
-
- def hasparent(self, state, optimistic=False):
- return self.impl.hasparent(state, optimistic=optimistic)
-
- def __getattr__(self, key):
- try:
- return getattr(self.comparator, key)
- except AttributeError:
- raise AttributeError(
- 'Neither %r object nor %r object has an attribute %r' % (
- type(self).__name__,
- type(self.comparator).__name__,
- key)
- )
-
- def __str__(self):
- return "%s.%s" % (self.class_.__name__, self.key)
-
- @util.memoized_property
- def property(self):
- return self.comparator.property
-
-
-class InstrumentedAttribute(QueryableAttribute):
- """Class bound instrumented attribute which adds descriptor methods."""
-
- def __set__(self, instance, value):
- self.impl.set(instance_state(instance),
- instance_dict(instance), value, None)
-
- def __delete__(self, instance):
- self.impl.delete(instance_state(instance), instance_dict(instance))
-
- def __get__(self, instance, owner):
- if instance is None:
- return self
-
- dict_ = instance_dict(instance)
- if self._supports_population and self.key in dict_:
- return dict_[self.key]
- else:
- return self.impl.get(instance_state(instance),dict_)
-
-def create_proxied_attribute(descriptor):
- """Create an QueryableAttribute / user descriptor hybrid.
-
- Returns a new QueryableAttribute type that delegates descriptor
- behavior and getattr() to the given descriptor.
- """
-
- # TODO: can move this to descriptor_props if the need for this
- # function is removed from ext/hybrid.py
-
- class Proxy(QueryableAttribute):
- """Presents the :class:`.QueryableAttribute` interface as a
- proxy on top of a Python descriptor / :class:`.PropComparator`
- combination.
-
- """
-
- def __init__(self, class_, key, descriptor, comparator,
- adapter=None, doc=None):
- self.class_ = class_
- self.key = key
- self.descriptor = descriptor
- self._comparator = comparator
- self.adapter = adapter
- self.__doc__ = doc
-
- @util.memoized_property
- def comparator(self):
- if util.callable(self._comparator):
- self._comparator = self._comparator()
- if self.adapter:
- self._comparator = self._comparator.adapted(self.adapter)
- return self._comparator
-
- def adapted(self, adapter):
- """Proxy adapted() for the use case of AliasedClass calling adapted."""
-
- return self.__class__(self.class_, self.key, self.descriptor,
- self._comparator,
- adapter)
-
- def __get__(self, instance, owner):
- if instance is None:
- return self
- else:
- return self.descriptor.__get__(instance, owner)
-
- def __str__(self):
- return self.key
-
- def __getattr__(self, attribute):
- """Delegate __getattr__ to the original descriptor and/or
- comparator."""
-
- try:
- return getattr(descriptor, attribute)
- except AttributeError:
- try:
- return getattr(self.comparator, attribute)
- except AttributeError:
- raise AttributeError(
- 'Neither %r object nor %r object has an attribute %r' % (
- type(descriptor).__name__,
- type(self.comparator).__name__,
- attribute)
- )
-
- Proxy.__name__ = type(descriptor).__name__ + 'Proxy'
-
- util.monkeypatch_proxied_specials(Proxy, type(descriptor),
- name='descriptor',
- from_instance=descriptor)
- return Proxy
-
-class AttributeImpl(object):
- """internal implementation for instrumented attributes."""
-
- def __init__(self, class_, key,
- callable_, dispatch, trackparent=False, extension=None,
- compare_function=None, active_history=False,
- parent_token=None, expire_missing=True,
- **kwargs):
- """Construct an AttributeImpl.
-
- \class_
- associated class
-
- key
- string name of the attribute
-
- \callable_
- optional function which generates a callable based on a parent
- instance, which produces the "default" values for a scalar or
- collection attribute when it's first accessed, if not present
- already.
-
- trackparent
- if True, attempt to track if an instance has a parent attached
- to it via this attribute.
-
- extension
- a single or list of AttributeExtension object(s) which will
- receive set/delete/append/remove/etc. events. Deprecated.
- The event package is now used.
-
- compare_function
- a function that compares two values which are normally
- assignable to this attribute.
-
- active_history
- indicates that get_history() should always return the "old" value,
- even if it means executing a lazy callable upon attribute change.
-
- parent_token
- Usually references the MapperProperty, used as a key for
- the hasparent() function to identify an "owning" attribute.
- Allows multiple AttributeImpls to all match a single
- owner attribute.
-
- expire_missing
- if False, don't add an "expiry" callable to this attribute
- during state.expire_attributes(None), if no value is present
- for this key.
-
- """
- self.class_ = class_
- self.key = key
- self.callable_ = callable_
- self.dispatch = dispatch
- self.trackparent = trackparent
- self.parent_token = parent_token or self
- if compare_function is None:
- self.is_equal = operator.eq
- else:
- self.is_equal = compare_function
-
- # TODO: pass in the manager here
- # instead of doing a lookup
- attr = manager_of_class(class_)[key]
-
- for ext in util.to_list(extension or []):
- ext._adapt_listener(attr, ext)
-
- if active_history:
- self.dispatch._active_history = True
-
- self.expire_missing = expire_missing
-
- def _get_active_history(self):
- """Backwards compat for impl.active_history"""
-
- return self.dispatch._active_history
-
- def _set_active_history(self, value):
- self.dispatch._active_history = value
-
- active_history = property(_get_active_history, _set_active_history)
-
-
- def hasparent(self, state, optimistic=False):
- """Return the boolean value of a `hasparent` flag attached to
- the given state.
-
- The `optimistic` flag determines what the default return value
- should be if no `hasparent` flag can be located.
-
- As this function is used to determine if an instance is an
- *orphan*, instances that were loaded from storage should be
- assumed to not be orphans, until a True/False value for this
- flag is set.
-
- An instance attribute that is loaded by a callable function
- will also not have a `hasparent` flag.
-
- """
- return state.parents.get(id(self.parent_token), optimistic)
-
- def sethasparent(self, state, value):
- """Set a boolean flag on the given item corresponding to
- whether or not it is attached to a parent object via the
- attribute represented by this ``InstrumentedAttribute``.
-
- """
- state.parents[id(self.parent_token)] = value
-
- def set_callable(self, state, callable_):
- """Set a callable function for this attribute on the given object.
-
- This callable will be executed when the attribute is next
- accessed, and is assumed to construct part of the instances
- previously stored state. When its value or values are loaded,
- they will be established as part of the instance's *committed
- state*. While *trackparent* information will be assembled for
- these instances, attribute-level event handlers will not be
- fired.
-
- The callable overrides the class level callable set in the
- ``InstrumentedAttribute`` constructor.
-
- """
- state.callables[self.key] = callable_
-
- def get_history(self, state, dict_, passive=PASSIVE_OFF):
- raise NotImplementedError()
-
- def get_all_pending(self, state, dict_):
- """Return a list of tuples of (state, obj)
- for all objects in this attribute's current state
- + history.
-
- Only applies to object-based attributes.
-
- This is an inlining of existing functionality
- which roughly correponds to:
-
- get_state_history(
- state,
- key,
- passive=PASSIVE_NO_INITIALIZE).sum()
-
- """
- raise NotImplementedError()
-
- def initialize(self, state, dict_):
- """Initialize the given state's attribute with an empty value."""
-
- dict_[self.key] = None
- return None
-
- def get(self, state, dict_, passive=PASSIVE_OFF):
- """Retrieve a value from the given object.
-
- If a callable is assembled on this object's attribute, and
- passive is False, the callable will be executed and the
- resulting value will be set as the new value for this attribute.
- """
- if self.key in dict_:
- return dict_[self.key]
- else:
- # if history present, don't load
- key = self.key
- if key not in state.committed_state or \
- state.committed_state[key] is NEVER_SET:
- if passive is PASSIVE_NO_INITIALIZE:
- return PASSIVE_NO_RESULT
-
- if key in state.callables:
- callable_ = state.callables[key]
- value = callable_(passive)
- elif self.callable_:
- value = self.callable_(state, passive)
- else:
- value = ATTR_EMPTY
-
- if value is PASSIVE_NO_RESULT:
- return value
- elif value is ATTR_WAS_SET:
- try:
- return dict_[key]
- except KeyError:
- # TODO: no test coverage here.
- raise KeyError(
- "Deferred loader for attribute "
- "%r failed to populate "
- "correctly" % key)
- elif value is not ATTR_EMPTY:
- return self.set_committed_value(state, dict_, value)
-
- if passive is PASSIVE_RETURN_NEVER_SET:
- return NEVER_SET
- else:
- # Return a new, empty value
- return self.initialize(state, dict_)
-
- def append(self, state, dict_, value, initiator, passive=PASSIVE_OFF):
- self.set(state, dict_, value, initiator, passive=passive)
-
- def remove(self, state, dict_, value, initiator, passive=PASSIVE_OFF):
- self.set(state, dict_, None, initiator, passive=passive)
-
- def set(self, state, dict_, value, initiator, passive=PASSIVE_OFF):
- raise NotImplementedError()
-
- def get_committed_value(self, state, dict_, passive=PASSIVE_OFF):
- """return the unchanged value of this attribute"""
-
- if self.key in state.committed_state:
- value = state.committed_state[self.key]
- if value is NO_VALUE:
- return None
- else:
- return value
- else:
- return self.get(state, dict_, passive=passive)
-
- def set_committed_value(self, state, dict_, value):
- """set an attribute value on the given instance and 'commit' it."""
-
- dict_[self.key] = value
- state.commit(dict_, [self.key])
- return value
-
-class ScalarAttributeImpl(AttributeImpl):
- """represents a scalar value-holding InstrumentedAttribute."""
-
- accepts_scalar_loader = True
- uses_objects = False
- supports_population = True
-
- def delete(self, state, dict_):
-
- # TODO: catch key errors, convert to attributeerror?
- if self.dispatch._active_history:
- old = self.get(state, dict_, PASSIVE_RETURN_NEVER_SET)
- else:
- old = dict_.get(self.key, NO_VALUE)
-
- if self.dispatch.remove:
- self.fire_remove_event(state, dict_, old, None)
- state.modified_event(dict_, self, old)
- del dict_[self.key]
-
- def get_history(self, state, dict_, passive=PASSIVE_OFF):
- return History.from_scalar_attribute(
- self, state, dict_.get(self.key, NO_VALUE))
-
- def set(self, state, dict_, value, initiator, passive=PASSIVE_OFF):
- if initiator and initiator.parent_token is self.parent_token:
- return
-
- if self.dispatch._active_history:
- old = self.get(state, dict_, PASSIVE_RETURN_NEVER_SET)
- else:
- old = dict_.get(self.key, NO_VALUE)
-
- if self.dispatch.set:
- value = self.fire_replace_event(state, dict_,
- value, old, initiator)
- state.modified_event(dict_, self, old)
- dict_[self.key] = value
-
- def fire_replace_event(self, state, dict_, value, previous, initiator):
- for fn in self.dispatch.set:
- value = fn(state, value, previous, initiator or self)
- return value
-
- def fire_remove_event(self, state, dict_, value, initiator):
- for fn in self.dispatch.remove:
- fn(state, value, initiator or self)
-
- @property
- def type(self):
- self.property.columns[0].type
-
-
-class MutableScalarAttributeImpl(ScalarAttributeImpl):
- """represents a scalar value-holding InstrumentedAttribute, which can
- detect changes within the value itself.
-
- """
-
- uses_objects = False
- supports_population = True
-
- def __init__(self, class_, key, callable_, dispatch,
- class_manager, copy_function=None,
- compare_function=None, **kwargs):
- super(ScalarAttributeImpl, self).__init__(
- class_,
- key,
- callable_, dispatch,
- compare_function=compare_function,
- **kwargs)
- class_manager.mutable_attributes.add(key)
- if copy_function is None:
- raise sa_exc.ArgumentError(
- "MutableScalarAttributeImpl requires a copy function")
- self.copy = copy_function
-
- def get_history(self, state, dict_, passive=PASSIVE_OFF):
- if not dict_:
- v = state.committed_state.get(self.key, NO_VALUE)
- else:
- v = dict_.get(self.key, NO_VALUE)
-
- return History.from_scalar_attribute(self, state, v)
-
- def check_mutable_modified(self, state, dict_):
- a, u, d = self.get_history(state, dict_)
- return bool(a or d)
-
- def get(self, state, dict_, passive=PASSIVE_OFF):
- if self.key not in state.mutable_dict:
- ret = ScalarAttributeImpl.get(self, state, dict_, passive=passive)
- if ret is not PASSIVE_NO_RESULT:
- state.mutable_dict[self.key] = ret
- return ret
- else:
- return state.mutable_dict[self.key]
-
- def delete(self, state, dict_):
- ScalarAttributeImpl.delete(self, state, dict_)
- state.mutable_dict.pop(self.key)
-
- def set(self, state, dict_, value, initiator, passive=PASSIVE_OFF):
- ScalarAttributeImpl.set(self, state, dict_, value, initiator, passive)
- state.mutable_dict[self.key] = value
-
-
-class ScalarObjectAttributeImpl(ScalarAttributeImpl):
- """represents a scalar-holding InstrumentedAttribute,
- where the target object is also instrumented.
-
- Adds events to delete/set operations.
-
- """
-
- accepts_scalar_loader = False
- uses_objects = True
- supports_population = True
-
- def __init__(self, class_, key, callable_, dispatch,
- trackparent=False, extension=None, copy_function=None,
- **kwargs):
- super(ScalarObjectAttributeImpl, self).__init__(
- class_,
- key,
- callable_, dispatch,
- trackparent=trackparent,
- extension=extension,
- **kwargs)
-
- def delete(self, state, dict_):
- old = self.get(state, dict_)
- self.fire_remove_event(state, dict_, old, self)
- del dict_[self.key]
-
- def get_history(self, state, dict_, passive=PASSIVE_OFF):
- if self.key in dict_:
- return History.from_object_attribute(self, state, dict_[self.key])
- else:
- if passive is PASSIVE_OFF:
- passive = PASSIVE_RETURN_NEVER_SET
- current = self.get(state, dict_, passive=passive)
- if current is PASSIVE_NO_RESULT:
- return HISTORY_BLANK
- else:
- return History.from_object_attribute(self, state, current)
-
- def get_all_pending(self, state, dict_):
- if self.key in dict_:
- current = dict_[self.key]
- if current is not None:
- ret = [(instance_state(current), current)]
- else:
- ret = []
-
- if self.key in state.committed_state:
- original = state.committed_state[self.key]
- if original not in (NEVER_SET, PASSIVE_NO_RESULT, None) and \
- original is not current:
-
- ret.append((instance_state(original), original))
- return ret
- else:
- return []
-
- def set(self, state, dict_, value, initiator, passive=PASSIVE_OFF):
- """Set a value on the given InstanceState.
-
- `initiator` is the ``InstrumentedAttribute`` that initiated the
- ``set()`` operation and is used to control the depth of a circular
- setter operation.
-
- """
- if initiator and initiator.parent_token is self.parent_token:
- return
-
- if self.dispatch._active_history:
- old = self.get(state, dict_, passive=PASSIVE_ONLY_PERSISTENT)
- else:
- old = self.get(state, dict_, passive=PASSIVE_NO_FETCH)
-
- value = self.fire_replace_event(state, dict_, value, old, initiator)
- dict_[self.key] = value
-
- def fire_remove_event(self, state, dict_, value, initiator):
- if self.trackparent and value is not None:
- self.sethasparent(instance_state(value), False)
-
- for fn in self.dispatch.remove:
- fn(state, value, initiator or self)
-
- state.modified_event(dict_, self, value)
-
- def fire_replace_event(self, state, dict_, value, previous, initiator):
- if self.trackparent:
- if (previous is not value and
- previous is not None and
- previous is not PASSIVE_NO_RESULT):
- self.sethasparent(instance_state(previous), False)
-
- for fn in self.dispatch.set:
- value = fn(state, value, previous, initiator or self)
-
- state.modified_event(dict_, self, previous)
-
- if self.trackparent:
- if value is not None:
- self.sethasparent(instance_state(value), True)
-
- return value
-
-
-class CollectionAttributeImpl(AttributeImpl):
- """A collection-holding attribute that instruments changes in membership.
-
- Only handles collections of instrumented objects.
-
- InstrumentedCollectionAttribute holds an arbitrary, user-specified
- container object (defaulting to a list) and brokers access to the
- CollectionAdapter, a "view" onto that object that presents consistent bag
- semantics to the orm layer independent of the user data implementation.
-
- """
- accepts_scalar_loader = False
- uses_objects = True
- supports_population = True
-
- def __init__(self, class_, key, callable_, dispatch,
- typecallable=None, trackparent=False, extension=None,
- copy_function=None, compare_function=None, **kwargs):
- super(CollectionAttributeImpl, self).__init__(
- class_,
- key,
- callable_, dispatch,
- trackparent=trackparent,
- extension=extension,
- compare_function=compare_function,
- **kwargs)
-
- if copy_function is None:
- copy_function = self.__copy
- self.copy = copy_function
- self.collection_factory = typecallable
-
- def __copy(self, item):
- return [y for y in list(collections.collection_adapter(item))]
-
- def get_history(self, state, dict_, passive=PASSIVE_OFF):
- current = self.get(state, dict_, passive=passive)
- if current is PASSIVE_NO_RESULT:
- return HISTORY_BLANK
- else:
- return History.from_collection(self, state, current)
-
- def get_all_pending(self, state, dict_):
- if self.key not in dict_:
- return []
-
- current = dict_[self.key]
- current = getattr(current, '_sa_adapter')
-
- if self.key in state.committed_state:
- original = state.committed_state[self.key]
- if original is not NO_VALUE:
- current_states = [(instance_state(c), c) for c in current]
- original_states = [(instance_state(c), c) for c in original]
-
- current_set = dict(current_states)
- original_set = dict(original_states)
-
- return \
- [(s, o) for s, o in current_states if s not in original_set] + \
- [(s, o) for s, o in current_states if s in original_set] + \
- [(s, o) for s, o in original_states if s not in current_set]
-
- return [(instance_state(o), o) for o in current]
-
-
- def fire_append_event(self, state, dict_, value, initiator):
- for fn in self.dispatch.append:
- value = fn(state, value, initiator or self)
-
- state.modified_event(dict_, self, NEVER_SET, True)
-
- if self.trackparent and value is not None:
- self.sethasparent(instance_state(value), True)
-
- return value
-
- def fire_pre_remove_event(self, state, dict_, initiator):
- state.modified_event(dict_, self, NEVER_SET, True)
-
- def fire_remove_event(self, state, dict_, value, initiator):
- if self.trackparent and value is not None:
- self.sethasparent(instance_state(value), False)
-
- for fn in self.dispatch.remove:
- fn(state, value, initiator or self)
-
- state.modified_event(dict_, self, NEVER_SET, True)
-
- def delete(self, state, dict_):
- if self.key not in dict_:
- return
-
- state.modified_event(dict_, self, NEVER_SET, True)
-
- collection = self.get_collection(state, state.dict)
- collection.clear_with_event()
- # TODO: catch key errors, convert to attributeerror?
- del dict_[self.key]
-
- def initialize(self, state, dict_):
- """Initialize this attribute with an empty collection."""
-
- _, user_data = self._initialize_collection(state)
- dict_[self.key] = user_data
- return user_data
-
- def _initialize_collection(self, state):
- return state.manager.initialize_collection(
- self.key, state, self.collection_factory)
-
- def append(self, state, dict_, value, initiator, passive=PASSIVE_OFF):
- if initiator and initiator.parent_token is self.parent_token:
- return
-
- collection = self.get_collection(state, dict_, passive=passive)
- if collection is PASSIVE_NO_RESULT:
- value = self.fire_append_event(state, dict_, value, initiator)
- assert self.key not in dict_, \
- "Collection was loaded during event handling."
- state.get_pending(self.key).append(value)
- else:
- collection.append_with_event(value, initiator)
-
- def remove(self, state, dict_, value, initiator, passive=PASSIVE_OFF):
- if initiator and initiator.parent_token is self.parent_token:
- return
-
- collection = self.get_collection(state, state.dict, passive=passive)
- if collection is PASSIVE_NO_RESULT:
- self.fire_remove_event(state, dict_, value, initiator)
- assert self.key not in dict_, \
- "Collection was loaded during event handling."
- state.get_pending(self.key).remove(value)
- else:
- collection.remove_with_event(value, initiator)
-
- def set(self, state, dict_, value, initiator, passive=PASSIVE_OFF):
- """Set a value on the given object.
-
- `initiator` is the ``InstrumentedAttribute`` that initiated the
- ``set()`` operation and is used to control the depth of a circular
- setter operation.
- """
-
- if initiator and initiator.parent_token is self.parent_token:
- return
-
- self._set_iterable(
- state, dict_, value,
- lambda adapter, i: adapter.adapt_like_to_iterable(i))
-
- def _set_iterable(self, state, dict_, iterable, adapter=None):
- """Set a collection value from an iterable of state-bearers.
-
- ``adapter`` is an optional callable invoked with a CollectionAdapter
- and the iterable. Should return an iterable of state-bearing
- instances suitable for appending via a CollectionAdapter. Can be used
- for, e.g., adapting an incoming dictionary into an iterator of values
- rather than keys.
-
- """
- # pulling a new collection first so that an adaptation exception does
- # not trigger a lazy load of the old collection.
- new_collection, user_data = self._initialize_collection(state)
- if adapter:
- new_values = list(adapter(new_collection, iterable))
- else:
- new_values = list(iterable)
-
- old = self.get(state, dict_, passive=PASSIVE_ONLY_PERSISTENT)
- if old is PASSIVE_NO_RESULT:
- old = self.initialize(state, dict_)
- elif old is iterable:
- # ignore re-assignment of the current collection, as happens
- # implicitly with in-place operators (foo.collection |= other)
- return
-
- # place a copy of "old" in state.committed_state
- state.modified_event(dict_, self, old, True)
-
- old_collection = getattr(old, '_sa_adapter')
-
- dict_[self.key] = user_data
-
- collections.bulk_replace(new_values, old_collection, new_collection)
- old_collection.unlink(old)
-
- def set_committed_value(self, state, dict_, value):
- """Set an attribute value on the given instance and 'commit' it."""
-
- collection, user_data = self._initialize_collection(state)
-
- if value:
- collection.append_multiple_without_event(value)
-
- state.dict[self.key] = user_data
-
- state.commit(dict_, [self.key])
-
- if self.key in state.pending:
- # pending items exist. issue a modified event,
- # add/remove new items.
- state.modified_event(dict_, self, user_data, True)
-
- pending = state.pending.pop(self.key)
- added = pending.added_items
- removed = pending.deleted_items
- for item in added:
- collection.append_without_event(item)
- for item in removed:
- collection.remove_without_event(item)
-
- return user_data
-
- def get_collection(self, state, dict_,
- user_data=None, passive=PASSIVE_OFF):
- """Retrieve the CollectionAdapter associated with the given state.
-
- Creates a new CollectionAdapter if one does not exist.
-
- """
- if user_data is None:
- user_data = self.get(state, dict_, passive=passive)
- if user_data is PASSIVE_NO_RESULT:
- return user_data
-
- return getattr(user_data, '_sa_adapter')
-
-def backref_listeners(attribute, key, uselist):
- """Apply listeners to synchronize a two-way relationship."""
-
- # use easily recognizable names for stack traces
-
- def emit_backref_from_scalar_set_event(state, child, oldchild, initiator):
- if oldchild is child:
- return child
-
- if oldchild is not None and oldchild is not PASSIVE_NO_RESULT:
- # With lazy=None, there's no guarantee that the full collection is
- # present when updating via a backref.
- old_state, old_dict = instance_state(oldchild),\
- instance_dict(oldchild)
- impl = old_state.manager[key].impl
- try:
- impl.remove(old_state,
- old_dict,
- state.obj(),
- initiator, passive=PASSIVE_NO_FETCH)
- except (ValueError, KeyError, IndexError):
- pass
-
- if child is not None:
- child_state, child_dict = instance_state(child),\
- instance_dict(child)
- child_state.manager[key].impl.append(
- child_state,
- child_dict,
- state.obj(),
- initiator,
- passive=PASSIVE_NO_FETCH)
- return child
-
- def emit_backref_from_collection_append_event(state, child, initiator):
- child_state, child_dict = instance_state(child), \
- instance_dict(child)
- child_state.manager[key].impl.append(
- child_state,
- child_dict,
- state.obj(),
- initiator,
- passive=PASSIVE_NO_FETCH)
- return child
-
- def emit_backref_from_collection_remove_event(state, child, initiator):
- if child is not None:
- child_state, child_dict = instance_state(child),\
- instance_dict(child)
- child_state.manager[key].impl.remove(
- child_state,
- child_dict,
- state.obj(),
- initiator,
- passive=PASSIVE_NO_FETCH)
-
- if uselist:
- event.listen(attribute, "append",
- emit_backref_from_collection_append_event,
- retval=True, raw=True)
- else:
- event.listen(attribute, "set",
- emit_backref_from_scalar_set_event,
- retval=True, raw=True)
- # TODO: need coverage in test/orm/ of remove event
- event.listen(attribute, "remove",
- emit_backref_from_collection_remove_event,
- retval=True, raw=True)
-
-_NO_HISTORY = util.symbol('NO_HISTORY')
-_NO_STATE_SYMBOLS = frozenset([
- id(PASSIVE_NO_RESULT),
- id(NO_VALUE),
- id(NEVER_SET)])
-class History(tuple):
- """A 3-tuple of added, unchanged and deleted values,
- representing the changes which have occurred on an instrumented
- attribute.
-
- Each tuple member is an iterable sequence.
-
- """
-
- __slots__ = ()
-
- added = property(itemgetter(0))
- """Return the collection of items added to the attribute (the first tuple
- element)."""
-
- unchanged = property(itemgetter(1))
- """Return the collection of items that have not changed on the attribute
- (the second tuple element)."""
-
-
- deleted = property(itemgetter(2))
- """Return the collection of items that have been removed from the
- attribute (the third tuple element)."""
-
- def __new__(cls, added, unchanged, deleted):
- return tuple.__new__(cls, (added, unchanged, deleted))
-
- def __nonzero__(self):
- return self != HISTORY_BLANK
-
- def empty(self):
- """Return True if this :class:`.History` has no changes
- and no existing, unchanged state.
-
- """
-
- return not bool(
- (self.added or self.deleted)
- or self.unchanged and self.unchanged != [None]
- )
-
- def sum(self):
- """Return a collection of added + unchanged + deleted."""
-
- return (self.added or []) +\
- (self.unchanged or []) +\
- (self.deleted or [])
-
- def non_deleted(self):
- """Return a collection of added + unchanged."""
-
- return (self.added or []) +\
- (self.unchanged or [])
-
- def non_added(self):
- """Return a collection of unchanged + deleted."""
-
- return (self.unchanged or []) +\
- (self.deleted or [])
-
- def has_changes(self):
- """Return True if this :class:`.History` has changes."""
-
- return bool(self.added or self.deleted)
-
- def as_state(self):
- return History(
- [(c is not None)
- and instance_state(c) or None
- for c in self.added],
- [(c is not None)
- and instance_state(c) or None
- for c in self.unchanged],
- [(c is not None)
- and instance_state(c) or None
- for c in self.deleted],
- )
-
- @classmethod
- def from_scalar_attribute(cls, attribute, state, current):
- original = state.committed_state.get(attribute.key, _NO_HISTORY)
-
- if original is _NO_HISTORY:
- if current is NO_VALUE:
- return cls((), (), ())
- else:
- return cls((), [current], ())
- # dont let ClauseElement expressions here trip things up
- elif attribute.is_equal(current, original) is True:
- return cls((), [current], ())
- else:
- # current convention on native scalars is to not
- # include information
- # about missing previous value in "deleted", but
- # we do include None, which helps in some primary
- # key situations
- if id(original) in _NO_STATE_SYMBOLS:
- deleted = ()
- else:
- deleted = [original]
- if current is NO_VALUE:
- return cls((), (), deleted)
- else:
- return cls([current], (), deleted)
-
- @classmethod
- def from_object_attribute(cls, attribute, state, current):
- original = state.committed_state.get(attribute.key, _NO_HISTORY)
-
- if original is _NO_HISTORY:
- if current is NO_VALUE or current is NEVER_SET:
- return cls((), (), ())
- else:
- return cls((), [current], ())
- elif current is original:
- return cls((), [current], ())
- else:
- # current convention on related objects is to not
- # include information
- # about missing previous value in "deleted", and
- # to also not include None - the dependency.py rules
- # ignore the None in any case.
- if id(original) in _NO_STATE_SYMBOLS or original is None:
- deleted = ()
- else:
- deleted = [original]
- if current is NO_VALUE or current is NEVER_SET:
- return cls((), (), deleted)
- else:
- return cls([current], (), deleted)
-
- @classmethod
- def from_collection(cls, attribute, state, current):
- original = state.committed_state.get(attribute.key, _NO_HISTORY)
- current = getattr(current, '_sa_adapter')
-
- if original is NO_VALUE:
- return cls(list(current), (), ())
- elif original is _NO_HISTORY:
- return cls((), list(current), ())
- else:
- current_states = [(instance_state(c), c) for c in current]
- original_states = [(instance_state(c), c) for c in original]
-
- current_set = dict(current_states)
- original_set = dict(original_states)
-
- return cls(
- [o for s, o in current_states if s not in original_set],
- [o for s, o in current_states if s in original_set],
- [o for s, o in original_states if s not in current_set]
- )
-
-HISTORY_BLANK = History(None, None, None)
-
-def get_history(obj, key, passive=PASSIVE_OFF):
- """Return a :class:`.History` record for the given object
- and attribute key.
-
- :param obj: an object whose class is instrumented by the
- attributes package.
-
- :param key: string attribute name.
-
- :param passive: indicates if the attribute should be
- loaded from the database if not already present (:attr:`.PASSIVE_NO_FETCH`), and
- if the attribute should be not initialized to a blank value otherwise
- (:attr:`.PASSIVE_NO_INITIALIZE`). Default is :attr:`PASSIVE_OFF`.
-
- """
- if passive is True:
- util.warn_deprecated("Passing True for 'passive' is deprecated. "
- "Use attributes.PASSIVE_NO_INITIALIZE")
- passive = PASSIVE_NO_INITIALIZE
- elif passive is False:
- util.warn_deprecated("Passing False for 'passive' is "
- "deprecated. Use attributes.PASSIVE_OFF")
- passive = PASSIVE_OFF
-
- return get_state_history(instance_state(obj), key, passive)
-
-def get_state_history(state, key, passive=PASSIVE_OFF):
- return state.get_history(key, passive)
-
-
-def has_parent(cls, obj, key, optimistic=False):
- """TODO"""
- manager = manager_of_class(cls)
- state = instance_state(obj)
- return manager.has_parent(state, key, optimistic)
-
-def register_attribute(class_, key, **kw):
- comparator = kw.pop('comparator', None)
- parententity = kw.pop('parententity', None)
- doc = kw.pop('doc', None)
- desc = register_descriptor(class_, key,
- comparator, parententity, doc=doc)
- register_attribute_impl(class_, key, **kw)
- return desc
-
-def register_attribute_impl(class_, key,
- uselist=False, callable_=None,
- useobject=False, mutable_scalars=False,
- impl_class=None, backref=None, **kw):
-
- manager = manager_of_class(class_)
- if uselist:
- factory = kw.pop('typecallable', None)
- typecallable = manager.instrument_collection_class(
- key, factory or list)
- else:
- typecallable = kw.pop('typecallable', None)
-
- dispatch = manager[key].dispatch
-
- if impl_class:
- impl = impl_class(class_, key, typecallable, dispatch, **kw)
- elif uselist:
- impl = CollectionAttributeImpl(class_, key, callable_, dispatch,
- typecallable=typecallable, **kw)
- elif useobject:
- impl = ScalarObjectAttributeImpl(class_, key, callable_,
- dispatch,**kw)
- elif mutable_scalars:
- impl = MutableScalarAttributeImpl(class_, key, callable_, dispatch,
- class_manager=manager, **kw)
- else:
- impl = ScalarAttributeImpl(class_, key, callable_, dispatch, **kw)
-
- manager[key].impl = impl
-
- if backref:
- backref_listeners(manager[key], backref, uselist)
-
- manager.post_configure_attribute(key)
- return manager[key]
-
-def register_descriptor(class_, key, comparator=None,
- parententity=None, property_=None, doc=None):
- manager = manager_of_class(class_)
-
- descriptor = InstrumentedAttribute(class_, key, comparator=comparator,
- parententity=parententity)
-
- descriptor.__doc__ = doc
-
- manager.instrument_attribute(key, descriptor)
- return descriptor
-
-def unregister_attribute(class_, key):
- manager_of_class(class_).uninstrument_attribute(key)
-
-def init_collection(obj, key):
- """Initialize a collection attribute and return the collection adapter.
-
- This function is used to provide direct access to collection internals
- for a previously unloaded attribute. e.g.::
-
- collection_adapter = init_collection(someobject, 'elements')
- for elem in values:
- collection_adapter.append_without_event(elem)
-
- For an easier way to do the above, see
- :func:`~sqlalchemy.orm.attributes.set_committed_value`.
-
- obj is an instrumented object instance. An InstanceState
- is accepted directly for backwards compatibility but
- this usage is deprecated.
-
- """
- state = instance_state(obj)
- dict_ = state.dict
- return init_state_collection(state, dict_, key)
-
-def init_state_collection(state, dict_, key):
- """Initialize a collection attribute and return the collection adapter."""
-
- attr = state.manager[key].impl
- user_data = attr.initialize(state, dict_)
- return attr.get_collection(state, dict_, user_data)
-
-def set_committed_value(instance, key, value):
- """Set the value of an attribute with no history events.
-
- Cancels any previous history present. The value should be
- a scalar value for scalar-holding attributes, or
- an iterable for any collection-holding attribute.
-
- This is the same underlying method used when a lazy loader
- fires off and loads additional data from the database.
- In particular, this method can be used by application code
- which has loaded additional attributes or collections through
- separate queries, which can then be attached to an instance
- as though it were part of its original loaded state.
-
- """
- state, dict_ = instance_state(instance), instance_dict(instance)
- state.manager[key].impl.set_committed_value(state, dict_, value)
-
-def set_attribute(instance, key, value):
- """Set the value of an attribute, firing history events.
-
- This function may be used regardless of instrumentation
- applied directly to the class, i.e. no descriptors are required.
- Custom attribute management schemes will need to make usage
- of this method to establish attribute state as understood
- by SQLAlchemy.
-
- """
- state, dict_ = instance_state(instance), instance_dict(instance)
- state.manager[key].impl.set(state, dict_, value, None)
-
-def get_attribute(instance, key):
- """Get the value of an attribute, firing any callables required.
-
- This function may be used regardless of instrumentation
- applied directly to the class, i.e. no descriptors are required.
- Custom attribute management schemes will need to make usage
- of this method to make usage of attribute state as understood
- by SQLAlchemy.
-
- """
- state, dict_ = instance_state(instance), instance_dict(instance)
- return state.manager[key].impl.get(state, dict_)
-
-def del_attribute(instance, key):
- """Delete the value of an attribute, firing history events.
-
- This function may be used regardless of instrumentation
- applied directly to the class, i.e. no descriptors are required.
- Custom attribute management schemes will need to make usage
- of this method to establish attribute state as understood
- by SQLAlchemy.
-
- """
- state, dict_ = instance_state(instance), instance_dict(instance)
- state.manager[key].impl.delete(state, dict_)
-
-def flag_modified(instance, key):
- """Mark an attribute on an instance as 'modified'.
-
- This sets the 'modified' flag on the instance and
- establishes an unconditional change event for the given attribute.
-
- """
- state, dict_ = instance_state(instance), instance_dict(instance)
- impl = state.manager[key].impl
- state.modified_event(dict_, impl, NO_VALUE)
-
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/orm/collections.py b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/orm/collections.py
deleted file mode 100755
index 14251920..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/orm/collections.py
+++ /dev/null
@@ -1,1473 +0,0 @@
-# orm/collections.py
-# Copyright (C) 2005-2011 the SQLAlchemy authors and contributors <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-"""Support for collections of mapped entities.
-
-The collections package supplies the machinery used to inform the ORM of
-collection membership changes. An instrumentation via decoration approach is
-used, allowing arbitrary types (including built-ins) to be used as entity
-collections without requiring inheritance from a base class.
-
-Instrumentation decoration relays membership change events to the
-``InstrumentedCollectionAttribute`` that is currently managing the collection.
-The decorators observe function call arguments and return values, tracking
-entities entering or leaving the collection. Two decorator approaches are
-provided. One is a bundle of generic decorators that map function arguments
-and return values to events::
-
- from sqlalchemy.orm.collections import collection
- class MyClass(object):
- # ...
-
- @collection.adds(1)
- def store(self, item):
- self.data.append(item)
-
- @collection.removes_return()
- def pop(self):
- return self.data.pop()
-
-
-The second approach is a bundle of targeted decorators that wrap appropriate
-append and remove notifiers around the mutation methods present in the
-standard Python ``list``, ``set`` and ``dict`` interfaces. These could be
-specified in terms of generic decorator recipes, but are instead hand-tooled
-for increased efficiency. The targeted decorators occasionally implement
-adapter-like behavior, such as mapping bulk-set methods (``extend``,
-``update``, ``__setslice__``, etc.) into the series of atomic mutation events
-that the ORM requires.
-
-The targeted decorators are used internally for automatic instrumentation of
-entity collection classes. Every collection class goes through a
-transformation process roughly like so:
-
-1. If the class is a built-in, substitute a trivial sub-class
-2. Is this class already instrumented?
-3. Add in generic decorators
-4. Sniff out the collection interface through duck-typing
-5. Add targeted decoration to any undecorated interface method
-
-This process modifies the class at runtime, decorating methods and adding some
-bookkeeping properties. This isn't possible (or desirable) for built-in
-classes like ``list``, so trivial sub-classes are substituted to hold
-decoration::
-
- class InstrumentedList(list):
- pass
-
-Collection classes can be specified in ``relationship(collection_class=)`` as
-types or a function that returns an instance. Collection classes are
-inspected and instrumented during the mapper compilation phase. The
-collection_class callable will be executed once to produce a specimen
-instance, and the type of that specimen will be instrumented. Functions that
-return built-in types like ``lists`` will be adapted to produce instrumented
-instances.
-
-When extending a known type like ``list``, additional decorations are not
-generally not needed. Odds are, the extension method will delegate to a
-method that's already instrumented. For example::
-
- class QueueIsh(list):
- def push(self, item):
- self.append(item)
- def shift(self):
- return self.pop(0)
-
-There's no need to decorate these methods. ``append`` and ``pop`` are already
-instrumented as part of the ``list`` interface. Decorating them would fire
-duplicate events, which should be avoided.
-
-The targeted decoration tries not to rely on other methods in the underlying
-collection class, but some are unavoidable. Many depend on 'read' methods
-being present to properly instrument a 'write', for example, ``__setitem__``
-needs ``__getitem__``. "Bulk" methods like ``update`` and ``extend`` may also
-reimplemented in terms of atomic appends and removes, so the ``extend``
-decoration will actually perform many ``append`` operations and not call the
-underlying method at all.
-
-Tight control over bulk operation and the firing of events is also possible by
-implementing the instrumentation internally in your methods. The basic
-instrumentation package works under the general assumption that collection
-mutation will not raise unusual exceptions. If you want to closely
-orchestrate append and remove events with exception management, internal
-instrumentation may be the answer. Within your method,
-``collection_adapter(self)`` will retrieve an object that you can use for
-explicit control over triggering append and remove events.
-
-The owning object and InstrumentedCollectionAttribute are also reachable
-through the adapter, allowing for some very sophisticated behavior.
-
-"""
-
-import copy
-import inspect
-import operator
-import sys
-import weakref
-
-from sqlalchemy.sql import expression
-from sqlalchemy import schema, util, exc as sa_exc
-
-
-__all__ = ['collection', 'collection_adapter',
- 'mapped_collection', 'column_mapped_collection',
- 'attribute_mapped_collection']
-
-__instrumentation_mutex = util.threading.Lock()
-
-
-def column_mapped_collection(mapping_spec):
- """A dictionary-based collection type with column-based keying.
-
- Returns a MappedCollection factory with a keying function generated
- from mapping_spec, which may be a Column or a sequence of Columns.
-
- The key value must be immutable for the lifetime of the object. You
- can not, for example, map on foreign key values if those key values will
- change during the session, i.e. from None to a database-assigned integer
- after a session flush.
-
- """
- from sqlalchemy.orm.util import _state_mapper
- from sqlalchemy.orm.attributes import instance_state
-
- cols = [expression._only_column_elements(q, "mapping_spec")
- for q in util.to_list(mapping_spec)]
- if len(cols) == 1:
- def keyfunc(value):
- state = instance_state(value)
- m = _state_mapper(state)
- return m._get_state_attr_by_column(state, state.dict, cols[0])
- else:
- mapping_spec = tuple(cols)
- def keyfunc(value):
- state = instance_state(value)
- m = _state_mapper(state)
- return tuple(m._get_state_attr_by_column(state, state.dict, c)
- for c in mapping_spec)
- return lambda: MappedCollection(keyfunc)
-
-def attribute_mapped_collection(attr_name):
- """A dictionary-based collection type with attribute-based keying.
-
- Returns a MappedCollection factory with a keying based on the
- 'attr_name' attribute of entities in the collection.
-
- The key value must be immutable for the lifetime of the object. You
- can not, for example, map on foreign key values if those key values will
- change during the session, i.e. from None to a database-assigned integer
- after a session flush.
-
- """
- return lambda: MappedCollection(operator.attrgetter(attr_name))
-
-
-def mapped_collection(keyfunc):
- """A dictionary-based collection type with arbitrary keying.
-
- Returns a MappedCollection factory with a keying function generated
- from keyfunc, a callable that takes an entity and returns a key value.
-
- The key value must be immutable for the lifetime of the object. You
- can not, for example, map on foreign key values if those key values will
- change during the session, i.e. from None to a database-assigned integer
- after a session flush.
-
- """
- return lambda: MappedCollection(keyfunc)
-
-class collection(object):
- """Decorators for entity collection classes.
-
- The decorators fall into two groups: annotations and interception recipes.
-
- The annotating decorators (appender, remover, iterator,
- internally_instrumented, link) indicate the method's purpose and take no
- arguments. They are not written with parens::
-
- @collection.appender
- def append(self, append): ...
-
- The recipe decorators all require parens, even those that take no
- arguments::
-
- @collection.adds('entity')
- def insert(self, position, entity): ...
-
- @collection.removes_return()
- def popitem(self): ...
-
- Decorators can be specified in long-hand for Python 2.3, or with
- the class-level dict attribute '__instrumentation__'- see the source
- for details.
-
- """
- # Bundled as a class solely for ease of use: packaging, doc strings,
- # importability.
-
- @staticmethod
- def appender(fn):
- """Tag the method as the collection appender.
-
- The appender method is called with one positional argument: the value
- to append. The method will be automatically decorated with 'adds(1)'
- if not already decorated::
-
- @collection.appender
- def add(self, append): ...
-
- # or, equivalently
- @collection.appender
- @collection.adds(1)
- def add(self, append): ...
-
- # for mapping type, an 'append' may kick out a previous value
- # that occupies that slot. consider d['a'] = 'foo'- any previous
- # value in d['a'] is discarded.
- @collection.appender
- @collection.replaces(1)
- def add(self, entity):
- key = some_key_func(entity)
- previous = None
- if key in self:
- previous = self[key]
- self[key] = entity
- return previous
-
- If the value to append is not allowed in the collection, you may
- raise an exception. Something to remember is that the appender
- will be called for each object mapped by a database query. If the
- database contains rows that violate your collection semantics, you
- will need to get creative to fix the problem, as access via the
- collection will not work.
-
- If the appender method is internally instrumented, you must also
- receive the keyword argument '_sa_initiator' and ensure its
- promulgation to collection events.
-
- """
- setattr(fn, '_sa_instrument_role', 'appender')
- return fn
-
- @staticmethod
- def remover(fn):
- """Tag the method as the collection remover.
-
- The remover method is called with one positional argument: the value
- to remove. The method will be automatically decorated with
- :meth:`removes_return` if not already decorated::
-
- @collection.remover
- def zap(self, entity): ...
-
- # or, equivalently
- @collection.remover
- @collection.removes_return()
- def zap(self, ): ...
-
- If the value to remove is not present in the collection, you may
- raise an exception or return None to ignore the error.
-
- If the remove method is internally instrumented, you must also
- receive the keyword argument '_sa_initiator' and ensure its
- promulgation to collection events.
-
- """
- setattr(fn, '_sa_instrument_role', 'remover')
- return fn
-
- @staticmethod
- def iterator(fn):
- """Tag the method as the collection remover.
-
- The iterator method is called with no arguments. It is expected to
- return an iterator over all collection members::
-
- @collection.iterator
- def __iter__(self): ...
-
- """
- setattr(fn, '_sa_instrument_role', 'iterator')
- return fn
-
- @staticmethod
- def internally_instrumented(fn):
- """Tag the method as instrumented.
-
- This tag will prevent any decoration from being applied to the method.
- Use this if you are orchestrating your own calls to :func:`.collection_adapter`
- in one of the basic SQLAlchemy interface methods, or to prevent
- an automatic ABC method decoration from wrapping your implementation::
-
- # normally an 'extend' method on a list-like class would be
- # automatically intercepted and re-implemented in terms of
- # SQLAlchemy events and append(). your implementation will
- # never be called, unless:
- @collection.internally_instrumented
- def extend(self, items): ...
-
- """
- setattr(fn, '_sa_instrumented', True)
- return fn
-
- @staticmethod
- def link(fn):
- """Tag the method as a the "linked to attribute" event handler.
-
- This optional event handler will be called when the collection class
- is linked to or unlinked from the InstrumentedAttribute. It is
- invoked immediately after the '_sa_adapter' property is set on
- the instance. A single argument is passed: the collection adapter
- that has been linked, or None if unlinking.
-
- """
- setattr(fn, '_sa_instrument_role', 'link')
- return fn
-
- @staticmethod
- def converter(fn):
- """Tag the method as the collection converter.
-
- This optional method will be called when a collection is being
- replaced entirely, as in::
-
- myobj.acollection = [newvalue1, newvalue2]
-
- The converter method will receive the object being assigned and should
- return an iterable of values suitable for use by the ``appender``
- method. A converter must not assign values or mutate the collection,
- it's sole job is to adapt the value the user provides into an iterable
- of values for the ORM's use.
-
- The default converter implementation will use duck-typing to do the
- conversion. A dict-like collection will be convert into an iterable
- of dictionary values, and other types will simply be iterated::
-
- @collection.converter
- def convert(self, other): ...
-
- If the duck-typing of the object does not match the type of this
- collection, a TypeError is raised.
-
- Supply an implementation of this method if you want to expand the
- range of possible types that can be assigned in bulk or perform
- validation on the values about to be assigned.
-
- """
- setattr(fn, '_sa_instrument_role', 'converter')
- return fn
-
- @staticmethod
- def adds(arg):
- """Mark the method as adding an entity to the collection.
-
- Adds "add to collection" handling to the method. The decorator
- argument indicates which method argument holds the SQLAlchemy-relevant
- value. Arguments can be specified positionally (i.e. integer) or by
- name::
-
- @collection.adds(1)
- def push(self, item): ...
-
- @collection.adds('entity')
- def do_stuff(self, thing, entity=None): ...
-
- """
- def decorator(fn):
- setattr(fn, '_sa_instrument_before', ('fire_append_event', arg))
- return fn
- return decorator
-
- @staticmethod
- def replaces(arg):
- """Mark the method as replacing an entity in the collection.
-
- Adds "add to collection" and "remove from collection" handling to
- the method. The decorator argument indicates which method argument
- holds the SQLAlchemy-relevant value to be added, and return value, if
- any will be considered the value to remove.
-
- Arguments can be specified positionally (i.e. integer) or by name::
-
- @collection.replaces(2)
- def __setitem__(self, index, item): ...
-
- """
- def decorator(fn):
- setattr(fn, '_sa_instrument_before', ('fire_append_event', arg))
- setattr(fn, '_sa_instrument_after', 'fire_remove_event')
- return fn
- return decorator
-
- @staticmethod
- def removes(arg):
- """Mark the method as removing an entity in the collection.
-
- Adds "remove from collection" handling to the method. The decorator
- argument indicates which method argument holds the SQLAlchemy-relevant
- value to be removed. Arguments can be specified positionally (i.e.
- integer) or by name::
-
- @collection.removes(1)
- def zap(self, item): ...
-
- For methods where the value to remove is not known at call-time, use
- collection.removes_return.
-
- """
- def decorator(fn):
- setattr(fn, '_sa_instrument_before', ('fire_remove_event', arg))
- return fn
- return decorator
-
- @staticmethod
- def removes_return():
- """Mark the method as removing an entity in the collection.
-
- Adds "remove from collection" handling to the method. The return value
- of the method, if any, is considered the value to remove. The method
- arguments are not inspected::
-
- @collection.removes_return()
- def pop(self): ...
-
- For methods where the value to remove is known at call-time, use
- collection.remove.
-
- """
- def decorator(fn):
- setattr(fn, '_sa_instrument_after', 'fire_remove_event')
- return fn
- return decorator
-
-
-# public instrumentation interface for 'internally instrumented'
-# implementations
-def collection_adapter(collection):
- """Fetch the :class:`.CollectionAdapter` for a collection."""
-
- return getattr(collection, '_sa_adapter', None)
-
-def collection_iter(collection):
- """Iterate over an object supporting the @iterator or __iter__ protocols.
-
- If the collection is an ORM collection, it need not be attached to an
- object to be iterable.
-
- """
- try:
- return getattr(collection, '_sa_iterator',
- getattr(collection, '__iter__'))()
- except AttributeError:
- raise TypeError("'%s' object is not iterable" %
- type(collection).__name__)
-
-
-class CollectionAdapter(object):
- """Bridges between the ORM and arbitrary Python collections.
-
- Proxies base-level collection operations (append, remove, iterate)
- to the underlying Python collection, and emits add/remove events for
- entities entering or leaving the collection.
-
- The ORM uses an CollectionAdapter exclusively for interaction with
- entity collections.
-
- The usage of getattr()/setattr() is currently to allow injection
- of custom methods, such as to unwrap Zope security proxies.
-
- """
- def __init__(self, attr, owner_state, data):
- self._key = attr.key
- self._data = weakref.ref(data)
- self.owner_state = owner_state
- self.link_to_self(data)
-
- @property
- def data(self):
- "The entity collection being adapted."
- return self._data()
-
- @util.memoized_property
- def attr(self):
- return self.owner_state.manager[self._key].impl
-
- def link_to_self(self, data):
- """Link a collection to this adapter, and fire a link event."""
- setattr(data, '_sa_adapter', self)
- if hasattr(data, '_sa_on_link'):
- getattr(data, '_sa_on_link')(self)
-
- def unlink(self, data):
- """Unlink a collection from any adapter, and fire a link event."""
- setattr(data, '_sa_adapter', None)
- if hasattr(data, '_sa_on_link'):
- getattr(data, '_sa_on_link')(None)
-
- def adapt_like_to_iterable(self, obj):
- """Converts collection-compatible objects to an iterable of values.
-
- Can be passed any type of object, and if the underlying collection
- determines that it can be adapted into a stream of values it can
- use, returns an iterable of values suitable for append()ing.
-
- This method may raise TypeError or any other suitable exception
- if adaptation fails.
-
- If a converter implementation is not supplied on the collection,
- a default duck-typing-based implementation is used.
-
- """
- converter = getattr(self._data(), '_sa_converter', None)
- if converter is not None:
- return converter(obj)
-
- setting_type = util.duck_type_collection(obj)
- receiving_type = util.duck_type_collection(self._data())
-
- if obj is None or setting_type != receiving_type:
- given = obj is None and 'None' or obj.__class__.__name__
- if receiving_type is None:
- wanted = self._data().__class__.__name__
- else:
- wanted = receiving_type.__name__
-
- raise TypeError(
- "Incompatible collection type: %s is not %s-like" % (
- given, wanted))
-
- # If the object is an adapted collection, return the (iterable)
- # adapter.
- if getattr(obj, '_sa_adapter', None) is not None:
- return getattr(obj, '_sa_adapter')
- elif setting_type == dict:
- # Py3K
- #return obj.values()
- # Py2K
- return getattr(obj, 'itervalues', getattr(obj, 'values'))()
- # end Py2K
- else:
- return iter(obj)
-
- def append_with_event(self, item, initiator=None):
- """Add an entity to the collection, firing mutation events."""
-
- getattr(self._data(), '_sa_appender')(item, _sa_initiator=initiator)
-
- def append_without_event(self, item):
- """Add or restore an entity to the collection, firing no events."""
- getattr(self._data(), '_sa_appender')(item, _sa_initiator=False)
-
- def append_multiple_without_event(self, items):
- """Add or restore an entity to the collection, firing no events."""
- appender = getattr(self._data(), '_sa_appender')
- for item in items:
- appender(item, _sa_initiator=False)
-
- def remove_with_event(self, item, initiator=None):
- """Remove an entity from the collection, firing mutation events."""
- getattr(self._data(), '_sa_remover')(item, _sa_initiator=initiator)
-
- def remove_without_event(self, item):
- """Remove an entity from the collection, firing no events."""
- getattr(self._data(), '_sa_remover')(item, _sa_initiator=False)
-
- def clear_with_event(self, initiator=None):
- """Empty the collection, firing a mutation event for each entity."""
-
- remover = getattr(self._data(), '_sa_remover')
- for item in list(self):
- remover(item, _sa_initiator=initiator)
-
- def clear_without_event(self):
- """Empty the collection, firing no events."""
-
- remover = getattr(self._data(), '_sa_remover')
- for item in list(self):
- remover(item, _sa_initiator=False)
-
- def __iter__(self):
- """Iterate over entities in the collection."""
-
- # Py3K requires iter() here
- return iter(getattr(self._data(), '_sa_iterator')())
-
- def __len__(self):
- """Count entities in the collection."""
- return len(list(getattr(self._data(), '_sa_iterator')()))
-
- def __nonzero__(self):
- return True
-
- def fire_append_event(self, item, initiator=None):
- """Notify that a entity has entered the collection.
-
- Initiator is a token owned by the InstrumentedAttribute that initiated the membership
- mutation, and should be left as None unless you are passing along
- an initiator value from a chained operation.
-
- """
- if initiator is not False and item is not None:
- return self.attr.fire_append_event(
- self.owner_state,
- self.owner_state.dict,
- item, initiator)
- else:
- return item
-
- def fire_remove_event(self, item, initiator=None):
- """Notify that a entity has been removed from the collection.
-
- Initiator is the InstrumentedAttribute that initiated the membership
- mutation, and should be left as None unless you are passing along
- an initiator value from a chained operation.
-
- """
- if initiator is not False and item is not None:
- self.attr.fire_remove_event(
- self.owner_state,
- self.owner_state.dict,
- item, initiator)
-
- def fire_pre_remove_event(self, initiator=None):
- """Notify that an entity is about to be removed from the collection.
-
- Only called if the entity cannot be removed after calling
- fire_remove_event().
-
- """
- self.attr.fire_pre_remove_event(
- self.owner_state,
- self.owner_state.dict,
- initiator=initiator)
-
- def __getstate__(self):
- return {'key': self._key,
- 'owner_state': self.owner_state,
- 'data': self.data}
-
- def __setstate__(self, d):
- self._key = d['key']
- self.owner_state = d['owner_state']
- self._data = weakref.ref(d['data'])
-
-
-def bulk_replace(values, existing_adapter, new_adapter):
- """Load a new collection, firing events based on prior like membership.
-
- Appends instances in ``values`` onto the ``new_adapter``. Events will be
- fired for any instance not present in the ``existing_adapter``. Any
- instances in ``existing_adapter`` not present in ``values`` will have
- remove events fired upon them.
-
- values
- An iterable of collection member instances
-
- existing_adapter
- A CollectionAdapter of instances to be replaced
-
- new_adapter
- An empty CollectionAdapter to load with ``values``
-
-
- """
- if not isinstance(values, list):
- values = list(values)
-
- idset = util.IdentitySet
- constants = idset(existing_adapter or ()).intersection(values or ())
- additions = idset(values or ()).difference(constants)
- removals = idset(existing_adapter or ()).difference(constants)
-
- for member in values or ():
- if member in additions:
- new_adapter.append_with_event(member)
- elif member in constants:
- new_adapter.append_without_event(member)
-
- if existing_adapter:
- for member in removals:
- existing_adapter.remove_with_event(member)
-
-def prepare_instrumentation(factory):
- """Prepare a callable for future use as a collection class factory.
-
- Given a collection class factory (either a type or no-arg callable),
- return another factory that will produce compatible instances when
- called.
-
- This function is responsible for converting collection_class=list
- into the run-time behavior of collection_class=InstrumentedList.
-
- """
- # Convert a builtin to 'Instrumented*'
- if factory in __canned_instrumentation:
- factory = __canned_instrumentation[factory]
-
- # Create a specimen
- cls = type(factory())
-
- # Did factory callable return a builtin?
- if cls in __canned_instrumentation:
- # Wrap it so that it returns our 'Instrumented*'
- factory = __converting_factory(factory)
- cls = factory()
-
- # Instrument the class if needed.
- if __instrumentation_mutex.acquire():
- try:
- if getattr(cls, '_sa_instrumented', None) != id(cls):
- _instrument_class(cls)
- finally:
- __instrumentation_mutex.release()
-
- return factory
-
-def __converting_factory(original_factory):
- """Convert the type returned by collection factories on the fly.
-
- Given a collection factory that returns a builtin type (e.g. a list),
- return a wrapped function that converts that type to one of our
- instrumented types.
-
- """
- def wrapper():
- collection = original_factory()
- type_ = type(collection)
- if type_ in __canned_instrumentation:
- # return an instrumented type initialized from the factory's
- # collection
- return __canned_instrumentation[type_](collection)
- else:
- raise sa_exc.InvalidRequestError(
- "Collection class factories must produce instances of a "
- "single class.")
- try:
- # often flawed but better than nothing
- wrapper.__name__ = "%sWrapper" % original_factory.__name__
- wrapper.__doc__ = original_factory.__doc__
- except:
- pass
- return wrapper
-
-def _instrument_class(cls):
- """Modify methods in a class and install instrumentation."""
-
- # TODO: more formally document this as a decoratorless/Python 2.3
- # option for specifying instrumentation. (likely doc'd here in code only,
- # not in online docs.) Useful for C types too.
- #
- # __instrumentation__ = {
- # 'rolename': 'methodname', # ...
- # 'methods': {
- # 'methodname': ('fire_{append,remove}_event', argspec,
- # 'fire_{append,remove}_event'),
- # 'append': ('fire_append_event', 1, None),
- # '__setitem__': ('fire_append_event', 1, 'fire_remove_event'),
- # 'pop': (None, None, 'fire_remove_event'),
- # }
- # }
-
- # In the normal call flow, a request for any of the 3 basic collection
- # types is transformed into one of our trivial subclasses
- # (e.g. InstrumentedList). Catch anything else that sneaks in here...
- if cls.__module__ == '__builtin__':
- raise sa_exc.ArgumentError(
- "Can not instrument a built-in type. Use a "
- "subclass, even a trivial one.")
-
- collection_type = util.duck_type_collection(cls)
- if collection_type in __interfaces:
- roles = __interfaces[collection_type].copy()
- decorators = roles.pop('_decorators', {})
- else:
- roles, decorators = {}, {}
-
- if hasattr(cls, '__instrumentation__'):
- roles.update(copy.deepcopy(getattr(cls, '__instrumentation__')))
-
- methods = roles.pop('methods', {})
-
- for name in dir(cls):
- method = getattr(cls, name, None)
- if not util.callable(method):
- continue
-
- # note role declarations
- if hasattr(method, '_sa_instrument_role'):
- role = method._sa_instrument_role
- assert role in ('appender', 'remover', 'iterator',
- 'link', 'converter')
- roles[role] = name
-
- # transfer instrumentation requests from decorated function
- # to the combined queue
- before, after = None, None
- if hasattr(method, '_sa_instrument_before'):
- op, argument = method._sa_instrument_before
- assert op in ('fire_append_event', 'fire_remove_event')
- before = op, argument
- if hasattr(method, '_sa_instrument_after'):
- op = method._sa_instrument_after
- assert op in ('fire_append_event', 'fire_remove_event')
- after = op
- if before:
- methods[name] = before[0], before[1], after
- elif after:
- methods[name] = None, None, after
-
- # apply ABC auto-decoration to methods that need it
- for method, decorator in decorators.items():
- fn = getattr(cls, method, None)
- if (fn and method not in methods and
- not hasattr(fn, '_sa_instrumented')):
- setattr(cls, method, decorator(fn))
-
- # ensure all roles are present, and apply implicit instrumentation if
- # needed
- if 'appender' not in roles or not hasattr(cls, roles['appender']):
- raise sa_exc.ArgumentError(
- "Type %s must elect an appender method to be "
- "a collection class" % cls.__name__)
- elif (roles['appender'] not in methods and
- not hasattr(getattr(cls, roles['appender']), '_sa_instrumented')):
- methods[roles['appender']] = ('fire_append_event', 1, None)
-
- if 'remover' not in roles or not hasattr(cls, roles['remover']):
- raise sa_exc.ArgumentError(
- "Type %s must elect a remover method to be "
- "a collection class" % cls.__name__)
- elif (roles['remover'] not in methods and
- not hasattr(getattr(cls, roles['remover']), '_sa_instrumented')):
- methods[roles['remover']] = ('fire_remove_event', 1, None)
-
- if 'iterator' not in roles or not hasattr(cls, roles['iterator']):
- raise sa_exc.ArgumentError(
- "Type %s must elect an iterator method to be "
- "a collection class" % cls.__name__)
-
- # apply ad-hoc instrumentation from decorators, class-level defaults
- # and implicit role declarations
- for method, (before, argument, after) in methods.items():
- setattr(cls, method,
- _instrument_membership_mutator(getattr(cls, method),
- before, argument, after))
- # intern the role map
- for role, method in roles.items():
- setattr(cls, '_sa_%s' % role, getattr(cls, method))
-
- setattr(cls, '_sa_instrumented', id(cls))
-
-def _instrument_membership_mutator(method, before, argument, after):
- """Route method args and/or return value through the collection adapter."""
- # This isn't smart enough to handle @adds(1) for 'def fn(self, (a, b))'
- if before:
- fn_args = list(util.flatten_iterator(inspect.getargspec(method)[0]))
- if type(argument) is int:
- pos_arg = argument
- named_arg = len(fn_args) > argument and fn_args[argument] or None
- else:
- if argument in fn_args:
- pos_arg = fn_args.index(argument)
- else:
- pos_arg = None
- named_arg = argument
- del fn_args
-
- def wrapper(*args, **kw):
- if before:
- if pos_arg is None:
- if named_arg not in kw:
- raise sa_exc.ArgumentError(
- "Missing argument %s" % argument)
- value = kw[named_arg]
- else:
- if len(args) > pos_arg:
- value = args[pos_arg]
- elif named_arg in kw:
- value = kw[named_arg]
- else:
- raise sa_exc.ArgumentError(
- "Missing argument %s" % argument)
-
- initiator = kw.pop('_sa_initiator', None)
- if initiator is False:
- executor = None
- else:
- executor = getattr(args[0], '_sa_adapter', None)
-
- if before and executor:
- getattr(executor, before)(value, initiator)
-
- if not after or not executor:
- return method(*args, **kw)
- else:
- res = method(*args, **kw)
- if res is not None:
- getattr(executor, after)(res, initiator)
- return res
- try:
- wrapper._sa_instrumented = True
- wrapper.__name__ = method.__name__
- wrapper.__doc__ = method.__doc__
- except:
- pass
- return wrapper
-
-def __set(collection, item, _sa_initiator=None):
- """Run set events, may eventually be inlined into decorators."""
-
- if _sa_initiator is not False and item is not None:
- executor = getattr(collection, '_sa_adapter', None)
- if executor:
- item = getattr(executor, 'fire_append_event')(item, _sa_initiator)
- return item
-
-def __del(collection, item, _sa_initiator=None):
- """Run del events, may eventually be inlined into decorators."""
- if _sa_initiator is not False and item is not None:
- executor = getattr(collection, '_sa_adapter', None)
- if executor:
- getattr(executor, 'fire_remove_event')(item, _sa_initiator)
-
-def __before_delete(collection, _sa_initiator=None):
- """Special method to run 'commit existing value' methods"""
- executor = getattr(collection, '_sa_adapter', None)
- if executor:
- getattr(executor, 'fire_pre_remove_event')(_sa_initiator)
-
-def _list_decorators():
- """Tailored instrumentation wrappers for any list-like class."""
-
- def _tidy(fn):
- setattr(fn, '_sa_instrumented', True)
- fn.__doc__ = getattr(getattr(list, fn.__name__), '__doc__')
-
- def append(fn):
- def append(self, item, _sa_initiator=None):
- item = __set(self, item, _sa_initiator)
- fn(self, item)
- _tidy(append)
- return append
-
- def remove(fn):
- def remove(self, value, _sa_initiator=None):
- __before_delete(self, _sa_initiator)
- # testlib.pragma exempt:__eq__
- fn(self, value)
- __del(self, value, _sa_initiator)
- _tidy(remove)
- return remove
-
- def insert(fn):
- def insert(self, index, value):
- value = __set(self, value)
- fn(self, index, value)
- _tidy(insert)
- return insert
-
- def __setitem__(fn):
- def __setitem__(self, index, value):
- if not isinstance(index, slice):
- existing = self[index]
- if existing is not None:
- __del(self, existing)
- value = __set(self, value)
- fn(self, index, value)
- else:
- # slice assignment requires __delitem__, insert, __len__
- step = index.step or 1
- start = index.start or 0
- if start < 0:
- start += len(self)
- stop = index.stop or len(self)
- if stop < 0:
- stop += len(self)
-
- if step == 1:
- for i in xrange(start, stop, step):
- if len(self) > start:
- del self[start]
-
- for i, item in enumerate(value):
- self.insert(i + start, item)
- else:
- rng = range(start, stop, step)
- if len(value) != len(rng):
- raise ValueError(
- "attempt to assign sequence of size %s to "
- "extended slice of size %s" % (len(value),
- len(rng)))
- for i, item in zip(rng, value):
- self.__setitem__(i, item)
- _tidy(__setitem__)
- return __setitem__
-
- def __delitem__(fn):
- def __delitem__(self, index):
- if not isinstance(index, slice):
- item = self[index]
- __del(self, item)
- fn(self, index)
- else:
- # slice deletion requires __getslice__ and a slice-groking
- # __getitem__ for stepped deletion
- # note: not breaking this into atomic dels
- for item in self[index]:
- __del(self, item)
- fn(self, index)
- _tidy(__delitem__)
- return __delitem__
-
- # Py2K
- def __setslice__(fn):
- def __setslice__(self, start, end, values):
- for value in self[start:end]:
- __del(self, value)
- values = [__set(self, value) for value in values]
- fn(self, start, end, values)
- _tidy(__setslice__)
- return __setslice__
-
- def __delslice__(fn):
- def __delslice__(self, start, end):
- for value in self[start:end]:
- __del(self, value)
- fn(self, start, end)
- _tidy(__delslice__)
- return __delslice__
- # end Py2K
-
- def extend(fn):
- def extend(self, iterable):
- for value in iterable:
- self.append(value)
- _tidy(extend)
- return extend
-
- def __iadd__(fn):
- def __iadd__(self, iterable):
- # list.__iadd__ takes any iterable and seems to let TypeError raise
- # as-is instead of returning NotImplemented
- for value in iterable:
- self.append(value)
- return self
- _tidy(__iadd__)
- return __iadd__
-
- def pop(fn):
- def pop(self, index=-1):
- __before_delete(self)
- item = fn(self, index)
- __del(self, item)
- return item
- _tidy(pop)
- return pop
-
- # __imul__ : not wrapping this. all members of the collection are already
- # present, so no need to fire appends... wrapping it with an explicit
- # decorator is still possible, so events on *= can be had if they're
- # desired. hard to imagine a use case for __imul__, though.
-
- l = locals().copy()
- l.pop('_tidy')
- return l
-
-def _dict_decorators():
- """Tailored instrumentation wrappers for any dict-like mapping class."""
-
- def _tidy(fn):
- setattr(fn, '_sa_instrumented', True)
- fn.__doc__ = getattr(getattr(dict, fn.__name__), '__doc__')
-
- Unspecified = util.symbol('Unspecified')
-
- def __setitem__(fn):
- def __setitem__(self, key, value, _sa_initiator=None):
- if key in self:
- __del(self, self[key], _sa_initiator)
- value = __set(self, value, _sa_initiator)
- fn(self, key, value)
- _tidy(__setitem__)
- return __setitem__
-
- def __delitem__(fn):
- def __delitem__(self, key, _sa_initiator=None):
- if key in self:
- __del(self, self[key], _sa_initiator)
- fn(self, key)
- _tidy(__delitem__)
- return __delitem__
-
- def clear(fn):
- def clear(self):
- for key in self:
- __del(self, self[key])
- fn(self)
- _tidy(clear)
- return clear
-
- def pop(fn):
- def pop(self, key, default=Unspecified):
- if key in self:
- __del(self, self[key])
- if default is Unspecified:
- return fn(self, key)
- else:
- return fn(self, key, default)
- _tidy(pop)
- return pop
-
- def popitem(fn):
- def popitem(self):
- __before_delete(self)
- item = fn(self)
- __del(self, item[1])
- return item
- _tidy(popitem)
- return popitem
-
- def setdefault(fn):
- def setdefault(self, key, default=None):
- if key not in self:
- self.__setitem__(key, default)
- return default
- else:
- return self.__getitem__(key)
- _tidy(setdefault)
- return setdefault
-
- if sys.version_info < (2, 4):
- def update(fn):
- def update(self, other):
- for key in other.keys():
- if key not in self or self[key] is not other[key]:
- self[key] = other[key]
- _tidy(update)
- return update
- else:
- def update(fn):
- def update(self, __other=Unspecified, **kw):
- if __other is not Unspecified:
- if hasattr(__other, 'keys'):
- for key in __other.keys():
- if (key not in self or
- self[key] is not __other[key]):
- self[key] = __other[key]
- else:
- for key, value in __other:
- if key not in self or self[key] is not value:
- self[key] = value
- for key in kw:
- if key not in self or self[key] is not kw[key]:
- self[key] = kw[key]
- _tidy(update)
- return update
-
- l = locals().copy()
- l.pop('_tidy')
- l.pop('Unspecified')
- return l
-
-if util.py3k:
- _set_binop_bases = (set, frozenset)
-else:
- import sets
- _set_binop_bases = (set, frozenset, sets.BaseSet)
-
-def _set_binops_check_strict(self, obj):
- """Allow only set, frozenset and self.__class__-derived objects in binops."""
- return isinstance(obj, _set_binop_bases + (self.__class__,))
-
-def _set_binops_check_loose(self, obj):
- """Allow anything set-like to participate in set binops."""
- return (isinstance(obj, _set_binop_bases + (self.__class__,)) or
- util.duck_type_collection(obj) == set)
-
-
-def _set_decorators():
- """Tailored instrumentation wrappers for any set-like class."""
-
- def _tidy(fn):
- setattr(fn, '_sa_instrumented', True)
- fn.__doc__ = getattr(getattr(set, fn.__name__), '__doc__')
-
- Unspecified = util.symbol('Unspecified')
-
- def add(fn):
- def add(self, value, _sa_initiator=None):
- if value not in self:
- value = __set(self, value, _sa_initiator)
- # testlib.pragma exempt:__hash__
- fn(self, value)
- _tidy(add)
- return add
-
- if sys.version_info < (2, 4):
- def discard(fn):
- def discard(self, value, _sa_initiator=None):
- if value in self:
- self.remove(value, _sa_initiator)
- _tidy(discard)
- return discard
- else:
- def discard(fn):
- def discard(self, value, _sa_initiator=None):
- # testlib.pragma exempt:__hash__
- if value in self:
- __del(self, value, _sa_initiator)
- # testlib.pragma exempt:__hash__
- fn(self, value)
- _tidy(discard)
- return discard
-
- def remove(fn):
- def remove(self, value, _sa_initiator=None):
- # testlib.pragma exempt:__hash__
- if value in self:
- __del(self, value, _sa_initiator)
- # testlib.pragma exempt:__hash__
- fn(self, value)
- _tidy(remove)
- return remove
-
- def pop(fn):
- def pop(self):
- __before_delete(self)
- item = fn(self)
- __del(self, item)
- return item
- _tidy(pop)
- return pop
-
- def clear(fn):
- def clear(self):
- for item in list(self):
- self.remove(item)
- _tidy(clear)
- return clear
-
- def update(fn):
- def update(self, value):
- for item in value:
- self.add(item)
- _tidy(update)
- return update
-
- def __ior__(fn):
- def __ior__(self, value):
- if not _set_binops_check_strict(self, value):
- return NotImplemented
- for item in value:
- self.add(item)
- return self
- _tidy(__ior__)
- return __ior__
-
- def difference_update(fn):
- def difference_update(self, value):
- for item in value:
- self.discard(item)
- _tidy(difference_update)
- return difference_update
-
- def __isub__(fn):
- def __isub__(self, value):
- if not _set_binops_check_strict(self, value):
- return NotImplemented
- for item in value:
- self.discard(item)
- return self
- _tidy(__isub__)
- return __isub__
-
- def intersection_update(fn):
- def intersection_update(self, other):
- want, have = self.intersection(other), set(self)
- remove, add = have - want, want - have
-
- for item in remove:
- self.remove(item)
- for item in add:
- self.add(item)
- _tidy(intersection_update)
- return intersection_update
-
- def __iand__(fn):
- def __iand__(self, other):
- if not _set_binops_check_strict(self, other):
- return NotImplemented
- want, have = self.intersection(other), set(self)
- remove, add = have - want, want - have
-
- for item in remove:
- self.remove(item)
- for item in add:
- self.add(item)
- return self
- _tidy(__iand__)
- return __iand__
-
- def symmetric_difference_update(fn):
- def symmetric_difference_update(self, other):
- want, have = self.symmetric_difference(other), set(self)
- remove, add = have - want, want - have
-
- for item in remove:
- self.remove(item)
- for item in add:
- self.add(item)
- _tidy(symmetric_difference_update)
- return symmetric_difference_update
-
- def __ixor__(fn):
- def __ixor__(self, other):
- if not _set_binops_check_strict(self, other):
- return NotImplemented
- want, have = self.symmetric_difference(other), set(self)
- remove, add = have - want, want - have
-
- for item in remove:
- self.remove(item)
- for item in add:
- self.add(item)
- return self
- _tidy(__ixor__)
- return __ixor__
-
- l = locals().copy()
- l.pop('_tidy')
- l.pop('Unspecified')
- return l
-
-
-class InstrumentedList(list):
- """An instrumented version of the built-in list."""
-
- __instrumentation__ = {
- 'appender': 'append',
- 'remover': 'remove',
- 'iterator': '__iter__', }
-
-class InstrumentedSet(set):
- """An instrumented version of the built-in set."""
-
- __instrumentation__ = {
- 'appender': 'add',
- 'remover': 'remove',
- 'iterator': '__iter__', }
-
-class InstrumentedDict(dict):
- """An instrumented version of the built-in dict."""
-
- # Py3K
- #__instrumentation__ = {
- # 'iterator': 'values', }
- # Py2K
- __instrumentation__ = {
- 'iterator': 'itervalues', }
- # end Py2K
-
-__canned_instrumentation = {
- list: InstrumentedList,
- set: InstrumentedSet,
- dict: InstrumentedDict,
- }
-
-__interfaces = {
- list: {'appender': 'append',
- 'remover': 'remove',
- 'iterator': '__iter__',
- '_decorators': _list_decorators(), },
- set: {'appender': 'add',
- 'remover': 'remove',
- 'iterator': '__iter__',
- '_decorators': _set_decorators(), },
- # decorators are required for dicts and object collections.
- # Py3K
- #dict: {'iterator': 'values',
- # '_decorators': _dict_decorators(), },
- # Py2K
- dict: {'iterator': 'itervalues',
- '_decorators': _dict_decorators(), },
- # end Py2K
- # < 0.4 compatible naming, deprecated- use decorators instead.
- None: {}
- }
-
-class MappedCollection(dict):
- """A basic dictionary-based collection class.
-
- Extends dict with the minimal bag semantics that collection classes require.
- ``set`` and ``remove`` are implemented in terms of a keying function: any
- callable that takes an object and returns an object for use as a dictionary
- key.
-
- """
-
- def __init__(self, keyfunc):
- """Create a new collection with keying provided by keyfunc.
-
- keyfunc may be any callable any callable that takes an object and
- returns an object for use as a dictionary key.
-
- The keyfunc will be called every time the ORM needs to add a member by
- value-only (such as when loading instances from the database) or
- remove a member. The usual cautions about dictionary keying apply-
- ``keyfunc(object)`` should return the same output for the life of the
- collection. Keying based on mutable properties can result in
- unreachable instances "lost" in the collection.
-
- """
- self.keyfunc = keyfunc
-
- def set(self, value, _sa_initiator=None):
- """Add an item by value, consulting the keyfunc for the key."""
-
- key = self.keyfunc(value)
- self.__setitem__(key, value, _sa_initiator)
- set = collection.internally_instrumented(set)
- set = collection.appender(set)
-
- def remove(self, value, _sa_initiator=None):
- """Remove an item by value, consulting the keyfunc for the key."""
-
- key = self.keyfunc(value)
- # Let self[key] raise if key is not in this collection
- # testlib.pragma exempt:__ne__
- if self[key] != value:
- raise sa_exc.InvalidRequestError(
- "Can not remove '%s': collection holds '%s' for key '%s'. "
- "Possible cause: is the MappedCollection key function "
- "based on mutable properties or properties that only obtain "
- "values after flush?" %
- (value, self[key], key))
- self.__delitem__(key, _sa_initiator)
- remove = collection.internally_instrumented(remove)
- remove = collection.remover(remove)
-
- def _convert(self, dictlike):
- """Validate and convert a dict-like object into values for set()ing.
-
- This is called behind the scenes when a MappedCollection is replaced
- entirely by another collection, as in::
-
- myobj.mappedcollection = {'a':obj1, 'b': obj2} # ...
-
- Raises a TypeError if the key in any (key, value) pair in the dictlike
- object does not match the key that this collection's keyfunc would
- have assigned for that value.
-
- """
- for incoming_key, value in util.dictlike_iteritems(dictlike):
- new_key = self.keyfunc(value)
- if incoming_key != new_key:
- raise TypeError(
- "Found incompatible key %r for value %r; this collection's "
- "keying function requires a key of %r for this value." % (
- incoming_key, value, new_key))
- yield value
- _convert = collection.converter(_convert)
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/orm/dependency.py b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/orm/dependency.py
deleted file mode 100755
index 44858fb8..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/orm/dependency.py
+++ /dev/null
@@ -1,1161 +0,0 @@
-# orm/dependency.py
-# Copyright (C) 2005-2011 the SQLAlchemy authors and contributors <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-"""Relationship dependencies.
-
-"""
-
-from sqlalchemy import sql, util, exc as sa_exc
-from sqlalchemy.orm import attributes, exc, sync, unitofwork, \
- util as mapperutil
-from sqlalchemy.orm.interfaces import ONETOMANY, MANYTOONE, MANYTOMANY
-
-class DependencyProcessor(object):
- def __init__(self, prop):
- self.prop = prop
- self.cascade = prop.cascade
- self.mapper = prop.mapper
- self.parent = prop.parent
- self.secondary = prop.secondary
- self.direction = prop.direction
- self.post_update = prop.post_update
- self.passive_deletes = prop.passive_deletes
- self.passive_updates = prop.passive_updates
- self.enable_typechecks = prop.enable_typechecks
- if self.passive_deletes:
- self._passive_delete_flag = attributes.PASSIVE_NO_INITIALIZE
- else:
- self._passive_delete_flag = attributes.PASSIVE_OFF
- if self.passive_updates:
- self._passive_update_flag = attributes.PASSIVE_NO_INITIALIZE
- else:
- self._passive_update_flag= attributes.PASSIVE_OFF
-
- self.key = prop.key
- if not self.prop.synchronize_pairs:
- raise sa_exc.ArgumentError(
- "Can't build a DependencyProcessor for relationship %s. "
- "No target attributes to populate between parent and "
- "child are present" %
- self.prop)
-
- @classmethod
- def from_relationship(cls, prop):
- return _direction_to_processor[prop.direction](prop)
-
- def hasparent(self, state):
- """return True if the given object instance has a parent,
- according to the ``InstrumentedAttribute`` handled by this
- ``DependencyProcessor``.
-
- """
- return self.parent.class_manager.get_impl(self.key).hasparent(state)
-
- def per_property_preprocessors(self, uow):
- """establish actions and dependencies related to a flush.
-
- These actions will operate on all relevant states in
- the aggregate.
-
- """
- uow.register_preprocessor(self, True)
-
-
- def per_property_flush_actions(self, uow):
- after_save = unitofwork.ProcessAll(uow, self, False, True)
- before_delete = unitofwork.ProcessAll(uow, self, True, True)
-
- parent_saves = unitofwork.SaveUpdateAll(
- uow,
- self.parent.primary_base_mapper
- )
- child_saves = unitofwork.SaveUpdateAll(
- uow,
- self.mapper.primary_base_mapper
- )
-
- parent_deletes = unitofwork.DeleteAll(
- uow,
- self.parent.primary_base_mapper
- )
- child_deletes = unitofwork.DeleteAll(
- uow,
- self.mapper.primary_base_mapper
- )
-
- self.per_property_dependencies(uow,
- parent_saves,
- child_saves,
- parent_deletes,
- child_deletes,
- after_save,
- before_delete
- )
-
-
- def per_state_flush_actions(self, uow, states, isdelete):
- """establish actions and dependencies related to a flush.
-
- These actions will operate on all relevant states
- individually. This occurs only if there are cycles
- in the 'aggregated' version of events.
-
- """
-
- parent_base_mapper = self.parent.primary_base_mapper
- child_base_mapper = self.mapper.primary_base_mapper
- child_saves = unitofwork.SaveUpdateAll(uow, child_base_mapper)
- child_deletes = unitofwork.DeleteAll(uow, child_base_mapper)
-
- # locate and disable the aggregate processors
- # for this dependency
-
- if isdelete:
- before_delete = unitofwork.ProcessAll(uow, self, True, True)
- before_delete.disabled = True
- else:
- after_save = unitofwork.ProcessAll(uow, self, False, True)
- after_save.disabled = True
-
- # check if the "child" side is part of the cycle
-
- if child_saves not in uow.cycles:
- # based on the current dependencies we use, the saves/
- # deletes should always be in the 'cycles' collection
- # together. if this changes, we will have to break up
- # this method a bit more.
- assert child_deletes not in uow.cycles
-
- # child side is not part of the cycle, so we will link per-state
- # actions to the aggregate "saves", "deletes" actions
- child_actions = [
- (child_saves, False), (child_deletes, True)
- ]
- child_in_cycles = False
- else:
- child_in_cycles = True
-
- # check if the "parent" side is part of the cycle
- if not isdelete:
- parent_saves = unitofwork.SaveUpdateAll(
- uow,
- self.parent.base_mapper)
- parent_deletes = before_delete = None
- if parent_saves in uow.cycles:
- parent_in_cycles = True
- else:
- parent_deletes = unitofwork.DeleteAll(
- uow,
- self.parent.base_mapper)
- parent_saves = after_save = None
- if parent_deletes in uow.cycles:
- parent_in_cycles = True
-
- # now create actions /dependencies for each state.
- for state in states:
- # detect if there's anything changed or loaded
- # by a preprocessor on this state/attribute. if not,
- # we should be able to skip it entirely.
- sum_ = state.manager[self.key].impl.get_all_pending(state, state.dict)
-
- if not sum_:
- continue
-
- if isdelete:
- before_delete = unitofwork.ProcessState(uow,
- self, True, state)
- if parent_in_cycles:
- parent_deletes = unitofwork.DeleteState(
- uow,
- state,
- parent_base_mapper)
- else:
- after_save = unitofwork.ProcessState(uow, self, False, state)
- if parent_in_cycles:
- parent_saves = unitofwork.SaveUpdateState(
- uow,
- state,
- parent_base_mapper)
-
- if child_in_cycles:
- child_actions = []
- for child_state, child in sum_:
- if child_state not in uow.states:
- child_action = (None, None)
- else:
- (deleted, listonly) = uow.states[child_state]
- if deleted:
- child_action = (
- unitofwork.DeleteState(
- uow, child_state,
- child_base_mapper),
- True)
- else:
- child_action = (
- unitofwork.SaveUpdateState(
- uow, child_state,
- child_base_mapper),
- False)
- child_actions.append(child_action)
-
- # establish dependencies between our possibly per-state
- # parent action and our possibly per-state child action.
- for child_action, childisdelete in child_actions:
- self.per_state_dependencies(uow, parent_saves,
- parent_deletes,
- child_action,
- after_save, before_delete,
- isdelete, childisdelete)
-
-
- def presort_deletes(self, uowcommit, states):
- return False
-
- def presort_saves(self, uowcommit, states):
- return False
-
- def process_deletes(self, uowcommit, states):
- pass
-
- def process_saves(self, uowcommit, states):
- pass
-
- def prop_has_changes(self, uowcommit, states, isdelete):
- if not isdelete or self.passive_deletes:
- passive = attributes.PASSIVE_NO_INITIALIZE
- elif self.direction is MANYTOONE:
- passive = attributes.PASSIVE_NO_FETCH_RELATED
- else:
- passive = attributes.PASSIVE_OFF
-
- for s in states:
- # TODO: add a high speed method
- # to InstanceState which returns: attribute
- # has a non-None value, or had one
- history = uowcommit.get_attribute_history(
- s,
- self.key,
- passive)
- if history and not history.empty():
- return True
- else:
- return states and \
- not self.prop._is_self_referential() and \
- self.mapper in uowcommit.mappers
-
- def _verify_canload(self, state):
- if state is not None and \
- not self.mapper._canload(state,
- allow_subtypes=not self.enable_typechecks):
- if self.mapper._canload(state, allow_subtypes=True):
- raise exc.FlushError('Attempting to flush an item of type '
- '%(x)s as a member of collection '
- '"%(y)s". Expected an object of type '
- '%(z)s or a polymorphic subclass of '
- 'this type. If %(x)s is a subclass of '
- '%(z)s, configure mapper "%(zm)s" to '
- 'load this subtype polymorphically, or '
- 'set enable_typechecks=False to allow '
- 'any subtype to be accepted for flush. '
- % {
- 'x': state.class_,
- 'y': self.prop,
- 'z': self.mapper.class_,
- 'zm': self.mapper,
- })
- else:
- raise exc.FlushError(
- 'Attempting to flush an item of type '
- '%(x)s as a member of collection '
- '"%(y)s". Expected an object of type '
- '%(z)s or a polymorphic subclass of '
- 'this type.' % {
- 'x': state.class_,
- 'y': self.prop,
- 'z': self.mapper.class_,
- })
-
- def _synchronize(self, state, child, associationrow,
- clearkeys, uowcommit):
- raise NotImplementedError()
-
- def _get_reversed_processed_set(self, uow):
- if not self.prop._reverse_property:
- return None
-
- process_key = tuple(sorted(
- [self.key] +
- [p.key for p in self.prop._reverse_property]
- ))
- return uow.memo(
- ('reverse_key', process_key),
- set
- )
-
- def _post_update(self, state, uowcommit, related):
- for x in related:
- if x is not None:
- uowcommit.issue_post_update(
- state,
- [r for l, r in self.prop.synchronize_pairs]
- )
- break
-
- def _pks_changed(self, uowcommit, state):
- raise NotImplementedError()
-
- def __repr__(self):
- return "%s(%s)" % (self.__class__.__name__, self.prop)
-
-class OneToManyDP(DependencyProcessor):
-
- def per_property_dependencies(self, uow, parent_saves,
- child_saves,
- parent_deletes,
- child_deletes,
- after_save,
- before_delete,
- ):
- if self.post_update:
- child_post_updates = unitofwork.IssuePostUpdate(
- uow,
- self.mapper.primary_base_mapper,
- False)
- child_pre_updates = unitofwork.IssuePostUpdate(
- uow,
- self.mapper.primary_base_mapper,
- True)
-
- uow.dependencies.update([
- (child_saves, after_save),
- (parent_saves, after_save),
- (after_save, child_post_updates),
-
- (before_delete, child_pre_updates),
- (child_pre_updates, parent_deletes),
- (child_pre_updates, child_deletes),
-
- ])
- else:
- uow.dependencies.update([
- (parent_saves, after_save),
- (after_save, child_saves),
- (after_save, child_deletes),
-
- (child_saves, parent_deletes),
- (child_deletes, parent_deletes),
-
- (before_delete, child_saves),
- (before_delete, child_deletes),
- ])
-
- def per_state_dependencies(self, uow,
- save_parent,
- delete_parent,
- child_action,
- after_save, before_delete,
- isdelete, childisdelete):
-
- if self.post_update:
-
- child_post_updates = unitofwork.IssuePostUpdate(
- uow,
- self.mapper.primary_base_mapper,
- False)
- child_pre_updates = unitofwork.IssuePostUpdate(
- uow,
- self.mapper.primary_base_mapper,
- True)
-
- # TODO: this whole block is not covered
- # by any tests
- if not isdelete:
- if childisdelete:
- uow.dependencies.update([
- (child_action, after_save),
- (after_save, child_post_updates),
- ])
- else:
- uow.dependencies.update([
- (save_parent, after_save),
- (child_action, after_save),
- (after_save, child_post_updates),
- ])
- else:
- if childisdelete:
- uow.dependencies.update([
- (before_delete, child_pre_updates),
- (child_pre_updates, delete_parent),
- ])
- else:
- uow.dependencies.update([
- (before_delete, child_pre_updates),
- (child_pre_updates, delete_parent),
- ])
- elif not isdelete:
- uow.dependencies.update([
- (save_parent, after_save),
- (after_save, child_action),
- (save_parent, child_action)
- ])
- else:
- uow.dependencies.update([
- (before_delete, child_action),
- (child_action, delete_parent)
- ])
-
- def presort_deletes(self, uowcommit, states):
- # head object is being deleted, and we manage its list of
- # child objects the child objects have to have their
- # foreign key to the parent set to NULL
- should_null_fks = not self.cascade.delete and \
- not self.passive_deletes == 'all'
-
- for state in states:
- history = uowcommit.get_attribute_history(
- state,
- self.key,
- self._passive_delete_flag)
- if history:
- for child in history.deleted:
- if child is not None and self.hasparent(child) is False:
- if self.cascade.delete_orphan:
- uowcommit.register_object(child, isdelete=True)
- else:
- uowcommit.register_object(child)
-
- if should_null_fks:
- for child in history.unchanged:
- if child is not None:
- uowcommit.register_object(child,
- operation="delete", prop=self.prop)
-
-
-
- def presort_saves(self, uowcommit, states):
- children_added = uowcommit.memo(('children_added', self), set)
-
- for state in states:
- pks_changed = self._pks_changed(uowcommit, state)
-
- if not pks_changed or self.passive_updates:
- passive = attributes.PASSIVE_NO_INITIALIZE
- else:
- passive = attributes.PASSIVE_OFF
-
- history = uowcommit.get_attribute_history(
- state,
- self.key,
- passive)
- if history:
- for child in history.added:
- if child is not None:
- uowcommit.register_object(child, cancel_delete=True,
- operation="add",
- prop=self.prop)
-
- children_added.update(history.added)
-
- for child in history.deleted:
- if not self.cascade.delete_orphan:
- uowcommit.register_object(child, isdelete=False,
- operation='delete',
- prop=self.prop)
- elif self.hasparent(child) is False:
- uowcommit.register_object(child, isdelete=True,
- operation="delete", prop=self.prop)
- for c, m, st_, dct_ in self.mapper.cascade_iterator(
- 'delete', child):
- uowcommit.register_object(
- st_,
- isdelete=True)
-
- if pks_changed:
- if history:
- for child in history.unchanged:
- if child is not None:
- uowcommit.register_object(
- child,
- False,
- self.passive_updates,
- operation="pk change",
- prop=self.prop)
-
- def process_deletes(self, uowcommit, states):
- # head object is being deleted, and we manage its list of
- # child objects the child objects have to have their foreign
- # key to the parent set to NULL this phase can be called
- # safely for any cascade but is unnecessary if delete cascade
- # is on.
-
- if self.post_update or not self.passive_deletes == 'all':
- children_added = uowcommit.memo(('children_added', self), set)
-
- for state in states:
- history = uowcommit.get_attribute_history(
- state,
- self.key,
- self._passive_delete_flag)
- if history:
- for child in history.deleted:
- if child is not None and \
- self.hasparent(child) is False:
- self._synchronize(
- state,
- child,
- None, True,
- uowcommit, False)
- if self.post_update and child:
- self._post_update(child, uowcommit, [state])
-
- if self.post_update or not self.cascade.delete:
- for child in set(history.unchanged).\
- difference(children_added):
- if child is not None:
- self._synchronize(
- state,
- child,
- None, True,
- uowcommit, False)
- if self.post_update and child:
- self._post_update(child,
- uowcommit,
- [state])
-
- # technically, we can even remove each child from the
- # collection here too. but this would be a somewhat
- # inconsistent behavior since it wouldn't happen
- #if the old parent wasn't deleted but child was moved.
-
- def process_saves(self, uowcommit, states):
- for state in states:
- history = uowcommit.get_attribute_history(
- state,
- self.key,
- attributes.PASSIVE_NO_INITIALIZE)
- if history:
- for child in history.added:
- self._synchronize(state, child, None,
- False, uowcommit, False)
- if child is not None and self.post_update:
- self._post_update(child, uowcommit, [state])
-
- for child in history.deleted:
- if not self.cascade.delete_orphan and \
- not self.hasparent(child):
- self._synchronize(state, child, None, True,
- uowcommit, False)
-
- if self._pks_changed(uowcommit, state):
- for child in history.unchanged:
- self._synchronize(state, child, None,
- False, uowcommit, True)
-
- def _synchronize(self, state, child,
- associationrow, clearkeys, uowcommit,
- pks_changed):
- source = state
- dest = child
- if dest is None or \
- (not self.post_update and uowcommit.is_deleted(dest)):
- return
- self._verify_canload(child)
- if clearkeys:
- sync.clear(dest, self.mapper, self.prop.synchronize_pairs)
- else:
- sync.populate(source, self.parent, dest, self.mapper,
- self.prop.synchronize_pairs, uowcommit,
- self.passive_updates and pks_changed)
-
- def _pks_changed(self, uowcommit, state):
- return sync.source_modified(
- uowcommit,
- state,
- self.parent,
- self.prop.synchronize_pairs)
-
-class ManyToOneDP(DependencyProcessor):
- def __init__(self, prop):
- DependencyProcessor.__init__(self, prop)
- self.mapper._dependency_processors.append(DetectKeySwitch(prop))
-
- def per_property_dependencies(self, uow,
- parent_saves,
- child_saves,
- parent_deletes,
- child_deletes,
- after_save,
- before_delete):
-
- if self.post_update:
- parent_post_updates = unitofwork.IssuePostUpdate(
- uow,
- self.parent.primary_base_mapper,
- False)
- parent_pre_updates = unitofwork.IssuePostUpdate(
- uow,
- self.parent.primary_base_mapper,
- True)
-
- uow.dependencies.update([
- (child_saves, after_save),
- (parent_saves, after_save),
- (after_save, parent_post_updates),
-
- (after_save, parent_pre_updates),
- (before_delete, parent_pre_updates),
-
- (parent_pre_updates, child_deletes),
- ])
- else:
- uow.dependencies.update([
- (child_saves, after_save),
- (after_save, parent_saves),
- (parent_saves, child_deletes),
- (parent_deletes, child_deletes)
- ])
-
- def per_state_dependencies(self, uow,
- save_parent,
- delete_parent,
- child_action,
- after_save, before_delete,
- isdelete, childisdelete):
-
- if self.post_update:
-
- if not isdelete:
- parent_post_updates = unitofwork.IssuePostUpdate(
- uow,
- self.parent.primary_base_mapper,
- False)
- if childisdelete:
- uow.dependencies.update([
- (after_save, parent_post_updates),
- (parent_post_updates, child_action)
- ])
- else:
- uow.dependencies.update([
- (save_parent, after_save),
- (child_action, after_save),
-
- (after_save, parent_post_updates)
- ])
- else:
- parent_pre_updates = unitofwork.IssuePostUpdate(
- uow,
- self.parent.primary_base_mapper,
- True)
-
- uow.dependencies.update([
- (before_delete, parent_pre_updates),
- (parent_pre_updates, delete_parent),
- (parent_pre_updates, child_action)
- ])
-
- elif not isdelete:
- if not childisdelete:
- uow.dependencies.update([
- (child_action, after_save),
- (after_save, save_parent),
- ])
- else:
- uow.dependencies.update([
- (after_save, save_parent),
- ])
-
- else:
- if childisdelete:
- uow.dependencies.update([
- (delete_parent, child_action)
- ])
-
- def presort_deletes(self, uowcommit, states):
- if self.cascade.delete or self.cascade.delete_orphan:
- for state in states:
- history = uowcommit.get_attribute_history(
- state,
- self.key,
- self._passive_delete_flag)
- if history:
- if self.cascade.delete_orphan:
- todelete = history.sum()
- else:
- todelete = history.non_deleted()
- for child in todelete:
- if child is None:
- continue
- uowcommit.register_object(child, isdelete=True,
- operation="delete", prop=self.prop)
- for c, m, st_, dct_ in self.mapper.cascade_iterator(
- 'delete', child):
- uowcommit.register_object(
- st_, isdelete=True)
-
- def presort_saves(self, uowcommit, states):
- for state in states:
- uowcommit.register_object(state, operation="add", prop=self.prop)
- if self.cascade.delete_orphan:
- history = uowcommit.get_attribute_history(
- state,
- self.key,
- self._passive_delete_flag)
- if history:
- ret = True
- for child in history.deleted:
- if self.hasparent(child) is False:
- uowcommit.register_object(child, isdelete=True,
- operation="delete", prop=self.prop)
-
- for c, m, st_, dct_ in self.mapper.cascade_iterator(
- 'delete', child):
- uowcommit.register_object(
- st_,
- isdelete=True)
-
- def process_deletes(self, uowcommit, states):
- if self.post_update and \
- not self.cascade.delete_orphan and \
- not self.passive_deletes == 'all':
-
- # post_update means we have to update our
- # row to not reference the child object
- # before we can DELETE the row
- for state in states:
- self._synchronize(state, None, None, True, uowcommit)
- if state and self.post_update:
- history = uowcommit.get_attribute_history(
- state,
- self.key,
- self._passive_delete_flag)
- if history:
- self._post_update(state, uowcommit, history.sum())
-
- def process_saves(self, uowcommit, states):
- for state in states:
- history = uowcommit.get_attribute_history(
- state,
- self.key,
- attributes.PASSIVE_NO_INITIALIZE)
- if history:
- for child in history.added:
- self._synchronize(state, child, None, False,
- uowcommit, "add")
-
- if self.post_update:
- self._post_update(state, uowcommit, history.sum())
-
- def _synchronize(self, state, child, associationrow,
- clearkeys, uowcommit, operation=None):
- if state is None or \
- (not self.post_update and uowcommit.is_deleted(state)):
- return
-
- if operation is not None and \
- child is not None and \
- not uowcommit.session._contains_state(child):
- util.warn(
- "Object of type %s not in session, %s "
- "operation along '%s' won't proceed" %
- (mapperutil.state_class_str(child), operation, self.prop))
- return
-
- if clearkeys or child is None:
- sync.clear(state, self.parent, self.prop.synchronize_pairs)
- else:
- self._verify_canload(child)
- sync.populate(child, self.mapper, state,
- self.parent,
- self.prop.synchronize_pairs,
- uowcommit,
- False)
-
-class DetectKeySwitch(DependencyProcessor):
- """For many-to-one relationships with no one-to-many backref,
- searches for parents through the unit of work when a primary
- key has changed and updates them.
-
- Theoretically, this approach could be expanded to support transparent
- deletion of objects referenced via many-to-one as well, although
- the current attribute system doesn't do enough bookkeeping for this
- to be efficient.
-
- """
-
- def per_property_preprocessors(self, uow):
- if self.prop._reverse_property:
- if self.passive_updates:
- return
- else:
- if False in (prop.passive_updates for \
- prop in self.prop._reverse_property):
- return
-
- uow.register_preprocessor(self, False)
-
- def per_property_flush_actions(self, uow):
- parent_saves = unitofwork.SaveUpdateAll(
- uow,
- self.parent.base_mapper)
- after_save = unitofwork.ProcessAll(uow, self, False, False)
- uow.dependencies.update([
- (parent_saves, after_save)
- ])
-
- def per_state_flush_actions(self, uow, states, isdelete):
- pass
-
- def presort_deletes(self, uowcommit, states):
- pass
-
- def presort_saves(self, uow, states):
- if not self.passive_updates:
- # for non-passive updates, register in the preprocess stage
- # so that mapper save_obj() gets a hold of changes
- self._process_key_switches(states, uow)
-
- def prop_has_changes(self, uow, states, isdelete):
- if not isdelete and self.passive_updates:
- d = self._key_switchers(uow, states)
- return bool(d)
-
- return False
-
- def process_deletes(self, uowcommit, states):
- assert False
-
- def process_saves(self, uowcommit, states):
- # for passive updates, register objects in the process stage
- # so that we avoid ManyToOneDP's registering the object without
- # the listonly flag in its own preprocess stage (results in UPDATE)
- # statements being emitted
- assert self.passive_updates
- self._process_key_switches(states, uowcommit)
-
- def _key_switchers(self, uow, states):
- switched, notswitched = uow.memo(
- ('pk_switchers', self),
- lambda: (set(), set())
- )
-
- allstates = switched.union(notswitched)
- for s in states:
- if s not in allstates:
- if self._pks_changed(uow, s):
- switched.add(s)
- else:
- notswitched.add(s)
- return switched
-
- def _process_key_switches(self, deplist, uowcommit):
- switchers = self._key_switchers(uowcommit, deplist)
- if switchers:
- # if primary key values have actually changed somewhere, perform
- # a linear search through the UOW in search of a parent.
- for state in uowcommit.session.identity_map.all_states():
- if not issubclass(state.class_, self.parent.class_):
- continue
- dict_ = state.dict
- related = state.get_impl(self.key).get(state, dict_,
- passive=self._passive_update_flag)
- if related is not attributes.PASSIVE_NO_RESULT and \
- related is not None:
- related_state = attributes.instance_state(dict_[self.key])
- if related_state in switchers:
- uowcommit.register_object(state,
- False,
- self.passive_updates)
- sync.populate(
- related_state,
- self.mapper, state,
- self.parent, self.prop.synchronize_pairs,
- uowcommit, self.passive_updates)
-
- def _pks_changed(self, uowcommit, state):
- return bool(state.key) and sync.source_modified(uowcommit,
- state,
- self.mapper,
- self.prop.synchronize_pairs)
-
-
-class ManyToManyDP(DependencyProcessor):
-
- def per_property_dependencies(self, uow, parent_saves,
- child_saves,
- parent_deletes,
- child_deletes,
- after_save,
- before_delete
- ):
-
- uow.dependencies.update([
- (parent_saves, after_save),
- (child_saves, after_save),
- (after_save, child_deletes),
-
- # a rowswitch on the parent from deleted to saved
- # can make this one occur, as the "save" may remove
- # an element from the
- # "deleted" list before we have a chance to
- # process its child rows
- (before_delete, parent_saves),
-
- (before_delete, parent_deletes),
- (before_delete, child_deletes),
- (before_delete, child_saves),
- ])
-
- def per_state_dependencies(self, uow,
- save_parent,
- delete_parent,
- child_action,
- after_save, before_delete,
- isdelete, childisdelete):
- if not isdelete:
- if childisdelete:
- uow.dependencies.update([
- (save_parent, after_save),
- (after_save, child_action),
- ])
- else:
- uow.dependencies.update([
- (save_parent, after_save),
- (child_action, after_save),
- ])
- else:
- uow.dependencies.update([
- (before_delete, child_action),
- (before_delete, delete_parent)
- ])
-
- def presort_deletes(self, uowcommit, states):
- if not self.passive_deletes:
- # if no passive deletes, load history on
- # the collection, so that prop_has_changes()
- # returns True
- for state in states:
- history = uowcommit.get_attribute_history(
- state,
- self.key,
- self._passive_delete_flag)
-
- def presort_saves(self, uowcommit, states):
- if not self.passive_updates:
- # if no passive updates, load history on
- # each collection where parent has changed PK,
- # so that prop_has_changes() returns True
- for state in states:
- if self._pks_changed(uowcommit, state):
- history = uowcommit.get_attribute_history(
- state,
- self.key,
- attributes.PASSIVE_OFF)
-
- if not self.cascade.delete_orphan:
- return
-
- # check for child items removed from the collection
- # if delete_orphan check is turned on.
- for state in states:
- history = uowcommit.get_attribute_history(
- state,
- self.key,
- attributes.PASSIVE_NO_INITIALIZE)
- if history:
- for child in history.deleted:
- if self.hasparent(child) is False:
- uowcommit.register_object(child, isdelete=True,
- operation="delete", prop=self.prop)
- for c, m, st_, dct_ in self.mapper.cascade_iterator(
- 'delete',
- child):
- uowcommit.register_object(
- st_, isdelete=True)
-
- def process_deletes(self, uowcommit, states):
- secondary_delete = []
- secondary_insert = []
- secondary_update = []
-
- processed = self._get_reversed_processed_set(uowcommit)
- tmp = set()
- for state in states:
- # this history should be cached already, as
- # we loaded it in preprocess_deletes
- history = uowcommit.get_attribute_history(
- state,
- self.key,
- self._passive_delete_flag)
- if history:
- for child in history.non_added():
- if child is None or \
- (processed is not None and
- (state, child) in processed):
- continue
- associationrow = {}
- if not self._synchronize(
- state,
- child,
- associationrow,
- False, uowcommit, "delete"):
- continue
- secondary_delete.append(associationrow)
-
- tmp.update((c, state) for c in history.non_added())
-
- if processed is not None:
- processed.update(tmp)
-
- self._run_crud(uowcommit, secondary_insert,
- secondary_update, secondary_delete)
-
- def process_saves(self, uowcommit, states):
- secondary_delete = []
- secondary_insert = []
- secondary_update = []
-
- processed = self._get_reversed_processed_set(uowcommit)
- tmp = set()
-
- for state in states:
- need_cascade_pks = not self.passive_updates and \
- self._pks_changed(uowcommit, state)
- if need_cascade_pks:
- passive = attributes.PASSIVE_OFF
- else:
- passive = attributes.PASSIVE_NO_INITIALIZE
- history = uowcommit.get_attribute_history(state, self.key,
- passive)
- if history:
- for child in history.added:
- if child is None or \
- (processed is not None and
- (state, child) in processed):
- continue
- associationrow = {}
- if not self._synchronize(state,
- child,
- associationrow,
- False, uowcommit, "add"):
- continue
- secondary_insert.append(associationrow)
- for child in history.deleted:
- if child is None or \
- (processed is not None and
- (state, child) in processed):
- continue
- associationrow = {}
- if not self._synchronize(state,
- child,
- associationrow,
- False, uowcommit, "delete"):
- continue
- secondary_delete.append(associationrow)
-
- tmp.update((c, state)
- for c in history.added + history.deleted)
-
- if need_cascade_pks:
-
- for child in history.unchanged:
- associationrow = {}
- sync.update(state,
- self.parent,
- associationrow,
- "old_",
- self.prop.synchronize_pairs)
- sync.update(child,
- self.mapper,
- associationrow,
- "old_",
- self.prop.secondary_synchronize_pairs)
-
- secondary_update.append(associationrow)
-
- if processed is not None:
- processed.update(tmp)
-
- self._run_crud(uowcommit, secondary_insert,
- secondary_update, secondary_delete)
-
- def _run_crud(self, uowcommit, secondary_insert,
- secondary_update, secondary_delete):
- connection = uowcommit.transaction.connection(self.mapper)
-
- if secondary_delete:
- associationrow = secondary_delete[0]
- statement = self.secondary.delete(sql.and_(*[
- c == sql.bindparam(c.key, type_=c.type)
- for c in self.secondary.c
- if c.key in associationrow
- ]))
- result = connection.execute(statement, secondary_delete)
-
- if result.supports_sane_multi_rowcount() and \
- result.rowcount != len(secondary_delete):
- raise exc.StaleDataError(
- "DELETE statement on table '%s' expected to delete %d row(s); "
- "Only %d were matched." %
- (self.secondary.description, len(secondary_delete),
- result.rowcount)
- )
-
- if secondary_update:
- associationrow = secondary_update[0]
- statement = self.secondary.update(sql.and_(*[
- c == sql.bindparam("old_" + c.key, type_=c.type)
- for c in self.secondary.c
- if c.key in associationrow
- ]))
- result = connection.execute(statement, secondary_update)
- if result.supports_sane_multi_rowcount() and \
- result.rowcount != len(secondary_update):
- raise exc.StaleDataError(
- "UPDATE statement on table '%s' expected to update %d row(s); "
- "Only %d were matched." %
- (self.secondary.description, len(secondary_update),
- result.rowcount)
- )
-
- if secondary_insert:
- statement = self.secondary.insert()
- connection.execute(statement, secondary_insert)
-
- def _synchronize(self, state, child, associationrow,
- clearkeys, uowcommit, operation):
- if associationrow is None:
- return
-
- if child is not None and not uowcommit.session._contains_state(child):
- if not child.deleted:
- util.warn(
- "Object of type %s not in session, %s "
- "operation along '%s' won't proceed" %
- (mapperutil.state_class_str(child), operation, self.prop))
- return False
-
- self._verify_canload(child)
-
- sync.populate_dict(state, self.parent, associationrow,
- self.prop.synchronize_pairs)
- sync.populate_dict(child, self.mapper, associationrow,
- self.prop.secondary_synchronize_pairs)
-
- return True
-
- def _pks_changed(self, uowcommit, state):
- return sync.source_modified(
- uowcommit,
- state,
- self.parent,
- self.prop.synchronize_pairs)
-
-_direction_to_processor = {
- ONETOMANY : OneToManyDP,
- MANYTOONE: ManyToOneDP,
- MANYTOMANY : ManyToManyDP,
-}
-
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/orm/deprecated_interfaces.py b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/orm/deprecated_interfaces.py
deleted file mode 100755
index d5a9ab9c..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/orm/deprecated_interfaces.py
+++ /dev/null
@@ -1,583 +0,0 @@
-# orm/deprecated_interfaces.py
-# Copyright (C) 2005-2011 the SQLAlchemy authors and contributors <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-from sqlalchemy import event, util
-from interfaces import EXT_CONTINUE
-
-
-class MapperExtension(object):
- """Base implementation for :class:`.Mapper` event hooks.
-
- .. note:: :class:`.MapperExtension` is deprecated. Please
- refer to :func:`.event.listen` as well as
- :class:`.MapperEvents`.
-
- New extension classes subclass :class:`.MapperExtension` and are specified
- using the ``extension`` mapper() argument, which is a single
- :class:`.MapperExtension` or a list of such::
-
- from sqlalchemy.orm.interfaces import MapperExtension
-
- class MyExtension(MapperExtension):
- def before_insert(self, mapper, connection, instance):
- print "instance %s before insert !" % instance
-
- m = mapper(User, users_table, extension=MyExtension())
-
- A single mapper can maintain a chain of ``MapperExtension``
- objects. When a particular mapping event occurs, the
- corresponding method on each ``MapperExtension`` is invoked
- serially, and each method has the ability to halt the chain
- from proceeding further::
-
- m = mapper(User, users_table, extension=[ext1, ext2, ext3])
-
- Each ``MapperExtension`` method returns the symbol
- EXT_CONTINUE by default. This symbol generally means "move
- to the next ``MapperExtension`` for processing". For methods
- that return objects like translated rows or new object
- instances, EXT_CONTINUE means the result of the method
- should be ignored. In some cases it's required for a
- default mapper activity to be performed, such as adding a
- new instance to a result list.
-
- The symbol EXT_STOP has significance within a chain
- of ``MapperExtension`` objects that the chain will be stopped
- when this symbol is returned. Like EXT_CONTINUE, it also
- has additional significance in some cases that a default
- mapper activity will not be performed.
-
- """
-
- @classmethod
- def _adapt_instrument_class(cls, self, listener):
- cls._adapt_listener_methods(self, listener, ('instrument_class',))
-
- @classmethod
- def _adapt_listener(cls, self, listener):
- cls._adapt_listener_methods(
- self, listener,
- (
- 'init_instance',
- 'init_failed',
- 'translate_row',
- 'create_instance',
- 'append_result',
- 'populate_instance',
- 'reconstruct_instance',
- 'before_insert',
- 'after_insert',
- 'before_update',
- 'after_update',
- 'before_delete',
- 'after_delete'
- ))
-
- @classmethod
- def _adapt_listener_methods(cls, self, listener, methods):
-
- for meth in methods:
- me_meth = getattr(MapperExtension, meth)
- ls_meth = getattr(listener, meth)
-
- if not util.methods_equivalent(me_meth, ls_meth):
- if meth == 'reconstruct_instance':
- def go(ls_meth):
- def reconstruct(instance, ctx):
- ls_meth(self, instance)
- return reconstruct
- event.listen(self.class_manager, 'load',
- go(ls_meth), raw=False, propagate=True)
- elif meth == 'init_instance':
- def go(ls_meth):
- def init_instance(instance, args, kwargs):
- ls_meth(self, self.class_,
- self.class_manager.original_init,
- instance, args, kwargs)
- return init_instance
- event.listen(self.class_manager, 'init',
- go(ls_meth), raw=False, propagate=True)
- elif meth == 'init_failed':
- def go(ls_meth):
- def init_failed(instance, args, kwargs):
- util.warn_exception(ls_meth, self, self.class_,
- self.class_manager.original_init,
- instance, args, kwargs)
-
- return init_failed
- event.listen(self.class_manager, 'init_failure',
- go(ls_meth), raw=False, propagate=True)
- else:
- event.listen(self, "%s" % meth, ls_meth,
- raw=False, retval=True, propagate=True)
-
-
- def instrument_class(self, mapper, class_):
- """Receive a class when the mapper is first constructed, and has
- applied instrumentation to the mapped class.
-
- The return value is only significant within the ``MapperExtension``
- chain; the parent mapper's behavior isn't modified by this method.
-
- """
- return EXT_CONTINUE
-
- def init_instance(self, mapper, class_, oldinit, instance, args, kwargs):
- """Receive an instance when it's constructor is called.
-
- This method is only called during a userland construction of
- an object. It is not called when an object is loaded from the
- database.
-
- The return value is only significant within the ``MapperExtension``
- chain; the parent mapper's behavior isn't modified by this method.
-
- """
- return EXT_CONTINUE
-
- def init_failed(self, mapper, class_, oldinit, instance, args, kwargs):
- """Receive an instance when it's constructor has been called,
- and raised an exception.
-
- This method is only called during a userland construction of
- an object. It is not called when an object is loaded from the
- database.
-
- The return value is only significant within the ``MapperExtension``
- chain; the parent mapper's behavior isn't modified by this method.
-
- """
- return EXT_CONTINUE
-
- def translate_row(self, mapper, context, row):
- """Perform pre-processing on the given result row and return a
- new row instance.
-
- This is called when the mapper first receives a row, before
- the object identity or the instance itself has been derived
- from that row. The given row may or may not be a
- ``RowProxy`` object - it will always be a dictionary-like
- object which contains mapped columns as keys. The
- returned object should also be a dictionary-like object
- which recognizes mapped columns as keys.
-
- If the ultimate return value is EXT_CONTINUE, the row
- is not translated.
-
- """
- return EXT_CONTINUE
-
- def create_instance(self, mapper, selectcontext, row, class_):
- """Receive a row when a new object instance is about to be
- created from that row.
-
- The method can choose to create the instance itself, or it can return
- EXT_CONTINUE to indicate normal object creation should take place.
-
- mapper
- The mapper doing the operation
-
- selectcontext
- The QueryContext generated from the Query.
-
- row
- The result row from the database
-
- class\_
- The class we are mapping.
-
- return value
- A new object instance, or EXT_CONTINUE
-
- """
- return EXT_CONTINUE
-
- def append_result(self, mapper, selectcontext, row, instance,
- result, **flags):
- """Receive an object instance before that instance is appended
- to a result list.
-
- If this method returns EXT_CONTINUE, result appending will proceed
- normally. if this method returns any other value or None,
- result appending will not proceed for this instance, giving
- this extension an opportunity to do the appending itself, if
- desired.
-
- mapper
- The mapper doing the operation.
-
- selectcontext
- The QueryContext generated from the Query.
-
- row
- The result row from the database.
-
- instance
- The object instance to be appended to the result.
-
- result
- List to which results are being appended.
-
- \**flags
- extra information about the row, same as criterion in
- ``create_row_processor()`` method of
- :class:`~sqlalchemy.orm.interfaces.MapperProperty`
- """
-
- return EXT_CONTINUE
-
- def populate_instance(self, mapper, selectcontext, row,
- instance, **flags):
- """Receive an instance before that instance has
- its attributes populated.
-
- This usually corresponds to a newly loaded instance but may
- also correspond to an already-loaded instance which has
- unloaded attributes to be populated. The method may be called
- many times for a single instance, as multiple result rows are
- used to populate eagerly loaded collections.
-
- If this method returns EXT_CONTINUE, instance population will
- proceed normally. If any other value or None is returned,
- instance population will not proceed, giving this extension an
- opportunity to populate the instance itself, if desired.
-
- As of 0.5, most usages of this hook are obsolete. For a
- generic "object has been newly created from a row" hook, use
- ``reconstruct_instance()``, or the ``@orm.reconstructor``
- decorator.
-
- """
- return EXT_CONTINUE
-
- def reconstruct_instance(self, mapper, instance):
- """Receive an object instance after it has been created via
- ``__new__``, and after initial attribute population has
- occurred.
-
- This typically occurs when the instance is created based on
- incoming result rows, and is only called once for that
- instance's lifetime.
-
- Note that during a result-row load, this method is called upon
- the first row received for this instance. Note that some
- attributes and collections may or may not be loaded or even
- initialized, depending on what's present in the result rows.
-
- The return value is only significant within the ``MapperExtension``
- chain; the parent mapper's behavior isn't modified by this method.
-
- """
- return EXT_CONTINUE
-
- def before_insert(self, mapper, connection, instance):
- """Receive an object instance before that instance is inserted
- into its table.
-
- This is a good place to set up primary key values and such
- that aren't handled otherwise.
-
- Column-based attributes can be modified within this method
- which will result in the new value being inserted. However
- *no* changes to the overall flush plan can be made, and
- manipulation of the ``Session`` will not have the desired effect.
- To manipulate the ``Session`` within an extension, use
- ``SessionExtension``.
-
- The return value is only significant within the ``MapperExtension``
- chain; the parent mapper's behavior isn't modified by this method.
-
- """
-
- return EXT_CONTINUE
-
- def after_insert(self, mapper, connection, instance):
- """Receive an object instance after that instance is inserted.
-
- The return value is only significant within the ``MapperExtension``
- chain; the parent mapper's behavior isn't modified by this method.
-
- """
-
- return EXT_CONTINUE
-
- def before_update(self, mapper, connection, instance):
- """Receive an object instance before that instance is updated.
-
- Note that this method is called for all instances that are marked as
- "dirty", even those which have no net changes to their column-based
- attributes. An object is marked as dirty when any of its column-based
- attributes have a "set attribute" operation called or when any of its
- collections are modified. If, at update time, no column-based
- attributes have any net changes, no UPDATE statement will be issued.
- This means that an instance being sent to before_update is *not* a
- guarantee that an UPDATE statement will be issued (although you can
- affect the outcome here).
-
- To detect if the column-based attributes on the object have net
- changes, and will therefore generate an UPDATE statement, use
- ``object_session(instance).is_modified(instance,
- include_collections=False)``.
-
- Column-based attributes can be modified within this method
- which will result in the new value being updated. However
- *no* changes to the overall flush plan can be made, and
- manipulation of the ``Session`` will not have the desired effect.
- To manipulate the ``Session`` within an extension, use
- ``SessionExtension``.
-
- The return value is only significant within the ``MapperExtension``
- chain; the parent mapper's behavior isn't modified by this method.
-
- """
-
- return EXT_CONTINUE
-
- def after_update(self, mapper, connection, instance):
- """Receive an object instance after that instance is updated.
-
- The return value is only significant within the ``MapperExtension``
- chain; the parent mapper's behavior isn't modified by this method.
-
- """
-
- return EXT_CONTINUE
-
- def before_delete(self, mapper, connection, instance):
- """Receive an object instance before that instance is deleted.
-
- Note that *no* changes to the overall flush plan can be made
- here; and manipulation of the ``Session`` will not have the
- desired effect. To manipulate the ``Session`` within an
- extension, use ``SessionExtension``.
-
- The return value is only significant within the ``MapperExtension``
- chain; the parent mapper's behavior isn't modified by this method.
-
- """
-
- return EXT_CONTINUE
-
- def after_delete(self, mapper, connection, instance):
- """Receive an object instance after that instance is deleted.
-
- The return value is only significant within the ``MapperExtension``
- chain; the parent mapper's behavior isn't modified by this method.
-
- """
-
- return EXT_CONTINUE
-
-class SessionExtension(object):
-
- """Base implementation for :class:`.Session` event hooks.
-
- .. note:: :class:`.SessionExtension` is deprecated. Please
- refer to :func:`.event.listen` as well as
- :class:`.SessionEvents`.
-
- Subclasses may be installed into a :class:`.Session` (or
- :func:`.sessionmaker`) using the ``extension`` keyword
- argument::
-
- from sqlalchemy.orm.interfaces import SessionExtension
-
- class MySessionExtension(SessionExtension):
- def before_commit(self, session):
- print "before commit!"
-
- Session = sessionmaker(extension=MySessionExtension())
-
- The same :class:`.SessionExtension` instance can be used
- with any number of sessions.
-
- """
-
- @classmethod
- def _adapt_listener(cls, self, listener):
- for meth in [
- 'before_commit',
- 'after_commit',
- 'after_rollback',
- 'before_flush',
- 'after_flush',
- 'after_flush_postexec',
- 'after_begin',
- 'after_attach',
- 'after_bulk_update',
- 'after_bulk_delete',
- ]:
- me_meth = getattr(SessionExtension, meth)
- ls_meth = getattr(listener, meth)
-
- if not util.methods_equivalent(me_meth, ls_meth):
- event.listen(self, meth, getattr(listener, meth))
-
- def before_commit(self, session):
- """Execute right before commit is called.
-
- Note that this may not be per-flush if a longer running
- transaction is ongoing."""
-
- def after_commit(self, session):
- """Execute after a commit has occurred.
-
- Note that this may not be per-flush if a longer running
- transaction is ongoing."""
-
- def after_rollback(self, session):
- """Execute after a rollback has occurred.
-
- Note that this may not be per-flush if a longer running
- transaction is ongoing."""
-
- def before_flush( self, session, flush_context, instances):
- """Execute before flush process has started.
-
- `instances` is an optional list of objects which were passed to
- the ``flush()`` method. """
-
- def after_flush(self, session, flush_context):
- """Execute after flush has completed, but before commit has been
- called.
-
- Note that the session's state is still in pre-flush, i.e. 'new',
- 'dirty', and 'deleted' lists still show pre-flush state as well
- as the history settings on instance attributes."""
-
- def after_flush_postexec(self, session, flush_context):
- """Execute after flush has completed, and after the post-exec
- state occurs.
-
- This will be when the 'new', 'dirty', and 'deleted' lists are in
- their final state. An actual commit() may or may not have
- occurred, depending on whether or not the flush started its own
- transaction or participated in a larger transaction. """
-
- def after_begin( self, session, transaction, connection):
- """Execute after a transaction is begun on a connection
-
- `transaction` is the SessionTransaction. This method is called
- after an engine level transaction is begun on a connection. """
-
- def after_attach(self, session, instance):
- """Execute after an instance is attached to a session.
-
- This is called after an add, delete or merge. """
-
- def after_bulk_update( self, session, query, query_context, result):
- """Execute after a bulk update operation to the session.
-
- This is called after a session.query(...).update()
-
- `query` is the query object that this update operation was
- called on. `query_context` was the query context object.
- `result` is the result object returned from the bulk operation.
- """
-
- def after_bulk_delete( self, session, query, query_context, result):
- """Execute after a bulk delete operation to the session.
-
- This is called after a session.query(...).delete()
-
- `query` is the query object that this delete operation was
- called on. `query_context` was the query context object.
- `result` is the result object returned from the bulk operation.
- """
-
-
-class AttributeExtension(object):
- """Base implementation for :class:`.AttributeImpl` event hooks, events
- that fire upon attribute mutations in user code.
-
- .. note:: :class:`.AttributeExtension` is deprecated. Please
- refer to :func:`.event.listen` as well as
- :class:`.AttributeEvents`.
-
- :class:`.AttributeExtension` is used to listen for set,
- remove, and append events on individual mapped attributes.
- It is established on an individual mapped attribute using
- the `extension` argument, available on
- :func:`.column_property`, :func:`.relationship`, and
- others::
-
- from sqlalchemy.orm.interfaces import AttributeExtension
- from sqlalchemy.orm import mapper, relationship, column_property
-
- class MyAttrExt(AttributeExtension):
- def append(self, state, value, initiator):
- print "append event !"
- return value
-
- def set(self, state, value, oldvalue, initiator):
- print "set event !"
- return value
-
- mapper(SomeClass, sometable, properties={
- 'foo':column_property(sometable.c.foo, extension=MyAttrExt()),
- 'bar':relationship(Bar, extension=MyAttrExt())
- })
-
- Note that the :class:`.AttributeExtension` methods
- :meth:`~.AttributeExtension.append` and
- :meth:`~.AttributeExtension.set` need to return the
- ``value`` parameter. The returned value is used as the
- effective value, and allows the extension to change what is
- ultimately persisted.
-
- AttributeExtension is assembled within the descriptors associated
- with a mapped class.
-
- """
-
- active_history = True
- """indicates that the set() method would like to receive the 'old' value,
- even if it means firing lazy callables.
-
- Note that ``active_history`` can also be set directly via
- :func:`.column_property` and :func:`.relationship`.
-
- """
-
- @classmethod
- def _adapt_listener(cls, self, listener):
- event.listen(self, 'append', listener.append,
- active_history=listener.active_history,
- raw=True, retval=True)
- event.listen(self, 'remove', listener.remove,
- active_history=listener.active_history,
- raw=True, retval=True)
- event.listen(self, 'set', listener.set,
- active_history=listener.active_history,
- raw=True, retval=True)
-
- def append(self, state, value, initiator):
- """Receive a collection append event.
-
- The returned value will be used as the actual value to be
- appended.
-
- """
- return value
-
- def remove(self, state, value, initiator):
- """Receive a remove event.
-
- No return value is defined.
-
- """
- pass
-
- def set(self, state, value, oldvalue, initiator):
- """Receive a set event.
-
- The returned value will be used as the actual value to be
- set.
-
- """
- return value
-
-
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/orm/descriptor_props.py b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/orm/descriptor_props.py
deleted file mode 100755
index 5ad148a7..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/orm/descriptor_props.py
+++ /dev/null
@@ -1,405 +0,0 @@
-# orm/descriptor_props.py
-# Copyright (C) 2005-2011 the SQLAlchemy authors and contributors <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-"""Descriptor properties are more "auxiliary" properties
-that exist as configurational elements, but don't participate
-as actively in the load/persist ORM loop.
-
-"""
-
-from sqlalchemy.orm.interfaces import \
- MapperProperty, PropComparator, StrategizedProperty
-from sqlalchemy.orm.mapper import _none_set
-from sqlalchemy.orm import attributes
-from sqlalchemy import util, sql, exc as sa_exc, event, schema
-from sqlalchemy.sql import expression
-properties = util.importlater('sqlalchemy.orm', 'properties')
-
-class DescriptorProperty(MapperProperty):
- """:class:`.MapperProperty` which proxies access to a
- user-defined descriptor."""
-
- doc = None
-
- def instrument_class(self, mapper):
- prop = self
-
- class _ProxyImpl(object):
- accepts_scalar_loader = False
- expire_missing = True
-
- def __init__(self, key):
- self.key = key
-
- if hasattr(prop, 'get_history'):
- def get_history(self, state, dict_,
- passive=attributes.PASSIVE_OFF):
- return prop.get_history(state, dict_, passive)
-
- if self.descriptor is None:
- desc = getattr(mapper.class_, self.key, None)
- if mapper._is_userland_descriptor(desc):
- self.descriptor = desc
-
- if self.descriptor is None:
- def fset(obj, value):
- setattr(obj, self.name, value)
- def fdel(obj):
- delattr(obj, self.name)
- def fget(obj):
- return getattr(obj, self.name)
-
- self.descriptor = property(
- fget=fget,
- fset=fset,
- fdel=fdel,
- )
-
- proxy_attr = attributes.\
- create_proxied_attribute(self.descriptor)\
- (
- self.parent.class_,
- self.key,
- self.descriptor,
- lambda: self._comparator_factory(mapper),
- doc=self.doc
- )
- proxy_attr.property = self
- proxy_attr.impl = _ProxyImpl(self.key)
- mapper.class_manager.instrument_attribute(self.key, proxy_attr)
-
-
-class CompositeProperty(DescriptorProperty):
-
- def __init__(self, class_, *attrs, **kwargs):
- self.attrs = attrs
- self.composite_class = class_
- self.active_history = kwargs.get('active_history', False)
- self.deferred = kwargs.get('deferred', False)
- self.group = kwargs.get('group', None)
- util.set_creation_order(self)
- self._create_descriptor()
-
- def instrument_class(self, mapper):
- super(CompositeProperty, self).instrument_class(mapper)
- self._setup_event_handlers()
-
- def do_init(self):
- """Initialization which occurs after the :class:`.CompositeProperty`
- has been associated with its parent mapper.
-
- """
- self._init_props()
- self._setup_arguments_on_columns()
-
- def _create_descriptor(self):
- """Create the Python descriptor that will serve as
- the access point on instances of the mapped class.
-
- """
-
- def fget(instance):
- dict_ = attributes.instance_dict(instance)
-
- if self.key not in dict_:
- # key not present. Iterate through related
- # attributes, retrieve their values. This
- # ensures they all load.
- values = [getattr(instance, key) for key in self._attribute_keys]
-
- # usually, the load() event will have loaded our key
- # at this point, unless we only loaded relationship()
- # attributes above. Populate here if that's the case.
- if self.key not in dict_ and not _none_set.issuperset(values):
- dict_[self.key] = self.composite_class(*values)
-
- return dict_.get(self.key, None)
-
- def fset(instance, value):
- dict_ = attributes.instance_dict(instance)
- state = attributes.instance_state(instance)
- attr = state.manager[self.key]
- previous = dict_.get(self.key, attributes.NO_VALUE)
- for fn in attr.dispatch.set:
- value = fn(state, value, previous, attr.impl)
- dict_[self.key] = value
- if value is None:
- for key in self._attribute_keys:
- setattr(instance, key, None)
- else:
- for key, value in zip(
- self._attribute_keys,
- value.__composite_values__()):
- setattr(instance, key, value)
-
- def fdel(instance):
- state = attributes.instance_state(instance)
- dict_ = attributes.instance_dict(instance)
- previous = dict_.pop(self.key, attributes.NO_VALUE)
- attr = state.manager[self.key]
- attr.dispatch.remove(state, previous, attr.impl)
- for key in self._attribute_keys:
- setattr(instance, key, None)
-
- self.descriptor = property(fget, fset, fdel)
-
- @util.memoized_property
- def _comparable_elements(self):
- return [
- getattr(self.parent.class_, prop.key)
- for prop in self.props
- ]
-
- def _init_props(self):
- self.props = props = []
- for attr in self.attrs:
- if isinstance(attr, basestring):
- prop = self.parent.get_property(attr)
- elif isinstance(attr, schema.Column):
- prop = self.parent._columntoproperty[attr]
- elif isinstance(attr, attributes.InstrumentedAttribute):
- prop = attr.property
- props.append(prop)
-
- @property
- def columns(self):
- return [a for a in self.attrs if isinstance(a, schema.Column)]
-
- def _setup_arguments_on_columns(self):
- """Propagate configuration arguments made on this composite
- to the target columns, for those that apply.
-
- """
- for prop in self.props:
- prop.active_history = self.active_history
- if self.deferred:
- prop.deferred = self.deferred
- prop.strategy_class = strategies.DeferredColumnLoader
- prop.group = self.group
-
- def _setup_event_handlers(self):
- """Establish events that populate/expire the composite attribute."""
-
- def load_handler(state, *args):
- dict_ = state.dict
-
- if self.key in dict_:
- return
-
- # if column elements aren't loaded, skip.
- # __get__() will initiate a load for those
- # columns
- for k in self._attribute_keys:
- if k not in dict_:
- return
-
- dict_[self.key] = self.composite_class(
- *[state.dict[key] for key in
- self._attribute_keys]
- )
-
- def expire_handler(state, keys):
- if keys is None or set(self._attribute_keys).intersection(keys):
- state.dict.pop(self.key, None)
-
- def insert_update_handler(mapper, connection, state):
- state.dict[self.key] = self.composite_class(
- *[state.dict.get(key, None) for key in
- self._attribute_keys]
- )
-
- event.listen(self.parent, 'after_insert',
- insert_update_handler, raw=True)
- event.listen(self.parent, 'after_update',
- insert_update_handler, raw=True)
- event.listen(self.parent, 'load', load_handler, raw=True)
- event.listen(self.parent, 'refresh', load_handler, raw=True)
- event.listen(self.parent, "expire", expire_handler, raw=True)
-
- # TODO: need a deserialize hook here
-
- @util.memoized_property
- def _attribute_keys(self):
- return [
- prop.key for prop in self.props
- ]
-
- def get_history(self, state, dict_, passive=attributes.PASSIVE_OFF):
- """Provided for userland code that uses attributes.get_history()."""
-
- added = []
- deleted = []
-
- has_history = False
- for prop in self.props:
- key = prop.key
- hist = state.manager[key].impl.get_history(state, dict_)
- if hist.has_changes():
- has_history = True
-
- added.extend(hist.non_deleted())
- if hist.deleted:
- deleted.extend(hist.deleted)
- else:
- deleted.append(None)
-
- if has_history:
- return attributes.History(
- [self.composite_class(*added)],
- (),
- [self.composite_class(*deleted)]
- )
- else:
- return attributes.History(
- (),[self.composite_class(*added)], ()
- )
-
- def _comparator_factory(self, mapper):
- return CompositeProperty.Comparator(self)
-
- class Comparator(PropComparator):
- def __init__(self, prop, adapter=None):
- self.prop = prop
- self.adapter = adapter
-
- def __clause_element__(self):
- if self.adapter:
- # TODO: test coverage for adapted composite comparison
- return expression.ClauseList(
- *[self.adapter(x) for x in self.prop._comparable_elements])
- else:
- return expression.ClauseList(*self.prop._comparable_elements)
-
- __hash__ = None
-
- def __eq__(self, other):
- if other is None:
- values = [None] * len(self.prop._comparable_elements)
- else:
- values = other.__composite_values__()
- return sql.and_(
- *[a==b for a, b in zip(self.prop._comparable_elements, values)])
-
- def __ne__(self, other):
- return sql.not_(self.__eq__(other))
-
- def __str__(self):
- return str(self.parent.class_.__name__) + "." + self.key
-
-class ConcreteInheritedProperty(DescriptorProperty):
- """A 'do nothing' :class:`.MapperProperty` that disables
- an attribute on a concrete subclass that is only present
- on the inherited mapper, not the concrete classes' mapper.
-
- Cases where this occurs include:
-
- * When the superclass mapper is mapped against a
- "polymorphic union", which includes all attributes from
- all subclasses.
- * When a relationship() is configured on an inherited mapper,
- but not on the subclass mapper. Concrete mappers require
- that relationship() is configured explicitly on each
- subclass.
-
- """
-
- def _comparator_factory(self, mapper):
- comparator_callable = None
-
- for m in self.parent.iterate_to_root():
- p = m._props[self.key]
- if not isinstance(p, ConcreteInheritedProperty):
- comparator_callable = p.comparator_factory
- break
- return comparator_callable
-
- def __init__(self):
- def warn():
- raise AttributeError("Concrete %s does not implement "
- "attribute %r at the instance level. Add this "
- "property explicitly to %s." %
- (self.parent, self.key, self.parent))
-
- class NoninheritedConcreteProp(object):
- def __set__(s, obj, value):
- warn()
- def __delete__(s, obj):
- warn()
- def __get__(s, obj, owner):
- if obj is None:
- return self.descriptor
- warn()
- self.descriptor = NoninheritedConcreteProp()
-
-
-class SynonymProperty(DescriptorProperty):
-
- def __init__(self, name, map_column=None,
- descriptor=None, comparator_factory=None,
- doc=None):
- self.name = name
- self.map_column = map_column
- self.descriptor = descriptor
- self.comparator_factory = comparator_factory
- self.doc = doc or (descriptor and descriptor.__doc__) or None
-
- util.set_creation_order(self)
-
- # TODO: when initialized, check _proxied_property,
- # emit a warning if its not a column-based property
-
- @util.memoized_property
- def _proxied_property(self):
- return getattr(self.parent.class_, self.name).property
-
- def _comparator_factory(self, mapper):
- prop = self._proxied_property
-
- if self.comparator_factory:
- comp = self.comparator_factory(prop, mapper)
- else:
- comp = prop.comparator_factory(prop, mapper)
- return comp
-
- def set_parent(self, parent, init):
- if self.map_column:
- # implement the 'map_column' option.
- if self.key not in parent.mapped_table.c:
- raise sa_exc.ArgumentError(
- "Can't compile synonym '%s': no column on table "
- "'%s' named '%s'"
- % (self.name, parent.mapped_table.description, self.key))
- elif parent.mapped_table.c[self.key] in \
- parent._columntoproperty and \
- parent._columntoproperty[
- parent.mapped_table.c[self.key]
- ].key == self.name:
- raise sa_exc.ArgumentError(
- "Can't call map_column=True for synonym %r=%r, "
- "a ColumnProperty already exists keyed to the name "
- "%r for column %r" %
- (self.key, self.name, self.name, self.key)
- )
- p = properties.ColumnProperty(parent.mapped_table.c[self.key])
- parent._configure_property(
- self.name, p,
- init=init,
- setparent=True)
- p._mapped_by_synonym = self.key
-
- self.parent = parent
-
-class ComparableProperty(DescriptorProperty):
- """Instruments a Python property for use in query expressions."""
-
- def __init__(self, comparator_factory, descriptor=None, doc=None):
- self.descriptor = descriptor
- self.comparator_factory = comparator_factory
- self.doc = doc or (descriptor and descriptor.__doc__) or None
- util.set_creation_order(self)
-
- def _comparator_factory(self, mapper):
- return self.comparator_factory(self, mapper)
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/orm/dynamic.py b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/orm/dynamic.py
deleted file mode 100755
index d4a031d9..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/orm/dynamic.py
+++ /dev/null
@@ -1,313 +0,0 @@
-# orm/dynamic.py
-# Copyright (C) 2005-2011 the SQLAlchemy authors and contributors <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-"""Dynamic collection API.
-
-Dynamic collections act like Query() objects for read operations and support
-basic add/delete mutation.
-
-"""
-
-from sqlalchemy import log, util
-from sqlalchemy import exc as sa_exc
-from sqlalchemy.orm import exc as orm_exc
-from sqlalchemy.sql import operators
-from sqlalchemy.orm import (
- attributes, object_session, util as mapperutil, strategies, object_mapper
- )
-from sqlalchemy.orm.query import Query
-from sqlalchemy.orm.util import has_identity
-from sqlalchemy.orm import attributes, collections
-
-class DynaLoader(strategies.AbstractRelationshipLoader):
- def init_class_attribute(self, mapper):
- self.is_class_level = True
-
- strategies._register_attribute(self,
- mapper,
- useobject=True,
- impl_class=DynamicAttributeImpl,
- target_mapper=self.parent_property.mapper,
- order_by=self.parent_property.order_by,
- query_class=self.parent_property.query_class
- )
-
-log.class_logger(DynaLoader)
-
-class DynamicAttributeImpl(attributes.AttributeImpl):
- uses_objects = True
- accepts_scalar_loader = False
- supports_population = False
-
- def __init__(self, class_, key, typecallable,
- dispatch,
- target_mapper, order_by, query_class=None, **kw):
- super(DynamicAttributeImpl, self).\
- __init__(class_, key, typecallable, dispatch, **kw)
- self.target_mapper = target_mapper
- self.order_by = order_by
- if not query_class:
- self.query_class = AppenderQuery
- elif AppenderMixin in query_class.mro():
- self.query_class = query_class
- else:
- self.query_class = mixin_user_query(query_class)
-
- def get(self, state, dict_, passive=attributes.PASSIVE_OFF):
- if passive is not attributes.PASSIVE_OFF:
- return self._get_collection_history(state,
- attributes.PASSIVE_NO_INITIALIZE).added_items
- else:
- return self.query_class(self, state)
-
- def get_collection(self, state, dict_, user_data=None,
- passive=attributes.PASSIVE_NO_INITIALIZE):
- if passive is not attributes.PASSIVE_OFF:
- return self._get_collection_history(state,
- passive).added_items
- else:
- history = self._get_collection_history(state, passive)
- return history.added_items + history.unchanged_items
-
- def fire_append_event(self, state, dict_, value, initiator):
- collection_history = self._modified_event(state, dict_)
- collection_history.added_items.append(value)
-
- for fn in self.dispatch.append:
- value = fn(state, value, initiator or self)
-
- if self.trackparent and value is not None:
- self.sethasparent(attributes.instance_state(value), True)
-
- def fire_remove_event(self, state, dict_, value, initiator):
- collection_history = self._modified_event(state, dict_)
- collection_history.deleted_items.append(value)
-
- if self.trackparent and value is not None:
- self.sethasparent(attributes.instance_state(value), False)
-
- for fn in self.dispatch.remove:
- fn(state, value, initiator or self)
-
- def _modified_event(self, state, dict_):
-
- if self.key not in state.committed_state:
- state.committed_state[self.key] = CollectionHistory(self, state)
-
- state.modified_event(dict_,
- self,
- attributes.NEVER_SET)
-
- # this is a hack to allow the fixtures.ComparableEntity fixture
- # to work
- dict_[self.key] = True
- return state.committed_state[self.key]
-
- def set(self, state, dict_, value, initiator,
- passive=attributes.PASSIVE_OFF):
- if initiator and initiator.parent_token is self.parent_token:
- return
-
- self._set_iterable(state, dict_, value)
-
- def _set_iterable(self, state, dict_, iterable, adapter=None):
- collection_history = self._modified_event(state, dict_)
- new_values = list(iterable)
- if state.has_identity:
- old_collection = list(self.get(state, dict_))
- else:
- old_collection = []
- collections.bulk_replace(new_values, DynCollectionAdapter(self,
- state, old_collection),
- DynCollectionAdapter(self, state,
- new_values))
-
- def delete(self, *args, **kwargs):
- raise NotImplementedError()
-
- def set_committed_value(self, state, dict_, value):
- raise NotImplementedError("Dynamic attributes don't support "
- "collection population.")
-
- def get_history(self, state, dict_, passive=attributes.PASSIVE_OFF):
- c = self._get_collection_history(state, passive)
- return attributes.History(c.added_items, c.unchanged_items,
- c.deleted_items)
-
- def get_all_pending(self, state, dict_):
- c = self._get_collection_history(state, True)
- return [
- (attributes.instance_state(x), x)
- for x in
- c.added_items + c.unchanged_items + c.deleted_items
- ]
-
- def _get_collection_history(self, state, passive=attributes.PASSIVE_OFF):
- if self.key in state.committed_state:
- c = state.committed_state[self.key]
- else:
- c = CollectionHistory(self, state)
-
- if passive is attributes.PASSIVE_OFF:
- return CollectionHistory(self, state, apply_to=c)
- else:
- return c
-
- def append(self, state, dict_, value, initiator,
- passive=attributes.PASSIVE_OFF):
- if initiator is not self:
- self.fire_append_event(state, dict_, value, initiator)
-
- def remove(self, state, dict_, value, initiator,
- passive=attributes.PASSIVE_OFF):
- if initiator is not self:
- self.fire_remove_event(state, dict_, value, initiator)
-
-class DynCollectionAdapter(object):
- """the dynamic analogue to orm.collections.CollectionAdapter"""
-
- def __init__(self, attr, owner_state, data):
- self.attr = attr
- self.state = owner_state
- self.data = data
-
- def __iter__(self):
- return iter(self.data)
-
- def append_with_event(self, item, initiator=None):
- self.attr.append(self.state, self.state.dict, item, initiator)
-
- def remove_with_event(self, item, initiator=None):
- self.attr.remove(self.state, self.state.dict, item, initiator)
-
- def append_without_event(self, item):
- pass
-
- def remove_without_event(self, item):
- pass
-
-class AppenderMixin(object):
- query_class = None
-
- def __init__(self, attr, state):
- Query.__init__(self, attr.target_mapper, None)
- self.instance = instance = state.obj()
- self.attr = attr
-
- mapper = object_mapper(instance)
- prop = mapper._props[self.attr.key]
- self._criterion = prop.compare(
- operators.eq,
- instance,
- value_is_parent=True,
- alias_secondary=False)
-
- if self.attr.order_by:
- self._order_by = self.attr.order_by
-
- def __session(self):
- sess = object_session(self.instance)
- if sess is not None and self.autoflush and sess.autoflush \
- and self.instance in sess:
- sess.flush()
- if not has_identity(self.instance):
- return None
- else:
- return sess
-
- def session(self):
- return self.__session()
- session = property(session, lambda s, x:None)
-
- def __iter__(self):
- sess = self.__session()
- if sess is None:
- return iter(self.attr._get_collection_history(
- attributes.instance_state(self.instance),
- attributes.PASSIVE_NO_INITIALIZE).added_items)
- else:
- return iter(self._clone(sess))
-
- def __getitem__(self, index):
- sess = self.__session()
- if sess is None:
- return self.attr._get_collection_history(
- attributes.instance_state(self.instance),
- attributes.PASSIVE_NO_INITIALIZE).added_items.\
- __getitem__(index)
- else:
- return self._clone(sess).__getitem__(index)
-
- def count(self):
- sess = self.__session()
- if sess is None:
- return len(self.attr._get_collection_history(
- attributes.instance_state(self.instance),
- attributes.PASSIVE_NO_INITIALIZE).added_items)
- else:
- return self._clone(sess).count()
-
- def _clone(self, sess=None):
- # note we're returning an entirely new Query class instance
- # here without any assignment capabilities; the class of this
- # query is determined by the session.
- instance = self.instance
- if sess is None:
- sess = object_session(instance)
- if sess is None:
- raise orm_exc.DetachedInstanceError(
- "Parent instance %s is not bound to a Session, and no "
- "contextual session is established; lazy load operation "
- "of attribute '%s' cannot proceed" % (
- mapperutil.instance_str(instance), self.attr.key))
-
- if self.query_class:
- query = self.query_class(self.attr.target_mapper, session=sess)
- else:
- query = sess.query(self.attr.target_mapper)
-
- query._criterion = self._criterion
- query._order_by = self._order_by
-
- return query
-
- def append(self, item):
- self.attr.append(
- attributes.instance_state(self.instance),
- attributes.instance_dict(self.instance), item, None)
-
- def remove(self, item):
- self.attr.remove(
- attributes.instance_state(self.instance),
- attributes.instance_dict(self.instance), item, None)
-
-
-class AppenderQuery(AppenderMixin, Query):
- """A dynamic query that supports basic collection storage operations."""
-
-
-def mixin_user_query(cls):
- """Return a new class with AppenderQuery functionality layered over."""
- name = 'Appender' + cls.__name__
- return type(name, (AppenderMixin, cls), {'query_class': cls})
-
-class CollectionHistory(object):
- """Overrides AttributeHistory to receive append/remove events directly."""
-
- def __init__(self, attr, state, apply_to=None):
- if apply_to:
- deleted = util.IdentitySet(apply_to.deleted_items)
- added = apply_to.added_items
- coll = AppenderQuery(attr, state).autoflush(False)
- self.unchanged_items = [o for o in util.IdentitySet(coll)
- if o not in deleted]
- self.added_items = apply_to.added_items
- self.deleted_items = apply_to.deleted_items
- else:
- self.deleted_items = []
- self.added_items = []
- self.unchanged_items = []
-
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/orm/evaluator.py b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/orm/evaluator.py
deleted file mode 100755
index f05b92a5..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/orm/evaluator.py
+++ /dev/null
@@ -1,111 +0,0 @@
-# orm/evaluator.py
-# Copyright (C) 2005-2011 the SQLAlchemy authors and contributors <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-import operator
-from sqlalchemy.sql import operators, functions
-from sqlalchemy.sql import expression as sql
-
-
-class UnevaluatableError(Exception):
- pass
-
-_straight_ops = set(getattr(operators, op)
- for op in ('add', 'mul', 'sub',
- # Py2K
- 'div',
- # end Py2K
- 'mod', 'truediv',
- 'lt', 'le', 'ne', 'gt', 'ge', 'eq'))
-
-
-_notimplemented_ops = set(getattr(operators, op)
- for op in ('like_op', 'notlike_op', 'ilike_op',
- 'notilike_op', 'between_op', 'in_op',
- 'notin_op', 'endswith_op', 'concat_op'))
-
-class EvaluatorCompiler(object):
- def process(self, clause):
- meth = getattr(self, "visit_%s" % clause.__visit_name__, None)
- if not meth:
- raise UnevaluatableError("Cannot evaluate %s" % type(clause).__name__)
- return meth(clause)
-
- def visit_grouping(self, clause):
- return self.process(clause.element)
-
- def visit_null(self, clause):
- return lambda obj: None
-
- def visit_column(self, clause):
- if 'parentmapper' in clause._annotations:
- key = clause._annotations['parentmapper'].\
- _columntoproperty[clause].key
- else:
- key = clause.key
- get_corresponding_attr = operator.attrgetter(key)
- return lambda obj: get_corresponding_attr(obj)
-
- def visit_clauselist(self, clause):
- evaluators = map(self.process, clause.clauses)
- if clause.operator is operators.or_:
- def evaluate(obj):
- has_null = False
- for sub_evaluate in evaluators:
- value = sub_evaluate(obj)
- if value:
- return True
- has_null = has_null or value is None
- if has_null:
- return None
- return False
- elif clause.operator is operators.and_:
- def evaluate(obj):
- for sub_evaluate in evaluators:
- value = sub_evaluate(obj)
- if not value:
- if value is None:
- return None
- return False
- return True
- else:
- raise UnevaluatableError("Cannot evaluate clauselist with operator %s" % clause.operator)
-
- return evaluate
-
- def visit_binary(self, clause):
- eval_left,eval_right = map(self.process, [clause.left, clause.right])
- operator = clause.operator
- if operator is operators.is_:
- def evaluate(obj):
- return eval_left(obj) == eval_right(obj)
- elif operator is operators.isnot:
- def evaluate(obj):
- return eval_left(obj) != eval_right(obj)
- elif operator in _straight_ops:
- def evaluate(obj):
- left_val = eval_left(obj)
- right_val = eval_right(obj)
- if left_val is None or right_val is None:
- return None
- return operator(eval_left(obj), eval_right(obj))
- else:
- raise UnevaluatableError("Cannot evaluate %s with operator %s" % (type(clause).__name__, clause.operator))
- return evaluate
-
- def visit_unary(self, clause):
- eval_inner = self.process(clause.element)
- if clause.operator is operators.inv:
- def evaluate(obj):
- value = eval_inner(obj)
- if value is None:
- return None
- return not value
- return evaluate
- raise UnevaluatableError("Cannot evaluate %s with operator %s" % (type(clause).__name__, clause.operator))
-
- def visit_bindparam(self, clause):
- val = clause.value
- return lambda obj: val
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/orm/events.py b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/orm/events.py
deleted file mode 100755
index 8c12e72b..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/orm/events.py
+++ /dev/null
@@ -1,1046 +0,0 @@
-# orm/events.py
-# Copyright (C) 2005-2011 the SQLAlchemy authors and contributors <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-"""ORM event interfaces.
-
-"""
-from sqlalchemy import event, exc
-import inspect
-
-class InstrumentationEvents(event.Events):
- """Events related to class instrumentation events.
-
- The listeners here support being established against
- any new style class, that is any object that is a subclass
- of 'type'. Events will then be fired off for events
- against that class as well as all subclasses.
- 'type' itself is also accepted as a target
- in which case the events fire for all classes.
-
- """
-
- @classmethod
- def _accept_with(cls, target):
- from sqlalchemy.orm.instrumentation import instrumentation_registry
-
- if isinstance(target, type):
- return instrumentation_registry
- else:
- return None
-
- @classmethod
- def _listen(cls, target, identifier, fn, propagate=False):
- event.Events._listen(target, identifier, fn, propagate=propagate)
-
- @classmethod
- def _remove(cls, identifier, target, fn):
- raise NotImplementedError("Removal of instrumentation events not yet implemented")
-
- def class_instrument(self, cls):
- """Called after the given class is instrumented.
-
- To get at the :class:`.ClassManager`, use
- :func:`.manager_of_class`.
-
- """
-
- def class_uninstrument(self, cls):
- """Called before the given class is uninstrumented.
-
- To get at the :class:`.ClassManager`, use
- :func:`.manager_of_class`.
-
- """
-
-
- def attribute_instrument(self, cls, key, inst):
- """Called when an attribute is instrumented."""
-
-class InstanceEvents(event.Events):
- """Define events specific to object lifecycle.
-
- e.g.::
-
- from sqlalchemy import event
-
- def my_load_listener(target, context):
- print "on load!"
-
- event.listen(SomeMappedClass, 'load', my_load_listener)
-
- Available targets include mapped classes, instances of
- :class:`.Mapper` (i.e. returned by :func:`.mapper`,
- :func:`.class_mapper` and similar), as well as the
- :class:`.Mapper` class and :func:`.mapper` function itself
- for global event reception::
-
- from sqlalchemy.orm import mapper
-
- def some_listener(target, context):
- log.debug("Instance %s being loaded" % target)
-
- # attach to all mappers
- event.listen(mapper, 'load', some_listener)
-
- Instance events are closely related to mapper events, but
- are more specific to the instance and its instrumentation,
- rather than its system of persistence.
-
- When using :class:`.InstanceEvents`, several modifiers are
- available to the :func:`.event.listen` function.
-
- :param propagate=False: When True, the event listener should
- be applied to all inheriting mappers as well as the
- mapper which is the target of this listener.
- :param raw=False: When True, the "target" argument passed
- to applicable event listener functions will be the
- instance's :class:`.InstanceState` management
- object, rather than the mapped instance itself.
-
- """
- @classmethod
- def _accept_with(cls, target):
- from sqlalchemy.orm.instrumentation import ClassManager, manager_of_class
- from sqlalchemy.orm import Mapper, mapper
-
- if isinstance(target, ClassManager):
- return target
- elif isinstance(target, Mapper):
- return target.class_manager
- elif target is mapper:
- return ClassManager
- elif isinstance(target, type):
- if issubclass(target, Mapper):
- return ClassManager
- else:
- manager = manager_of_class(target)
- if manager:
- return manager
- return None
-
- @classmethod
- def _listen(cls, target, identifier, fn, raw=False, propagate=False):
- if not raw:
- orig_fn = fn
- def wrap(state, *arg, **kw):
- return orig_fn(state.obj(), *arg, **kw)
- fn = wrap
-
- event.Events._listen(target, identifier, fn, propagate=propagate)
- if propagate:
- for mgr in target.subclass_managers(True):
- event.Events._listen(mgr, identifier, fn, True)
-
- @classmethod
- def _remove(cls, identifier, target, fn):
- raise NotImplementedError("Removal of instance events not yet implemented")
-
- def first_init(self, manager, cls):
- """Called when the first instance of a particular mapping is called.
-
- """
-
- def init(self, target, args, kwargs):
- """Receive an instance when it's constructor is called.
-
- This method is only called during a userland construction of
- an object. It is not called when an object is loaded from the
- database.
-
- """
-
- def init_failure(self, target, args, kwargs):
- """Receive an instance when it's constructor has been called,
- and raised an exception.
-
- This method is only called during a userland construction of
- an object. It is not called when an object is loaded from the
- database.
-
- """
-
- def load(self, target, context):
- """Receive an object instance after it has been created via
- ``__new__``, and after initial attribute population has
- occurred.
-
- This typically occurs when the instance is created based on
- incoming result rows, and is only called once for that
- instance's lifetime.
-
- Note that during a result-row load, this method is called upon
- the first row received for this instance. Note that some
- attributes and collections may or may not be loaded or even
- initialized, depending on what's present in the result rows.
-
- :param target: the mapped instance. If
- the event is configured with ``raw=True``, this will
- instead be the :class:`.InstanceState` state-management
- object associated with the instance.
- :param context: the :class:`.QueryContext` corresponding to the
- current :class:`.Query` in progress. This argument may be
- ``None`` if the load does not correspond to a :class:`.Query`,
- such as during :meth:`.Session.merge`.
-
- """
-
- def refresh(self, target, context, attrs):
- """Receive an object instance after one or more attributes have
- been refreshed from a query.
-
- :param target: the mapped instance. If
- the event is configured with ``raw=True``, this will
- instead be the :class:`.InstanceState` state-management
- object associated with the instance.
- :param context: the :class:`.QueryContext` corresponding to the
- current :class:`.Query` in progress.
- :param attrs: iterable collection of attribute names which
- were populated, or None if all column-mapped, non-deferred
- attributes were populated.
-
- """
-
- def expire(self, target, attrs):
- """Receive an object instance after its attributes or some subset
- have been expired.
-
- 'keys' is a list of attribute names. If None, the entire
- state was expired.
-
- :param target: the mapped instance. If
- the event is configured with ``raw=True``, this will
- instead be the :class:`.InstanceState` state-management
- object associated with the instance.
- :param attrs: iterable collection of attribute
- names which were expired, or None if all attributes were
- expired.
-
- """
-
- def resurrect(self, target):
- """Receive an object instance as it is 'resurrected' from
- garbage collection, which occurs when a "dirty" state falls
- out of scope.
-
- :param target: the mapped instance. If
- the event is configured with ``raw=True``, this will
- instead be the :class:`.InstanceState` state-management
- object associated with the instance.
-
- """
-
- def pickle(self, target, state_dict):
- """Receive an object instance when its associated state is
- being pickled.
-
- :param target: the mapped instance. If
- the event is configured with ``raw=True``, this will
- instead be the :class:`.InstanceState` state-management
- object associated with the instance.
- :param state_dict: the dictionary returned by
- :class:`.InstanceState.__getstate__`, containing the state
- to be pickled.
-
- """
-
- def unpickle(self, target, state_dict):
- """Receive an object instance after it's associated state has
- been unpickled.
-
- :param target: the mapped instance. If
- the event is configured with ``raw=True``, this will
- instead be the :class:`.InstanceState` state-management
- object associated with the instance.
- :param state_dict: the dictionary sent to
- :class:`.InstanceState.__setstate__`, containing the state
- dictionary which was pickled.
-
- """
-
-class MapperEvents(event.Events):
- """Define events specific to mappings.
-
- e.g.::
-
- from sqlalchemy import event
-
- def my_before_insert_listener(mapper, connection, target):
- # execute a stored procedure upon INSERT,
- # apply the value to the row to be inserted
- target.calculated_value = connection.scalar(
- "select my_special_function(%d)"
- % target.special_number)
-
- # associate the listener function with SomeMappedClass,
- # to execute during the "before_insert" hook
- event.listen(SomeMappedClass, 'before_insert', my_before_insert_listener)
-
- Available targets include mapped classes, instances of
- :class:`.Mapper` (i.e. returned by :func:`.mapper`,
- :func:`.class_mapper` and similar), as well as the
- :class:`.Mapper` class and :func:`.mapper` function itself
- for global event reception::
-
- from sqlalchemy.orm import mapper
-
- def some_listener(mapper, connection, target):
- log.debug("Instance %s being inserted" % target)
-
- # attach to all mappers
- event.listen(mapper, 'before_insert', some_listener)
-
- Mapper events provide hooks into critical sections of the
- mapper, including those related to object instrumentation,
- object loading, and object persistence. In particular, the
- persistence methods :meth:`~.MapperEvents.before_insert`,
- and :meth:`~.MapperEvents.before_update` are popular
- places to augment the state being persisted - however, these
- methods operate with several significant restrictions. The
- user is encouraged to evaluate the
- :meth:`.SessionEvents.before_flush` and
- :meth:`.SessionEvents.after_flush` methods as more
- flexible and user-friendly hooks in which to apply
- additional database state during a flush.
-
- When using :class:`.MapperEvents`, several modifiers are
- available to the :func:`.event.listen` function.
-
- :param propagate=False: When True, the event listener should
- be applied to all inheriting mappers as well as the
- mapper which is the target of this listener.
- :param raw=False: When True, the "target" argument passed
- to applicable event listener functions will be the
- instance's :class:`.InstanceState` management
- object, rather than the mapped instance itself.
- :param retval=False: when True, the user-defined event function
- must have a return value, the purpose of which is either to
- control subsequent event propagation, or to otherwise alter
- the operation in progress by the mapper. Possible return
- values are:
-
- * ``sqlalchemy.orm.interfaces.EXT_CONTINUE`` - continue event
- processing normally.
- * ``sqlalchemy.orm.interfaces.EXT_STOP`` - cancel all subsequent
- event handlers in the chain.
- * other values - the return value specified by specific listeners,
- such as :meth:`~.MapperEvents.translate_row` or
- :meth:`~.MapperEvents.create_instance`.
-
- """
-
- @classmethod
- def _accept_with(cls, target):
- from sqlalchemy.orm import mapper, class_mapper, Mapper
- if target is mapper:
- return Mapper
- elif isinstance(target, type):
- if issubclass(target, Mapper):
- return target
- else:
- return class_mapper(target)
- else:
- return target
-
- @classmethod
- def _listen(cls, target, identifier, fn,
- raw=False, retval=False, propagate=False):
- from sqlalchemy.orm.interfaces import EXT_CONTINUE
-
- if not raw or not retval:
- if not raw:
- meth = getattr(cls, identifier)
- try:
- target_index = inspect.getargspec(meth)[0].index('target') - 1
- except ValueError:
- target_index = None
-
- wrapped_fn = fn
- def wrap(*arg, **kw):
- if not raw and target_index is not None:
- arg = list(arg)
- arg[target_index] = arg[target_index].obj()
- if not retval:
- wrapped_fn(*arg, **kw)
- return EXT_CONTINUE
- else:
- return wrapped_fn(*arg, **kw)
- fn = wrap
-
- if propagate:
- for mapper in target.self_and_descendants:
- event.Events._listen(mapper, identifier, fn, propagate=True)
- else:
- event.Events._listen(target, identifier, fn)
-
- def instrument_class(self, mapper, class_):
- """Receive a class when the mapper is first constructed,
- before instrumentation is applied to the mapped class.
-
- This event is the earliest phase of mapper construction.
- Most attributes of the mapper are not yet initialized.
-
- This listener can generally only be applied to the :class:`.Mapper`
- class overall.
-
- :param mapper: the :class:`.Mapper` which is the target
- of this event.
- :param class\_: the mapped class.
-
- """
-
- def mapper_configured(self, mapper, class_):
- """Called when the mapper for the class is fully configured.
-
- This event is the latest phase of mapper construction.
- The mapper should be in its final state.
-
- :param mapper: the :class:`.Mapper` which is the target
- of this event.
- :param class\_: the mapped class.
-
- """
- # TODO: need coverage for this event
-
- def translate_row(self, mapper, context, row):
- """Perform pre-processing on the given result row and return a
- new row instance.
-
- This listener is typically registered with ``retval=True``.
- It is called when the mapper first receives a row, before
- the object identity or the instance itself has been derived
- from that row. The given row may or may not be a
- :class:`.RowProxy` object - it will always be a dictionary-like
- object which contains mapped columns as keys. The
- returned object should also be a dictionary-like object
- which recognizes mapped columns as keys.
-
- :param mapper: the :class:`.Mapper` which is the target
- of this event.
- :param context: the :class:`.QueryContext`, which includes
- a handle to the current :class:`.Query` in progress as well
- as additional state information.
- :param row: the result row being handled. This may be
- an actual :class:`.RowProxy` or may be a dictionary containing
- :class:`.Column` objects as keys.
- :return: When configured with ``retval=True``, the function
- should return a dictionary-like row object, or ``EXT_CONTINUE``,
- indicating the original row should be used.
-
-
- """
-
- def create_instance(self, mapper, context, row, class_):
- """Receive a row when a new object instance is about to be
- created from that row.
-
- The method can choose to create the instance itself, or it can return
- EXT_CONTINUE to indicate normal object creation should take place.
- This listener is typically registered with ``retval=True``.
-
- :param mapper: the :class:`.Mapper` which is the target
- of this event.
- :param context: the :class:`.QueryContext`, which includes
- a handle to the current :class:`.Query` in progress as well
- as additional state information.
- :param row: the result row being handled. This may be
- an actual :class:`.RowProxy` or may be a dictionary containing
- :class:`.Column` objects as keys.
- :param class\_: the mapped class.
- :return: When configured with ``retval=True``, the return value
- should be a newly created instance of the mapped class,
- or ``EXT_CONTINUE`` indicating that default object construction
- should take place.
-
- """
-
- def append_result(self, mapper, context, row, target,
- result, **flags):
- """Receive an object instance before that instance is appended
- to a result list.
-
- This is a rarely used hook which can be used to alter
- the construction of a result list returned by :class:`.Query`.
-
- :param mapper: the :class:`.Mapper` which is the target
- of this event.
- :param context: the :class:`.QueryContext`, which includes
- a handle to the current :class:`.Query` in progress as well
- as additional state information.
- :param row: the result row being handled. This may be
- an actual :class:`.RowProxy` or may be a dictionary containing
- :class:`.Column` objects as keys.
- :param target: the mapped instance being populated. If
- the event is configured with ``raw=True``, this will
- instead be the :class:`.InstanceState` state-management
- object associated with the instance.
- :param result: a list-like object where results are being
- appended.
- :param \**flags: Additional state information about the
- current handling of the row.
- :return: If this method is registered with ``retval=True``,
- a return value of ``EXT_STOP`` will prevent the instance
- from being appended to the given result list, whereas a
- return value of ``EXT_CONTINUE`` will result in the default
- behavior of appending the value to the result list.
-
- """
-
-
- def populate_instance(self, mapper, context, row,
- target, **flags):
- """Receive an instance before that instance has
- its attributes populated.
-
- This usually corresponds to a newly loaded instance but may
- also correspond to an already-loaded instance which has
- unloaded attributes to be populated. The method may be called
- many times for a single instance, as multiple result rows are
- used to populate eagerly loaded collections.
-
- Most usages of this hook are obsolete. For a
- generic "object has been newly created from a row" hook, use
- :meth:`.InstanceEvents.load`.
-
- :param mapper: the :class:`.Mapper` which is the target
- of this event.
- :param context: the :class:`.QueryContext`, which includes
- a handle to the current :class:`.Query` in progress as well
- as additional state information.
- :param row: the result row being handled. This may be
- an actual :class:`.RowProxy` or may be a dictionary containing
- :class:`.Column` objects as keys.
- :param target: the mapped instance. If
- the event is configured with ``raw=True``, this will
- instead be the :class:`.InstanceState` state-management
- object associated with the instance.
- :return: When configured with ``retval=True``, a return
- value of ``EXT_STOP`` will bypass instance population by
- the mapper. A value of ``EXT_CONTINUE`` indicates that
- default instance population should take place.
-
- """
-
- def before_insert(self, mapper, connection, target):
- """Receive an object instance before an INSERT statement
- is emitted corresponding to that instance.
-
- This event is used to modify local, non-object related
- attributes on the instance before an INSERT occurs, as well
- as to emit additional SQL statements on the given
- connection.
-
- The event is often called for a batch of objects of the
- same class before their INSERT statements are emitted at
- once in a later step. In the extremely rare case that
- this is not desirable, the :func:`.mapper` can be
- configured with ``batch=False``, which will cause
- batches of instances to be broken up into individual
- (and more poorly performing) event->persist->event
- steps.
-
- Handlers should **not** modify any attributes which are
- mapped by :func:`.relationship`, nor should they attempt
- to make any modifications to the :class:`.Session` in
- this hook (including :meth:`.Session.add`,
- :meth:`.Session.delete`, etc.) - such changes will not
- take effect. For overall changes to the "flush plan",
- use :meth:`.SessionEvents.before_flush`.
-
- :param mapper: the :class:`.Mapper` which is the target
- of this event.
- :param connection: the :class:`.Connection` being used to
- emit INSERT statements for this instance. This
- provides a handle into the current transaction on the
- target database specific to this instance.
- :param target: the mapped instance being persisted. If
- the event is configured with ``raw=True``, this will
- instead be the :class:`.InstanceState` state-management
- object associated with the instance.
- :return: No return value is supported by this event.
-
- """
-
- def after_insert(self, mapper, connection, target):
- """Receive an object instance after an INSERT statement
- is emitted corresponding to that instance.
-
- This event is used to modify in-Python-only
- state on the instance after an INSERT occurs, as well
- as to emit additional SQL statements on the given
- connection.
-
- The event is often called for a batch of objects of the
- same class after their INSERT statements have been
- emitted at once in a previous step. In the extremely
- rare case that this is not desirable, the
- :func:`.mapper` can be configured with ``batch=False``,
- which will cause batches of instances to be broken up
- into individual (and more poorly performing)
- event->persist->event steps.
-
- :param mapper: the :class:`.Mapper` which is the target
- of this event.
- :param connection: the :class:`.Connection` being used to
- emit INSERT statements for this instance. This
- provides a handle into the current transaction on the
- target database specific to this instance.
- :param target: the mapped instance being persisted. If
- the event is configured with ``raw=True``, this will
- instead be the :class:`.InstanceState` state-management
- object associated with the instance.
- :return: No return value is supported by this event.
-
- """
-
- def before_update(self, mapper, connection, target):
- """Receive an object instance before an UPDATE statement
- is emitted corresponding to that instance.
-
- This event is used to modify local, non-object related
- attributes on the instance before an UPDATE occurs, as well
- as to emit additional SQL statements on the given
- connection.
-
- This method is called for all instances that are
- marked as "dirty", *even those which have no net changes
- to their column-based attributes*. An object is marked
- as dirty when any of its column-based attributes have a
- "set attribute" operation called or when any of its
- collections are modified. If, at update time, no
- column-based attributes have any net changes, no UPDATE
- statement will be issued. This means that an instance
- being sent to :meth:`~.MapperEvents.before_update` is
- *not* a guarantee that an UPDATE statement will be
- issued, although you can affect the outcome here by
- modifying attributes so that a net change in value does
- exist.
-
- To detect if the column-based attributes on the object have net
- changes, and will therefore generate an UPDATE statement, use
- ``object_session(instance).is_modified(instance,
- include_collections=False)``.
-
- The event is often called for a batch of objects of the
- same class before their UPDATE statements are emitted at
- once in a later step. In the extremely rare case that
- this is not desirable, the :func:`.mapper` can be
- configured with ``batch=False``, which will cause
- batches of instances to be broken up into individual
- (and more poorly performing) event->persist->event
- steps.
-
- Handlers should **not** modify any attributes which are
- mapped by :func:`.relationship`, nor should they attempt
- to make any modifications to the :class:`.Session` in
- this hook (including :meth:`.Session.add`,
- :meth:`.Session.delete`, etc.) - such changes will not
- take effect. For overall changes to the "flush plan",
- use :meth:`.SessionEvents.before_flush`.
-
- :param mapper: the :class:`.Mapper` which is the target
- of this event.
- :param connection: the :class:`.Connection` being used to
- emit UPDATE statements for this instance. This
- provides a handle into the current transaction on the
- target database specific to this instance.
- :param target: the mapped instance being persisted. If
- the event is configured with ``raw=True``, this will
- instead be the :class:`.InstanceState` state-management
- object associated with the instance.
- :return: No return value is supported by this event.
- """
-
- def after_update(self, mapper, connection, target):
- """Receive an object instance after an UPDATE statement
- is emitted corresponding to that instance.
-
- This event is used to modify in-Python-only
- state on the instance after an UPDATE occurs, as well
- as to emit additional SQL statements on the given
- connection.
-
- This method is called for all instances that are
- marked as "dirty", *even those which have no net changes
- to their column-based attributes*, and for which
- no UPDATE statement has proceeded. An object is marked
- as dirty when any of its column-based attributes have a
- "set attribute" operation called or when any of its
- collections are modified. If, at update time, no
- column-based attributes have any net changes, no UPDATE
- statement will be issued. This means that an instance
- being sent to :meth:`~.MapperEvents.after_update` is
- *not* a guarantee that an UPDATE statement has been
- issued.
-
- To detect if the column-based attributes on the object have net
- changes, and therefore resulted in an UPDATE statement, use
- ``object_session(instance).is_modified(instance,
- include_collections=False)``.
-
- The event is often called for a batch of objects of the
- same class after their UPDATE statements have been emitted at
- once in a previous step. In the extremely rare case that
- this is not desirable, the :func:`.mapper` can be
- configured with ``batch=False``, which will cause
- batches of instances to be broken up into individual
- (and more poorly performing) event->persist->event
- steps.
-
- :param mapper: the :class:`.Mapper` which is the target
- of this event.
- :param connection: the :class:`.Connection` being used to
- emit UPDATE statements for this instance. This
- provides a handle into the current transaction on the
- target database specific to this instance.
- :param target: the mapped instance being persisted. If
- the event is configured with ``raw=True``, this will
- instead be the :class:`.InstanceState` state-management
- object associated with the instance.
- :return: No return value is supported by this event.
-
- """
-
- def before_delete(self, mapper, connection, target):
- """Receive an object instance before a DELETE statement
- is emitted corresponding to that instance.
-
- This event is used to emit additional SQL statements on
- the given connection as well as to perform application
- specific bookkeeping related to a deletion event.
-
- The event is often called for a batch of objects of the
- same class before their DELETE statements are emitted at
- once in a later step.
-
- Handlers should **not** modify any attributes which are
- mapped by :func:`.relationship`, nor should they attempt
- to make any modifications to the :class:`.Session` in
- this hook (including :meth:`.Session.add`,
- :meth:`.Session.delete`, etc.) - such changes will not
- take effect. For overall changes to the "flush plan",
- use :meth:`.SessionEvents.before_flush`.
-
- :param mapper: the :class:`.Mapper` which is the target
- of this event.
- :param connection: the :class:`.Connection` being used to
- emit DELETE statements for this instance. This
- provides a handle into the current transaction on the
- target database specific to this instance.
- :param target: the mapped instance being deleted. If
- the event is configured with ``raw=True``, this will
- instead be the :class:`.InstanceState` state-management
- object associated with the instance.
- :return: No return value is supported by this event.
-
- """
-
- def after_delete(self, mapper, connection, target):
- """Receive an object instance after a DELETE statement
- has been emitted corresponding to that instance.
-
- This event is used to emit additional SQL statements on
- the given connection as well as to perform application
- specific bookkeeping related to a deletion event.
-
- The event is often called for a batch of objects of the
- same class after their DELETE statements have been emitted at
- once in a previous step.
-
- :param mapper: the :class:`.Mapper` which is the target
- of this event.
- :param connection: the :class:`.Connection` being used to
- emit DELETE statements for this instance. This
- provides a handle into the current transaction on the
- target database specific to this instance.
- :param target: the mapped instance being deleted. If
- the event is configured with ``raw=True``, this will
- instead be the :class:`.InstanceState` state-management
- object associated with the instance.
- :return: No return value is supported by this event.
-
- """
-
- @classmethod
- def _remove(cls, identifier, target, fn):
- raise NotImplementedError("Removal of mapper events not yet implemented")
-
-class SessionEvents(event.Events):
- """Define events specific to :class:`.Session` lifecycle.
-
- e.g.::
-
- from sqlalchemy import event
- from sqlalchemy.orm import sessionmaker
-
- def my_before_commit(session):
- print "before commit!"
-
- Session = sessionmaker()
-
- event.listen(Session, "before_commit", my_before_commit)
-
- The :func:`~.event.listen` function will accept
- :class:`.Session` objects as well as the return result
- of :func:`.sessionmaker` and :func:`.scoped_session`.
-
- Additionally, it accepts the :class:`.Session` class which
- will apply listeners to all :class:`.Session` instances
- globally.
-
- """
-
- @classmethod
- def _accept_with(cls, target):
- from sqlalchemy.orm import ScopedSession, Session
- if isinstance(target, ScopedSession):
- if not isinstance(target.session_factory, type) or \
- not issubclass(target.session_factory, Session):
- raise exc.ArgumentError(
- "Session event listen on a ScopedSession "
- "requires that its creation callable "
- "is a Session subclass.")
- return target.session_factory
- elif isinstance(target, type):
- if issubclass(target, ScopedSession):
- return Session
- elif issubclass(target, Session):
- return target
- elif isinstance(target, Session):
- return target
- else:
- return None
-
- @classmethod
- def _remove(cls, identifier, target, fn):
- raise NotImplementedError("Removal of session events not yet implemented")
-
- def before_commit(self, session):
- """Execute before commit is called.
-
- Note that this may not be per-flush if a longer running
- transaction is ongoing."""
-
- def after_commit(self, session):
- """Execute after a commit has occurred.
-
- Note that this may not be per-flush if a longer running
- transaction is ongoing."""
-
- def after_rollback(self, session):
- """Execute after a rollback has occurred.
-
- Note that this may not be per-flush if a longer running
- transaction is ongoing."""
-
- def before_flush( self, session, flush_context, instances):
- """Execute before flush process has started.
-
- `instances` is an optional list of objects which were passed to
- the ``flush()`` method. """
-
- def after_flush(self, session, flush_context):
- """Execute after flush has completed, but before commit has been
- called.
-
- Note that the session's state is still in pre-flush, i.e. 'new',
- 'dirty', and 'deleted' lists still show pre-flush state as well
- as the history settings on instance attributes."""
-
- def after_flush_postexec(self, session, flush_context):
- """Execute after flush has completed, and after the post-exec
- state occurs.
-
- This will be when the 'new', 'dirty', and 'deleted' lists are in
- their final state. An actual commit() may or may not have
- occurred, depending on whether or not the flush started its own
- transaction or participated in a larger transaction. """
-
- def after_begin( self, session, transaction, connection):
- """Execute after a transaction is begun on a connection
-
- `transaction` is the SessionTransaction. This method is called
- after an engine level transaction is begun on a connection. """
-
- def after_attach(self, session, instance):
- """Execute after an instance is attached to a session.
-
- This is called after an add, delete or merge. """
-
- def after_bulk_update( self, session, query, query_context, result):
- """Execute after a bulk update operation to the session.
-
- This is called after a session.query(...).update()
-
- `query` is the query object that this update operation was
- called on. `query_context` was the query context object.
- `result` is the result object returned from the bulk operation.
- """
-
- def after_bulk_delete( self, session, query, query_context, result):
- """Execute after a bulk delete operation to the session.
-
- This is called after a session.query(...).delete()
-
- `query` is the query object that this delete operation was
- called on. `query_context` was the query context object.
- `result` is the result object returned from the bulk operation.
- """
-
-
-class AttributeEvents(event.Events):
- """Define events for object attributes.
-
- These are typically defined on the class-bound descriptor for the
- target class.
-
- e.g.::
-
- from sqlalchemy import event
-
- def my_append_listener(target, value, initiator):
- print "received append event for target: %s" % target
-
- event.listen(MyClass.collection, 'append', my_append_listener)
-
- Listeners have the option to return a possibly modified version
- of the value, when the ``retval=True`` flag is passed
- to :func:`~.event.listen`::
-
- def validate_phone(target, value, oldvalue, initiator):
- "Strip non-numeric characters from a phone number"
-
- return re.sub(r'(?![0-9])', '', value)
-
- # setup listener on UserContact.phone attribute, instructing
- # it to use the return value
- listen(UserContact.phone, 'set', validate_phone, retval=True)
-
- A validation function like the above can also raise an exception
- such as :class:`.ValueError` to halt the operation.
-
- Several modifiers are available to the :func:`~.event.listen` function.
-
- :param active_history=False: When True, indicates that the
- "set" event would like to receive the "old" value being
- replaced unconditionally, even if this requires firing off
- database loads. Note that ``active_history`` can also be
- set directly via :func:`.column_property` and
- :func:`.relationship`.
-
- :param propagate=False: When True, the listener function will
- be established not just for the class attribute given, but
- for attributes of the same name on all current subclasses
- of that class, as well as all future subclasses of that
- class, using an additional listener that listens for
- instrumentation events.
- :param raw=False: When True, the "target" argument to the
- event will be the :class:`.InstanceState` management
- object, rather than the mapped instance itself.
- :param retval=False: when True, the user-defined event
- listening must return the "value" argument from the
- function. This gives the listening function the opportunity
- to change the value that is ultimately used for a "set"
- or "append" event.
-
- """
-
- @classmethod
- def _accept_with(cls, target):
- from sqlalchemy.orm import interfaces
- # TODO: coverage
- if isinstance(target, interfaces.MapperProperty):
- return getattr(target.parent.class_, target.key)
- else:
- return target
-
- @classmethod
- def _listen(cls, target, identifier, fn, active_history=False,
- raw=False, retval=False,
- propagate=False):
- if active_history:
- target.dispatch._active_history = True
-
- # TODO: for removal, need to package the identity
- # of the wrapper with the original function.
-
- if not raw or not retval:
- orig_fn = fn
- def wrap(target, value, *arg):
- if not raw:
- target = target.obj()
- if not retval:
- orig_fn(target, value, *arg)
- return value
- else:
- return orig_fn(target, value, *arg)
- fn = wrap
-
- event.Events._listen(target, identifier, fn, propagate)
-
- if propagate:
- from sqlalchemy.orm.instrumentation import manager_of_class
-
- manager = manager_of_class(target.class_)
-
- for mgr in manager.subclass_managers(True):
- event.Events._listen(mgr[target.key], identifier, fn, True)
-
- @classmethod
- def _remove(cls, identifier, target, fn):
- raise NotImplementedError("Removal of attribute events not yet implemented")
-
- def append(self, target, value, initiator):
- """Receive a collection append event.
-
- :param target: the object instance receiving the event.
- If the listener is registered with ``raw=True``, this will
- be the :class:`.InstanceState` object.
- :param value: the value being appended. If this listener
- is registered with ``retval=True``, the listener
- function must return this value, or a new value which
- replaces it.
- :param initiator: the attribute implementation object
- which initiated this event.
- :return: if the event was registered with ``retval=True``,
- the given value, or a new effective value, should be returned.
-
- """
-
- def remove(self, target, value, initiator):
- """Receive a collection remove event.
-
- :param target: the object instance receiving the event.
- If the listener is registered with ``raw=True``, this will
- be the :class:`.InstanceState` object.
- :param value: the value being removed.
- :param initiator: the attribute implementation object
- which initiated this event.
- :return: No return value is defined for this event.
- """
-
- def set(self, target, value, oldvalue, initiator):
- """Receive a scalar set event.
-
- :param target: the object instance receiving the event.
- If the listener is registered with ``raw=True``, this will
- be the :class:`.InstanceState` object.
- :param value: the value being set. If this listener
- is registered with ``retval=True``, the listener
- function must return this value, or a new value which
- replaces it.
- :param oldvalue: the previous value being replaced. This
- may also be the symbol ``NEVER_SET`` or ``NO_VALUE``.
- If the listener is registered with ``active_history=True``,
- the previous value of the attribute will be loaded from
- the database if the existing value is currently unloaded
- or expired.
- :param initiator: the attribute implementation object
- which initiated this event.
- :return: if the event was registered with ``retval=True``,
- the given value, or a new effective value, should be returned.
-
- """
-
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/orm/exc.py b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/orm/exc.py
deleted file mode 100755
index 3bfb2708..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/orm/exc.py
+++ /dev/null
@@ -1,119 +0,0 @@
-# orm/exc.py
-# Copyright (C) 2005-2011 the SQLAlchemy authors and contributors <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-"""SQLAlchemy ORM exceptions."""
-
-import sqlalchemy as sa
-
-
-NO_STATE = (AttributeError, KeyError)
-"""Exception types that may be raised by instrumentation implementations."""
-
-class StaleDataError(sa.exc.SQLAlchemyError):
- """An operation encountered database state that is unaccounted for.
-
- Two conditions cause this to happen:
-
- * A flush may have attempted to update or delete rows
- and an unexpected number of rows were matched during
- the UPDATE or DELETE statement. Note that when
- version_id_col is used, rows in UPDATE or DELETE statements
- are also matched against the current known version
- identifier.
-
- * A mapped object with version_id_col was refreshed,
- and the version number coming back from the database does
- not match that of the object itself.
-
- """
-
-ConcurrentModificationError = StaleDataError
-
-
-class FlushError(sa.exc.SQLAlchemyError):
- """A invalid condition was detected during flush()."""
-
-
-class UnmappedError(sa.exc.InvalidRequestError):
- """Base for exceptions that involve expected mappings not present."""
-
-class ObjectDereferencedError(sa.exc.SQLAlchemyError):
- """An operation cannot complete due to an object being garbage collected."""
-
-class DetachedInstanceError(sa.exc.SQLAlchemyError):
- """An attempt to access unloaded attributes on a
- mapped instance that is detached."""
-
-class UnmappedInstanceError(UnmappedError):
- """An mapping operation was requested for an unknown instance."""
-
- def __init__(self, obj, msg=None):
- if not msg:
- try:
- mapper = sa.orm.class_mapper(type(obj))
- name = _safe_cls_name(type(obj))
- msg = ("Class %r is mapped, but this instance lacks "
- "instrumentation. This occurs when the instance is created "
- "before sqlalchemy.orm.mapper(%s) was called." % (name, name))
- except UnmappedClassError:
- msg = _default_unmapped(type(obj))
- if isinstance(obj, type):
- msg += (
- '; was a class (%s) supplied where an instance was '
- 'required?' % _safe_cls_name(obj))
- UnmappedError.__init__(self, msg)
-
-
-class UnmappedClassError(UnmappedError):
- """An mapping operation was requested for an unknown class."""
-
- def __init__(self, cls, msg=None):
- if not msg:
- msg = _default_unmapped(cls)
- UnmappedError.__init__(self, msg)
-
-
-class ObjectDeletedError(sa.exc.InvalidRequestError):
- """An refresh() operation failed to re-retrieve an object's row."""
-
-
-class UnmappedColumnError(sa.exc.InvalidRequestError):
- """Mapping operation was requested on an unknown column."""
-
-
-class NoResultFound(sa.exc.InvalidRequestError):
- """A database result was required but none was found."""
-
-
-class MultipleResultsFound(sa.exc.InvalidRequestError):
- """A single database result was required but more than one were found."""
-
-
-# Legacy compat until 0.6.
-sa.exc.ConcurrentModificationError = ConcurrentModificationError
-sa.exc.FlushError = FlushError
-sa.exc.UnmappedColumnError
-
-def _safe_cls_name(cls):
- try:
- cls_name = '.'.join((cls.__module__, cls.__name__))
- except AttributeError:
- cls_name = getattr(cls, '__name__', None)
- if cls_name is None:
- cls_name = repr(cls)
- return cls_name
-
-def _default_unmapped(cls):
- try:
- mappers = sa.orm.attributes.manager_of_class(cls).mappers
- except NO_STATE:
- mappers = {}
- except TypeError:
- mappers = {}
- name = _safe_cls_name(cls)
-
- if not mappers:
- return "Class '%s' is not mapped" % name
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/orm/identity.py b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/orm/identity.py
deleted file mode 100755
index 8f000e41..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/orm/identity.py
+++ /dev/null
@@ -1,254 +0,0 @@
-# orm/identity.py
-# Copyright (C) 2005-2011 the SQLAlchemy authors and contributors <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-import weakref
-from sqlalchemy.orm import attributes
-
-
-class IdentityMap(dict):
- def __init__(self):
- self._mutable_attrs = set()
- self._modified = set()
- self._wr = weakref.ref(self)
-
- def replace(self, state):
- raise NotImplementedError()
-
- def add(self, state):
- raise NotImplementedError()
-
- def update(self, dict):
- raise NotImplementedError("IdentityMap uses add() to insert data")
-
- def clear(self):
- raise NotImplementedError("IdentityMap uses remove() to remove data")
-
- def _manage_incoming_state(self, state):
- state._instance_dict = self._wr
-
- if state.modified:
- self._modified.add(state)
- if state.manager.mutable_attributes:
- self._mutable_attrs.add(state)
-
- def _manage_removed_state(self, state):
- del state._instance_dict
- self._mutable_attrs.discard(state)
- self._modified.discard(state)
-
- def _dirty_states(self):
- return self._modified.union(s for s in self._mutable_attrs.copy()
- if s.modified)
-
- def check_modified(self):
- """return True if any InstanceStates present have been marked as 'modified'."""
-
- if self._modified:
- return True
- else:
- for state in self._mutable_attrs.copy():
- if state.modified:
- return True
- return False
-
- def has_key(self, key):
- return key in self
-
- def popitem(self):
- raise NotImplementedError("IdentityMap uses remove() to remove data")
-
- def pop(self, key, *args):
- raise NotImplementedError("IdentityMap uses remove() to remove data")
-
- def setdefault(self, key, default=None):
- raise NotImplementedError("IdentityMap uses add() to insert data")
-
- def copy(self):
- raise NotImplementedError()
-
- def __setitem__(self, key, value):
- raise NotImplementedError("IdentityMap uses add() to insert data")
-
- def __delitem__(self, key):
- raise NotImplementedError("IdentityMap uses remove() to remove data")
-
-class WeakInstanceDict(IdentityMap):
- def __init__(self):
- IdentityMap.__init__(self)
-
- def __getitem__(self, key):
- state = dict.__getitem__(self, key)
- o = state.obj()
- if o is None:
- o = state._is_really_none()
- if o is None:
- raise KeyError, key
- return o
-
- def __contains__(self, key):
- try:
- if dict.__contains__(self, key):
- state = dict.__getitem__(self, key)
- o = state.obj()
- if o is None:
- o = state._is_really_none()
- else:
- return False
- except KeyError:
- return False
- else:
- return o is not None
-
- def contains_state(self, state):
- return dict.get(self, state.key) is state
-
- def replace(self, state):
- if dict.__contains__(self, state.key):
- existing = dict.__getitem__(self, state.key)
- if existing is not state:
- self._manage_removed_state(existing)
- else:
- return
-
- dict.__setitem__(self, state.key, state)
- self._manage_incoming_state(state)
-
- def add(self, state):
- key = state.key
- # inline of self.__contains__
- if dict.__contains__(self, key):
- try:
- existing_state = dict.__getitem__(self, key)
- if existing_state is not state:
- o = existing_state.obj()
- if o is None:
- o = existing_state._is_really_none()
- if o is not None:
- raise AssertionError("A conflicting state is already "
- "present in the identity map for key %r"
- % (key, ))
- else:
- return
- except KeyError:
- pass
- dict.__setitem__(self, key, state)
- self._manage_incoming_state(state)
-
- def get(self, key, default=None):
- state = dict.get(self, key, default)
- if state is default:
- return default
- o = state.obj()
- if o is None:
- o = state._is_really_none()
- if o is None:
- return default
- return o
-
- def _items(self):
- values = self.all_states()
- result = []
- for state in values:
- value = state.obj()
- if value is not None:
- result.append((state.key, value))
- return result
-
- def _values(self):
- values = self.all_states()
- result = []
- for state in values:
- value = state.obj()
- if value is not None:
- result.append(value)
-
- return result
-
- # Py3K
- #def items(self):
- # return iter(self._items())
- #
- #def values(self):
- # return iter(self._values())
- # Py2K
- items = _items
- def iteritems(self):
- return iter(self.items())
-
- values = _values
- def itervalues(self):
- return iter(self.values())
- # end Py2K
-
- def all_states(self):
- # Py3K
- # return list(dict.values(self))
- # Py2K
- return dict.values(self)
- # end Py2K
-
- def discard(self, state):
- st = dict.get(self, state.key, None)
- if st is state:
- dict.__delitem__(self, state.key)
- self._manage_removed_state(state)
-
- def prune(self):
- return 0
-
-class StrongInstanceDict(IdentityMap):
- def all_states(self):
- return [attributes.instance_state(o) for o in self.itervalues()]
-
- def contains_state(self, state):
- return state.key in self and attributes.instance_state(self[state.key]) is state
-
- def replace(self, state):
- if dict.__contains__(self, state.key):
- existing = dict.__getitem__(self, state.key)
- existing = attributes.instance_state(existing)
- if existing is not state:
- self._manage_removed_state(existing)
- else:
- return
-
- dict.__setitem__(self, state.key, state.obj())
- self._manage_incoming_state(state)
-
- def add(self, state):
- if state.key in self:
- if attributes.instance_state(dict.__getitem__(self,
- state.key)) is not state:
- raise AssertionError('A conflicting state is already '
- 'present in the identity map for key %r'
- % (state.key, ))
- else:
- dict.__setitem__(self, state.key, state.obj())
- self._manage_incoming_state(state)
-
- def discard(self, state):
- obj = dict.get(self, state.key, None)
- if obj is not None:
- st = attributes.instance_state(obj)
- if st is state:
- dict.__delitem__(self, state.key)
- self._manage_removed_state(state)
-
- def prune(self):
- """prune unreferenced, non-dirty states."""
-
- ref_count = len(self)
- dirty = [s.obj() for s in self.all_states() if s.modified]
-
- # work around http://bugs.python.org/issue6149
- keepers = weakref.WeakValueDictionary()
- keepers.update(self)
-
- dict.clear(self)
- dict.update(self, keepers)
- self.modified = bool(dirty)
- return ref_count - len(self)
-
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/orm/instrumentation.py b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/orm/instrumentation.py
deleted file mode 100755
index aa051490..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/orm/instrumentation.py
+++ /dev/null
@@ -1,691 +0,0 @@
-# orm/instrumentation.py
-# Copyright (C) 2005-2011 the SQLAlchemy authors and contributors <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-"""Defines SQLAlchemy's system of class instrumentation.
-
-This module is usually not directly visible to user applications, but
-defines a large part of the ORM's interactivity.
-
-instrumentation.py deals with registration of end-user classes
-for state tracking. It interacts closely with state.py
-and attributes.py which establish per-instance and per-class-attribute
-instrumentation, respectively.
-
-SQLA's instrumentation system is completely customizable, in which
-case an understanding of the general mechanics of this module is helpful.
-An example of full customization is in /examples/custom_attributes.
-
-"""
-
-
-from sqlalchemy.orm import exc, collections, events
-from operator import attrgetter, itemgetter
-from sqlalchemy import event, util
-import weakref
-from sqlalchemy.orm import state, attributes
-
-
-INSTRUMENTATION_MANAGER = '__sa_instrumentation_manager__'
-"""Attribute, elects custom instrumentation when present on a mapped class.
-
-Allows a class to specify a slightly or wildly different technique for
-tracking changes made to mapped attributes and collections.
-
-Only one instrumentation implementation is allowed in a given object
-inheritance hierarchy.
-
-The value of this attribute must be a callable and will be passed a class
-object. The callable must return one of:
-
- - An instance of an interfaces.InstrumentationManager or subclass
- - An object implementing all or some of InstrumentationManager (TODO)
- - A dictionary of callables, implementing all or some of the above (TODO)
- - An instance of a ClassManager or subclass
-
-interfaces.InstrumentationManager is public API and will remain stable
-between releases. ClassManager is not public and no guarantees are made
-about stability. Caveat emptor.
-
-This attribute is consulted by the default SQLAlchemy instrumentation
-resolution code. If custom finders are installed in the global
-instrumentation_finders list, they may or may not choose to honor this
-attribute.
-
-"""
-
-instrumentation_finders = []
-"""An extensible sequence of instrumentation implementation finding callables.
-
-Finders callables will be passed a class object. If None is returned, the
-next finder in the sequence is consulted. Otherwise the return must be an
-instrumentation factory that follows the same guidelines as
-INSTRUMENTATION_MANAGER.
-
-By default, the only finder is find_native_user_instrumentation_hook, which
-searches for INSTRUMENTATION_MANAGER. If all finders return None, standard
-ClassManager instrumentation is used.
-
-"""
-
-
-class ClassManager(dict):
- """tracks state information at the class level."""
-
- MANAGER_ATTR = '_sa_class_manager'
- STATE_ATTR = '_sa_instance_state'
-
- deferred_scalar_loader = None
-
- original_init = object.__init__
-
- def __init__(self, class_):
- self.class_ = class_
- self.factory = None # where we came from, for inheritance bookkeeping
- self.info = {}
- self.new_init = None
- self.mutable_attributes = set()
- self.local_attrs = {}
- self.originals = {}
-
- self._bases = [mgr for mgr in [
- manager_of_class(base)
- for base in self.class_.__bases__
- if isinstance(base, type)
- ] if mgr is not None]
-
- for base in self._bases:
- self.update(base)
-
- self.manage()
- self._instrument_init()
-
- dispatch = event.dispatcher(events.InstanceEvents)
-
- @property
- def is_mapped(self):
- return 'mapper' in self.__dict__
-
- @util.memoized_property
- def mapper(self):
- raise exc.UnmappedClassError(self.class_)
-
- def _attr_has_impl(self, key):
- """Return True if the given attribute is fully initialized.
-
- i.e. has an impl.
- """
-
- return key in self and self[key].impl is not None
-
- def _configure_create_arguments(self,
- _source=None,
- deferred_scalar_loader=None):
- """Accept extra **kw arguments passed to create_manager_for_cls.
-
- The current contract of ClassManager and other managers is that they
- take a single "cls" argument in their constructor (as per
- test/orm/instrumentation.py InstrumentationCollisionTest). This
- is to provide consistency with the current API of "class manager"
- callables and such which may return various ClassManager and
- ClassManager-like instances. So create_manager_for_cls sends
- in ClassManager-specific arguments via this method once the
- non-proxied ClassManager is available.
-
- """
- if _source:
- deferred_scalar_loader = _source.deferred_scalar_loader
-
- if deferred_scalar_loader:
- self.deferred_scalar_loader = deferred_scalar_loader
-
- def _subclass_manager(self, cls):
- """Create a new ClassManager for a subclass of this ClassManager's
- class.
-
- This is called automatically when attributes are instrumented so that
- the attributes can be propagated to subclasses against their own
- class-local manager, without the need for mappers etc. to have already
- pre-configured managers for the full class hierarchy. Mappers
- can post-configure the auto-generated ClassManager when needed.
-
- """
- manager = manager_of_class(cls)
- if manager is None:
- manager = _create_manager_for_cls(cls, _source=self)
- return manager
-
- def _instrument_init(self):
- # TODO: self.class_.__init__ is often the already-instrumented
- # __init__ from an instrumented superclass. We still need to make
- # our own wrapper, but it would
- # be nice to wrap the original __init__ and not our existing wrapper
- # of such, since this adds method overhead.
- self.original_init = self.class_.__init__
- self.new_init = _generate_init(self.class_, self)
- self.install_member('__init__', self.new_init)
-
- def _uninstrument_init(self):
- if self.new_init:
- self.uninstall_member('__init__')
- self.new_init = None
-
- @util.memoized_property
- def _state_constructor(self):
- self.dispatch.first_init(self, self.class_)
- if self.mutable_attributes:
- return state.MutableAttrInstanceState
- else:
- return state.InstanceState
-
- def manage(self):
- """Mark this instance as the manager for its class."""
-
- setattr(self.class_, self.MANAGER_ATTR, self)
-
- def dispose(self):
- """Dissasociate this manager from its class."""
-
- delattr(self.class_, self.MANAGER_ATTR)
-
- def manager_getter(self):
- return attrgetter(self.MANAGER_ATTR)
-
- def instrument_attribute(self, key, inst, propagated=False):
- if propagated:
- if key in self.local_attrs:
- return # don't override local attr with inherited attr
- else:
- self.local_attrs[key] = inst
- self.install_descriptor(key, inst)
- self[key] = inst
-
- for cls in self.class_.__subclasses__():
- manager = self._subclass_manager(cls)
- manager.instrument_attribute(key, inst, True)
-
- def subclass_managers(self, recursive):
- for cls in self.class_.__subclasses__():
- mgr = manager_of_class(cls)
- if mgr is not None and mgr is not self:
- yield mgr
- if recursive:
- for m in mgr.subclass_managers(True):
- yield m
-
- def post_configure_attribute(self, key):
- instrumentation_registry.dispatch.\
- attribute_instrument(self.class_, key, self[key])
-
- def uninstrument_attribute(self, key, propagated=False):
- if key not in self:
- return
- if propagated:
- if key in self.local_attrs:
- return # don't get rid of local attr
- else:
- del self.local_attrs[key]
- self.uninstall_descriptor(key)
- del self[key]
- if key in self.mutable_attributes:
- self.mutable_attributes.remove(key)
- for cls in self.class_.__subclasses__():
- manager = manager_of_class(cls)
- if manager:
- manager.uninstrument_attribute(key, True)
-
- def unregister(self):
- """remove all instrumentation established by this ClassManager."""
-
- self._uninstrument_init()
-
- self.mapper = self.dispatch = None
- self.info.clear()
-
- for key in list(self):
- if key in self.local_attrs:
- self.uninstrument_attribute(key)
-
- def install_descriptor(self, key, inst):
- if key in (self.STATE_ATTR, self.MANAGER_ATTR):
- raise KeyError("%r: requested attribute name conflicts with "
- "instrumentation attribute of the same name." %
- key)
- setattr(self.class_, key, inst)
-
- def uninstall_descriptor(self, key):
- delattr(self.class_, key)
-
- def install_member(self, key, implementation):
- if key in (self.STATE_ATTR, self.MANAGER_ATTR):
- raise KeyError("%r: requested attribute name conflicts with "
- "instrumentation attribute of the same name." %
- key)
- self.originals.setdefault(key, getattr(self.class_, key, None))
- setattr(self.class_, key, implementation)
-
- def uninstall_member(self, key):
- original = self.originals.pop(key, None)
- if original is not None:
- setattr(self.class_, key, original)
-
- def instrument_collection_class(self, key, collection_class):
- return collections.prepare_instrumentation(collection_class)
-
- def initialize_collection(self, key, state, factory):
- user_data = factory()
- adapter = collections.CollectionAdapter(
- self.get_impl(key), state, user_data)
- return adapter, user_data
-
- def is_instrumented(self, key, search=False):
- if search:
- return key in self
- else:
- return key in self.local_attrs
-
- def get_impl(self, key):
- return self[key].impl
-
- @property
- def attributes(self):
- return self.itervalues()
-
- ## InstanceState management
-
- def new_instance(self, state=None):
- instance = self.class_.__new__(self.class_)
- setattr(instance, self.STATE_ATTR,
- state or self._state_constructor(instance, self))
- return instance
-
- def setup_instance(self, instance, state=None):
- setattr(instance, self.STATE_ATTR,
- state or self._state_constructor(instance, self))
-
- def teardown_instance(self, instance):
- delattr(instance, self.STATE_ATTR)
-
- def _new_state_if_none(self, instance):
- """Install a default InstanceState if none is present.
-
- A private convenience method used by the __init__ decorator.
-
- """
- if hasattr(instance, self.STATE_ATTR):
- return False
- elif self.class_ is not instance.__class__ and \
- self.is_mapped:
- # this will create a new ClassManager for the
- # subclass, without a mapper. This is likely a
- # user error situation but allow the object
- # to be constructed, so that it is usable
- # in a non-ORM context at least.
- return self._subclass_manager(instance.__class__).\
- _new_state_if_none(instance)
- else:
- state = self._state_constructor(instance, self)
- setattr(instance, self.STATE_ATTR, state)
- return state
-
- def state_getter(self):
- """Return a (instance) -> InstanceState callable.
-
- "state getter" callables should raise either KeyError or
- AttributeError if no InstanceState could be found for the
- instance.
- """
-
- return attrgetter(self.STATE_ATTR)
-
- def dict_getter(self):
- return attrgetter('__dict__')
-
- def has_state(self, instance):
- return hasattr(instance, self.STATE_ATTR)
-
- def has_parent(self, state, key, optimistic=False):
- """TODO"""
- return self.get_impl(key).hasparent(state, optimistic=optimistic)
-
- def __nonzero__(self):
- """All ClassManagers are non-zero regardless of attribute state."""
- return True
-
- def __repr__(self):
- return '<%s of %r at %x>' % (
- self.__class__.__name__, self.class_, id(self))
-
-class _ClassInstrumentationAdapter(ClassManager):
- """Adapts a user-defined InstrumentationManager to a ClassManager."""
-
- def __init__(self, class_, override, **kw):
- self._adapted = override
- self._get_state = self._adapted.state_getter(class_)
- self._get_dict = self._adapted.dict_getter(class_)
-
- ClassManager.__init__(self, class_, **kw)
-
- def manage(self):
- self._adapted.manage(self.class_, self)
-
- def dispose(self):
- self._adapted.dispose(self.class_)
-
- def manager_getter(self):
- return self._adapted.manager_getter(self.class_)
-
- def instrument_attribute(self, key, inst, propagated=False):
- ClassManager.instrument_attribute(self, key, inst, propagated)
- if not propagated:
- self._adapted.instrument_attribute(self.class_, key, inst)
-
- def post_configure_attribute(self, key):
- super(_ClassInstrumentationAdapter, self).post_configure_attribute(key)
- self._adapted.post_configure_attribute(self.class_, key, self[key])
-
- def install_descriptor(self, key, inst):
- self._adapted.install_descriptor(self.class_, key, inst)
-
- def uninstall_descriptor(self, key):
- self._adapted.uninstall_descriptor(self.class_, key)
-
- def install_member(self, key, implementation):
- self._adapted.install_member(self.class_, key, implementation)
-
- def uninstall_member(self, key):
- self._adapted.uninstall_member(self.class_, key)
-
- def instrument_collection_class(self, key, collection_class):
- return self._adapted.instrument_collection_class(
- self.class_, key, collection_class)
-
- def initialize_collection(self, key, state, factory):
- delegate = getattr(self._adapted, 'initialize_collection', None)
- if delegate:
- return delegate(key, state, factory)
- else:
- return ClassManager.initialize_collection(self, key,
- state, factory)
-
- def new_instance(self, state=None):
- instance = self.class_.__new__(self.class_)
- self.setup_instance(instance, state)
- return instance
-
- def _new_state_if_none(self, instance):
- """Install a default InstanceState if none is present.
-
- A private convenience method used by the __init__ decorator.
- """
- if self.has_state(instance):
- return False
- else:
- return self.setup_instance(instance)
-
- def setup_instance(self, instance, state=None):
- self._adapted.initialize_instance_dict(self.class_, instance)
-
- if state is None:
- state = self._state_constructor(instance, self)
-
- # the given instance is assumed to have no state
- self._adapted.install_state(self.class_, instance, state)
- return state
-
- def teardown_instance(self, instance):
- self._adapted.remove_state(self.class_, instance)
-
- def has_state(self, instance):
- try:
- state = self._get_state(instance)
- except exc.NO_STATE:
- return False
- else:
- return True
-
- def state_getter(self):
- return self._get_state
-
- def dict_getter(self):
- return self._get_dict
-
-def register_class(class_, **kw):
- """Register class instrumentation.
-
- Returns the existing or newly created class manager.
- """
-
- manager = manager_of_class(class_)
- if manager is None:
- manager = _create_manager_for_cls(class_, **kw)
- return manager
-
-def unregister_class(class_):
- """Unregister class instrumentation."""
-
- instrumentation_registry.unregister(class_)
-
-
-def is_instrumented(instance, key):
- """Return True if the given attribute on the given instance is
- instrumented by the attributes package.
-
- This function may be used regardless of instrumentation
- applied directly to the class, i.e. no descriptors are required.
-
- """
- return manager_of_class(instance.__class__).\
- is_instrumented(key, search=True)
-
-class InstrumentationRegistry(object):
- """Private instrumentation registration singleton.
-
- All classes are routed through this registry
- when first instrumented, however the InstrumentationRegistry
- is not actually needed unless custom ClassManagers are in use.
-
- """
-
- _manager_finders = weakref.WeakKeyDictionary()
- _state_finders = util.WeakIdentityMapping()
- _dict_finders = util.WeakIdentityMapping()
- _extended = False
-
- dispatch = event.dispatcher(events.InstrumentationEvents)
-
- def create_manager_for_cls(self, class_, **kw):
- assert class_ is not None
- assert manager_of_class(class_) is None
-
- for finder in instrumentation_finders:
- factory = finder(class_)
- if factory is not None:
- break
- else:
- factory = ClassManager
-
- existing_factories = self._collect_management_factories_for(class_).\
- difference([factory])
- if existing_factories:
- raise TypeError(
- "multiple instrumentation implementations specified "
- "in %s inheritance hierarchy: %r" % (
- class_.__name__, list(existing_factories)))
-
- manager = factory(class_)
- if not isinstance(manager, ClassManager):
- manager = _ClassInstrumentationAdapter(class_, manager)
-
- if factory != ClassManager and not self._extended:
- # somebody invoked a custom ClassManager.
- # reinstall global "getter" functions with the more
- # expensive ones.
- self._extended = True
- _install_lookup_strategy(self)
-
- manager._configure_create_arguments(**kw)
-
- manager.factory = factory
- self._manager_finders[class_] = manager.manager_getter()
- self._state_finders[class_] = manager.state_getter()
- self._dict_finders[class_] = manager.dict_getter()
-
- self.dispatch.class_instrument(class_)
-
- return manager
-
- def _collect_management_factories_for(self, cls):
- """Return a collection of factories in play or specified for a
- hierarchy.
-
- Traverses the entire inheritance graph of a cls and returns a
- collection of instrumentation factories for those classes. Factories
- are extracted from active ClassManagers, if available, otherwise
- instrumentation_finders is consulted.
-
- """
- hierarchy = util.class_hierarchy(cls)
- factories = set()
- for member in hierarchy:
- manager = manager_of_class(member)
- if manager is not None:
- factories.add(manager.factory)
- else:
- for finder in instrumentation_finders:
- factory = finder(member)
- if factory is not None:
- break
- else:
- factory = None
- factories.add(factory)
- factories.discard(None)
- return factories
-
- def manager_of_class(self, cls):
- # this is only called when alternate instrumentation
- # has been established
- if cls is None:
- return None
- try:
- finder = self._manager_finders[cls]
- except KeyError:
- return None
- else:
- return finder(cls)
-
- def state_of(self, instance):
- # this is only called when alternate instrumentation
- # has been established
- if instance is None:
- raise AttributeError("None has no persistent state.")
- try:
- return self._state_finders[instance.__class__](instance)
- except KeyError:
- raise AttributeError("%r is not instrumented" %
- instance.__class__)
-
- def dict_of(self, instance):
- # this is only called when alternate instrumentation
- # has been established
- if instance is None:
- raise AttributeError("None has no persistent state.")
- try:
- return self._dict_finders[instance.__class__](instance)
- except KeyError:
- raise AttributeError("%r is not instrumented" %
- instance.__class__)
-
- def unregister(self, class_):
- if class_ in self._manager_finders:
- manager = self.manager_of_class(class_)
- self.dispatch.class_uninstrument(class_)
- manager.unregister()
- manager.dispose()
- del self._manager_finders[class_]
- del self._state_finders[class_]
- del self._dict_finders[class_]
- if ClassManager.MANAGER_ATTR in class_.__dict__:
- delattr(class_, ClassManager.MANAGER_ATTR)
-
-instrumentation_registry = InstrumentationRegistry()
-
-
-def _install_lookup_strategy(implementation):
- """Replace global class/object management functions
- with either faster or more comprehensive implementations,
- based on whether or not extended class instrumentation
- has been detected.
-
- This function is called only by InstrumentationRegistry()
- and unit tests specific to this behavior.
-
- """
- global instance_state, instance_dict, manager_of_class
- if implementation is util.symbol('native'):
- instance_state = attrgetter(ClassManager.STATE_ATTR)
- instance_dict = attrgetter("__dict__")
- def manager_of_class(cls):
- return cls.__dict__.get(ClassManager.MANAGER_ATTR, None)
- else:
- instance_state = instrumentation_registry.state_of
- instance_dict = instrumentation_registry.dict_of
- manager_of_class = instrumentation_registry.manager_of_class
- attributes.instance_state = instance_state
- attributes.instance_dict = instance_dict
- attributes.manager_of_class = manager_of_class
-
-_create_manager_for_cls = instrumentation_registry.create_manager_for_cls
-
-# Install default "lookup" strategies. These are basically
-# very fast attrgetters for key attributes.
-# When a custom ClassManager is installed, more expensive per-class
-# strategies are copied over these.
-_install_lookup_strategy(util.symbol('native'))
-
-
-def find_native_user_instrumentation_hook(cls):
- """Find user-specified instrumentation management for a class."""
- return getattr(cls, INSTRUMENTATION_MANAGER, None)
-instrumentation_finders.append(find_native_user_instrumentation_hook)
-
-def _generate_init(class_, class_manager):
- """Build an __init__ decorator that triggers ClassManager events."""
-
- # TODO: we should use the ClassManager's notion of the
- # original '__init__' method, once ClassManager is fixed
- # to always reference that.
- original__init__ = class_.__init__
- assert original__init__
-
- # Go through some effort here and don't change the user's __init__
- # calling signature.
- # FIXME: need to juggle local names to avoid constructor argument
- # clashes.
- func_body = """\
-def __init__(%(apply_pos)s):
- new_state = class_manager._new_state_if_none(%(self_arg)s)
- if new_state:
- return new_state.initialize_instance(%(apply_kw)s)
- else:
- return original__init__(%(apply_kw)s)
-"""
- func_vars = util.format_argspec_init(original__init__, grouped=False)
- func_text = func_body % func_vars
-
- # Py3K
- #func_defaults = getattr(original__init__, '__defaults__', None)
- # Py2K
- func = getattr(original__init__, 'im_func', original__init__)
- func_defaults = getattr(func, 'func_defaults', None)
- # end Py2K
-
- env = locals().copy()
- exec func_text in env
- __init__ = env['__init__']
- __init__.__doc__ = original__init__.__doc__
- if func_defaults:
- __init__.func_defaults = func_defaults
- return __init__
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/orm/interfaces.py b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/orm/interfaces.py
deleted file mode 100755
index 7a874855..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/orm/interfaces.py
+++ /dev/null
@@ -1,754 +0,0 @@
-# orm/interfaces.py
-# Copyright (C) 2005-2011 the SQLAlchemy authors and contributors <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-"""
-
-Contains various base classes used throughout the ORM.
-
-Defines the now deprecated ORM extension classes as well
-as ORM internals.
-
-Other than the deprecated extensions, this module and the
-classes within should be considered mostly private.
-
-"""
-
-from itertools import chain
-
-from sqlalchemy import exc as sa_exc
-from sqlalchemy import util
-from sqlalchemy.sql import expression
-deque = util.importlater('collections').deque
-
-mapperutil = util.importlater('sqlalchemy.orm', 'util')
-
-collections = None
-
-__all__ = (
- 'AttributeExtension',
- 'EXT_CONTINUE',
- 'EXT_STOP',
- 'ExtensionOption',
- 'InstrumentationManager',
- 'LoaderStrategy',
- 'MapperExtension',
- 'MapperOption',
- 'MapperProperty',
- 'PropComparator',
- 'PropertyOption',
- 'SessionExtension',
- 'StrategizedOption',
- 'StrategizedProperty',
- 'build_path',
- )
-
-EXT_CONTINUE = util.symbol('EXT_CONTINUE')
-EXT_STOP = util.symbol('EXT_STOP')
-
-ONETOMANY = util.symbol('ONETOMANY')
-MANYTOONE = util.symbol('MANYTOONE')
-MANYTOMANY = util.symbol('MANYTOMANY')
-
-from deprecated_interfaces import AttributeExtension, SessionExtension, \
- MapperExtension
-
-
-class MapperProperty(object):
- """Manage the relationship of a ``Mapper`` to a single class
- attribute, as well as that attribute as it appears on individual
- instances of the class, including attribute instrumentation,
- attribute access, loading behavior, and dependency calculations.
- """
-
- cascade = ()
- """The set of 'cascade' attribute names.
-
- This collection is checked before the 'cascade_iterator' method is called.
-
- """
-
- def setup(self, context, entity, path, reduced_path, adapter, **kwargs):
- """Called by Query for the purposes of constructing a SQL statement.
-
- Each MapperProperty associated with the target mapper processes the
- statement referenced by the query context, adding columns and/or
- criterion as appropriate.
- """
-
- pass
-
- def create_row_processor(self, selectcontext, path, reduced_path,
- mapper, row, adapter):
- """Return a 3-tuple consisting of three row processing functions.
-
- """
- return None, None, None
-
- def cascade_iterator(self, type_, state, visited_instances=None,
- halt_on=None):
- """Iterate through instances related to the given instance for
- a particular 'cascade', starting with this MapperProperty.
-
- Return an iterator3-tuples (instance, mapper, state).
-
- Note that the 'cascade' collection on this MapperProperty is
- checked first for the given type before cascade_iterator is called.
-
- See PropertyLoader for the related instance implementation.
- """
-
- return iter(())
-
- def set_parent(self, parent, init):
- self.parent = parent
-
- def instrument_class(self, mapper):
- raise NotImplementedError()
-
- _compile_started = False
- _compile_finished = False
-
- def init(self):
- """Called after all mappers are created to assemble
- relationships between mappers and perform other post-mapper-creation
- initialization steps.
-
- """
- self._compile_started = True
- self.do_init()
- self._compile_finished = True
-
- @property
- def class_attribute(self):
- """Return the class-bound descriptor corresponding to this
- MapperProperty."""
-
- return getattr(self.parent.class_, self.key)
-
- def do_init(self):
- """Perform subclass-specific initialization post-mapper-creation
- steps.
-
- This is a template method called by the ``MapperProperty``
- object's init() method.
-
- """
-
- pass
-
- def post_instrument_class(self, mapper):
- """Perform instrumentation adjustments that need to occur
- after init() has completed.
-
- """
- pass
-
- def per_property_preprocessors(self, uow):
- pass
-
- def is_primary(self):
- """Return True if this ``MapperProperty``'s mapper is the
- primary mapper for its class.
-
- This flag is used to indicate that the ``MapperProperty`` can
- define attribute instrumentation for the class at the class
- level (as opposed to the individual instance level).
- """
-
- return not self.parent.non_primary
-
- def merge(self, session, source_state, source_dict, dest_state,
- dest_dict, load, _recursive):
- """Merge the attribute represented by this ``MapperProperty``
- from source to destination object"""
-
- pass
-
- def compare(self, operator, value, **kw):
- """Return a compare operation for the columns represented by
- this ``MapperProperty`` to the given value, which may be a
- column value or an instance. 'operator' is an operator from
- the operators module, or from sql.Comparator.
-
- By default uses the PropComparator attached to this MapperProperty
- under the attribute name "comparator".
- """
-
- return operator(self.comparator, value)
-
-class PropComparator(expression.ColumnOperators):
- """Defines comparison operations for MapperProperty objects.
-
- User-defined subclasses of :class:`.PropComparator` may be created. The
- built-in Python comparison and math operator methods, such as
- ``__eq__()``, ``__lt__()``, ``__add__()``, can be overridden to provide
- new operator behaivor. The custom :class:`.PropComparator` is passed to
- the mapper property via the ``comparator_factory`` argument. In each case,
- the appropriate subclass of :class:`.PropComparator` should be used::
-
- from sqlalchemy.orm.properties import \\
- ColumnProperty,\\
- CompositeProperty,\\
- RelationshipProperty
-
- class MyColumnComparator(ColumnProperty.Comparator):
- pass
-
- class MyCompositeComparator(CompositeProperty.Comparator):
- pass
-
- class MyRelationshipComparator(RelationshipProperty.Comparator):
- pass
-
- """
-
- def __init__(self, prop, mapper, adapter=None):
- self.prop = self.property = prop
- self.mapper = mapper
- self.adapter = adapter
-
- def __clause_element__(self):
- raise NotImplementedError("%r" % self)
-
- def adapted(self, adapter):
- """Return a copy of this PropComparator which will use the given
- adaption function on the local side of generated expressions.
-
- """
-
- return self.__class__(self.prop, self.mapper, adapter)
-
- @staticmethod
- def any_op(a, b, **kwargs):
- return a.any(b, **kwargs)
-
- @staticmethod
- def has_op(a, b, **kwargs):
- return a.has(b, **kwargs)
-
- @staticmethod
- def of_type_op(a, class_):
- return a.of_type(class_)
-
- def of_type(self, class_):
- """Redefine this object in terms of a polymorphic subclass.
-
- Returns a new PropComparator from which further criterion can be
- evaluated.
-
- e.g.::
-
- query.join(Company.employees.of_type(Engineer)).\\
- filter(Engineer.name=='foo')
-
- \class_
- a class or mapper indicating that criterion will be against
- this specific subclass.
-
-
- """
-
- return self.operate(PropComparator.of_type_op, class_)
-
- def any(self, criterion=None, **kwargs):
- """Return true if this collection contains any member that meets the
- given criterion.
-
- criterion
- an optional ClauseElement formulated against the member class' table
- or attributes.
-
- \**kwargs
- key/value pairs corresponding to member class attribute names which
- will be compared via equality to the corresponding values.
- """
-
- return self.operate(PropComparator.any_op, criterion, **kwargs)
-
- def has(self, criterion=None, **kwargs):
- """Return true if this element references a member which meets the
- given criterion.
-
- criterion
- an optional ClauseElement formulated against the member class' table
- or attributes.
-
- \**kwargs
- key/value pairs corresponding to member class attribute names which
- will be compared via equality to the corresponding values.
- """
-
- return self.operate(PropComparator.has_op, criterion, **kwargs)
-
-
-class StrategizedProperty(MapperProperty):
- """A MapperProperty which uses selectable strategies to affect
- loading behavior.
-
- There is a single strategy selected by default. Alternate
- strategies can be selected at Query time through the usage of
- ``StrategizedOption`` objects via the Query.options() method.
-
- """
-
- def _get_context_strategy(self, context, reduced_path):
- key = ('loaderstrategy', reduced_path)
- if key in context.attributes:
- cls = context.attributes[key]
- try:
- return self._strategies[cls]
- except KeyError:
- return self.__init_strategy(cls)
- else:
- return self.strategy
-
- def _get_strategy(self, cls):
- try:
- return self._strategies[cls]
- except KeyError:
- return self.__init_strategy(cls)
-
- def __init_strategy(self, cls):
- self._strategies[cls] = strategy = cls(self)
- strategy.init()
- return strategy
-
- def setup(self, context, entity, path, reduced_path, adapter, **kwargs):
- self._get_context_strategy(context, reduced_path + (self.key,)).\
- setup_query(context, entity, path,
- reduced_path, adapter, **kwargs)
-
- def create_row_processor(self, context, path, reduced_path, mapper, row, adapter):
- return self._get_context_strategy(context, reduced_path + (self.key,)).\
- create_row_processor(context, path,
- reduced_path, mapper, row, adapter)
-
- def do_init(self):
- self._strategies = {}
- self.strategy = self.__init_strategy(self.strategy_class)
-
- def post_instrument_class(self, mapper):
- if self.is_primary() and \
- not mapper.class_manager._attr_has_impl(self.key):
- self.strategy.init_class_attribute(mapper)
-
-def build_path(entity, key, prev=None):
- if prev:
- return prev + (entity, key)
- else:
- return (entity, key)
-
-def serialize_path(path):
- if path is None:
- return None
-
- return zip(
- [m.class_ for m in [path[i] for i in range(0, len(path), 2)]],
- [path[i] for i in range(1, len(path), 2)] + [None]
- )
-
-def deserialize_path(path):
- if path is None:
- return None
-
- p = tuple(chain(*[(mapperutil.class_mapper(cls), key) for cls, key in path]))
- if p and p[-1] is None:
- p = p[0:-1]
- return p
-
-class MapperOption(object):
- """Describe a modification to a Query."""
-
- propagate_to_loaders = False
- """if True, indicate this option should be carried along
- Query object generated by scalar or object lazy loaders.
- """
-
- def process_query(self, query):
- pass
-
- def process_query_conditionally(self, query):
- """same as process_query(), except that this option may not
- apply to the given query.
-
- Used when secondary loaders resend existing options to a new
- Query."""
-
- self.process_query(query)
-
-class PropertyOption(MapperOption):
- """A MapperOption that is applied to a property off the mapper or
- one of its child mappers, identified by a dot-separated key. """
-
- def __init__(self, key, mapper=None):
- self.key = key
- self.mapper = mapper
-
- def process_query(self, query):
- self._process(query, True)
-
- def process_query_conditionally(self, query):
- self._process(query, False)
-
- def _process(self, query, raiseerr):
- paths, mappers = self._get_paths(query, raiseerr)
- if paths:
- self.process_query_property(query, paths, mappers)
-
- def process_query_property(self, query, paths, mappers):
- pass
-
- def __getstate__(self):
- d = self.__dict__.copy()
- d['key'] = ret = []
- for token in util.to_list(self.key):
- if isinstance(token, PropComparator):
- ret.append((token.mapper.class_, token.key))
- else:
- ret.append(token)
- return d
-
- def __setstate__(self, state):
- ret = []
- for key in state['key']:
- if isinstance(key, tuple):
- cls, propkey = key
- ret.append(getattr(cls, propkey))
- else:
- ret.append(key)
- state['key'] = tuple(ret)
- self.__dict__ = state
-
- def _find_entity_prop_comparator(self, query, token, mapper, raiseerr):
- if mapperutil._is_aliased_class(mapper):
- searchfor = mapper
- isa = False
- else:
- searchfor = mapperutil._class_to_mapper(mapper)
- isa = True
- for ent in query._mapper_entities:
- if searchfor is ent.path_entity or isa \
- and searchfor.common_parent(ent.path_entity):
- return ent
- else:
- if raiseerr:
- if not list(query._mapper_entities):
- raise sa_exc.ArgumentError(
- "Query has only expression-based entities - "
- "can't find property named '%s'."
- % (token, )
- )
- else:
- raise sa_exc.ArgumentError(
- "Can't find property '%s' on any entity "
- "specified in this Query. Note the full path "
- "from root (%s) to target entity must be specified."
- % (token, ",".join(str(x) for
- x in query._mapper_entities))
- )
- else:
- return None
-
- def _find_entity_basestring(self, query, token, raiseerr):
- for ent in query._mapper_entities:
- # return only the first _MapperEntity when searching
- # based on string prop name. Ideally object
- # attributes are used to specify more exactly.
- return ent
- else:
- if raiseerr:
- raise sa_exc.ArgumentError(
- "Query has only expression-based entities - "
- "can't find property named '%s'."
- % (token, )
- )
- else:
- return None
-
- def _get_paths(self, query, raiseerr):
- path = None
- entity = None
- l = []
- mappers = []
-
- # _current_path implies we're in a
- # secondary load with an existing path
- current_path = list(query._current_path)
-
- tokens = deque(self.key)
- while tokens:
- token = tokens.popleft()
- if isinstance(token, basestring):
- sub_tokens = token.split(".", 1)
- token = sub_tokens[0]
- tokens.extendleft(sub_tokens[1:])
-
- # exhaust current_path before
- # matching tokens to entities
- if current_path:
- if current_path[1] == token:
- current_path = current_path[2:]
- continue
- else:
- return [], []
-
- if not entity:
- entity = self._find_entity_basestring(
- query,
- token,
- raiseerr)
- if entity is None:
- return [], []
- path_element = entity.path_entity
- mapper = entity.mapper
- mappers.append(mapper)
- if mapper.has_property(token):
- prop = mapper.get_property(token)
- else:
- if raiseerr:
- raise sa_exc.ArgumentError(
- "Can't find property named '%s' on the "
- "mapped entity %s in this Query. " % (
- token, mapper)
- )
- else:
- return [], []
- elif isinstance(token, PropComparator):
- prop = token.property
-
- # exhaust current_path before
- # matching tokens to entities
- if current_path:
- if current_path[0:2] == \
- [token.parententity, prop.key]:
- current_path = current_path[2:]
- continue
- else:
- return [], []
-
- if not entity:
- entity = self._find_entity_prop_comparator(
- query,
- prop.key,
- token.parententity,
- raiseerr)
- if not entity:
- return [], []
- path_element = entity.path_entity
- mapper = entity.mapper
- mappers.append(prop.parent)
- else:
- raise sa_exc.ArgumentError(
- "mapper option expects "
- "string key or list of attributes")
- assert prop is not None
- path = build_path(path_element, prop.key, path)
- l.append(path)
- if getattr(token, '_of_type', None):
- path_element = mapper = token._of_type
- else:
- path_element = mapper = getattr(prop, 'mapper', None)
- if mapper is None and tokens:
- raise sa_exc.ArgumentError(
- "Attribute '%s' of entity '%s' does not "
- "refer to a mapped entity" %
- (token, entity)
- )
- if path_element:
- path_element = path_element
-
- if current_path:
- # ran out of tokens before
- # current_path was exhausted.
- assert not tokens
- return [], []
-
- return l, mappers
-
-class StrategizedOption(PropertyOption):
- """A MapperOption that affects which LoaderStrategy will be used
- for an operation by a StrategizedProperty.
- """
-
- chained = False
-
- def process_query_property(self, query, paths, mappers):
-
- # _get_context_strategy may receive the path in terms of a base
- # mapper - e.g. options(eagerload_all(Company.employees,
- # Engineer.machines)) in the polymorphic tests leads to
- # "(Person, 'machines')" in the path due to the mechanics of how
- # the eager strategy builds up the path
-
- if self.chained:
- for path in paths:
- query._attributes[('loaderstrategy',
- _reduce_path(path))] = \
- self.get_strategy_class()
- else:
- query._attributes[('loaderstrategy',
- _reduce_path(paths[-1]))] = \
- self.get_strategy_class()
-
- def get_strategy_class(self):
- raise NotImplementedError()
-
-def _reduce_path(path):
- """Convert a (mapper, path) path to use base mappers.
-
- This is used to allow more open ended selection of loader strategies, i.e.
- Mapper -> prop1 -> Subclass -> prop2, where Subclass is a sub-mapper
- of the mapper referenced by Mapper.prop1.
-
- """
- return tuple([i % 2 != 0 and
- element or
- getattr(element, 'base_mapper', element)
- for i, element in enumerate(path)])
-
-class LoaderStrategy(object):
- """Describe the loading behavior of a StrategizedProperty object.
-
- The ``LoaderStrategy`` interacts with the querying process in three
- ways:
-
- * it controls the configuration of the ``InstrumentedAttribute``
- placed on a class to handle the behavior of the attribute. this
- may involve setting up class-level callable functions to fire
- off a select operation when the attribute is first accessed
- (i.e. a lazy load)
-
- * it processes the ``QueryContext`` at statement construction time,
- where it can modify the SQL statement that is being produced.
- simple column attributes may add their represented column to the
- list of selected columns, *eager loading* properties may add
- ``LEFT OUTER JOIN`` clauses to the statement.
-
- * it processes the ``SelectionContext`` at row-processing time. This
- includes straight population of attributes corresponding to rows,
- setting instance-level lazyloader callables on newly
- constructed instances, and appending child items to scalar/collection
- attributes in response to eagerly-loaded relations.
- """
-
- def __init__(self, parent):
- self.parent_property = parent
- self.is_class_level = False
- self.parent = self.parent_property.parent
- self.key = self.parent_property.key
-
- def init(self):
- raise NotImplementedError("LoaderStrategy")
-
- def init_class_attribute(self, mapper):
- pass
-
- def setup_query(self, context, entity, path, reduced_path, adapter, **kwargs):
- pass
-
- def create_row_processor(self, selectcontext, path, reduced_path, mapper,
- row, adapter):
- """Return row processing functions which fulfill the contract
- specified by MapperProperty.create_row_processor.
-
- StrategizedProperty delegates its create_row_processor method
- directly to this method. """
-
- return None, None, None
-
- def __str__(self):
- return str(self.parent_property)
-
- def debug_callable(self, fn, logger, announcement, logfn):
- if announcement:
- logger.debug(announcement)
- if logfn:
- def call(*args, **kwargs):
- logger.debug(logfn(*args, **kwargs))
- return fn(*args, **kwargs)
- return call
- else:
- return fn
-
-class InstrumentationManager(object):
- """User-defined class instrumentation extension.
-
- :class:`.InstrumentationManager` can be subclassed in order
- to change
- how class instrumentation proceeds. This class exists for
- the purposes of integration with other object management
- frameworks which would like to entirely modify the
- instrumentation methodology of the ORM, and is not intended
- for regular usage. For interception of class instrumentation
- events, see :class:`.InstrumentationEvents`.
-
- For an example of :class:`.InstrumentationManager`, see the
- example :ref:`examples_instrumentation`.
-
- The API for this class should be considered as semi-stable,
- and may change slightly with new releases.
-
- """
-
- # r4361 added a mandatory (cls) constructor to this interface.
- # given that, perhaps class_ should be dropped from all of these
- # signatures.
-
- def __init__(self, class_):
- pass
-
- def manage(self, class_, manager):
- setattr(class_, '_default_class_manager', manager)
-
- def dispose(self, class_, manager):
- delattr(class_, '_default_class_manager')
-
- def manager_getter(self, class_):
- def get(cls):
- return cls._default_class_manager
- return get
-
- def instrument_attribute(self, class_, key, inst):
- pass
-
- def post_configure_attribute(self, class_, key, inst):
- pass
-
- def install_descriptor(self, class_, key, inst):
- setattr(class_, key, inst)
-
- def uninstall_descriptor(self, class_, key):
- delattr(class_, key)
-
- def install_member(self, class_, key, implementation):
- setattr(class_, key, implementation)
-
- def uninstall_member(self, class_, key):
- delattr(class_, key)
-
- def instrument_collection_class(self, class_, key, collection_class):
- global collections
- if collections is None:
- from sqlalchemy.orm import collections
- return collections.prepare_instrumentation(collection_class)
-
- def get_instance_dict(self, class_, instance):
- return instance.__dict__
-
- def initialize_instance_dict(self, class_, instance):
- pass
-
- def install_state(self, class_, instance, state):
- setattr(instance, '_default_state', state)
-
- def remove_state(self, class_, instance):
- delattr(instance, '_default_state')
-
- def state_getter(self, class_):
- return lambda instance: getattr(instance, '_default_state')
-
- def dict_getter(self, class_):
- return lambda inst: self.get_instance_dict(class_, inst)
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/orm/mapper.py b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/orm/mapper.py
deleted file mode 100755
index a426e28a..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/orm/mapper.py
+++ /dev/null
@@ -1,2825 +0,0 @@
-# orm/mapper.py
-# Copyright (C) 2005-2011 the SQLAlchemy authors and contributors <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-"""Logic to map Python classes to and from selectables.
-
-Defines the :class:`~sqlalchemy.orm.mapper.Mapper` class, the central
-configurational unit which associates a class with a database table.
-
-This is a semi-private module; the main configurational API of the ORM is
-available in :class:`~sqlalchemy.orm.`.
-
-"""
-
-import types
-import weakref
-import operator
-from itertools import chain, groupby
-deque = __import__('collections').deque
-
-from sqlalchemy import sql, util, log, exc as sa_exc, event, schema
-from sqlalchemy.sql import expression, visitors, operators, util as sqlutil
-from sqlalchemy.orm import instrumentation, attributes, sync, \
- exc as orm_exc, unitofwork, events
-from sqlalchemy.orm.interfaces import MapperProperty, EXT_CONTINUE, \
- PropComparator
-
-from sqlalchemy.orm.util import _INSTRUMENTOR, _class_to_mapper, \
- _state_mapper, class_mapper, instance_str, state_str
-
-import sys
-sessionlib = util.importlater("sqlalchemy.orm", "session")
-properties = util.importlater("sqlalchemy.orm", "properties")
-
-__all__ = (
- 'Mapper',
- '_mapper_registry',
- 'class_mapper',
- 'object_mapper',
- )
-
-_mapper_registry = weakref.WeakKeyDictionary()
-_new_mappers = False
-_already_compiling = False
-_none_set = frozenset([None])
-
-_memoized_configured_property = util.group_expirable_memoized_property()
-
-# a constant returned by _get_attr_by_column to indicate
-# this mapper is not handling an attribute for a particular
-# column
-NO_ATTRIBUTE = util.symbol('NO_ATTRIBUTE')
-
-# lock used to synchronize the "mapper compile" step
-_COMPILE_MUTEX = util.threading.RLock()
-
-class Mapper(object):
- """Define the correlation of class attributes to database table
- columns.
-
- Instances of this class should be constructed via the
- :func:`~sqlalchemy.orm.mapper` function.
-
- """
- def __init__(self,
- class_,
- local_table,
- properties = None,
- primary_key = None,
- non_primary = False,
- inherits = None,
- inherit_condition = None,
- inherit_foreign_keys = None,
- extension = None,
- order_by = False,
- always_refresh = False,
- version_id_col = None,
- version_id_generator = None,
- polymorphic_on=None,
- _polymorphic_map=None,
- polymorphic_identity=None,
- concrete=False,
- with_polymorphic=None,
- allow_null_pks=None,
- allow_partial_pks=True,
- batch=True,
- column_prefix=None,
- include_properties=None,
- exclude_properties=None,
- passive_updates=True,
- eager_defaults=False,
- _compiled_cache_size=100,
- ):
- """Construct a new mapper.
-
- Mappers are normally constructed via the
- :func:`~sqlalchemy.orm.mapper` function. See for details.
-
- """
-
- self.class_ = util.assert_arg_type(class_, type, 'class_')
-
- self.class_manager = None
-
- self._primary_key_argument = util.to_list(primary_key)
- self.non_primary = non_primary
-
- if order_by is not False:
- self.order_by = util.to_list(order_by)
- else:
- self.order_by = order_by
-
- self.always_refresh = always_refresh
- self.version_id_col = version_id_col
- self.version_id_generator = version_id_generator or \
- (lambda x:(x or 0) + 1)
- self.concrete = concrete
- self.single = False
- self.inherits = inherits
- self.local_table = local_table
- self.inherit_condition = inherit_condition
- self.inherit_foreign_keys = inherit_foreign_keys
- self._init_properties = properties or {}
- self.delete_orphans = []
- self.batch = batch
- self.eager_defaults = eager_defaults
- self.column_prefix = column_prefix
- self.polymorphic_on = polymorphic_on
- self._dependency_processors = []
- self._validators = {}
- self.passive_updates = passive_updates
- self._clause_adapter = None
- self._requires_row_aliasing = False
- self._inherits_equated_pairs = None
- self._memoized_values = {}
- self._compiled_cache_size = _compiled_cache_size
- self._reconstructor = None
- self._deprecated_extensions = util.to_list(extension or [])
-
- if allow_null_pks:
- util.warn_deprecated(
- "the allow_null_pks option to Mapper() is "
- "deprecated. It is now allow_partial_pks=False|True, "
- "defaults to True.")
- allow_partial_pks = allow_null_pks
-
- self.allow_partial_pks = allow_partial_pks
-
- if with_polymorphic == '*':
- self.with_polymorphic = ('*', None)
- elif isinstance(with_polymorphic, (tuple, list)):
- if isinstance(with_polymorphic[0], (basestring, tuple, list)):
- self.with_polymorphic = with_polymorphic
- else:
- self.with_polymorphic = (with_polymorphic, None)
- elif with_polymorphic is not None:
- raise sa_exc.ArgumentError("Invalid setting for with_polymorphic")
- else:
- self.with_polymorphic = None
-
- if isinstance(self.local_table, expression._SelectBase):
- raise sa_exc.InvalidRequestError(
- "When mapping against a select() construct, map against "
- "an alias() of the construct instead."
- "This because several databases don't allow a "
- "SELECT from a subquery that does not have an alias."
- )
-
- if self.with_polymorphic and \
- isinstance(self.with_polymorphic[1],
- expression._SelectBase):
- self.with_polymorphic = (self.with_polymorphic[0],
- self.with_polymorphic[1].alias())
-
- # our 'polymorphic identity', a string name that when located in a
- # result set row indicates this Mapper should be used to construct
- # the object instance for that row.
- self.polymorphic_identity = polymorphic_identity
-
- # a dictionary of 'polymorphic identity' names, associating those
- # names with Mappers that will be used to construct object instances
- # upon a select operation.
- if _polymorphic_map is None:
- self.polymorphic_map = {}
- else:
- self.polymorphic_map = _polymorphic_map
-
- if include_properties is not None:
- self.include_properties = util.to_set(include_properties)
- else:
- self.include_properties = None
- if exclude_properties:
- self.exclude_properties = util.to_set(exclude_properties)
- else:
- self.exclude_properties = None
-
- self.configured = False
-
- # prevent this mapper from being constructed
- # while a configure_mappers() is occurring (and defer a configure_mappers()
- # until construction succeeds)
- _COMPILE_MUTEX.acquire()
- try:
- self._configure_inheritance()
- self._configure_legacy_instrument_class()
- self._configure_class_instrumentation()
- self._configure_listeners()
- self._configure_properties()
- self._configure_polymorphic_setter()
- self._configure_pks()
- global _new_mappers
- _new_mappers = True
- self._log("constructed")
- self._expire_memoizations()
- finally:
- _COMPILE_MUTEX.release()
-
- # major attributes initialized at the classlevel so that
- # they can be Sphinx-documented.
-
- local_table = None
- """The :class:`.Selectable` which this :class:`.Mapper` manages.
-
- Typically is an instance of :class:`.Table` or :class:`.Alias`.
- May also be ``None``.
-
- The "local" table is the
- selectable that the :class:`.Mapper` is directly responsible for
- managing from an attribute access and flush perspective. For
- non-inheriting mappers, the local table is the same as the
- "mapped" table. For joined-table inheritance mappers, local_table
- will be the particular sub-table of the overall "join" which
- this :class:`.Mapper` represents. If this mapper is a
- single-table inheriting mapper, local_table will be ``None``.
-
- See also :attr:`~.Mapper.mapped_table`.
-
- """
-
- mapped_table = None
- """The :class:`.Selectable` to which this :class:`.Mapper` is mapped.
-
- Typically an instance of :class:`.Table`, :class:`.Join`, or
- :class:`.Alias`.
-
- The "mapped" table is the selectable that
- the mapper selects from during queries. For non-inheriting
- mappers, the mapped table is the same as the "local" table.
- For joined-table inheritance mappers, mapped_table references the
- full :class:`.Join` representing full rows for this particular
- subclass. For single-table inheritance mappers, mapped_table
- references the base table.
-
- See also :attr:`~.Mapper.local_table`.
-
- """
-
- inherits = None
- """References the :class:`.Mapper` which this :class:`.Mapper`
- inherits from, if any.
-
- This is a *read only* attribute determined during mapper construction.
- Behavior is undefined if directly modified.
-
- """
-
- configured = None
- """Represent ``True`` if this :class:`.Mapper` has been configured.
-
- This is a *read only* attribute determined during mapper construction.
- Behavior is undefined if directly modified.
-
- See also :func:`.configure_mappers`.
-
- """
-
- concrete = None
- """Represent ``True`` if this :class:`.Mapper` is a concrete
- inheritance mapper.
-
- This is a *read only* attribute determined during mapper construction.
- Behavior is undefined if directly modified.
-
- """
-
- tables = None
- """An iterable containing the collection of :class:`.Table` objects
- which this :class:`.Mapper` is aware of.
-
- If the mapper is mapped to a :class:`.Join`, or an :class:`.Alias`
- representing a :class:`.Select`, the individual :class:`.Table`
- objects that comprise the full construct will be represented here.
-
- This is a *read only* attribute determined during mapper construction.
- Behavior is undefined if directly modified.
-
- """
-
- primary_key = None
- """An iterable containing the collection of :class:`.Column` objects
- which comprise the 'primary key' of the mapped table, from the
- perspective of this :class:`.Mapper`.
-
- This list is against the selectable in :attr:`~.Mapper.mapped_table`. In the
- case of inheriting mappers, some columns may be managed by a superclass
- mapper. For example, in the case of a :class:`.Join`, the primary
- key is determined by all of the primary key columns across all tables
- referenced by the :class:`.Join`.
-
- The list is also not necessarily the same as the primary key column
- collection associated with the underlying tables; the :class:`.Mapper`
- features a ``primary_key`` argument that can override what the
- :class:`.Mapper` considers as primary key columns.
-
- This is a *read only* attribute determined during mapper construction.
- Behavior is undefined if directly modified.
-
- """
-
- class_ = None
- """The Python class which this :class:`.Mapper` maps.
-
- This is a *read only* attribute determined during mapper construction.
- Behavior is undefined if directly modified.
-
- """
-
- class_manager = None
- """The :class:`.ClassManager` which maintains event listeners
- and class-bound descriptors for this :class:`.Mapper`.
-
- This is a *read only* attribute determined during mapper construction.
- Behavior is undefined if directly modified.
-
- """
-
- single = None
- """Represent ``True`` if this :class:`.Mapper` is a single table
- inheritance mapper.
-
- :attr:`~.Mapper.local_table` will be ``None`` if this flag is set.
-
- This is a *read only* attribute determined during mapper construction.
- Behavior is undefined if directly modified.
-
- """
-
- non_primary = None
- """Represent ``True`` if this :class:`.Mapper` is a "non-primary"
- mapper, e.g. a mapper that is used only to selet rows but not for
- persistence management.
-
- This is a *read only* attribute determined during mapper construction.
- Behavior is undefined if directly modified.
-
- """
-
- polymorphic_on = None
- """The :class:`.Column` specified as the ``polymorphic_on`` column
- for this :class:`.Mapper`, within an inheritance scenario.
-
- This attribute may also be of other types besides :class:`.Column`
- in a future SQLAlchemy release.
-
- This is a *read only* attribute determined during mapper construction.
- Behavior is undefined if directly modified.
-
- """
-
- polymorphic_map = None
- """A mapping of "polymorphic identity" identifiers mapped to :class:`.Mapper`
- instances, within an inheritance scenario.
-
- The identifiers can be of any type which is comparable to the
- type of column represented by :attr:`~.Mapper.polymorphic_on`.
-
- An inheritance chain of mappers will all reference the same
- polymorphic map object. The object is used to correlate incoming
- result rows to target mappers.
-
- This is a *read only* attribute determined during mapper construction.
- Behavior is undefined if directly modified.
-
- """
-
- polymorphic_identity = None
- """Represent an identifier which is matched against the :attr:`~.Mapper.polymorphic_on`
- column during result row loading.
-
- Used only with inheritance, this object can be of any type which is
- comparable to the type of column represented by :attr:`~.Mapper.polymorphic_on`.
-
- This is a *read only* attribute determined during mapper construction.
- Behavior is undefined if directly modified.
-
- """
-
- base_mapper = None
- """The base-most :class:`.Mapper` in an inheritance chain.
-
- In a non-inheriting scenario, this attribute will always be this
- :class:`.Mapper`. In an inheritance scenario, it references
- the :class:`.Mapper` which is parent to all other :class:`.Mapper`
- objects in the inheritance chain.
-
- This is a *read only* attribute determined during mapper construction.
- Behavior is undefined if directly modified.
-
- """
-
- columns = None
- """A collection of :class:`.Column` or other scalar expression
- objects maintained by this :class:`.Mapper`.
-
- The collection behaves the same as that of the ``c`` attribute on
- any :class:`.Table` object, except that only those columns included in
- this mapping are present, and are keyed based on the attribute name
- defined in the mapping, not necessarily the ``key`` attribute of the
- :class:`.Column` itself. Additionally, scalar expressions mapped
- by :func:`.column_property` are also present here.
-
- This is a *read only* attribute determined during mapper construction.
- Behavior is undefined if directly modified.
-
- """
-
- c = None
- """A synonym for :attr:`~.Mapper.columns`."""
-
- dispatch = event.dispatcher(events.MapperEvents)
-
- def _configure_inheritance(self):
- """Configure settings related to inherting and/or inherited mappers
- being present."""
-
- # a set of all mappers which inherit from this one.
- self._inheriting_mappers = set()
-
- if self.inherits:
- if isinstance(self.inherits, type):
- self.inherits = class_mapper(self.inherits, compile=False)
- if not issubclass(self.class_, self.inherits.class_):
- raise sa_exc.ArgumentError(
- "Class '%s' does not inherit from '%s'" %
- (self.class_.__name__, self.inherits.class_.__name__))
- if self.non_primary != self.inherits.non_primary:
- np = not self.non_primary and "primary" or "non-primary"
- raise sa_exc.ArgumentError(
- "Inheritance of %s mapper for class '%s' is "
- "only allowed from a %s mapper" %
- (np, self.class_.__name__, np))
- # inherit_condition is optional.
- if self.local_table is None:
- self.local_table = self.inherits.local_table
- self.mapped_table = self.inherits.mapped_table
- self.single = True
- elif not self.local_table is self.inherits.local_table:
- if self.concrete:
- self.mapped_table = self.local_table
- for mapper in self.iterate_to_root():
- if mapper.polymorphic_on is not None:
- mapper._requires_row_aliasing = True
- else:
- if self.inherit_condition is None:
- # figure out inherit condition from our table to the
- # immediate table of the inherited mapper, not its
- # full table which could pull in other stuff we dont
- # want (allows test/inheritance.InheritTest4 to pass)
- self.inherit_condition = sqlutil.join_condition(
- self.inherits.local_table,
- self.local_table)
- self.mapped_table = sql.join(
- self.inherits.mapped_table,
- self.local_table,
- self.inherit_condition)
-
- fks = util.to_set(self.inherit_foreign_keys)
- self._inherits_equated_pairs = sqlutil.criterion_as_pairs(
- self.mapped_table.onclause,
- consider_as_foreign_keys=fks)
- else:
- self.mapped_table = self.local_table
-
- if self.polymorphic_identity is not None and not self.concrete:
- self._identity_class = self.inherits._identity_class
- else:
- self._identity_class = self.class_
-
- if self.version_id_col is None:
- self.version_id_col = self.inherits.version_id_col
- self.version_id_generator = self.inherits.version_id_generator
- elif self.inherits.version_id_col is not None and \
- self.version_id_col is not self.inherits.version_id_col:
- util.warn(
- "Inheriting version_id_col '%s' does not match inherited "
- "version_id_col '%s' and will not automatically populate "
- "the inherited versioning column. "
- "version_id_col should only be specified on "
- "the base-most mapper that includes versioning." %
- (self.version_id_col.description,
- self.inherits.version_id_col.description)
- )
-
- if self.order_by is False and \
- not self.concrete and \
- self.inherits.order_by is not False:
- self.order_by = self.inherits.order_by
-
- self.polymorphic_map = self.inherits.polymorphic_map
- self.batch = self.inherits.batch
- self.inherits._inheriting_mappers.add(self)
- self.base_mapper = self.inherits.base_mapper
- self.passive_updates = self.inherits.passive_updates
- self._all_tables = self.inherits._all_tables
-
- if self.polymorphic_identity is not None:
- self.polymorphic_map[self.polymorphic_identity] = self
-
- if self.polymorphic_on is None:
- for mapper in self.iterate_to_root():
- # try to set up polymorphic on using
- # correesponding_column(); else leave
- # as None
- if mapper.polymorphic_on is not None:
- self.polymorphic_on = \
- self.mapped_table.corresponding_column(
- mapper.polymorphic_on)
- break
- else:
- self._all_tables = set()
- self.base_mapper = self
- self.mapped_table = self.local_table
- if self.polymorphic_identity is not None:
- self.polymorphic_map[self.polymorphic_identity] = self
- self._identity_class = self.class_
-
- if self.mapped_table is None:
- raise sa_exc.ArgumentError(
- "Mapper '%s' does not have a mapped_table specified."
- % self)
-
- def _configure_legacy_instrument_class(self):
-
- if self.inherits:
- self.dispatch._update(self.inherits.dispatch)
- super_extensions = set(chain(*[m._deprecated_extensions
- for m in self.inherits.iterate_to_root()]))
- else:
- super_extensions = set()
-
- for ext in self._deprecated_extensions:
- if ext not in super_extensions:
- ext._adapt_instrument_class(self, ext)
-
- def _configure_listeners(self):
- if self.inherits:
- super_extensions = set(chain(*[m._deprecated_extensions
- for m in self.inherits.iterate_to_root()]))
- else:
- super_extensions = set()
-
- for ext in self._deprecated_extensions:
- if ext not in super_extensions:
- ext._adapt_listener(self, ext)
-
- if self.inherits:
- self.class_manager.dispatch._update(
- self.inherits.class_manager.dispatch)
-
- def _configure_class_instrumentation(self):
- """If this mapper is to be a primary mapper (i.e. the
- non_primary flag is not set), associate this Mapper with the
- given class_ and entity name.
-
- Subsequent calls to ``class_mapper()`` for the class_/entity
- name combination will return this mapper. Also decorate the
- `__init__` method on the mapped class to include optional
- auto-session attachment logic.
-
- """
- manager = attributes.manager_of_class(self.class_)
-
- if self.non_primary:
- if not manager or not manager.is_mapped:
- raise sa_exc.InvalidRequestError(
- "Class %s has no primary mapper configured. Configure "
- "a primary mapper first before setting up a non primary "
- "Mapper." % self.class_)
- self.class_manager = manager
- self._identity_class = manager.mapper._identity_class
- _mapper_registry[self] = True
- return
-
- if manager is not None:
- assert manager.class_ is self.class_
- if manager.is_mapped:
- raise sa_exc.ArgumentError(
- "Class '%s' already has a primary mapper defined. "
- "Use non_primary=True to "
- "create a non primary Mapper. clear_mappers() will "
- "remove *all* current mappers from all classes." %
- self.class_)
- #else:
- # a ClassManager may already exist as
- # ClassManager.instrument_attribute() creates
- # new managers for each subclass if they don't yet exist.
-
- _mapper_registry[self] = True
-
- self.dispatch.instrument_class(self, self.class_)
-
- if manager is None:
- manager = instrumentation.register_class(self.class_,
- deferred_scalar_loader = _load_scalar_attributes
- )
-
- self.class_manager = manager
-
- manager.mapper = self
-
- # The remaining members can be added by any mapper,
- # e_name None or not.
- if manager.info.get(_INSTRUMENTOR, False):
- return
-
- event.listen(manager, 'first_init', _event_on_first_init, raw=True)
- event.listen(manager, 'init', _event_on_init, raw=True)
- event.listen(manager, 'resurrect', _event_on_resurrect, raw=True)
-
- for key, method in util.iterate_attributes(self.class_):
- if isinstance(method, types.FunctionType):
- if hasattr(method, '__sa_reconstructor__'):
- self._reconstructor = method
- event.listen(manager, 'load', _event_on_load, raw=True)
- elif hasattr(method, '__sa_validators__'):
- for name in method.__sa_validators__:
- self._validators[name] = method
-
- manager.info[_INSTRUMENTOR] = self
-
- @util.deprecated("0.7", message=":meth:`.Mapper.compile` "
- "is replaced by :func:`.configure_mappers`")
- def compile(self):
- """Initialize the inter-mapper relationships of all mappers that
- have been constructed thus far.
-
- """
- configure_mappers()
- return self
-
-
- @property
- @util.deprecated("0.7", message=":attr:`.Mapper.compiled` "
- "is replaced by :attr:`.Mapper.configured`")
- def compiled(self):
- return self.configured
-
- def dispose(self):
- # Disable any attribute-based compilation.
- self.configured = True
-
- if hasattr(self, '_configure_failed'):
- del self._configure_failed
-
- if not self.non_primary and \
- self.class_manager.is_mapped and \
- self.class_manager.mapper is self:
- instrumentation.unregister_class(self.class_)
-
- def _configure_pks(self):
-
- self.tables = sqlutil.find_tables(self.mapped_table)
-
- self._pks_by_table = {}
- self._cols_by_table = {}
-
- all_cols = util.column_set(chain(*[
- col.proxy_set for col in
- self._columntoproperty]))
-
- pk_cols = util.column_set(c for c in all_cols if c.primary_key)
-
- # identify primary key columns which are also mapped by this mapper.
- tables = set(self.tables + [self.mapped_table])
- self._all_tables.update(tables)
- for t in tables:
- if t.primary_key and pk_cols.issuperset(t.primary_key):
- # ordering is important since it determines the ordering of
- # mapper.primary_key (and therefore query.get())
- self._pks_by_table[t] =\
- util.ordered_column_set(t.primary_key).\
- intersection(pk_cols)
- self._cols_by_table[t] = \
- util.ordered_column_set(t.c).\
- intersection(all_cols)
-
- # determine cols that aren't expressed within our tables; mark these
- # as "read only" properties which are refreshed upon INSERT/UPDATE
- self._readonly_props = set(
- self._columntoproperty[col]
- for col in self._columntoproperty
- if not hasattr(col, 'table') or
- col.table not in self._cols_by_table)
-
- # if explicit PK argument sent, add those columns to the
- # primary key mappings
- if self._primary_key_argument:
- for k in self._primary_key_argument:
- if k.table not in self._pks_by_table:
- self._pks_by_table[k.table] = util.OrderedSet()
- self._pks_by_table[k.table].add(k)
-
- # otherwise, see that we got a full PK for the mapped table
- elif self.mapped_table not in self._pks_by_table or \
- len(self._pks_by_table[self.mapped_table]) == 0:
- raise sa_exc.ArgumentError(
- "Mapper %s could not assemble any primary "
- "key columns for mapped table '%s'" %
- (self, self.mapped_table.description))
- elif self.local_table not in self._pks_by_table and \
- isinstance(self.local_table, schema.Table):
- util.warn("Could not assemble any primary "
- "keys for locally mapped table '%s' - "
- "no rows will be persisted in this Table."
- % self.local_table.description)
-
- if self.inherits and \
- not self.concrete and \
- not self._primary_key_argument:
- # if inheriting, the "primary key" for this mapper is
- # that of the inheriting (unless concrete or explicit)
- self.primary_key = self.inherits.primary_key
- else:
- # determine primary key from argument or mapped_table pks -
- # reduce to the minimal set of columns
- if self._primary_key_argument:
- primary_key = sqlutil.reduce_columns(
- [self.mapped_table.corresponding_column(c) for c in
- self._primary_key_argument],
- ignore_nonexistent_tables=True)
- else:
- primary_key = sqlutil.reduce_columns(
- self._pks_by_table[self.mapped_table],
- ignore_nonexistent_tables=True)
-
- if len(primary_key) == 0:
- raise sa_exc.ArgumentError(
- "Mapper %s could not assemble any primary "
- "key columns for mapped table '%s'" %
- (self, self.mapped_table.description))
-
- self.primary_key = tuple(primary_key)
- self._log("Identified primary key columns: %s", primary_key)
-
- def _configure_properties(self):
-
- # Column and other ClauseElement objects which are mapped
- self.columns = self.c = util.OrderedProperties()
-
- # object attribute names mapped to MapperProperty objects
- self._props = util.OrderedDict()
-
- # table columns mapped to lists of MapperProperty objects
- # using a list allows a single column to be defined as
- # populating multiple object attributes
- self._columntoproperty = _ColumnMapping(self)
-
- # load custom properties
- if self._init_properties:
- for key, prop in self._init_properties.iteritems():
- self._configure_property(key, prop, False)
-
- # pull properties from the inherited mapper if any.
- if self.inherits:
- for key, prop in self.inherits._props.iteritems():
- if key not in self._props and \
- not self._should_exclude(key, key, local=False, column=None):
- self._adapt_inherited_property(key, prop, False)
-
- # create properties for each column in the mapped table,
- # for those columns which don't already map to a property
- for column in self.mapped_table.columns:
- if column in self._columntoproperty:
- continue
-
- column_key = (self.column_prefix or '') + column.key
-
- if self._should_exclude(
- column.key, column_key,
- local=self.local_table.c.contains_column(column),
- column=column
- ):
- continue
-
- # adjust the "key" used for this column to that
- # of the inheriting mapper
- for mapper in self.iterate_to_root():
- if column in mapper._columntoproperty:
- column_key = mapper._columntoproperty[column].key
-
- self._configure_property(column_key,
- column,
- init=False,
- setparent=True)
-
- def _configure_polymorphic_setter(self):
- """Configure an attribute on the mapper representing the
- 'polymorphic_on' column, if applicable, and not
- already generated by _configure_properties (which is typical).
-
- Also create a setter function which will assign this
- attribute to the value of the 'polymorphic_identity'
- upon instance construction, also if applicable. This
- routine will run when an instance is created.
-
- """
- # do a special check for the "discriminiator" column, as it
- # may only be present in the 'with_polymorphic' selectable
- # but we need it for the base mapper
- setter = False
-
- if self.polymorphic_on is not None:
- setter = True
-
- if self.polymorphic_on not in self._columntoproperty:
- col = self.mapped_table.corresponding_column(self.polymorphic_on)
- if col is None:
- setter = False
- instrument = False
- col = self.polymorphic_on
- if self.with_polymorphic is None \
- or self.with_polymorphic[1].corresponding_column(col) \
- is None:
- raise sa_exc.InvalidRequestError("Could not map polymorphic_on column "
- "'%s' to the mapped table - polymorphic "
- "loads will not function properly"
- % col.description)
- else:
- instrument = True
-
- if self._should_exclude(col.key, col.key, False, col):
- raise sa_exc.InvalidRequestError(
- "Cannot exclude or override the discriminator column %r" %
- col.key)
-
- self._configure_property(
- col.key,
- properties.ColumnProperty(col, _instrument=instrument),
- init=False, setparent=True)
- polymorphic_key = col.key
- else:
- polymorphic_key = self._columntoproperty[self.polymorphic_on].key
-
- if setter:
- def _set_polymorphic_identity(state):
- dict_ = state.dict
- state.get_impl(polymorphic_key).set(state, dict_,
- self.polymorphic_identity, None)
-
- self._set_polymorphic_identity = _set_polymorphic_identity
- else:
- self._set_polymorphic_identity = None
-
-
-
- def _adapt_inherited_property(self, key, prop, init):
- if not self.concrete:
- self._configure_property(key, prop, init=False, setparent=False)
- elif key not in self._props:
- self._configure_property(
- key,
- properties.ConcreteInheritedProperty(),
- init=init, setparent=True)
-
- def _configure_property(self, key, prop, init=True, setparent=True):
- self._log("_configure_property(%s, %s)", key, prop.__class__.__name__)
-
- if not isinstance(prop, MapperProperty):
- # we were passed a Column or a list of Columns;
- # generate a properties.ColumnProperty
- columns = util.to_list(prop)
- column = columns[0]
- if not expression.is_column(column):
- raise sa_exc.ArgumentError(
- "%s=%r is not an instance of MapperProperty or Column"
- % (key, prop))
-
- prop = self._props.get(key, None)
-
- if isinstance(prop, properties.ColumnProperty):
- if prop.parent is self:
- raise sa_exc.InvalidRequestError(
- "Implicitly combining column %s with column "
- "%s under attribute '%s'. Please configure one "
- "or more attributes for these same-named columns "
- "explicitly."
- % (prop.columns[-1], column, key))
-
- # existing properties.ColumnProperty from an inheriting
- # mapper. make a copy and append our column to it
- prop = prop.copy()
- prop.columns.insert(0, column)
- self._log("inserting column to existing list "
- "in properties.ColumnProperty %s" % (key))
-
- elif prop is None or isinstance(prop, properties.ConcreteInheritedProperty):
- mapped_column = []
- for c in columns:
- mc = self.mapped_table.corresponding_column(c)
- if mc is None:
- mc = self.local_table.corresponding_column(c)
- if mc is not None:
- # if the column is in the local table but not the
- # mapped table, this corresponds to adding a
- # column after the fact to the local table.
- # [ticket:1523]
- self.mapped_table._reset_exported()
- mc = self.mapped_table.corresponding_column(c)
- if mc is None:
- raise sa_exc.ArgumentError(
- "When configuring property '%s' on %s, "
- "column '%s' is not represented in the mapper's "
- "table. Use the `column_property()` function to "
- "force this column to be mapped as a read-only "
- "attribute." % (key, self, c))
- mapped_column.append(mc)
- prop = properties.ColumnProperty(*mapped_column)
- else:
- raise sa_exc.ArgumentError(
- "WARNING: when configuring property '%s' on %s, "
- "column '%s' conflicts with property '%r'. "
- "To resolve this, map the column to the class under a "
- "different name in the 'properties' dictionary. Or, "
- "to remove all awareness of the column entirely "
- "(including its availability as a foreign key), "
- "use the 'include_properties' or 'exclude_properties' "
- "mapper arguments to control specifically which table "
- "columns get mapped." %
- (key, self, column.key, prop))
-
- if isinstance(prop, properties.ColumnProperty):
- col = self.mapped_table.corresponding_column(prop.columns[0])
-
- # if the column is not present in the mapped table,
- # test if a column has been added after the fact to the
- # parent table (or their parent, etc.) [ticket:1570]
- if col is None and self.inherits:
- path = [self]
- for m in self.inherits.iterate_to_root():
- col = m.local_table.corresponding_column(prop.columns[0])
- if col is not None:
- for m2 in path:
- m2.mapped_table._reset_exported()
- col = self.mapped_table.corresponding_column(
- prop.columns[0])
- break
- path.append(m)
-
- # subquery expression, column not present in the mapped
- # selectable.
- if col is None:
- col = prop.columns[0]
-
- # column is coming in after _readonly_props was
- # initialized; check for 'readonly'
- if hasattr(self, '_readonly_props') and \
- (not hasattr(col, 'table') or
- col.table not in self._cols_by_table):
- self._readonly_props.add(prop)
-
- else:
- # if column is coming in after _cols_by_table was
- # initialized, ensure the col is in the right set
- if hasattr(self, '_cols_by_table') and \
- col.table in self._cols_by_table and \
- col not in self._cols_by_table[col.table]:
- self._cols_by_table[col.table].add(col)
-
- # if this properties.ColumnProperty represents the "polymorphic
- # discriminator" column, mark it. We'll need this when rendering
- # columns in SELECT statements.
- if not hasattr(prop, '_is_polymorphic_discriminator'):
- prop._is_polymorphic_discriminator = \
- (col is self.polymorphic_on or
- prop.columns[0] is self.polymorphic_on)
-
- self.columns[key] = col
- for col in prop.columns:
- for col in col.proxy_set:
- self._columntoproperty[col] = prop
-
- prop.key = key
-
- if setparent:
- prop.set_parent(self, init)
-
- if key in self._props and \
- getattr(self._props[key], '_mapped_by_synonym', False):
- syn = self._props[key]._mapped_by_synonym
- raise sa_exc.ArgumentError(
- "Can't call map_column=True for synonym %r=%r, "
- "a ColumnProperty already exists keyed to the name "
- "%r for column %r" % (syn, key, key, syn)
- )
-
- self._props[key] = prop
-
- if not self.non_primary:
- prop.instrument_class(self)
-
- for mapper in self._inheriting_mappers:
- mapper._adapt_inherited_property(key, prop, init)
-
- if init:
- prop.init()
- prop.post_instrument_class(self)
-
-
- def _post_configure_properties(self):
- """Call the ``init()`` method on all ``MapperProperties``
- attached to this mapper.
-
- This is a deferred configuration step which is intended
- to execute once all mappers have been constructed.
-
- """
-
- self._log("_post_configure_properties() started")
- l = [(key, prop) for key, prop in self._props.iteritems()]
- for key, prop in l:
- self._log("initialize prop %s", key)
-
- if prop.parent is self and not prop._compile_started:
- prop.init()
-
- if prop._compile_finished:
- prop.post_instrument_class(self)
-
- self._log("_post_configure_properties() complete")
- self.configured = True
-
- def add_properties(self, dict_of_properties):
- """Add the given dictionary of properties to this mapper,
- using `add_property`.
-
- """
- for key, value in dict_of_properties.iteritems():
- self.add_property(key, value)
-
- def add_property(self, key, prop):
- """Add an individual MapperProperty to this mapper.
-
- If the mapper has not been configured yet, just adds the
- property to the initial properties dictionary sent to the
- constructor. If this Mapper has already been configured, then
- the given MapperProperty is configured immediately.
-
- """
- self._init_properties[key] = prop
- self._configure_property(key, prop, init=self.configured)
- self._expire_memoizations()
-
- def _expire_memoizations(self):
- for mapper in self.iterate_to_root():
- _memoized_configured_property.expire_instance(mapper)
-
- @property
- def _log_desc(self):
- return "(" + self.class_.__name__ + \
- "|" + \
- (self.local_table is not None and
- self.local_table.description or
- str(self.local_table)) +\
- (self.non_primary and
- "|non-primary" or "") + ")"
-
- def _log(self, msg, *args):
-
- self.logger.info(
- "%s " + msg, *((self._log_desc,) + args)
- )
-
- def _log_debug(self, msg, *args):
- self.logger.debug(
- "%s " + msg, *((self._log_desc,) + args)
- )
-
- def __repr__(self):
- return '<Mapper at 0x%x; %s>' % (
- id(self), self.class_.__name__)
-
- def __str__(self):
- return "Mapper|%s|%s%s" % (
- self.class_.__name__,
- self.local_table is not None and
- self.local_table.description or None,
- self.non_primary and "|non-primary" or ""
- )
-
- def _is_orphan(self, state):
- o = False
- for mapper in self.iterate_to_root():
- for (key, cls) in mapper.delete_orphans:
- if attributes.manager_of_class(cls).has_parent(
- state, key, optimistic=bool(state.key)):
- return False
- o = o or bool(mapper.delete_orphans)
- return o
-
- def has_property(self, key):
- return key in self._props
-
- def get_property(self, key, _compile_mappers=True):
- """return a MapperProperty associated with the given key.
- """
-
- if _compile_mappers and _new_mappers:
- configure_mappers()
-
- try:
- return self._props[key]
- except KeyError:
- raise sa_exc.InvalidRequestError(
- "Mapper '%s' has no property '%s'" % (self, key))
-
- @util.deprecated('0.6.4',
- 'Call to deprecated function mapper._get_col_to_pr'
- 'op(). Use mapper.get_property_by_column()')
- def _get_col_to_prop(self, col):
- return self._columntoproperty[col]
-
- def get_property_by_column(self, column):
- """Given a :class:`.Column` object, return the
- :class:`.MapperProperty` which maps this column."""
-
- return self._columntoproperty[column]
-
- @property
- def iterate_properties(self):
- """return an iterator of all MapperProperty objects."""
- if _new_mappers:
- configure_mappers()
- return self._props.itervalues()
-
- def _mappers_from_spec(self, spec, selectable):
- """given a with_polymorphic() argument, return the set of mappers it
- represents.
-
- Trims the list of mappers to just those represented within the given
- selectable, if present. This helps some more legacy-ish mappings.
-
- """
- if spec == '*':
- mappers = list(self.self_and_descendants)
- elif spec:
- mappers = [_class_to_mapper(m) for m in util.to_list(spec)]
- for m in mappers:
- if not m.isa(self):
- raise sa_exc.InvalidRequestError(
- "%r does not inherit from %r" %
- (m, self))
- else:
- mappers = []
-
- if selectable is not None:
- tables = set(sqlutil.find_tables(selectable,
- include_aliases=True))
- mappers = [m for m in mappers if m.local_table in tables]
-
- return mappers
-
- def _selectable_from_mappers(self, mappers):
- """given a list of mappers (assumed to be within this mapper's
- inheritance hierarchy), construct an outerjoin amongst those mapper's
- mapped tables.
-
- """
-
- from_obj = self.mapped_table
- for m in mappers:
- if m is self:
- continue
- if m.concrete:
- raise sa_exc.InvalidRequestError(
- "'with_polymorphic()' requires 'selectable' argument "
- "when concrete-inheriting mappers are used.")
- elif not m.single:
- from_obj = from_obj.outerjoin(m.local_table,
- m.inherit_condition)
-
- return from_obj
-
- @_memoized_configured_property
- def _single_table_criterion(self):
- if self.single and \
- self.inherits and \
- self.polymorphic_on is not None:
- return self.polymorphic_on.in_(
- m.polymorphic_identity
- for m in self.self_and_descendants)
- else:
- return None
-
- @_memoized_configured_property
- def _with_polymorphic_mappers(self):
- if not self.with_polymorphic:
- return [self]
- return self._mappers_from_spec(*self.with_polymorphic)
-
- @_memoized_configured_property
- def _with_polymorphic_selectable(self):
- if not self.with_polymorphic:
- return self.mapped_table
-
- spec, selectable = self.with_polymorphic
- if selectable is not None:
- return selectable
- else:
- return self._selectable_from_mappers(
- self._mappers_from_spec(spec, selectable))
-
- def _with_polymorphic_args(self, spec=None, selectable=False):
- if self.with_polymorphic:
- if not spec:
- spec = self.with_polymorphic[0]
- if selectable is False:
- selectable = self.with_polymorphic[1]
-
- mappers = self._mappers_from_spec(spec, selectable)
- if selectable is not None:
- return mappers, selectable
- else:
- return mappers, self._selectable_from_mappers(mappers)
-
- @_memoized_configured_property
- def _polymorphic_properties(self):
- return tuple(self._iterate_polymorphic_properties(
- self._with_polymorphic_mappers))
-
- def _iterate_polymorphic_properties(self, mappers=None):
- """Return an iterator of MapperProperty objects which will render into
- a SELECT."""
-
- if mappers is None:
- mappers = self._with_polymorphic_mappers
-
- if not mappers:
- for c in self.iterate_properties:
- yield c
- else:
- # in the polymorphic case, filter out discriminator columns
- # from other mappers, as these are sometimes dependent on that
- # mapper's polymorphic selectable (which we don't want rendered)
- for c in util.unique_list(
- chain(*[list(mapper.iterate_properties) for mapper in [self] +
- mappers])
- ):
- if getattr(c, '_is_polymorphic_discriminator', False) and \
- (self.polymorphic_on is None or
- c.columns[0] is not self.polymorphic_on):
- continue
- yield c
-
- @property
- def properties(self):
- raise NotImplementedError(
- "Public collection of MapperProperty objects is "
- "provided by the get_property() and iterate_properties "
- "accessors.")
-
- @_memoized_configured_property
- def _get_clause(self):
- """create a "get clause" based on the primary key. this is used
- by query.get() and many-to-one lazyloads to load this item
- by primary key.
-
- """
- params = [(primary_key, sql.bindparam(None, type_=primary_key.type))
- for primary_key in self.primary_key]
- return sql.and_(*[k==v for (k, v) in params]), \
- util.column_dict(params)
-
- @_memoized_configured_property
- def _equivalent_columns(self):
- """Create a map of all *equivalent* columns, based on
- the determination of column pairs that are equated to
- one another based on inherit condition. This is designed
- to work with the queries that util.polymorphic_union
- comes up with, which often don't include the columns from
- the base table directly (including the subclass table columns
- only).
-
- The resulting structure is a dictionary of columns mapped
- to lists of equivalent columns, i.e.
-
- {
- tablea.col1:
- set([tableb.col1, tablec.col1]),
- tablea.col2:
- set([tabled.col2])
- }
-
- """
- result = util.column_dict()
- def visit_binary(binary):
- if binary.operator == operators.eq:
- if binary.left in result:
- result[binary.left].add(binary.right)
- else:
- result[binary.left] = util.column_set((binary.right,))
- if binary.right in result:
- result[binary.right].add(binary.left)
- else:
- result[binary.right] = util.column_set((binary.left,))
- for mapper in self.base_mapper.self_and_descendants:
- if mapper.inherit_condition is not None:
- visitors.traverse(
- mapper.inherit_condition, {},
- {'binary':visit_binary})
-
- return result
-
- def _is_userland_descriptor(self, obj):
- return not isinstance(obj,
- (MapperProperty, attributes.QueryableAttribute)) and \
- hasattr(obj, '__get__') and not \
- isinstance(obj.__get__(None, obj),
- attributes.QueryableAttribute)
-
-
- def _should_exclude(self, name, assigned_name, local, column):
- """determine whether a particular property should be implicitly
- present on the class.
-
- This occurs when properties are propagated from an inherited class, or
- are applied from the columns present in the mapped table.
-
- """
-
- # check for descriptors, either local or from
- # an inherited class
- if local:
- if self.class_.__dict__.get(assigned_name, None) is not None \
- and self._is_userland_descriptor(
- self.class_.__dict__[assigned_name]):
- return True
- else:
- if getattr(self.class_, assigned_name, None) is not None \
- and self._is_userland_descriptor(
- getattr(self.class_, assigned_name)):
- return True
-
- if self.include_properties is not None and \
- name not in self.include_properties and \
- (column is None or column not in self.include_properties):
- self._log("not including property %s" % (name))
- return True
-
- if self.exclude_properties is not None and \
- (
- name in self.exclude_properties or \
- (column is not None and column in self.exclude_properties)
- ):
- self._log("excluding property %s" % (name))
- return True
-
- return False
-
- def common_parent(self, other):
- """Return true if the given mapper shares a common inherited parent as
- this mapper."""
-
- return self.base_mapper is other.base_mapper
-
- def _canload(self, state, allow_subtypes):
- s = self.primary_mapper()
- if self.polymorphic_on is not None or allow_subtypes:
- return _state_mapper(state).isa(s)
- else:
- return _state_mapper(state) is s
-
- def isa(self, other):
- """Return True if the this mapper inherits from the given mapper."""
-
- m = self
- while m and m is not other:
- m = m.inherits
- return bool(m)
-
- def iterate_to_root(self):
- m = self
- while m:
- yield m
- m = m.inherits
-
- @_memoized_configured_property
- def self_and_descendants(self):
- """The collection including this mapper and all descendant mappers.
-
- This includes not just the immediately inheriting mappers but
- all their inheriting mappers as well.
-
- """
- descendants = []
- stack = deque([self])
- while stack:
- item = stack.popleft()
- descendants.append(item)
- stack.extend(item._inheriting_mappers)
- return tuple(descendants)
-
- def polymorphic_iterator(self):
- """Iterate through the collection including this mapper and
- all descendant mappers.
-
- This includes not just the immediately inheriting mappers but
- all their inheriting mappers as well.
-
- To iterate through an entire hierarchy, use
- ``mapper.base_mapper.polymorphic_iterator()``.
-
- """
- return iter(self.self_and_descendants)
-
- def primary_mapper(self):
- """Return the primary mapper corresponding to this mapper's class key
- (class)."""
-
- return self.class_manager.mapper
-
- @property
- def primary_base_mapper(self):
- return self.class_manager.mapper.base_mapper
-
- def identity_key_from_row(self, row, adapter=None):
- """Return an identity-map key for use in storing/retrieving an
- item from the identity map.
-
- row
- A ``sqlalchemy.engine.base.RowProxy`` instance or a
- dictionary corresponding result-set ``ColumnElement``
- instances to their values within a row.
-
- """
- pk_cols = self.primary_key
- if adapter:
- pk_cols = [adapter.columns[c] for c in pk_cols]
-
- return self._identity_class, \
- tuple(row[column] for column in pk_cols)
-
- def identity_key_from_primary_key(self, primary_key):
- """Return an identity-map key for use in storing/retrieving an
- item from an identity map.
-
- primary_key
- A list of values indicating the identifier.
-
- """
- return self._identity_class, tuple(primary_key)
-
- def identity_key_from_instance(self, instance):
- """Return the identity key for the given instance, based on
- its primary key attributes.
-
- This value is typically also found on the instance state under the
- attribute name `key`.
-
- """
- return self.identity_key_from_primary_key(
- self.primary_key_from_instance(instance))
-
- def _identity_key_from_state(self, state):
- dict_ = state.dict
- manager = state.manager
- return self._identity_class, tuple([
- manager[self._columntoproperty[col].key].\
- impl.get(state, dict_, attributes.PASSIVE_OFF)
- for col in self.primary_key
- ])
-
- def primary_key_from_instance(self, instance):
- """Return the list of primary key values for the given
- instance.
-
- """
- state = attributes.instance_state(instance)
- return self._primary_key_from_state(state)
-
- def _primary_key_from_state(self, state):
- dict_ = state.dict
- manager = state.manager
- return [
- manager[self._columntoproperty[col].key].\
- impl.get(state, dict_, attributes.PASSIVE_OFF)
- for col in self.primary_key
- ]
-
- def _get_state_attr_by_column(self, state, dict_, column,
- passive=attributes.PASSIVE_OFF):
- prop = self._columntoproperty[column]
- return state.manager[prop.key].impl.get(state, dict_, passive=passive)
-
- def _set_state_attr_by_column(self, state, dict_, column, value):
- prop = self._columntoproperty[column]
- state.manager[prop.key].impl.set(state, dict_, value, None)
-
- def _get_committed_attr_by_column(self, obj, column):
- state = attributes.instance_state(obj)
- dict_ = attributes.instance_dict(obj)
- return self._get_committed_state_attr_by_column(state, dict_, column)
-
- def _get_committed_state_attr_by_column(self, state, dict_,
- column, passive=attributes.PASSIVE_OFF):
-
- prop = self._columntoproperty[column]
- return state.manager[prop.key].impl.\
- get_committed_value(state, dict_, passive=passive)
-
- def _optimized_get_statement(self, state, attribute_names):
- """assemble a WHERE clause which retrieves a given state by primary
- key, using a minimized set of tables.
-
- Applies to a joined-table inheritance mapper where the
- requested attribute names are only present on joined tables,
- not the base table. The WHERE clause attempts to include
- only those tables to minimize joins.
-
- """
- props = self._props
-
- tables = set(chain(
- *[sqlutil.find_tables(c, check_columns=True)
- for key in attribute_names
- for c in props[key].columns]
- ))
-
- if self.base_mapper.local_table in tables:
- return None
-
- class ColumnsNotAvailable(Exception):
- pass
-
- def visit_binary(binary):
- leftcol = binary.left
- rightcol = binary.right
- if leftcol is None or rightcol is None:
- return
-
- if leftcol.table not in tables:
- leftval = self._get_committed_state_attr_by_column(
- state, state.dict,
- leftcol,
- passive=attributes.PASSIVE_NO_INITIALIZE)
- if leftval is attributes.PASSIVE_NO_RESULT or leftval is None:
- raise ColumnsNotAvailable()
- binary.left = sql.bindparam(None, leftval,
- type_=binary.right.type)
- elif rightcol.table not in tables:
- rightval = self._get_committed_state_attr_by_column(
- state, state.dict,
- rightcol,
- passive=attributes.PASSIVE_NO_INITIALIZE)
- if rightval is attributes.PASSIVE_NO_RESULT or rightval is None:
- raise ColumnsNotAvailable()
- binary.right = sql.bindparam(None, rightval,
- type_=binary.right.type)
-
- allconds = []
-
- try:
- start = False
- for mapper in reversed(list(self.iterate_to_root())):
- if mapper.local_table in tables:
- start = True
- if start and not mapper.single:
- allconds.append(visitors.cloned_traverse(
- mapper.inherit_condition,
- {},
- {'binary':visit_binary}
- )
- )
- except ColumnsNotAvailable:
- return None
-
- cond = sql.and_(*allconds)
-
- cols = []
- for key in attribute_names:
- cols.extend(props[key].columns)
- return sql.select(cols, cond, use_labels=True)
-
- def cascade_iterator(self, type_, state, halt_on=None):
- """Iterate each element and its mapper in an object graph,
- for all relationships that meet the given cascade rule.
-
- :param type_:
- The name of the cascade rule (i.e. save-update, delete,
- etc.)
-
- :param state:
- The lead InstanceState. child items will be processed per
- the relationships defined for this object's mapper.
-
- the return value are object instances; this provides a strong
- reference so that they don't fall out of scope immediately.
-
- """
- visited_states = set()
- prp, mpp = object(), object()
-
- visitables = deque([(deque(self._props.values()), prp,
- state, state.dict)])
-
- while visitables:
- iterator, item_type, parent_state, parent_dict = visitables[-1]
- if not iterator:
- visitables.pop()
- continue
-
- if item_type is prp:
- prop = iterator.popleft()
- if type_ not in prop.cascade:
- continue
- queue = deque(prop.cascade_iterator(type_, parent_state,
- parent_dict, visited_states, halt_on))
- if queue:
- visitables.append((queue,mpp, None, None))
- elif item_type is mpp:
- instance, instance_mapper, corresponding_state, \
- corresponding_dict = iterator.popleft()
- yield instance, instance_mapper, \
- corresponding_state, corresponding_dict
- visitables.append((deque(instance_mapper._props.values()),
- prp, corresponding_state,
- corresponding_dict))
-
- @_memoized_configured_property
- def _compiled_cache(self):
- return util.LRUCache(self._compiled_cache_size)
-
- @_memoized_configured_property
- def _sorted_tables(self):
- table_to_mapper = {}
- for mapper in self.base_mapper.self_and_descendants:
- for t in mapper.tables:
- table_to_mapper[t] = mapper
-
- sorted_ = sqlutil.sort_tables(table_to_mapper.iterkeys())
- ret = util.OrderedDict()
- for t in sorted_:
- ret[t] = table_to_mapper[t]
- return ret
-
- def _per_mapper_flush_actions(self, uow):
- saves = unitofwork.SaveUpdateAll(uow, self.base_mapper)
- deletes = unitofwork.DeleteAll(uow, self.base_mapper)
- uow.dependencies.add((saves, deletes))
-
- for dep in self._dependency_processors:
- dep.per_property_preprocessors(uow)
-
- for prop in self._props.values():
- prop.per_property_preprocessors(uow)
-
- def _per_state_flush_actions(self, uow, states, isdelete):
-
- base_mapper = self.base_mapper
- save_all = unitofwork.SaveUpdateAll(uow, base_mapper)
- delete_all = unitofwork.DeleteAll(uow, base_mapper)
- for state in states:
- # keep saves before deletes -
- # this ensures 'row switch' operations work
- if isdelete:
- action = unitofwork.DeleteState(uow, state, base_mapper)
- uow.dependencies.add((save_all, action))
- else:
- action = unitofwork.SaveUpdateState(uow, state, base_mapper)
- uow.dependencies.add((action, delete_all))
-
- yield action
-
- def _memo(self, key, callable_):
- if key in self._memoized_values:
- return self._memoized_values[key]
- else:
- self._memoized_values[key] = value = callable_()
- return value
-
- def _post_update(self, states, uowtransaction, post_update_cols):
- """Issue UPDATE statements on behalf of a relationship() which
- specifies post_update.
-
- """
- cached_connections = util.PopulateDict(
- lambda conn:conn.execution_options(
- compiled_cache=self._compiled_cache
- ))
-
- # if session has a connection callable,
- # organize individual states with the connection
- # to use for update
- if uowtransaction.session.connection_callable:
- connection_callable = \
- uowtransaction.session.connection_callable
- else:
- connection = uowtransaction.transaction.connection(self)
- connection_callable = None
-
- tups = []
- for state in _sort_states(states):
- if connection_callable:
- conn = connection_callable(self, state.obj())
- else:
- conn = connection
-
- mapper = _state_mapper(state)
-
- tups.append((state, state.dict, mapper, conn))
-
- table_to_mapper = self._sorted_tables
-
- for table in table_to_mapper:
- update = []
-
- for state, state_dict, mapper, connection in tups:
- if table not in mapper._pks_by_table:
- continue
-
- pks = mapper._pks_by_table[table]
- params = {}
- hasdata = False
-
- for col in mapper._cols_by_table[table]:
- if col in pks:
- params[col._label] = \
- mapper._get_state_attr_by_column(
- state,
- state_dict, col)
- elif col in post_update_cols:
- prop = mapper._columntoproperty[col]
- history = attributes.get_state_history(
- state, prop.key,
- attributes.PASSIVE_NO_INITIALIZE)
- if history.added:
- value = history.added[0]
- params[col.key] = value
- hasdata = True
- if hasdata:
- update.append((state, state_dict, params, mapper,
- connection))
-
- if update:
- mapper = table_to_mapper[table]
-
- def update_stmt():
- clause = sql.and_()
-
- for col in mapper._pks_by_table[table]:
- clause.clauses.append(col == sql.bindparam(col._label,
- type_=col.type))
-
- return table.update(clause)
-
- statement = self._memo(('post_update', table), update_stmt)
-
- # execute each UPDATE in the order according to the original
- # list of states to guarantee row access order, but
- # also group them into common (connection, cols) sets
- # to support executemany().
- for key, grouper in groupby(
- update, lambda rec: (rec[4], rec[2].keys())
- ):
- multiparams = [params for state, state_dict,
- params, mapper, conn in grouper]
- cached_connections[connection].\
- execute(statement, multiparams)
-
- def _save_obj(self, states, uowtransaction, single=False):
- """Issue ``INSERT`` and/or ``UPDATE`` statements for a list
- of objects.
-
- This is called within the context of a UOWTransaction during a
- flush operation, given a list of states to be flushed. The
- base mapper in an inheritance hierarchy handles the inserts/
- updates for all descendant mappers.
-
- """
-
- # if batch=false, call _save_obj separately for each object
- if not single and not self.batch:
- for state in _sort_states(states):
- self._save_obj([state],
- uowtransaction,
- single=True)
- return
-
- # if session has a connection callable,
- # organize individual states with the connection
- # to use for insert/update
- if uowtransaction.session.connection_callable:
- connection_callable = \
- uowtransaction.session.connection_callable
- else:
- connection = uowtransaction.transaction.connection(self)
- connection_callable = None
-
- tups = []
-
- for state in _sort_states(states):
- if connection_callable:
- conn = connection_callable(self, state.obj())
- else:
- conn = connection
-
- has_identity = bool(state.key)
- mapper = _state_mapper(state)
- instance_key = state.key or mapper._identity_key_from_state(state)
-
- row_switch = None
-
- # call before_XXX extensions
- if not has_identity:
- mapper.dispatch.before_insert(mapper, conn, state)
- else:
- mapper.dispatch.before_update(mapper, conn, state)
-
- # detect if we have a "pending" instance (i.e. has
- # no instance_key attached to it), and another instance
- # with the same identity key already exists as persistent.
- # convert to an UPDATE if so.
- if not has_identity and \
- instance_key in uowtransaction.session.identity_map:
- instance = \
- uowtransaction.session.identity_map[instance_key]
- existing = attributes.instance_state(instance)
- if not uowtransaction.is_deleted(existing):
- raise orm_exc.FlushError(
- "New instance %s with identity key %s conflicts "
- "with persistent instance %s" %
- (state_str(state), instance_key,
- state_str(existing)))
-
- self._log_debug(
- "detected row switch for identity %s. "
- "will update %s, remove %s from "
- "transaction", instance_key,
- state_str(state), state_str(existing))
-
- # remove the "delete" flag from the existing element
- uowtransaction.remove_state_actions(existing)
- row_switch = existing
-
- tups.append(
- (state, state.dict, mapper, conn,
- has_identity, instance_key, row_switch)
- )
-
- # dictionary of connection->connection_with_cache_options.
- cached_connections = util.PopulateDict(
- lambda conn:conn.execution_options(
- compiled_cache=self._compiled_cache
- ))
-
- table_to_mapper = self._sorted_tables
-
- for table in table_to_mapper:
- insert = []
- update = []
-
- for state, state_dict, mapper, connection, has_identity, \
- instance_key, row_switch in tups:
- if table not in mapper._pks_by_table:
- continue
-
- pks = mapper._pks_by_table[table]
-
- isinsert = not has_identity and not row_switch
-
- params = {}
- value_params = {}
-
- if isinsert:
- has_all_pks = True
- for col in mapper._cols_by_table[table]:
- if col is mapper.version_id_col:
- params[col.key] = \
- mapper.version_id_generator(None)
- else:
- # pull straight from the dict for
- # pending objects
- prop = mapper._columntoproperty[col]
- value = state_dict.get(prop.key, None)
-
- if value is None:
- if col in pks:
- has_all_pks = False
- elif col.default is None and \
- col.server_default is None:
- params[col.key] = value
-
- elif isinstance(value, sql.ClauseElement):
- value_params[col] = value
- else:
- params[col.key] = value
-
- insert.append((state, state_dict, params, mapper,
- connection, value_params, has_all_pks))
- else:
- hasdata = False
- for col in mapper._cols_by_table[table]:
- if col is mapper.version_id_col:
- params[col._label] = \
- mapper._get_committed_state_attr_by_column(
- row_switch or state,
- row_switch and row_switch.dict
- or state_dict,
- col)
-
- prop = mapper._columntoproperty[col]
- history = attributes.get_state_history(
- state, prop.key,
- attributes.PASSIVE_NO_INITIALIZE
- )
- if history.added:
- params[col.key] = history.added[0]
- hasdata = True
- else:
- params[col.key] = \
- mapper.version_id_generator(
- params[col._label])
-
- # HACK: check for history, in case the
- # history is only
- # in a different table than the one
- # where the version_id_col is.
- for prop in mapper._columntoproperty.\
- itervalues():
- history = attributes.get_state_history(
- state, prop.key,
- attributes.PASSIVE_NO_INITIALIZE)
- if history.added:
- hasdata = True
- else:
- prop = mapper._columntoproperty[col]
- history = attributes.get_state_history(
- state, prop.key,
- attributes.PASSIVE_NO_INITIALIZE)
- if history.added:
- if isinstance(history.added[0],
- sql.ClauseElement):
- value_params[col] = history.added[0]
- else:
- value = history.added[0]
- params[col.key] = value
-
- if col in pks:
- if history.deleted and \
- not row_switch:
- # if passive_updates and sync detected
- # this was a pk->pk sync, use the new
- # value to locate the row, since the
- # DB would already have set this
- if ("pk_cascaded", state, col) in \
- uowtransaction.\
- attributes:
- value = history.added[0]
- params[col._label] = value
- else:
- # use the old value to
- # locate the row
- value = history.deleted[0]
- params[col._label] = value
- hasdata = True
- else:
- # row switch logic can reach us here
- # remove the pk from the update params
- # so the update doesn't
- # attempt to include the pk in the
- # update statement
- del params[col.key]
- value = history.added[0]
- params[col._label] = value
- if value is None and hasdata:
- raise sa_exc.FlushError(
- "Can't update table "
- "using NULL for primary key "
- "value")
- else:
- hasdata = True
- elif col in pks:
- value = state.manager[prop.key].\
- impl.get(state, state_dict)
- if value is None:
- raise sa_exc.FlushError(
- "Can't update table "
- "using NULL for primary "
- "key value")
- params[col._label] = value
- if hasdata:
- update.append((state, state_dict, params, mapper,
- connection, value_params))
-
- if update:
- mapper = table_to_mapper[table]
-
- needs_version_id = mapper.version_id_col is not None and \
- table.c.contains_column(mapper.version_id_col)
-
- def update_stmt():
- clause = sql.and_()
-
- for col in mapper._pks_by_table[table]:
- clause.clauses.append(col == sql.bindparam(col._label,
- type_=col.type))
-
- if needs_version_id:
- clause.clauses.append(mapper.version_id_col ==\
- sql.bindparam(mapper.version_id_col._label,
- type_=col.type))
-
- return table.update(clause)
-
- statement = self._memo(('update', table), update_stmt)
-
- rows = 0
- for state, state_dict, params, mapper, \
- connection, value_params in update:
-
- if value_params:
- c = connection.execute(
- statement.values(value_params),
- params)
- else:
- c = cached_connections[connection].\
- execute(statement, params)
-
- mapper._postfetch(
- uowtransaction,
- table,
- state,
- state_dict,
- c.context.prefetch_cols,
- c.context.postfetch_cols,
- c.context.compiled_parameters[0],
- value_params)
- rows += c.rowcount
-
- if connection.dialect.supports_sane_rowcount:
- if rows != len(update):
- raise orm_exc.StaleDataError(
- "UPDATE statement on table '%s' expected to update %d row(s); "
- "%d were matched." %
- (table.description, len(update), rows))
-
- elif needs_version_id:
- util.warn("Dialect %s does not support updated rowcount "
- "- versioning cannot be verified." %
- c.dialect.dialect_description,
- stacklevel=12)
-
- if insert:
- statement = self._memo(('insert', table), table.insert)
-
- for (connection, pkeys, hasvalue, has_all_pks), \
- records in groupby(insert,
- lambda rec: (rec[4],
- rec[2].keys(),
- bool(rec[5]),
- rec[6])
- ):
- if has_all_pks and not hasvalue:
- records = list(records)
- multiparams = [rec[2] for rec in records]
- c = cached_connections[connection].\
- execute(statement, multiparams)
-
- for (state, state_dict, params, mapper,
- conn, value_params, has_all_pks), \
- last_inserted_params in \
- zip(records, c.context.compiled_parameters):
- mapper._postfetch(
- uowtransaction,
- table,
- state,
- state_dict,
- c.context.prefetch_cols,
- c.context.postfetch_cols,
- last_inserted_params,
- value_params)
-
- else:
- for state, state_dict, params, mapper, \
- connection, value_params, \
- has_all_pks in records:
-
- if value_params:
- result = connection.execute(
- statement.values(value_params),
- params)
- else:
- result = cached_connections[connection].\
- execute(statement, params)
-
- primary_key = result.context.inserted_primary_key
-
- if primary_key is not None:
- # set primary key attributes
- for pk, col in zip(primary_key,
- mapper._pks_by_table[table]):
- prop = mapper._columntoproperty[col]
- if state_dict.get(prop.key) is None:
- # TODO: would rather say:
- #state_dict[prop.key] = pk
- mapper._set_state_attr_by_column(
- state,
- state_dict,
- col, pk)
-
- mapper._postfetch(
- uowtransaction,
- table,
- state,
- state_dict,
- result.context.prefetch_cols,
- result.context.postfetch_cols,
- result.context.compiled_parameters[0],
- value_params)
-
-
- for state, state_dict, mapper, connection, has_identity, \
- instance_key, row_switch in tups:
-
- if mapper._readonly_props:
- readonly = state.unmodified_intersection(
- [p.key for p in mapper._readonly_props]
- )
- if readonly:
- state.expire_attributes(state.dict, readonly)
-
- # if eager_defaults option is enabled,
- # refresh whatever has been expired.
- if self.eager_defaults and state.unloaded:
- state.key = self._identity_key_from_state(state)
- uowtransaction.session.query(self)._load_on_ident(
- state.key, refresh_state=state,
- only_load_props=state.unloaded)
-
- # call after_XXX extensions
- if not has_identity:
- mapper.dispatch.after_insert(mapper, connection, state)
- else:
- mapper.dispatch.after_update(mapper, connection, state)
-
- def _postfetch(self, uowtransaction, table,
- state, dict_, prefetch_cols, postfetch_cols,
- params, value_params):
- """During a flush, expire attributes in need of newly
- persisted database state."""
-
- if self.version_id_col is not None:
- prefetch_cols = list(prefetch_cols) + [self.version_id_col]
-
- for c in prefetch_cols:
- if c.key in params and c in self._columntoproperty:
- self._set_state_attr_by_column(state, dict_, c, params[c.key])
-
- if postfetch_cols:
- state.expire_attributes(state.dict,
- [self._columntoproperty[c].key
- for c in postfetch_cols if c in
- self._columntoproperty]
- )
-
- # synchronize newly inserted ids from one table to the next
- # TODO: this still goes a little too often. would be nice to
- # have definitive list of "columns that changed" here
- for m, equated_pairs in self._table_to_equated[table]:
- sync.populate(state, m, state, m,
- equated_pairs,
- uowtransaction,
- self.passive_updates)
-
- @util.memoized_property
- def _table_to_equated(self):
- """memoized map of tables to collections of columns to be
- synchronized upwards to the base mapper."""
-
- result = util.defaultdict(list)
-
- for table in self._sorted_tables:
- cols = set(table.c)
- for m in self.iterate_to_root():
- if m._inherits_equated_pairs and \
- cols.intersection(
- [l for l, r in m._inherits_equated_pairs]):
- result[table].append((m, m._inherits_equated_pairs))
-
- return result
-
- def _delete_obj(self, states, uowtransaction):
- """Issue ``DELETE`` statements for a list of objects.
-
- This is called within the context of a UOWTransaction during a
- flush operation.
-
- """
- if uowtransaction.session.connection_callable:
- connection_callable = \
- uowtransaction.session.connection_callable
- else:
- connection = uowtransaction.transaction.connection(self)
- connection_callable = None
-
- tups = []
- cached_connections = util.PopulateDict(
- lambda conn:conn.execution_options(
- compiled_cache=self._compiled_cache
- ))
-
- for state in _sort_states(states):
- mapper = _state_mapper(state)
-
- if connection_callable:
- conn = connection_callable(self, state.obj())
- else:
- conn = connection
-
- mapper.dispatch.before_delete(mapper, conn, state)
-
- tups.append((state,
- state.dict,
- _state_mapper(state),
- bool(state.key),
- conn))
-
- table_to_mapper = self._sorted_tables
-
- for table in reversed(table_to_mapper.keys()):
- delete = util.defaultdict(list)
- for state, state_dict, mapper, has_identity, connection in tups:
- if not has_identity or table not in mapper._pks_by_table:
- continue
-
- params = {}
- delete[connection].append(params)
- for col in mapper._pks_by_table[table]:
- params[col.key] = \
- value = \
- mapper._get_state_attr_by_column(
- state, state_dict, col)
- if value is None:
- raise sa_exc.FlushError(
- "Can't delete from table "
- "using NULL for primary "
- "key value")
-
- if mapper.version_id_col is not None and \
- table.c.contains_column(mapper.version_id_col):
- params[mapper.version_id_col.key] = \
- mapper._get_committed_state_attr_by_column(
- state, state_dict,
- mapper.version_id_col)
-
- mapper = table_to_mapper[table]
- need_version_id = mapper.version_id_col is not None and \
- table.c.contains_column(mapper.version_id_col)
-
- def delete_stmt():
- clause = sql.and_()
- for col in mapper._pks_by_table[table]:
- clause.clauses.append(
- col == sql.bindparam(col.key, type_=col.type))
-
- if need_version_id:
- clause.clauses.append(
- mapper.version_id_col ==
- sql.bindparam(
- mapper.version_id_col.key,
- type_=mapper.version_id_col.type
- )
- )
-
- return table.delete(clause)
-
- for connection, del_objects in delete.iteritems():
- statement = self._memo(('delete', table), delete_stmt)
- rows = -1
-
- connection = cached_connections[connection]
-
- if need_version_id and \
- not connection.dialect.supports_sane_multi_rowcount:
- # TODO: need test coverage for this [ticket:1761]
- if connection.dialect.supports_sane_rowcount:
- rows = 0
- # execute deletes individually so that versioned
- # rows can be verified
- for params in del_objects:
- c = connection.execute(statement, params)
- rows += c.rowcount
- else:
- util.warn(
- "Dialect %s does not support deleted rowcount "
- "- versioning cannot be verified." %
- connection.dialect.dialect_description,
- stacklevel=12)
- connection.execute(statement, del_objects)
- else:
- c = connection.execute(statement, del_objects)
- if connection.dialect.supports_sane_multi_rowcount:
- rows = c.rowcount
-
- if rows != -1 and rows != len(del_objects):
- raise orm_exc.StaleDataError(
- "DELETE statement on table '%s' expected to delete %d row(s); "
- "%d were matched." %
- (table.description, len(del_objects), c.rowcount)
- )
-
- for state, state_dict, mapper, has_identity, connection in tups:
- mapper.dispatch.after_delete(mapper, connection, state)
-
- def _instance_processor(self, context, path, reduced_path, adapter,
- polymorphic_from=None,
- only_load_props=None, refresh_state=None,
- polymorphic_discriminator=None):
-
- """Produce a mapper level row processor callable
- which processes rows into mapped instances."""
-
- pk_cols = self.primary_key
-
- if polymorphic_from or refresh_state:
- polymorphic_on = None
- else:
- if polymorphic_discriminator is not None:
- polymorphic_on = polymorphic_discriminator
- else:
- polymorphic_on = self.polymorphic_on
- polymorphic_instances = util.PopulateDict(
- self._configure_subclass_mapper(
- context, path, reduced_path, adapter)
- )
-
- version_id_col = self.version_id_col
-
- if adapter:
- pk_cols = [adapter.columns[c] for c in pk_cols]
- if polymorphic_on is not None:
- polymorphic_on = adapter.columns[polymorphic_on]
- if version_id_col is not None:
- version_id_col = adapter.columns[version_id_col]
-
- identity_class = self._identity_class
-
- new_populators = []
- existing_populators = []
- load_path = context.query._current_path + path
-
- def populate_state(state, dict_, row, isnew, only_load_props):
- if isnew:
- if context.propagate_options:
- state.load_options = context.propagate_options
- if state.load_options:
- state.load_path = load_path
-
- if not new_populators:
- self._populators(context, path, reduced_path, row, adapter,
- new_populators,
- existing_populators
- )
-
- if isnew:
- populators = new_populators
- else:
- populators = existing_populators
-
- if only_load_props:
- for key, populator in populators:
- if key in only_load_props:
- populator(state, dict_, row)
- else:
- for key, populator in populators:
- populator(state, dict_, row)
-
- session_identity_map = context.session.identity_map
-
- listeners = self.dispatch
-
- translate_row = listeners.translate_row or None
- create_instance = listeners.create_instance or None
- populate_instance = listeners.populate_instance or None
- append_result = listeners.append_result or None
- populate_existing = context.populate_existing or self.always_refresh
- if self.allow_partial_pks:
- is_not_primary_key = _none_set.issuperset
- else:
- is_not_primary_key = _none_set.issubset
-
- def _instance(row, result):
- if translate_row:
- for fn in translate_row:
- ret = fn(self, context, row)
- if ret is not EXT_CONTINUE:
- row = ret
- break
-
- if polymorphic_on is not None:
- discriminator = row[polymorphic_on]
- if discriminator is not None:
- _instance = polymorphic_instances[discriminator]
- if _instance:
- return _instance(row, result)
-
- # determine identity key
- if refresh_state:
- identitykey = refresh_state.key
- if identitykey is None:
- # super-rare condition; a refresh is being called
- # on a non-instance-key instance; this is meant to only
- # occur within a flush()
- identitykey = self._identity_key_from_state(refresh_state)
- else:
- identitykey = identity_class, tuple([row[column] for column in pk_cols])
-
- instance = session_identity_map.get(identitykey)
- if instance is not None:
- state = attributes.instance_state(instance)
- dict_ = attributes.instance_dict(instance)
-
- isnew = state.runid != context.runid
- currentload = not isnew
- loaded_instance = False
-
- if not currentload and \
- version_id_col is not None and \
- context.version_check and \
- self._get_state_attr_by_column(
- state,
- dict_,
- self.version_id_col) != \
- row[version_id_col]:
-
- raise orm_exc.StaleDataError(
- "Instance '%s' has version id '%s' which "
- "does not match database-loaded version id '%s'."
- % (state_str(state),
- self._get_state_attr_by_column(
- state, dict_,
- self.version_id_col),
- row[version_id_col]))
- elif refresh_state:
- # out of band refresh_state detected (i.e. its not in the
- # session.identity_map) honor it anyway. this can happen
- # if a _get() occurs within save_obj(), such as
- # when eager_defaults is True.
- state = refresh_state
- instance = state.obj()
- dict_ = attributes.instance_dict(instance)
- isnew = state.runid != context.runid
- currentload = True
- loaded_instance = False
- else:
- # check for non-NULL values in the primary key columns,
- # else no entity is returned for the row
- if is_not_primary_key(identitykey[1]):
- return None
-
- isnew = True
- currentload = True
- loaded_instance = True
-
- if create_instance:
- for fn in create_instance:
- instance = fn(self,
- context,
- row, self.class_)
- if instance is not EXT_CONTINUE:
- manager = attributes.manager_of_class(
- instance.__class__)
- # TODO: if manager is None, raise a friendly error
- # about returning instances of unmapped types
- manager.setup_instance(instance)
- break
- else:
- instance = self.class_manager.new_instance()
- else:
- instance = self.class_manager.new_instance()
-
- dict_ = attributes.instance_dict(instance)
- state = attributes.instance_state(instance)
- state.key = identitykey
-
- # manually adding instance to session. for a complete add,
- # session._finalize_loaded() must be called.
- state.session_id = context.session.hash_key
- session_identity_map.add(state)
-
- if currentload or populate_existing:
- if isnew:
- state.runid = context.runid
- context.progress[state] = dict_
-
- if populate_instance:
- for fn in populate_instance:
- ret = fn(self, context, row, state,
- only_load_props=only_load_props,
- instancekey=identitykey, isnew=isnew)
- if ret is not EXT_CONTINUE:
- break
- else:
- populate_state(state, dict_, row, isnew, only_load_props)
- else:
- populate_state(state, dict_, row, isnew, only_load_props)
-
- if loaded_instance:
- state.manager.dispatch.load(state, context)
- elif isnew:
- state.manager.dispatch.refresh(state, context, only_load_props)
-
- elif state in context.partials or state.unloaded:
- # populate attributes on non-loading instances which have
- # been expired
- # TODO: apply eager loads to un-lazy loaded collections ?
-
- if state in context.partials:
- isnew = False
- (d_, attrs) = context.partials[state]
- else:
- isnew = True
- attrs = state.unloaded
- # allow query.instances to commit the subset of attrs
- context.partials[state] = (dict_, attrs)
-
- if populate_instance:
- for fn in populate_instance:
- ret = fn(self, context, row, state,
- only_load_props=attrs,
- instancekey=identitykey, isnew=isnew)
- if ret is not EXT_CONTINUE:
- break
- else:
- populate_state(state, dict_, row, isnew, attrs)
- else:
- populate_state(state, dict_, row, isnew, attrs)
-
- if isnew:
- state.manager.dispatch.refresh(state, context, attrs)
-
-
- if result is not None:
- if append_result:
- for fn in append_result:
- if fn(self, context, row, state,
- result, instancekey=identitykey,
- isnew=isnew) is not EXT_CONTINUE:
- break
- else:
- result.append(instance)
- else:
- result.append(instance)
-
- return instance
- return _instance
-
- def _populators(self, context, path, reduced_path, row, adapter,
- new_populators, existing_populators):
- """Produce a collection of attribute level row processor callables."""
-
- delayed_populators = []
- for prop in self._props.itervalues():
- newpop, existingpop, delayedpop = prop.create_row_processor(
- context, path,
- reduced_path,
- self, row, adapter)
- if newpop:
- new_populators.append((prop.key, newpop))
- if existingpop:
- existing_populators.append((prop.key, existingpop))
- if delayedpop:
- delayed_populators.append((prop.key, delayedpop))
- if delayed_populators:
- new_populators.extend(delayed_populators)
-
- def _configure_subclass_mapper(self, context, path, reduced_path, adapter):
- """Produce a mapper level row processor callable factory for mappers
- inheriting this one."""
-
- def configure_subclass_mapper(discriminator):
- try:
- mapper = self.polymorphic_map[discriminator]
- except KeyError:
- raise AssertionError(
- "No such polymorphic_identity %r is defined" %
- discriminator)
- if mapper is self:
- return None
-
- # replace the tip of the path info with the subclass mapper
- # being used. that way accurate "load_path" info is available
- # for options invoked during deferred loads.
- # we lose AliasedClass path elements this way, but currently,
- # those are not needed at this stage.
-
- # this asserts to true
- #assert mapper.isa(_class_to_mapper(path[-1]))
-
- return mapper._instance_processor(context, path[0:-1] + (mapper,),
- reduced_path[0:-1] + (mapper.base_mapper,),
- adapter,
- polymorphic_from=self)
- return configure_subclass_mapper
-
-log.class_logger(Mapper)
-
-def configure_mappers():
- """Initialize the inter-mapper relationships of all mappers that
- have been constructed thus far.
-
- This function can be called any number of times, but in
- most cases is handled internally.
-
- """
-
- global _new_mappers
- if not _new_mappers:
- return
-
- _COMPILE_MUTEX.acquire()
- try:
- global _already_compiling
- if _already_compiling:
- return
- _already_compiling = True
- try:
-
- # double-check inside mutex
- if not _new_mappers:
- return
-
- # initialize properties on all mappers
- # note that _mapper_registry is unordered, which
- # may randomly conceal/reveal issues related to
- # the order of mapper compilation
- for mapper in list(_mapper_registry):
- if getattr(mapper, '_configure_failed', False):
- e = sa_exc.InvalidRequestError(
- "One or more mappers failed to initialize - "
- "can't proceed with initialization of other "
- "mappers. Original exception was: %s"
- % mapper._configure_failed)
- e._configure_failed = mapper._configure_failed
- raise e
- if not mapper.configured:
- try:
- mapper._post_configure_properties()
- mapper._expire_memoizations()
- mapper.dispatch.mapper_configured(mapper, mapper.class_)
- except:
- exc = sys.exc_info()[1]
- if not hasattr(exc, '_configure_failed'):
- mapper._configure_failed = exc
- raise
-
- _new_mappers = False
- finally:
- _already_compiling = False
- finally:
- _COMPILE_MUTEX.release()
-
-
-def reconstructor(fn):
- """Decorate a method as the 'reconstructor' hook.
-
- Designates a method as the "reconstructor", an ``__init__``-like
- method that will be called by the ORM after the instance has been
- loaded from the database or otherwise reconstituted.
-
- The reconstructor will be invoked with no arguments. Scalar
- (non-collection) database-mapped attributes of the instance will
- be available for use within the function. Eagerly-loaded
- collections are generally not yet available and will usually only
- contain the first element. ORM state changes made to objects at
- this stage will not be recorded for the next flush() operation, so
- the activity within a reconstructor should be conservative.
-
- """
- fn.__sa_reconstructor__ = True
- return fn
-
-def validates(*names):
- """Decorate a method as a 'validator' for one or more named properties.
-
- Designates a method as a validator, a method which receives the
- name of the attribute as well as a value to be assigned, or in the
- case of a collection, the value to be added to the collection. The function
- can then raise validation exceptions to halt the process from continuing
- (where Python's built-in ``ValueError`` and ``AssertionError`` exceptions are
- reasonable choices), or can modify or replace the value before proceeding.
- The function should otherwise return the given value.
-
- Note that a validator for a collection **cannot** issue a load of that
- collection within the validation routine - this usage raises
- an assertion to avoid recursion overflows. This is a reentrant
- condition which is not supported.
-
- """
- def wrap(fn):
- fn.__sa_validators__ = names
- return fn
- return wrap
-
-def _event_on_load(state, ctx):
- instrumenting_mapper = state.manager.info[_INSTRUMENTOR]
- if instrumenting_mapper._reconstructor:
- instrumenting_mapper._reconstructor(state.obj())
-
-def _event_on_first_init(manager, cls):
- """Trigger mapper compilation."""
-
- instrumenting_mapper = manager.info.get(_INSTRUMENTOR)
- if instrumenting_mapper:
- if _new_mappers:
- configure_mappers()
-
-def _event_on_init(state, args, kwargs):
- """Run init_instance hooks."""
-
- instrumenting_mapper = state.manager.info.get(_INSTRUMENTOR)
- if instrumenting_mapper and \
- instrumenting_mapper._set_polymorphic_identity:
- instrumenting_mapper._set_polymorphic_identity(state)
-
-def _event_on_resurrect(state):
- # re-populate the primary key elements
- # of the dict based on the mapping.
- instrumenting_mapper = state.manager.info.get(_INSTRUMENTOR)
- if instrumenting_mapper:
- for col, val in zip(instrumenting_mapper.primary_key, state.key[1]):
- instrumenting_mapper._set_state_attr_by_column(
- state, state.dict, col, val)
-
-
-def _sort_states(states):
- return sorted(states, key=operator.attrgetter('sort_key'))
-
-def _load_scalar_attributes(state, attribute_names):
- """initiate a column-based attribute refresh operation."""
-
- mapper = _state_mapper(state)
- session = sessionlib._state_session(state)
- if not session:
- raise orm_exc.DetachedInstanceError(
- "Instance %s is not bound to a Session; "
- "attribute refresh operation cannot proceed" %
- (state_str(state)))
-
- has_key = bool(state.key)
-
- result = False
-
- if mapper.inherits and not mapper.concrete:
- statement = mapper._optimized_get_statement(state, attribute_names)
- if statement is not None:
- result = session.query(mapper).from_statement(statement).\
- _load_on_ident(None,
- only_load_props=attribute_names,
- refresh_state=state)
-
- if result is False:
- if has_key:
- identity_key = state.key
- else:
- # this codepath is rare - only valid when inside a flush, and the
- # object is becoming persistent but hasn't yet been assigned an identity_key.
- # check here to ensure we have the attrs we need.
- pk_attrs = [mapper._columntoproperty[col].key
- for col in mapper.primary_key]
- if state.expired_attributes.intersection(pk_attrs):
- raise sa_exc.InvalidRequestError("Instance %s cannot be refreshed - it's not "
- " persistent and does not "
- "contain a full primary key." % state_str(state))
- identity_key = mapper._identity_key_from_state(state)
-
- if (_none_set.issubset(identity_key) and \
- not mapper.allow_partial_pks) or \
- _none_set.issuperset(identity_key):
- util.warn("Instance %s to be refreshed doesn't "
- "contain a full primary key - can't be refreshed "
- "(and shouldn't be expired, either)."
- % state_str(state))
- return
-
- result = session.query(mapper)._load_on_ident(
- identity_key,
- refresh_state=state,
- only_load_props=attribute_names)
-
- # if instance is pending, a refresh operation
- # may not complete (even if PK attributes are assigned)
- if has_key and result is None:
- raise orm_exc.ObjectDeletedError(
- "Instance '%s' has been deleted." %
- state_str(state))
-
-
-class _ColumnMapping(util.py25_dict):
- """Error reporting helper for mapper._columntoproperty."""
-
- def __init__(self, mapper):
- self.mapper = mapper
-
- def __missing__(self, column):
- prop = self.mapper._props.get(column)
- if prop:
- raise orm_exc.UnmappedColumnError(
- "Column '%s.%s' is not available, due to "
- "conflicting property '%s':%r" % (
- column.table.name, column.name, column.key, prop))
- raise orm_exc.UnmappedColumnError(
- "No column %s is configured on mapper %s..." %
- (column, self.mapper))
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/orm/properties.py b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/orm/properties.py
deleted file mode 100755
index cf059513..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/orm/properties.py
+++ /dev/null
@@ -1,1250 +0,0 @@
-# orm/properties.py
-# Copyright (C) 2005-2011 the SQLAlchemy authors and contributors <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-"""MapperProperty implementations.
-
-This is a private module which defines the behavior of invidual ORM-
-mapped attributes.
-
-"""
-
-from sqlalchemy import sql, util, log, exc as sa_exc
-from sqlalchemy.sql.util import ClauseAdapter, criterion_as_pairs, \
- join_condition
-from sqlalchemy.sql import operators, expression
-from sqlalchemy.orm import attributes, dependency, mapper, \
- object_mapper, strategies, configure_mappers
-from sqlalchemy.orm.util import CascadeOptions, _class_to_mapper, \
- _orm_annotate, _orm_deannotate
-from sqlalchemy.orm.interfaces import MANYTOMANY, MANYTOONE, \
- MapperProperty, ONETOMANY, PropComparator, StrategizedProperty
-mapperlib = util.importlater("sqlalchemy.orm", "mapperlib")
-NoneType = type(None)
-
-__all__ = ('ColumnProperty', 'CompositeProperty', 'SynonymProperty',
- 'ComparableProperty', 'RelationshipProperty', 'RelationProperty')
-
-from descriptor_props import CompositeProperty, SynonymProperty, \
- ComparableProperty,ConcreteInheritedProperty
-
-class ColumnProperty(StrategizedProperty):
- """Describes an object attribute that corresponds to a table column."""
-
- def __init__(self, *columns, **kwargs):
- """Construct a ColumnProperty.
-
- :param \*columns: The list of `columns` describes a single
- object property. If there are multiple tables joined
- together for the mapper, this list represents the equivalent
- column as it appears across each table.
-
- :param group:
-
- :param deferred:
-
- :param comparator_factory:
-
- :param descriptor:
-
- :param extension:
-
- """
- self.columns = [expression._labeled(c) for c in columns]
- self.group = kwargs.pop('group', None)
- self.deferred = kwargs.pop('deferred', False)
- self.instrument = kwargs.pop('_instrument', True)
- self.comparator_factory = kwargs.pop('comparator_factory',
- self.__class__.Comparator)
- self.descriptor = kwargs.pop('descriptor', None)
- self.extension = kwargs.pop('extension', None)
- self.active_history = kwargs.pop('active_history', False)
-
- if 'doc' in kwargs:
- self.doc = kwargs.pop('doc')
- else:
- for col in reversed(self.columns):
- doc = getattr(col, 'doc', None)
- if doc is not None:
- self.doc = doc
- break
- else:
- self.doc = None
-
- if kwargs:
- raise TypeError(
- "%s received unexpected keyword argument(s): %s" % (
- self.__class__.__name__,
- ', '.join(sorted(kwargs.keys()))))
-
- util.set_creation_order(self)
- if not self.instrument:
- self.strategy_class = strategies.UninstrumentedColumnLoader
- elif self.deferred:
- self.strategy_class = strategies.DeferredColumnLoader
- else:
- self.strategy_class = strategies.ColumnLoader
-
- def instrument_class(self, mapper):
- if not self.instrument:
- return
-
- attributes.register_descriptor(
- mapper.class_,
- self.key,
- comparator=self.comparator_factory(self, mapper),
- parententity=mapper,
- property_=self,
- doc=self.doc
- )
-
- def do_init(self):
- super(ColumnProperty, self).do_init()
- if len(self.columns) > 1 and \
- set(self.parent.primary_key).issuperset(self.columns):
- util.warn(
- ("On mapper %s, primary key column '%s' is being combined "
- "with distinct primary key column '%s' in attribute '%s'. "
- "Use explicit properties to give each column its own mapped "
- "attribute name.") % (self.parent, self.columns[1],
- self.columns[0], self.key))
-
- def copy(self):
- return ColumnProperty(
- deferred=self.deferred,
- group=self.group,
- active_history=self.active_history,
- *self.columns)
-
- def _getcommitted(self, state, dict_, column,
- passive=attributes.PASSIVE_OFF):
- return state.get_impl(self.key).\
- get_committed_value(state, dict_, passive=passive)
-
- def merge(self, session, source_state, source_dict, dest_state,
- dest_dict, load, _recursive):
- if self.key in source_dict:
- value = source_dict[self.key]
-
- if not load:
- dest_dict[self.key] = value
- else:
- impl = dest_state.get_impl(self.key)
- impl.set(dest_state, dest_dict, value, None)
- else:
- if dest_state.has_identity and self.key not in dest_dict:
- dest_state.expire_attributes(dest_dict, [self.key])
-
- class Comparator(PropComparator):
- @util.memoized_instancemethod
- def __clause_element__(self):
- if self.adapter:
- return self.adapter(self.prop.columns[0])
- else:
- return self.prop.columns[0]._annotate({
- "parententity": self.mapper,
- "parentmapper":self.mapper})
-
- def operate(self, op, *other, **kwargs):
- return op(self.__clause_element__(), *other, **kwargs)
-
- def reverse_operate(self, op, other, **kwargs):
- col = self.__clause_element__()
- return op(col._bind_param(op, other), col, **kwargs)
-
- # TODO: legacy..do we need this ? (0.5)
- ColumnComparator = Comparator
-
- def __str__(self):
- return str(self.parent.class_.__name__) + "." + self.key
-
-log.class_logger(ColumnProperty)
-
-
-
-
-class RelationshipProperty(StrategizedProperty):
- """Describes an object property that holds a single item or list
- of items that correspond to a related database table.
- """
-
- def __init__(self, argument,
- secondary=None, primaryjoin=None,
- secondaryjoin=None,
- foreign_keys=None,
- uselist=None,
- order_by=False,
- backref=None,
- back_populates=None,
- post_update=False,
- cascade=False, extension=None,
- viewonly=False, lazy=True,
- collection_class=None, passive_deletes=False,
- passive_updates=True, remote_side=None,
- enable_typechecks=True, join_depth=None,
- comparator_factory=None,
- single_parent=False, innerjoin=False,
- doc=None,
- active_history=False,
- cascade_backrefs=True,
- load_on_pending=False,
- strategy_class=None, _local_remote_pairs=None,
- query_class=None):
-
- self.uselist = uselist
- self.argument = argument
- self.secondary = secondary
- self.primaryjoin = primaryjoin
- self.secondaryjoin = secondaryjoin
- self.post_update = post_update
- self.direction = None
- self.viewonly = viewonly
- self.lazy = lazy
- self.single_parent = single_parent
- self._user_defined_foreign_keys = foreign_keys
- self.collection_class = collection_class
- self.passive_deletes = passive_deletes
- self.cascade_backrefs = cascade_backrefs
- self.passive_updates = passive_updates
- self.remote_side = remote_side
- self.enable_typechecks = enable_typechecks
- self.query_class = query_class
- self.innerjoin = innerjoin
- self.doc = doc
- self.active_history = active_history
- self.join_depth = join_depth
- self.local_remote_pairs = _local_remote_pairs
- self.extension = extension
- self.load_on_pending = load_on_pending
- self.comparator_factory = comparator_factory or \
- RelationshipProperty.Comparator
- self.comparator = self.comparator_factory(self, None)
- util.set_creation_order(self)
-
- if strategy_class:
- self.strategy_class = strategy_class
- elif self.lazy== 'dynamic':
- from sqlalchemy.orm import dynamic
- self.strategy_class = dynamic.DynaLoader
- else:
- self.strategy_class = strategies.factory(self.lazy)
-
- self._reverse_property = set()
-
- if cascade is not False:
- self.cascade = CascadeOptions(cascade)
- else:
- self.cascade = CascadeOptions("save-update, merge")
-
- if self.passive_deletes == 'all' and \
- ("delete" in self.cascade or
- "delete-orphan" in self.cascade):
- raise sa_exc.ArgumentError(
- "Can't set passive_deletes='all' in conjunction "
- "with 'delete' or 'delete-orphan' cascade")
-
- self.order_by = order_by
-
- self.back_populates = back_populates
-
- if self.back_populates:
- if backref:
- raise sa_exc.ArgumentError(
- "backref and back_populates keyword arguments "
- "are mutually exclusive")
- self.backref = None
- else:
- self.backref = backref
-
- def instrument_class(self, mapper):
- attributes.register_descriptor(
- mapper.class_,
- self.key,
- comparator=self.comparator_factory(self, mapper),
- parententity=mapper,
- property_=self,
- doc=self.doc,
- )
-
- class Comparator(PropComparator):
- def __init__(self, prop, mapper, of_type=None, adapter=None):
- self.prop = prop
- self.mapper = mapper
- self.adapter = adapter
- if of_type:
- self._of_type = _class_to_mapper(of_type)
-
- def adapted(self, adapter):
- """Return a copy of this PropComparator which will use the
- given adaption function on the local side of generated
- expressions.
-
- """
-
- return self.__class__(self.property, self.mapper,
- getattr(self, '_of_type', None),
- adapter)
-
- @property
- def parententity(self):
- return self.property.parent
-
- def __clause_element__(self):
- elem = self.property.parent._with_polymorphic_selectable
- if self.adapter:
- return self.adapter(elem)
- else:
- return elem
-
- def operate(self, op, *other, **kwargs):
- return op(self, *other, **kwargs)
-
- def reverse_operate(self, op, other, **kwargs):
- return op(self, *other, **kwargs)
-
- def of_type(self, cls):
- return RelationshipProperty.Comparator(
- self.property,
- self.mapper,
- cls, adapter=self.adapter)
-
- def in_(self, other):
- raise NotImplementedError('in_() not yet supported for '
- 'relationships. For a simple many-to-one, use '
- 'in_() against the set of foreign key values.')
-
- __hash__ = None
-
- def __eq__(self, other):
- if isinstance(other, (NoneType, expression._Null)):
- if self.property.direction in [ONETOMANY, MANYTOMANY]:
- return ~self._criterion_exists()
- else:
- return _orm_annotate(self.property._optimized_compare(
- None, adapt_source=self.adapter))
- elif self.property.uselist:
- raise sa_exc.InvalidRequestError("Can't compare a colle"
- "ction to an object or collection; use "
- "contains() to test for membership.")
- else:
- return _orm_annotate(self.property._optimized_compare(other,
- adapt_source=self.adapter))
-
- def _criterion_exists(self, criterion=None, **kwargs):
- if getattr(self, '_of_type', None):
- target_mapper = self._of_type
- to_selectable = target_mapper._with_polymorphic_selectable
- if self.property._is_self_referential():
- to_selectable = to_selectable.alias()
-
- single_crit = target_mapper._single_table_criterion
- if single_crit is not None:
- if criterion is not None:
- criterion = single_crit & criterion
- else:
- criterion = single_crit
- else:
- to_selectable = None
-
- if self.adapter:
- source_selectable = self.__clause_element__()
- else:
- source_selectable = None
-
- pj, sj, source, dest, secondary, target_adapter = \
- self.property._create_joins(dest_polymorphic=True,
- dest_selectable=to_selectable,
- source_selectable=source_selectable)
-
- for k in kwargs:
- crit = self.property.mapper.class_manager[k] == kwargs[k]
- if criterion is None:
- criterion = crit
- else:
- criterion = criterion & crit
-
- # annotate the *local* side of the join condition, in the case
- # of pj + sj this is the full primaryjoin, in the case of just
- # pj its the local side of the primaryjoin.
- if sj is not None:
- j = _orm_annotate(pj) & sj
- else:
- j = _orm_annotate(pj, exclude=self.property.remote_side)
-
- if criterion is not None and target_adapter:
- # limit this adapter to annotated only?
- criterion = target_adapter.traverse(criterion)
-
- # only have the "joined left side" of what we
- # return be subject to Query adaption. The right
- # side of it is used for an exists() subquery and
- # should not correlate or otherwise reach out
- # to anything in the enclosing query.
- if criterion is not None:
- criterion = criterion._annotate({'_halt_adapt': True})
-
- crit = j & criterion
-
- return sql.exists([1], crit, from_obj=dest).correlate(source)
-
- def any(self, criterion=None, **kwargs):
- if not self.property.uselist:
- raise sa_exc.InvalidRequestError(
- "'any()' not implemented for scalar "
- "attributes. Use has()."
- )
-
- return self._criterion_exists(criterion, **kwargs)
-
- def has(self, criterion=None, **kwargs):
- if self.property.uselist:
- raise sa_exc.InvalidRequestError(
- "'has()' not implemented for collections. "
- "Use any().")
- return self._criterion_exists(criterion, **kwargs)
-
- def contains(self, other, **kwargs):
- if not self.property.uselist:
- raise sa_exc.InvalidRequestError(
- "'contains' not implemented for scalar "
- "attributes. Use ==")
- clause = self.property._optimized_compare(other,
- adapt_source=self.adapter)
-
- if self.property.secondaryjoin is not None:
- clause.negation_clause = \
- self.__negated_contains_or_equals(other)
-
- return clause
-
- def __negated_contains_or_equals(self, other):
- if self.property.direction == MANYTOONE:
- state = attributes.instance_state(other)
-
- def state_bindparam(x, state, col):
- o = state.obj() # strong ref
- return sql.bindparam(x, unique=True, callable_=lambda : \
- self.property.mapper._get_committed_attr_by_column(o,
- col))
-
- def adapt(col):
- if self.adapter:
- return self.adapter(col)
- else:
- return col
-
- if self.property._use_get:
- return sql.and_(*[
- sql.or_(
- adapt(x) != state_bindparam(adapt(x), state, y),
- adapt(x) == None)
- for (x, y) in self.property.local_remote_pairs])
-
- criterion = sql.and_(*[x==y for (x, y) in
- zip(
- self.property.mapper.primary_key,
- self.property.\
- mapper.\
- primary_key_from_instance(other))
- ])
- return ~self._criterion_exists(criterion)
-
- def __ne__(self, other):
- if isinstance(other, (NoneType, expression._Null)):
- if self.property.direction == MANYTOONE:
- return sql.or_(*[x != None for x in
- self.property._calculated_foreign_keys])
- else:
- return self._criterion_exists()
- elif self.property.uselist:
- raise sa_exc.InvalidRequestError("Can't compare a collection"
- " to an object or collection; use "
- "contains() to test for membership.")
- else:
- return self.__negated_contains_or_equals(other)
-
- @util.memoized_property
- def property(self):
- if mapperlib.module._new_mappers:
- configure_mappers()
- return self.prop
-
- def compare(self, op, value,
- value_is_parent=False,
- alias_secondary=True):
- if op == operators.eq:
- if value is None:
- if self.uselist:
- return ~sql.exists([1], self.primaryjoin)
- else:
- return self._optimized_compare(None,
- value_is_parent=value_is_parent,
- alias_secondary=alias_secondary)
- else:
- return self._optimized_compare(value,
- value_is_parent=value_is_parent,
- alias_secondary=alias_secondary)
- else:
- return op(self.comparator, value)
-
- def _optimized_compare(self, value, value_is_parent=False,
- adapt_source=None,
- alias_secondary=True):
- if value is not None:
- value = attributes.instance_state(value)
- return self._get_strategy(strategies.LazyLoader).lazy_clause(value,
- reverse_direction=not value_is_parent,
- alias_secondary=alias_secondary,
- adapt_source=adapt_source)
-
- def __str__(self):
- return str(self.parent.class_.__name__) + "." + self.key
-
- def merge(self,
- session,
- source_state,
- source_dict,
- dest_state,
- dest_dict,
- load, _recursive):
- if load:
- # TODO: no test coverage for recursive check
- for r in self._reverse_property:
- if (source_state, r) in _recursive:
- return
-
- if not "merge" in self.cascade:
- return
-
- if self.key not in source_dict:
- return
-
- if self.uselist:
- instances = source_state.get_impl(self.key).\
- get(source_state, source_dict)
- if hasattr(instances, '_sa_adapter'):
- # convert collections to adapters to get a true iterator
- instances = instances._sa_adapter
-
- if load:
- # for a full merge, pre-load the destination collection,
- # so that individual _merge of each item pulls from identity
- # map for those already present.
- # also assumes CollectionAttrbiuteImpl behavior of loading
- # "old" list in any case
- dest_state.get_impl(self.key).get(dest_state, dest_dict)
-
- dest_list = []
- for current in instances:
- current_state = attributes.instance_state(current)
- current_dict = attributes.instance_dict(current)
- _recursive[(current_state, self)] = True
- obj = session._merge(current_state, current_dict,
- load=load, _recursive=_recursive)
- if obj is not None:
- dest_list.append(obj)
-
- if not load:
- coll = attributes.init_state_collection(dest_state,
- dest_dict, self.key)
- for c in dest_list:
- coll.append_without_event(c)
- else:
- dest_state.get_impl(self.key)._set_iterable(dest_state,
- dest_dict, dest_list)
- else:
- current = source_dict[self.key]
- if current is not None:
- current_state = attributes.instance_state(current)
- current_dict = attributes.instance_dict(current)
- _recursive[(current_state, self)] = True
- obj = session._merge(current_state, current_dict,
- load=load, _recursive=_recursive)
- else:
- obj = None
- if not load:
- dest_dict[self.key] = obj
- else:
- dest_state.get_impl(self.key).set(dest_state,
- dest_dict, obj, None)
-
- def cascade_iterator(self, type_, state, dict_, visited_states, halt_on=None):
- #assert type_ in self.cascade
-
- # only actively lazy load on the 'delete' cascade
- if type_ != 'delete' or self.passive_deletes:
- passive = attributes.PASSIVE_NO_INITIALIZE
- else:
- passive = attributes.PASSIVE_OFF
-
- if type_ == 'save-update':
- tuples = state.manager[self.key].impl.\
- get_all_pending(state, dict_)
-
- else:
- tuples = state.value_as_iterable(dict_, self.key,
- passive=passive)
-
- skip_pending = type_ == 'refresh-expire' and 'delete-orphan' \
- not in self.cascade
-
- for instance_state, c in tuples:
- if instance_state in visited_states:
- continue
-
- instance_dict = attributes.instance_dict(c)
-
- if halt_on and halt_on(instance_state):
- continue
-
- if skip_pending and not instance_state.key:
- continue
-
- instance_mapper = instance_state.manager.mapper
-
- if not instance_mapper.isa(self.mapper.class_manager.mapper):
- raise AssertionError("Attribute '%s' on class '%s' "
- "doesn't handle objects "
- "of type '%s'" % (
- self.key,
- self.parent.class_,
- c.__class__
- ))
-
- visited_states.add(instance_state)
-
- yield c, instance_mapper, instance_state, instance_dict
-
-
- def _add_reverse_property(self, key):
- other = self.mapper.get_property(key, _compile_mappers=False)
- self._reverse_property.add(other)
- other._reverse_property.add(self)
-
- if not other._get_target().common_parent(self.parent):
- raise sa_exc.ArgumentError('reverse_property %r on '
- 'relationship %s references relationship %s, which '
- 'does not reference mapper %s' % (key, self, other,
- self.parent))
- if self.direction in (ONETOMANY, MANYTOONE) and self.direction \
- == other.direction:
- raise sa_exc.ArgumentError('%s and back-reference %s are '
- 'both of the same direction %r. Did you mean to '
- 'set remote_side on the many-to-one side ?'
- % (other, self, self.direction))
-
- def do_init(self):
- self._get_target()
- self._assert_is_primary()
- self._process_dependent_arguments()
- self._determine_joins()
- self._determine_synchronize_pairs()
- self._determine_direction()
- self._determine_local_remote_pairs()
- self._post_init()
- self._generate_backref()
- super(RelationshipProperty, self).do_init()
-
- def _get_target(self):
- if not hasattr(self, 'mapper'):
- if isinstance(self.argument, type):
- self.mapper = mapper.class_mapper(self.argument,
- compile=False)
- elif isinstance(self.argument, mapper.Mapper):
- self.mapper = self.argument
- elif util.callable(self.argument):
-
- # accept a callable to suit various deferred-
- # configurational schemes
-
- self.mapper = mapper.class_mapper(self.argument(),
- compile=False)
- else:
- raise sa_exc.ArgumentError("relationship '%s' expects "
- "a class or a mapper argument (received: %s)"
- % (self.key, type(self.argument)))
- assert isinstance(self.mapper, mapper.Mapper), self.mapper
- return self.mapper
-
- def _process_dependent_arguments(self):
-
- # accept callables for other attributes which may require
- # deferred initialization
-
- for attr in (
- 'order_by',
- 'primaryjoin',
- 'secondaryjoin',
- 'secondary',
- '_user_defined_foreign_keys',
- 'remote_side',
- ):
- if util.callable(getattr(self, attr)):
- setattr(self, attr, getattr(self, attr)())
-
- # in the case that InstrumentedAttributes were used to construct
- # primaryjoin or secondaryjoin, remove the "_orm_adapt"
- # annotation so these interact with Query in the same way as the
- # original Table-bound Column objects
-
- for attr in 'primaryjoin', 'secondaryjoin':
- val = getattr(self, attr)
- if val is not None:
- setattr(self, attr, _orm_deannotate(
- expression._only_column_elements(val, attr))
- )
- if self.order_by is not False and self.order_by is not None:
- self.order_by = [expression._only_column_elements(x, "order_by") for x in
- util.to_list(self.order_by)]
- self._user_defined_foreign_keys = \
- util.column_set(expression._only_column_elements(x, "foreign_keys") for x in
- util.to_column_set(self._user_defined_foreign_keys))
- self.remote_side = \
- util.column_set(expression._only_column_elements(x, "remote_side") for x in
- util.to_column_set(self.remote_side))
- if not self.parent.concrete:
- for inheriting in self.parent.iterate_to_root():
- if inheriting is not self.parent \
- and inheriting.has_property(self.key):
- util.warn("Warning: relationship '%s' on mapper "
- "'%s' supersedes the same relationship "
- "on inherited mapper '%s'; this can "
- "cause dependency issues during flush"
- % (self.key, self.parent, inheriting))
-
- # TODO: remove 'self.table'
-
- self.target = self.table = self.mapper.mapped_table
- if self.cascade.delete_orphan:
- if self.parent.class_ is self.mapper.class_:
- raise sa_exc.ArgumentError("In relationship '%s', "
- "can't establish 'delete-orphan' cascade rule "
- "on a self-referential relationship. You "
- "probably want cascade='all', which includes "
- "delete cascading but not orphan detection."
- % str(self))
- self.mapper.primary_mapper().delete_orphans.append((self.key,
- self.parent.class_))
-
- def _determine_joins(self):
- if self.secondaryjoin is not None and self.secondary is None:
- raise sa_exc.ArgumentError("Property '" + self.key
- + "' specified with secondary join condition but "
- "no secondary argument")
-
- # if join conditions were not specified, figure them out based
- # on foreign keys
-
- def _search_for_join(mapper, table):
-
- # find a join between the given mapper's mapped table and
- # the given table. will try the mapper's local table first
- # for more specificity, then if not found will try the more
- # general mapped table, which in the case of inheritance is
- # a join.
-
- try:
- return join_condition(mapper.local_table, table)
- except sa_exc.ArgumentError, e:
- return join_condition(mapper.mapped_table, table)
-
- try:
- if self.secondary is not None:
- if self.secondaryjoin is None:
- self.secondaryjoin = _search_for_join(self.mapper,
- self.secondary)
- if self.primaryjoin is None:
- self.primaryjoin = _search_for_join(self.parent,
- self.secondary)
- else:
- if self.primaryjoin is None:
- self.primaryjoin = _search_for_join(self.parent,
- self.target)
- except sa_exc.ArgumentError, e:
- raise sa_exc.ArgumentError("Could not determine join "
- "condition between parent/child tables on "
- "relationship %s. Specify a 'primaryjoin' "
- "expression. If 'secondary' is present, "
- "'secondaryjoin' is needed as well."
- % self)
-
- def _col_is_part_of_mappings(self, column):
- if self.secondary is None:
- return self.parent.mapped_table.c.contains_column(column) or \
- self.target.c.contains_column(column)
- else:
- return self.parent.mapped_table.c.contains_column(column) or \
- self.target.c.contains_column(column) or \
- self.secondary.c.contains_column(column) is not None
-
- def _sync_pairs_from_join(self, join_condition, primary):
- """Given a join condition, figure out what columns are foreign
- and are part of a binary "equated" condition to their referenced
- columns, and convert into a list of tuples of (primary col->foreign col).
-
- Make several attempts to determine if cols are compared using
- "=" or other comparators (in which case suggest viewonly),
- columns are present but not part of the expected mappings, columns
- don't have any :class:`.ForeignKey` information on them, or
- the ``foreign_keys`` attribute is being used incorrectly.
-
- """
- eq_pairs = criterion_as_pairs(join_condition,
- consider_as_foreign_keys=self._user_defined_foreign_keys,
- any_operator=self.viewonly)
-
- eq_pairs = [(l, r) for (l, r) in eq_pairs
- if self._col_is_part_of_mappings(l)
- and self._col_is_part_of_mappings(r)
- or self.viewonly and r in self._user_defined_foreign_keys]
-
- if not eq_pairs and \
- self.secondary is not None and \
- not self._user_defined_foreign_keys:
- fks = set(self.secondary.c)
- eq_pairs = criterion_as_pairs(join_condition,
- consider_as_foreign_keys=fks,
- any_operator=self.viewonly)
-
- eq_pairs = [(l, r) for (l, r) in eq_pairs
- if self._col_is_part_of_mappings(l)
- and self._col_is_part_of_mappings(r)
- or self.viewonly and r in fks]
- if eq_pairs:
- util.warn("No ForeignKey objects were present "
- "in secondary table '%s'. Assumed referenced "
- "foreign key columns %s for join condition '%s' "
- "on relationship %s" % (
- self.secondary.description,
- ", ".join(sorted(["'%s'" % col for col in fks])),
- join_condition,
- self
- ))
-
- if not eq_pairs:
- if not self.viewonly and criterion_as_pairs(join_condition,
- consider_as_foreign_keys=self._user_defined_foreign_keys,
- any_operator=True):
-
- err = "Could not locate any "\
- "foreign-key-equated, locally mapped column "\
- "pairs for %s "\
- "condition '%s' on relationship %s." % (
- primary and 'primaryjoin' or 'secondaryjoin',
- join_condition,
- self
- )
-
- if not self._user_defined_foreign_keys:
- err += " Ensure that the "\
- "referencing Column objects have a "\
- "ForeignKey present, or are otherwise part "\
- "of a ForeignKeyConstraint on their parent "\
- "Table, or specify the foreign_keys parameter "\
- "to this relationship."
-
- err += " For more "\
- "relaxed rules on join conditions, the "\
- "relationship may be marked as viewonly=True."
-
- raise sa_exc.ArgumentError(err)
- else:
- if self._user_defined_foreign_keys:
- raise sa_exc.ArgumentError("Could not determine "
- "relationship direction for %s condition "
- "'%s', on relationship %s, using manual "
- "'foreign_keys' setting. Do the columns "
- "in 'foreign_keys' represent all, and "
- "only, the 'foreign' columns in this join "
- "condition? Does the %s Table already "
- "have adequate ForeignKey and/or "
- "ForeignKeyConstraint objects established "
- "(in which case 'foreign_keys' is usually "
- "unnecessary)?"
- % (
- primary and 'primaryjoin' or 'secondaryjoin',
- join_condition,
- self,
- primary and 'mapped' or 'secondary'
- ))
- else:
- raise sa_exc.ArgumentError("Could not determine "
- "relationship direction for %s condition "
- "'%s', on relationship %s. Ensure that the "
- "referencing Column objects have a "
- "ForeignKey present, or are otherwise part "
- "of a ForeignKeyConstraint on their parent "
- "Table, or specify the foreign_keys parameter "
- "to this relationship."
- % (
- primary and 'primaryjoin' or 'secondaryjoin',
- join_condition,
- self
- ))
- return eq_pairs
-
- def _determine_synchronize_pairs(self):
- if self.local_remote_pairs:
- if not self._user_defined_foreign_keys:
- raise sa_exc.ArgumentError('foreign_keys argument is '
- 'required with _local_remote_pairs argument')
- self.synchronize_pairs = []
- for l, r in self.local_remote_pairs:
- if r in self._user_defined_foreign_keys:
- self.synchronize_pairs.append((l, r))
- elif l in self._user_defined_foreign_keys:
- self.synchronize_pairs.append((r, l))
- else:
- eq_pairs = self._sync_pairs_from_join(self.primaryjoin, True)
- self.synchronize_pairs = eq_pairs
- if self.secondaryjoin is not None:
- sq_pairs = self._sync_pairs_from_join(self.secondaryjoin, False)
- self.secondary_synchronize_pairs = sq_pairs
- else:
- self.secondary_synchronize_pairs = None
- self._calculated_foreign_keys = util.column_set(r for (l, r) in
- self.synchronize_pairs)
- if self.secondary_synchronize_pairs:
- self._calculated_foreign_keys.update(r for (l, r) in
- self.secondary_synchronize_pairs)
-
- def _determine_direction(self):
- if self.secondaryjoin is not None:
- self.direction = MANYTOMANY
- elif self._refers_to_parent_table():
-
- # self referential defaults to ONETOMANY unless the "remote"
- # side is present and does not reference any foreign key
- # columns
-
- if self.local_remote_pairs:
- remote = [r for (l, r) in self.local_remote_pairs]
- elif self.remote_side:
- remote = self.remote_side
- else:
- remote = None
- if not remote or self._calculated_foreign_keys.difference(l for (l,
- r) in self.synchronize_pairs).intersection(remote):
- self.direction = ONETOMANY
- else:
- self.direction = MANYTOONE
- else:
- foreign_keys = [f for (c, f) in self.synchronize_pairs]
- parentcols = util.column_set(self.parent.mapped_table.c)
- targetcols = util.column_set(self.mapper.mapped_table.c)
-
- # fk collection which suggests ONETOMANY.
-
- onetomany_fk = targetcols.intersection(foreign_keys)
-
- # fk collection which suggests MANYTOONE.
-
- manytoone_fk = parentcols.intersection(foreign_keys)
- if not onetomany_fk and not manytoone_fk:
- raise sa_exc.ArgumentError("Can't determine relationshi"
- "p direction for relationship '%s' - foreign "
- "key columns are present in neither the parent "
- "nor the child's mapped tables" % self)
- elif onetomany_fk and manytoone_fk:
-
- # fks on both sides. do the same test only based on the
- # local side.
-
- referents = [c for (c, f) in self.synchronize_pairs]
- onetomany_local = parentcols.intersection(referents)
- manytoone_local = targetcols.intersection(referents)
- if onetomany_local and not manytoone_local:
- self.direction = ONETOMANY
- elif manytoone_local and not onetomany_local:
- self.direction = MANYTOONE
- elif onetomany_fk:
- self.direction = ONETOMANY
- elif manytoone_fk:
- self.direction = MANYTOONE
- if not self.direction:
- raise sa_exc.ArgumentError("Can't determine relationship"
- " direction for relationship '%s' - foreign "
- "key columns are present in both the parent "
- "and the child's mapped tables. Specify "
- "'foreign_keys' argument." % self)
- if self.cascade.delete_orphan and not self.single_parent \
- and (self.direction is MANYTOMANY or self.direction
- is MANYTOONE):
- util.warn('On %s, delete-orphan cascade is not supported '
- 'on a many-to-many or many-to-one relationship '
- 'when single_parent is not set. Set '
- 'single_parent=True on the relationship().'
- % self)
- if self.direction is MANYTOONE and self.passive_deletes:
- util.warn("On %s, 'passive_deletes' is normally configured "
- "on one-to-many, one-to-one, many-to-many "
- "relationships only."
- % self)
-
- def _determine_local_remote_pairs(self):
- if not self.local_remote_pairs:
- if self.remote_side:
- if self.direction is MANYTOONE:
- self.local_remote_pairs = [(r, l) for (l, r) in
- criterion_as_pairs(self.primaryjoin,
- consider_as_referenced_keys=self.remote_side,
- any_operator=True)]
- else:
- self.local_remote_pairs = \
- criterion_as_pairs(self.primaryjoin,
- consider_as_foreign_keys=self.remote_side,
- any_operator=True)
- if not self.local_remote_pairs:
- raise sa_exc.ArgumentError('Relationship %s could '
- 'not determine any local/remote column '
- 'pairs from remote side argument %r'
- % (self, self.remote_side))
- else:
- if self.viewonly:
- eq_pairs = self.synchronize_pairs
- if self.secondaryjoin is not None:
- eq_pairs += self.secondary_synchronize_pairs
- else:
- eq_pairs = criterion_as_pairs(self.primaryjoin,
- consider_as_foreign_keys=self._calculated_foreign_keys,
- any_operator=True)
- if self.secondaryjoin is not None:
- eq_pairs += \
- criterion_as_pairs(self.secondaryjoin,
- consider_as_foreign_keys=self._calculated_foreign_keys,
- any_operator=True)
- eq_pairs = [(l, r) for (l, r) in eq_pairs
- if self._col_is_part_of_mappings(l)
- and self._col_is_part_of_mappings(r)]
- if self.direction is MANYTOONE:
- self.local_remote_pairs = [(r, l) for (l, r) in
- eq_pairs]
- else:
- self.local_remote_pairs = eq_pairs
- elif self.remote_side:
- raise sa_exc.ArgumentError('remote_side argument is '
- 'redundant against more detailed '
- '_local_remote_side argument.')
- for l, r in self.local_remote_pairs:
- if self.direction is ONETOMANY \
- and not self._col_is_part_of_mappings(l):
- raise sa_exc.ArgumentError("Local column '%s' is not "
- "part of mapping %s. Specify remote_side "
- "argument to indicate which column lazy join "
- "condition should compare against." % (l,
- self.parent))
- elif self.direction is MANYTOONE \
- and not self._col_is_part_of_mappings(r):
- raise sa_exc.ArgumentError("Remote column '%s' is not "
- "part of mapping %s. Specify remote_side "
- "argument to indicate which column lazy join "
- "condition should bind." % (r, self.mapper))
- self.local_side, self.remote_side = [util.ordered_column_set(x)
- for x in zip(*list(self.local_remote_pairs))]
-
- def _assert_is_primary(self):
- if not self.is_primary() \
- and not mapper.class_mapper(self.parent.class_,
- compile=False).has_property(self.key):
- raise sa_exc.ArgumentError("Attempting to assign a new "
- "relationship '%s' to a non-primary mapper on "
- "class '%s'. New relationships can only be added "
- "to the primary mapper, i.e. the very first mapper "
- "created for class '%s' " % (self.key,
- self.parent.class_.__name__,
- self.parent.class_.__name__))
-
- def _generate_backref(self):
- if not self.is_primary():
- return
- if self.backref is not None and not self.back_populates:
- if isinstance(self.backref, basestring):
- backref_key, kwargs = self.backref, {}
- else:
- backref_key, kwargs = self.backref
- mapper = self.mapper.primary_mapper()
- if mapper.has_property(backref_key):
- raise sa_exc.ArgumentError("Error creating backref "
- "'%s' on relationship '%s': property of that "
- "name exists on mapper '%s'" % (backref_key,
- self, mapper))
- if self.secondary is not None:
- pj = kwargs.pop('primaryjoin', self.secondaryjoin)
- sj = kwargs.pop('secondaryjoin', self.primaryjoin)
- else:
- pj = kwargs.pop('primaryjoin', self.primaryjoin)
- sj = kwargs.pop('secondaryjoin', None)
- if sj:
- raise sa_exc.InvalidRequestError(
- "Can't assign 'secondaryjoin' on a backref against "
- "a non-secondary relationship."
- )
- foreign_keys = kwargs.pop('foreign_keys',
- self._user_defined_foreign_keys)
- parent = self.parent.primary_mapper()
- kwargs.setdefault('viewonly', self.viewonly)
- kwargs.setdefault('post_update', self.post_update)
- kwargs.setdefault('passive_updates', self.passive_updates)
- self.back_populates = backref_key
- relationship = RelationshipProperty(
- parent,
- self.secondary,
- pj,
- sj,
- foreign_keys=foreign_keys,
- back_populates=self.key,
- **kwargs
- )
- mapper._configure_property(backref_key, relationship)
- if self.back_populates:
- self._add_reverse_property(self.back_populates)
-
- def _post_init(self):
- self.logger.info('%s setup primary join %s', self,
- self.primaryjoin)
- self.logger.info('%s setup secondary join %s', self,
- self.secondaryjoin)
- self.logger.info('%s synchronize pairs [%s]', self,
- ','.join('(%s => %s)' % (l, r) for (l, r) in
- self.synchronize_pairs))
- self.logger.info('%s secondary synchronize pairs [%s]', self,
- ','.join('(%s => %s)' % (l, r) for (l, r) in
- self.secondary_synchronize_pairs or []))
- self.logger.info('%s local/remote pairs [%s]', self,
- ','.join('(%s / %s)' % (l, r) for (l, r) in
- self.local_remote_pairs))
- self.logger.info('%s relationship direction %s', self,
- self.direction)
- if self.uselist is None:
- self.uselist = self.direction is not MANYTOONE
- if not self.viewonly:
- self._dependency_processor = \
- dependency.DependencyProcessor.from_relationship(self)
-
- @util.memoized_property
- def _use_get(self):
- """memoize the 'use_get' attribute of this RelationshipLoader's
- lazyloader."""
-
- strategy = self._get_strategy(strategies.LazyLoader)
- return strategy.use_get
-
- def _refers_to_parent_table(self):
- pt = self.parent.mapped_table
- mt = self.mapper.mapped_table
- for c, f in self.synchronize_pairs:
- if (
- pt.is_derived_from(c.table) and \
- pt.is_derived_from(f.table) and \
- mt.is_derived_from(c.table) and \
- mt.is_derived_from(f.table)
- ):
- return True
- else:
- return False
-
- def _is_self_referential(self):
- return self.mapper.common_parent(self.parent)
-
- def per_property_preprocessors(self, uow):
- if not self.viewonly and self._dependency_processor:
- self._dependency_processor.per_property_preprocessors(uow)
-
- def _create_joins(self, source_polymorphic=False,
- source_selectable=None, dest_polymorphic=False,
- dest_selectable=None, of_type=None):
- if source_selectable is None:
- if source_polymorphic and self.parent.with_polymorphic:
- source_selectable = self.parent._with_polymorphic_selectable
-
- aliased = False
- if dest_selectable is None:
- if dest_polymorphic and self.mapper.with_polymorphic:
- dest_selectable = self.mapper._with_polymorphic_selectable
- aliased = True
- else:
- dest_selectable = self.mapper.mapped_table
-
- if self._is_self_referential() and source_selectable is None:
- dest_selectable = dest_selectable.alias()
- aliased = True
- else:
- aliased = True
-
- aliased = aliased or (source_selectable is not None)
-
- primaryjoin, secondaryjoin, secondary = self.primaryjoin, \
- self.secondaryjoin, self.secondary
-
- # adjust the join condition for single table inheritance,
- # in the case that the join is to a subclass
- # this is analogous to the "_adjust_for_single_table_inheritance()"
- # method in Query.
-
- dest_mapper = of_type or self.mapper
-
- single_crit = dest_mapper._single_table_criterion
- if single_crit is not None:
- if secondaryjoin is not None:
- secondaryjoin = secondaryjoin & single_crit
- else:
- primaryjoin = primaryjoin & single_crit
-
- if aliased:
- if secondary is not None:
- secondary = secondary.alias()
- primary_aliasizer = ClauseAdapter(secondary)
- if dest_selectable is not None:
- secondary_aliasizer = \
- ClauseAdapter(dest_selectable,
- equivalents=self.mapper._equivalent_columns).\
- chain(primary_aliasizer)
- else:
- secondary_aliasizer = primary_aliasizer
- if source_selectable is not None:
- primary_aliasizer = \
- ClauseAdapter(secondary).\
- chain(ClauseAdapter(source_selectable,
- equivalents=self.parent._equivalent_columns))
- secondaryjoin = \
- secondary_aliasizer.traverse(secondaryjoin)
- else:
- if dest_selectable is not None:
- primary_aliasizer = ClauseAdapter(dest_selectable,
- exclude=self.local_side,
- equivalents=self.mapper._equivalent_columns)
- if source_selectable is not None:
- primary_aliasizer.chain(
- ClauseAdapter(source_selectable,
- exclude=self.remote_side,
- equivalents=self.parent._equivalent_columns))
- elif source_selectable is not None:
- primary_aliasizer = \
- ClauseAdapter(source_selectable,
- exclude=self.remote_side,
- equivalents=self.parent._equivalent_columns)
- secondary_aliasizer = None
- primaryjoin = primary_aliasizer.traverse(primaryjoin)
- target_adapter = secondary_aliasizer or primary_aliasizer
- target_adapter.include = target_adapter.exclude = None
- else:
- target_adapter = None
- if source_selectable is None:
- source_selectable = self.parent.local_table
- if dest_selectable is None:
- dest_selectable = self.mapper.local_table
- return (
- primaryjoin,
- secondaryjoin,
- source_selectable,
- dest_selectable,
- secondary,
- target_adapter,
- )
-
-
-PropertyLoader = RelationProperty = RelationshipProperty
-log.class_logger(RelationshipProperty)
-
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/orm/query.py b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/orm/query.py
deleted file mode 100755
index 54e864ab..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/orm/query.py
+++ /dev/null
@@ -1,2936 +0,0 @@
-# orm/query.py
-# Copyright (C) 2005-2011 the SQLAlchemy authors and contributors <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-"""The Query class and support.
-
-Defines the :class:`.Query` class, the central
-construct used by the ORM to construct database queries.
-
-The :class:`.Query` class should not be confused with the
-:class:`.Select` class, which defines database
-SELECT operations at the SQL (non-ORM) level. ``Query`` differs from
-``Select`` in that it returns ORM-mapped objects and interacts with an
-ORM session, whereas the ``Select`` construct interacts directly with the
-database to return iterable result sets.
-
-"""
-
-from itertools import chain
-from operator import itemgetter
-
-from sqlalchemy import sql, util, log, schema
-from sqlalchemy import exc as sa_exc
-from sqlalchemy.orm import exc as orm_exc
-from sqlalchemy.sql import util as sql_util
-from sqlalchemy.sql import expression, visitors, operators
-from sqlalchemy.orm import (
- attributes, interfaces, mapper, object_mapper, evaluator,
- )
-from sqlalchemy.orm.util import (
- AliasedClass, ORMAdapter, _entity_descriptor, _entity_info,
- _is_aliased_class, _is_mapped_class, _orm_columns, _orm_selectable,
- join as orm_join,with_parent, _attr_as_key, aliased
- )
-
-
-__all__ = ['Query', 'QueryContext', 'aliased']
-
-
-def _generative(*assertions):
- """Mark a method as generative."""
-
- @util.decorator
- def generate(fn, *args, **kw):
- self = args[0]._clone()
- for assertion in assertions:
- assertion(self, fn.func_name)
- fn(self, *args[1:], **kw)
- return self
- return generate
-
-class Query(object):
- """ORM-level SQL construction object.
-
- :class:`.Query` is the source of all SELECT statements generated by the
- ORM, both those formulated by end-user query operations as well as by
- high level internal operations such as related collection loading. It
- features a generative interface whereby successive calls return a new
- :class:`.Query` object, a copy of the former with additional
- criteria and options associated with it.
-
- :class:`.Query` objects are normally initially generated using the
- :meth:`~.Session.query` method of :class:`.Session`. For a full walkthrough
- of :class:`.Query` usage, see the :ref:`ormtutorial_toplevel`.
-
- """
-
- _enable_eagerloads = True
- _enable_assertions = True
- _with_labels = False
- _criterion = None
- _yield_per = None
- _lockmode = None
- _order_by = False
- _group_by = False
- _having = None
- _distinct = False
- _offset = None
- _limit = None
- _statement = None
- _correlate = frozenset()
- _populate_existing = False
- _version_check = False
- _autoflush = True
- _current_path = ()
- _only_load_props = None
- _refresh_state = None
- _from_obj = ()
- _select_from_entity = None
- _filter_aliases = None
- _from_obj_alias = None
- _joinpath = _joinpoint = util.immutabledict()
- _execution_options = util.immutabledict()
- _params = util.immutabledict()
- _attributes = util.immutabledict()
- _with_options = ()
- _with_hints = ()
- _enable_single_crit = True
-
- def __init__(self, entities, session=None):
- self.session = session
- self._polymorphic_adapters = {}
- self._set_entities(entities)
-
- def _set_entities(self, entities, entity_wrapper=None):
- if entity_wrapper is None:
- entity_wrapper = _QueryEntity
- self._entities = []
- for ent in util.to_list(entities):
- entity_wrapper(self, ent)
-
- self._setup_aliasizers(self._entities)
-
- def _setup_aliasizers(self, entities):
- if hasattr(self, '_mapper_adapter_map'):
- # usually safe to share a single map, but copying to prevent
- # subtle leaks if end-user is reusing base query with arbitrary
- # number of aliased() objects
- self._mapper_adapter_map = d = self._mapper_adapter_map.copy()
- else:
- self._mapper_adapter_map = d = {}
-
- for ent in entities:
- for entity in ent.entities:
- if entity not in d:
- mapper, selectable, is_aliased_class = \
- _entity_info(entity)
- if not is_aliased_class and mapper.with_polymorphic:
- with_polymorphic = mapper._with_polymorphic_mappers
- if mapper.mapped_table not in \
- self._polymorphic_adapters:
- self.__mapper_loads_polymorphically_with(mapper,
- sql_util.ColumnAdapter(
- selectable,
- mapper._equivalent_columns))
- adapter = None
- elif is_aliased_class:
- adapter = sql_util.ColumnAdapter(
- selectable,
- mapper._equivalent_columns)
- with_polymorphic = None
- else:
- with_polymorphic = adapter = None
-
- d[entity] = (mapper, adapter, selectable,
- is_aliased_class, with_polymorphic)
- ent.setup_entity(entity, *d[entity])
-
- def __mapper_loads_polymorphically_with(self, mapper, adapter):
- for m2 in mapper._with_polymorphic_mappers:
- self._polymorphic_adapters[m2] = adapter
- for m in m2.iterate_to_root():
- self._polymorphic_adapters[m.mapped_table] = \
- self._polymorphic_adapters[m.local_table] = \
- adapter
-
- def _set_select_from(self, *obj):
-
- fa = []
- for from_obj in obj:
- if isinstance(from_obj, expression._SelectBase):
- from_obj = from_obj.alias()
- fa.append(from_obj)
-
- self._from_obj = tuple(fa)
-
- if len(self._from_obj) == 1 and \
- isinstance(self._from_obj[0], expression.Alias):
- equivs = self.__all_equivs()
- self._from_obj_alias = sql_util.ColumnAdapter(
- self._from_obj[0], equivs)
-
- def _get_polymorphic_adapter(self, entity, selectable):
- self.__mapper_loads_polymorphically_with(entity.mapper,
- sql_util.ColumnAdapter(selectable,
- entity.mapper._equivalent_columns))
-
- def _reset_polymorphic_adapter(self, mapper):
- for m2 in mapper._with_polymorphic_mappers:
- self._polymorphic_adapters.pop(m2, None)
- for m in m2.iterate_to_root():
- self._polymorphic_adapters.pop(m.mapped_table, None)
- self._polymorphic_adapters.pop(m.local_table, None)
-
- def __adapt_polymorphic_element(self, element):
- if isinstance(element, expression.FromClause):
- search = element
- elif hasattr(element, 'table'):
- search = element.table
- else:
- search = None
-
- if search is not None:
- alias = self._polymorphic_adapters.get(search, None)
- if alias:
- return alias.adapt_clause(element)
-
- def _adapt_col_list(self, cols):
- return [
- self._adapt_clause(
- expression._literal_as_text(o),
- True, True)
- for o in cols
- ]
-
- @_generative()
- def _adapt_all_clauses(self):
- self._orm_only_adapt = False
-
- def _adapt_clause(self, clause, as_filter, orm_only):
- """Adapt incoming clauses to transformations which have been applied
- within this query."""
-
- adapters = []
-
- # do we adapt all expression elements or only those
- # tagged as 'ORM' constructs ?
- orm_only = getattr(self, '_orm_only_adapt', orm_only)
-
- if as_filter and self._filter_aliases:
- for fa in self._filter_aliases._visitor_iterator:
- adapters.append(
- (
- orm_only, fa.replace
- )
- )
-
- if self._from_obj_alias:
- # for the "from obj" alias, apply extra rule to the
- # 'ORM only' check, if this query were generated from a
- # subquery of itself, i.e. _from_selectable(), apply adaption
- # to all SQL constructs.
- adapters.append(
- (
- getattr(self, '_orm_only_from_obj_alias', orm_only),
- self._from_obj_alias.replace
- )
- )
-
- if self._polymorphic_adapters:
- adapters.append(
- (
- orm_only, self.__adapt_polymorphic_element
- )
- )
-
- if not adapters:
- return clause
-
- def replace(elem):
- if '_halt_adapt' in elem._annotations:
- return elem
-
- for _orm_only, adapter in adapters:
- # if 'orm only', look for ORM annotations
- # in the element before adapting.
- if not _orm_only or \
- '_orm_adapt' in elem._annotations or \
- "parententity" in elem._annotations:
-
- e = adapter(elem)
- if e is not None:
- return e
-
- return visitors.replacement_traverse(
- clause,
- {'column_collections':False},
- replace
- )
-
- def _entity_zero(self):
- return self._entities[0]
-
- def _mapper_zero(self):
- return self._select_from_entity or \
- self._entity_zero().entity_zero
-
- @property
- def _mapper_entities(self):
- # TODO: this is wrong, its hardcoded to "primary entity" when
- # for the case of __all_equivs() it should not be
- # the name of this accessor is wrong too
- for ent in self._entities:
- if hasattr(ent, 'primary_entity'):
- yield ent
-
- def _joinpoint_zero(self):
- return self._joinpoint.get(
- '_joinpoint_entity',
- self._mapper_zero()
- )
-
- def _mapper_zero_or_none(self):
- if not getattr(self._entities[0], 'primary_entity', False):
- return None
- return self._entities[0].mapper
-
- def _only_mapper_zero(self, rationale=None):
- if len(self._entities) > 1:
- raise sa_exc.InvalidRequestError(
- rationale or
- "This operation requires a Query against a single mapper."
- )
- return self._mapper_zero()
-
- def _only_full_mapper_zero(self, methname):
- if len(self._entities) != 1:
- raise sa_exc.InvalidRequestError(
- "%s() can only be used against "
- "a single mapped class." % methname)
- entity = self._entity_zero()
- if not hasattr(entity, 'primary_entity'):
- raise sa_exc.InvalidRequestError(
- "%s() can only be used against "
- "a single mapped class." % methname)
- return entity.entity_zero
-
- def _only_entity_zero(self, rationale=None):
- if len(self._entities) > 1:
- raise sa_exc.InvalidRequestError(
- rationale or
- "This operation requires a Query against a single mapper."
- )
- return self._entity_zero()
-
- def _generate_mapper_zero(self):
- if not getattr(self._entities[0], 'primary_entity', False):
- raise sa_exc.InvalidRequestError(
- "No primary mapper set up for this Query.")
- entity = self._entities[0]._clone()
- self._entities = [entity] + self._entities[1:]
- return entity
-
- def __all_equivs(self):
- equivs = {}
- for ent in self._mapper_entities:
- equivs.update(ent.mapper._equivalent_columns)
- return equivs
-
- def _get_condition(self):
- self._order_by = self._distinct = False
- return self._no_criterion_condition("get")
-
- def _no_criterion_condition(self, meth):
- if not self._enable_assertions:
- return
- if self._criterion is not None or \
- self._statement is not None or self._from_obj or \
- self._limit is not None or self._offset is not None or \
- self._group_by or self._order_by or self._distinct:
- raise sa_exc.InvalidRequestError(
- "Query.%s() being called on a "
- "Query with existing criterion. " % meth)
-
- self._from_obj = ()
- self._statement = self._criterion = None
- self._order_by = self._group_by = self._distinct = False
-
- def _no_clauseelement_condition(self, meth):
- if not self._enable_assertions:
- return
- if self._order_by:
- raise sa_exc.InvalidRequestError(
- "Query.%s() being called on a "
- "Query with existing criterion. " % meth)
- self._no_criterion_condition(meth)
-
- def _no_statement_condition(self, meth):
- if not self._enable_assertions:
- return
- if self._statement:
- raise sa_exc.InvalidRequestError(
- ("Query.%s() being called on a Query with an existing full "
- "statement - can't apply criterion.") % meth)
-
- def _no_limit_offset(self, meth):
- if not self._enable_assertions:
- return
- if self._limit is not None or self._offset is not None:
- raise sa_exc.InvalidRequestError(
- "Query.%s() being called on a Query which already has LIMIT "
- "or OFFSET applied. To modify the row-limited results of a "
- " Query, call from_self() first. "
- "Otherwise, call %s() before limit() or offset() are applied."
- % (meth, meth)
- )
-
- def _no_select_modifiers(self, meth):
- if not self._enable_assertions:
- return
- for attr, methname, notset in (
- ('_limit', 'limit()', None),
- ('_offset', 'offset()', None),
- ('_order_by', 'order_by()', False),
- ('_group_by', 'group_by()', False),
- ('_distinct', 'distinct()', False),
- ):
- if getattr(self, attr) is not notset:
- raise sa_exc.InvalidRequestError(
- "Can't call Query.%s() when %s has been called" %
- (meth, methname)
- )
-
- def _get_options(self, populate_existing=None,
- version_check=None,
- only_load_props=None,
- refresh_state=None):
- if populate_existing:
- self._populate_existing = populate_existing
- if version_check:
- self._version_check = version_check
- if refresh_state:
- self._refresh_state = refresh_state
- if only_load_props:
- self._only_load_props = set(only_load_props)
- return self
-
- def _clone(self):
- cls = self.__class__
- q = cls.__new__(cls)
- q.__dict__ = self.__dict__.copy()
- return q
-
- @property
- def statement(self):
- """The full SELECT statement represented by this Query.
-
- The statement by default will not have disambiguating labels
- applied to the construct unless with_labels(True) is called
- first.
-
- """
-
- stmt = self._compile_context(labels=self._with_labels).\
- statement
- if self._params:
- stmt = stmt.params(self._params)
- return stmt._annotate({'_halt_adapt': True})
-
- def subquery(self, name=None):
- """return the full SELECT statement represented by this :class:`.Query`,
- embedded within an :class:`.Alias`.
-
- Eager JOIN generation within the query is disabled.
-
- The statement will not have disambiguating labels
- applied to the list of selected columns unless the
- :meth:`.Query.with_labels` method is used to generate a new
- :class:`.Query` with the option enabled.
-
- :param name: string name to be assigned as the alias;
- this is passed through to :meth:`.FromClause.alias`.
- If ``None``, a name will be deterministically generated
- at compile time.
-
-
- """
- return self.enable_eagerloads(False).statement.alias(name=name)
-
- def label(self, name):
- """Return the full SELECT statement represented by this :class:`.Query`, converted
- to a scalar subquery with a label of the given name.
-
- Analogous to :meth:`sqlalchemy.sql._SelectBaseMixin.label`.
-
- New in 0.6.5.
-
- """
-
- return self.enable_eagerloads(False).statement.label(name)
-
-
- def as_scalar(self):
- """Return the full SELECT statement represented by this :class:`.Query`, converted
- to a scalar subquery.
-
- Analogous to :meth:`sqlalchemy.sql._SelectBaseMixin.as_scalar`.
-
- New in 0.6.5.
-
- """
-
- return self.enable_eagerloads(False).statement.as_scalar()
-
-
- def __clause_element__(self):
- return self.enable_eagerloads(False).with_labels().statement
-
- @_generative()
- def enable_eagerloads(self, value):
- """Control whether or not eager joins and subqueries are
- rendered.
-
- When set to False, the returned Query will not render
- eager joins regardless of :func:`~sqlalchemy.orm.joinedload`,
- :func:`~sqlalchemy.orm.subqueryload` options
- or mapper-level ``lazy='joined'``/``lazy='subquery'``
- configurations.
-
- This is used primarily when nesting the Query's
- statement into a subquery or other
- selectable.
-
- """
- self._enable_eagerloads = value
-
- @_generative()
- def with_labels(self):
- """Apply column labels to the return value of Query.statement.
-
- Indicates that this Query's `statement` accessor should return
- a SELECT statement that applies labels to all columns in the
- form <tablename>_<columnname>; this is commonly used to
- disambiguate columns from multiple tables which have the same
- name.
-
- When the `Query` actually issues SQL to load rows, it always
- uses column labeling.
-
- """
- self._with_labels = True
-
- @_generative()
- def enable_assertions(self, value):
- """Control whether assertions are generated.
-
- When set to False, the returned Query will
- not assert its state before certain operations,
- including that LIMIT/OFFSET has not been applied
- when filter() is called, no criterion exists
- when get() is called, and no "from_statement()"
- exists when filter()/order_by()/group_by() etc.
- is called. This more permissive mode is used by
- custom Query subclasses to specify criterion or
- other modifiers outside of the usual usage patterns.
-
- Care should be taken to ensure that the usage
- pattern is even possible. A statement applied
- by from_statement() will override any criterion
- set by filter() or order_by(), for example.
-
- """
- self._enable_assertions = value
-
- @property
- def whereclause(self):
- """A readonly attribute which returns the current WHERE criterion for this Query.
-
- This returned value is a SQL expression construct, or ``None`` if no
- criterion has been established.
-
- """
- return self._criterion
-
- @_generative()
- def _with_current_path(self, path):
- """indicate that this query applies to objects loaded
- within a certain path.
-
- Used by deferred loaders (see strategies.py) which transfer
- query options from an originating query to a newly generated
- query intended for the deferred load.
-
- """
- self._current_path = path
-
- @_generative(_no_clauseelement_condition)
- def with_polymorphic(self,
- cls_or_mappers,
- selectable=None, discriminator=None):
- """Load columns for descendant mappers of this Query's mapper.
-
- Using this method will ensure that each descendant mapper's
- tables are included in the FROM clause, and will allow filter()
- criterion to be used against those tables. The resulting
- instances will also have those columns already loaded so that
- no "post fetch" of those columns will be required.
-
- :param cls_or_mappers: a single class or mapper, or list of
- class/mappers, which inherit from this Query's mapper.
- Alternatively, it may also be the string ``'*'``, in which case
- all descending mappers will be added to the FROM clause.
-
- :param selectable: a table or select() statement that will
- be used in place of the generated FROM clause. This argument is
- required if any of the desired mappers use concrete table
- inheritance, since SQLAlchemy currently cannot generate UNIONs
- among tables automatically. If used, the ``selectable`` argument
- must represent the full set of tables and columns mapped by every
- desired mapper. Otherwise, the unaccounted mapped columns will
- result in their table being appended directly to the FROM clause
- which will usually lead to incorrect results.
-
- :param discriminator: a column to be used as the "discriminator"
- column for the given selectable. If not given, the polymorphic_on
- attribute of the mapper will be used, if any. This is useful for
- mappers that don't have polymorphic loading behavior by default,
- such as concrete table mappers.
-
- """
- entity = self._generate_mapper_zero()
- entity.set_with_polymorphic(self,
- cls_or_mappers,
- selectable=selectable,
- discriminator=discriminator)
-
- @_generative()
- def yield_per(self, count):
- """Yield only ``count`` rows at a time.
-
- WARNING: use this method with caution; if the same instance is present
- in more than one batch of rows, end-user changes to attributes will be
- overwritten.
-
- In particular, it's usually impossible to use this setting with
- eagerly loaded collections (i.e. any lazy='joined' or 'subquery')
- since those collections will be cleared for a new load when
- encountered in a subsequent result batch. In the case of 'subquery'
- loading, the full result for all rows is fetched which generally
- defeats the purpose of :meth:`~sqlalchemy.orm.query.Query.yield_per`.
-
- Also note that many DBAPIs do not "stream" results, pre-buffering
- all rows before making them available, including mysql-python and
- psycopg2. :meth:`~sqlalchemy.orm.query.Query.yield_per` will also
- set the ``stream_results`` execution
- option to ``True``, which currently is only understood by psycopg2
- and causes server side cursors to be used.
-
- """
- self._yield_per = count
- self._execution_options = self._execution_options.copy()
- self._execution_options['stream_results'] = True
-
- def get(self, ident):
- """Return an instance of the object based on the
- given identifier, or ``None`` if not found.
-
- The ``ident`` argument is a scalar or tuple of
- primary key column values
- in the order of the mapper's "primary key" setting, which
- defaults to the list of primary key columns for the
- mapped :class:`.Table`.
-
- :meth:`get` returns only a single mapped instance, or
- ``None``. It is not intended to return rows or scalar
- column values, therefore the :class:`.Query` must be
- constructed only against a single mapper or mapped class,
- not a SQL expression or multiple entities.
- Other usages raise an error.
-
- """
-
- # convert composite types to individual args
- if hasattr(ident, '__composite_values__'):
- ident = ident.__composite_values__()
-
- ident = util.to_list(ident)
-
- mapper = self._only_full_mapper_zero("get")
-
- if len(ident) != len(mapper.primary_key):
- raise sa_exc.InvalidRequestError(
- "Incorrect number of values in identifier to formulate "
- "primary key for query.get(); primary key columns are %s" %
- ','.join("'%s'" % c for c in mapper.primary_key))
-
- key = mapper.identity_key_from_primary_key(ident)
-
- if not self._populate_existing and \
- not mapper.always_refresh and \
- self._lockmode is None:
-
- instance = self._get_from_identity(self.session, key, False)
- if instance is not None:
- # reject calls for id in identity map but class
- # mismatch.
- if not issubclass(instance.__class__, mapper.class_):
- return None
- return instance
-
- return self._load_on_ident(key)
-
- @_generative()
- def correlate(self, *args):
- """Return a :class:`.Query` construct which will correlate the given
- FROM clauses to that of an enclosing :class:`.Query` or
- :func:`~.expression.select`.
-
- The method here accepts mapped classes, :func:`.aliased` constructs,
- and :func:`.mapper` constructs as arguments, which are resolved into
- expression constructs, in addition to appropriate expression
- constructs.
-
- The correlation arguments are ultimately passed to
- :meth:`.Select.correlate` after coercion to expression constructs.
-
- The correlation arguments take effect in such cases
- as when :meth:`.Query.from_self` is used, or when
- a subquery as returned by :meth:`.Query.subquery` is
- embedded in another :func:`~.expression.select` construct.
-
- """
-
- self._correlate = self._correlate.union(
- _orm_selectable(s)
- for s in args)
-
- @_generative()
- def autoflush(self, setting):
- """Return a Query with a specific 'autoflush' setting.
-
- Note that a Session with autoflush=False will
- not autoflush, even if this flag is set to True at the
- Query level. Therefore this flag is usually used only
- to disable autoflush for a specific Query.
-
- """
- self._autoflush = setting
-
- @_generative()
- def populate_existing(self):
- """Return a :class:`.Query` that will expire and refresh all instances
- as they are loaded, or reused from the current :class:`.Session`.
-
- :meth:`.populate_existing` does not improve behavior when
- the ORM is used normally - the :class:`.Session` object's usual
- behavior of maintaining a transaction and expiring all attributes
- after rollback or commit handles object state automatically.
- This method is not intended for general use.
-
- """
- self._populate_existing = True
-
- def with_parent(self, instance, property=None):
- """Add filtering criterion that relates the given instance
- to a child object or collection, using its attribute state
- as well as an established :func:`.relationship()`
- configuration.
-
- The method uses the :func:`.with_parent` function to generate
- the clause, the result of which is passed to :meth:`.Query.filter`.
-
- Parameters are the same as :func:`.with_parent`, with the exception
- that the given property can be None, in which case a search is
- performed against this :class:`.Query` object's target mapper.
-
- """
-
- if property is None:
- from sqlalchemy.orm import properties
- mapper = object_mapper(instance)
-
- for prop in mapper.iterate_properties:
- if isinstance(prop, properties.PropertyLoader) and \
- prop.mapper is self._mapper_zero():
- property = prop
- break
- else:
- raise sa_exc.InvalidRequestError(
- "Could not locate a property which relates instances "
- "of class '%s' to instances of class '%s'" %
- (
- self._mapper_zero().class_.__name__,
- instance.__class__.__name__)
- )
-
- return self.filter(with_parent(instance, property))
-
- @_generative()
- def add_entity(self, entity, alias=None):
- """add a mapped entity to the list of result columns
- to be returned."""
-
- if alias is not None:
- entity = aliased(entity, alias)
-
- self._entities = list(self._entities)
- m = _MapperEntity(self, entity)
- self._setup_aliasizers([m])
-
- @_generative()
- def with_session(self, session):
- """Return a :class:`Query` that will use the given :class:`.Session`.
-
- """
-
- self.session = session
-
- def from_self(self, *entities):
- """return a Query that selects from this Query's
- SELECT statement.
-
- \*entities - optional list of entities which will replace
- those being selected.
-
- """
- fromclause = self.with_labels().enable_eagerloads(False).\
- _enable_single_crit(False).\
- statement.correlate(None)
- q = self._from_selectable(fromclause)
- if entities:
- q._set_entities(entities)
- return q
-
- @_generative()
- def _enable_single_crit(self, val):
- self._enable_single_crit = val
-
- @_generative()
- def _from_selectable(self, fromclause):
- for attr in (
- '_statement', '_criterion',
- '_order_by', '_group_by',
- '_limit', '_offset',
- '_joinpath', '_joinpoint',
- '_distinct', '_having'
- ):
- self.__dict__.pop(attr, None)
- self._set_select_from(fromclause)
-
- # this enables clause adaptation for non-ORM
- # expressions.
- self._orm_only_from_obj_alias = False
-
- old_entities = self._entities
- self._entities = []
- for e in old_entities:
- e.adapt_to_selectable(self, self._from_obj[0])
-
- def values(self, *columns):
- """Return an iterator yielding result tuples corresponding
- to the given list of columns"""
-
- if not columns:
- return iter(())
- q = self._clone()
- q._set_entities(columns, entity_wrapper=_ColumnEntity)
- if not q._yield_per:
- q._yield_per = 10
- return iter(q)
- _values = values
-
- def value(self, column):
- """Return a scalar result corresponding to the given
- column expression."""
- try:
- # Py3K
- #return self.values(column).__next__()[0]
- # Py2K
- return self.values(column).next()[0]
- # end Py2K
- except StopIteration:
- return None
-
- @_generative()
- def with_entities(self, *entities):
- """Return a new :class:`.Query` replacing the SELECT list with the given
- entities.
-
- e.g.::
-
- # Users, filtered on some arbitrary criterion
- # and then ordered by related email address
- q = session.query(User).\\
- join(User.address).\\
- filter(User.name.like('%ed%')).\\
- order_by(Address.email)
-
- # given *only* User.id==5, Address.email, and 'q', what
- # would the *next* User in the result be ?
- subq = q.with_entities(Address.email).\\
- order_by(None).\\
- filter(User.id==5).\\
- subquery()
- q = q.join((subq, subq.c.email < Address.email)).\\
- limit(1)
-
- New in 0.6.5.
-
- """
- self._set_entities(entities)
-
-
- @_generative()
- def add_columns(self, *column):
- """Add one or more column expressions to the list
- of result columns to be returned."""
-
- self._entities = list(self._entities)
- l = len(self._entities)
- for c in column:
- _ColumnEntity(self, c)
- # _ColumnEntity may add many entities if the
- # given arg is a FROM clause
- self._setup_aliasizers(self._entities[l:])
-
- @util.pending_deprecation("0.7",
- ":meth:`.add_column` is superseded by :meth:`.add_columns`",
- False)
- def add_column(self, column):
- """Add a column expression to the list of result columns to be returned.
-
- Pending deprecation: :meth:`.add_column` will be superseded by
- :meth:`.add_columns`.
-
- """
-
- return self.add_columns(column)
-
- def options(self, *args):
- """Return a new Query object, applying the given list of
- mapper options.
-
- Most supplied options regard changing how column- and
- relationship-mapped attributes are loaded. See the sections
- :ref:`deferred` and :ref:`loading_toplevel` for reference
- documentation.
-
- """
- return self._options(False, *args)
-
- def _conditional_options(self, *args):
- return self._options(True, *args)
-
- @_generative()
- def _options(self, conditional, *args):
- # most MapperOptions write to the '_attributes' dictionary,
- # so copy that as well
- self._attributes = self._attributes.copy()
- opts = tuple(util.flatten_iterator(args))
- self._with_options = self._with_options + opts
- if conditional:
- for opt in opts:
- opt.process_query_conditionally(self)
- else:
- for opt in opts:
- opt.process_query(self)
-
- @_generative()
- def with_hint(self, selectable, text, dialect_name='*'):
- """Add an indexing hint for the given entity or selectable to
- this :class:`.Query`.
-
- Functionality is passed straight through to
- :meth:`~sqlalchemy.sql.expression.Select.with_hint`,
- with the addition that ``selectable`` can be a
- :class:`.Table`, :class:`.Alias`, or ORM entity / mapped class
- /etc.
- """
- mapper, selectable, is_aliased_class = _entity_info(selectable)
-
- self._with_hints += ((selectable, text, dialect_name),)
-
- @_generative()
- def execution_options(self, **kwargs):
- """ Set non-SQL options which take effect during execution.
-
- The options are the same as those accepted by
- :meth:`.Connection.execution_options`.
-
- Note that the ``stream_results`` execution option is enabled
- automatically if the :meth:`~sqlalchemy.orm.query.Query.yield_per()`
- method is used.
-
- """
- self._execution_options = self._execution_options.union(kwargs)
-
- @_generative()
- def with_lockmode(self, mode):
- """Return a new Query object with the specified locking mode."""
-
- self._lockmode = mode
-
- @_generative()
- def params(self, *args, **kwargs):
- """add values for bind parameters which may have been
- specified in filter().
-
- parameters may be specified using \**kwargs, or optionally a single
- dictionary as the first positional argument. The reason for both is
- that \**kwargs is convenient, however some parameter dictionaries
- contain unicode keys in which case \**kwargs cannot be used.
-
- """
- if len(args) == 1:
- kwargs.update(args[0])
- elif len(args) > 0:
- raise sa_exc.ArgumentError(
- "params() takes zero or one positional argument, "
- "which is a dictionary.")
- self._params = self._params.copy()
- self._params.update(kwargs)
-
- @_generative(_no_statement_condition, _no_limit_offset)
- def filter(self, criterion):
- """apply the given filtering criterion to the query and return
- the newly resulting ``Query``
-
- the criterion is any sql.ClauseElement applicable to the WHERE clause
- of a select.
-
- """
- if isinstance(criterion, basestring):
- criterion = sql.text(criterion)
-
- if criterion is not None and \
- not isinstance(criterion, sql.ClauseElement):
- raise sa_exc.ArgumentError(
- "filter() argument must be of type "
- "sqlalchemy.sql.ClauseElement or string")
-
- criterion = self._adapt_clause(criterion, True, True)
-
- if self._criterion is not None:
- self._criterion = self._criterion & criterion
- else:
- self._criterion = criterion
-
- def filter_by(self, **kwargs):
- """apply the given filtering criterion to the query and return
- the newly resulting ``Query``."""
-
- clauses = [_entity_descriptor(self._joinpoint_zero(), key) == value
- for key, value in kwargs.iteritems()]
- return self.filter(sql.and_(*clauses))
-
- @_generative(_no_statement_condition, _no_limit_offset)
- def order_by(self, *criterion):
- """apply one or more ORDER BY criterion to the query and return
- the newly resulting ``Query``
-
- All existing ORDER BY settings can be suppressed by
- passing ``None`` - this will suppress any ORDER BY configured
- on mappers as well.
-
- Alternatively, an existing ORDER BY setting on the Query
- object can be entirely cancelled by passing ``False``
- as the value - use this before calling methods where
- an ORDER BY is invalid.
-
- """
-
- if len(criterion) == 1:
- if criterion[0] is False:
- if '_order_by' in self.__dict__:
- del self._order_by
- return
- if criterion[0] is None:
- self._order_by = None
- return
-
- criterion = self._adapt_col_list(criterion)
-
- if self._order_by is False or self._order_by is None:
- self._order_by = criterion
- else:
- self._order_by = self._order_by + criterion
-
- @_generative(_no_statement_condition, _no_limit_offset)
- def group_by(self, *criterion):
- """apply one or more GROUP BY criterion to the query and return
- the newly resulting ``Query``"""
-
- criterion = list(chain(*[_orm_columns(c) for c in criterion]))
-
- criterion = self._adapt_col_list(criterion)
-
- if self._group_by is False:
- self._group_by = criterion
- else:
- self._group_by = self._group_by + criterion
-
- @_generative(_no_statement_condition, _no_limit_offset)
- def having(self, criterion):
- """apply a HAVING criterion to the query and return the
- newly resulting ``Query``."""
-
- if isinstance(criterion, basestring):
- criterion = sql.text(criterion)
-
- if criterion is not None and \
- not isinstance(criterion, sql.ClauseElement):
- raise sa_exc.ArgumentError(
- "having() argument must be of type "
- "sqlalchemy.sql.ClauseElement or string")
-
- criterion = self._adapt_clause(criterion, True, True)
-
- if self._having is not None:
- self._having = self._having & criterion
- else:
- self._having = criterion
-
- def union(self, *q):
- """Produce a UNION of this Query against one or more queries.
-
- e.g.::
-
- q1 = sess.query(SomeClass).filter(SomeClass.foo=='bar')
- q2 = sess.query(SomeClass).filter(SomeClass.bar=='foo')
-
- q3 = q1.union(q2)
-
- The method accepts multiple Query objects so as to control
- the level of nesting. A series of ``union()`` calls such as::
-
- x.union(y).union(z).all()
-
- will nest on each ``union()``, and produces::
-
- SELECT * FROM (SELECT * FROM (SELECT * FROM X UNION
- SELECT * FROM y) UNION SELECT * FROM Z)
-
- Whereas::
-
- x.union(y, z).all()
-
- produces::
-
- SELECT * FROM (SELECT * FROM X UNION SELECT * FROM y UNION
- SELECT * FROM Z)
-
- Note that many database backends do not allow ORDER BY to
- be rendered on a query called within UNION, EXCEPT, etc.
- To disable all ORDER BY clauses including those configured
- on mappers, issue ``query.order_by(None)`` - the resulting
- :class:`.Query` object will not render ORDER BY within
- its SELECT statement.
-
- """
-
-
- return self._from_selectable(
- expression.union(*([self]+ list(q))))
-
- def union_all(self, *q):
- """Produce a UNION ALL of this Query against one or more queries.
-
- Works the same way as :meth:`~sqlalchemy.orm.query.Query.union`. See
- that method for usage examples.
-
- """
- return self._from_selectable(
- expression.union_all(*([self]+ list(q)))
- )
-
- def intersect(self, *q):
- """Produce an INTERSECT of this Query against one or more queries.
-
- Works the same way as :meth:`~sqlalchemy.orm.query.Query.union`. See
- that method for usage examples.
-
- """
- return self._from_selectable(
- expression.intersect(*([self]+ list(q)))
- )
-
- def intersect_all(self, *q):
- """Produce an INTERSECT ALL of this Query against one or more queries.
-
- Works the same way as :meth:`~sqlalchemy.orm.query.Query.union`. See
- that method for usage examples.
-
- """
- return self._from_selectable(
- expression.intersect_all(*([self]+ list(q)))
- )
-
- def except_(self, *q):
- """Produce an EXCEPT of this Query against one or more queries.
-
- Works the same way as :meth:`~sqlalchemy.orm.query.Query.union`. See
- that method for usage examples.
-
- """
- return self._from_selectable(
- expression.except_(*([self]+ list(q)))
- )
-
- def except_all(self, *q):
- """Produce an EXCEPT ALL of this Query against one or more queries.
-
- Works the same way as :meth:`~sqlalchemy.orm.query.Query.union`. See
- that method for usage examples.
-
- """
- return self._from_selectable(
- expression.except_all(*([self]+ list(q)))
- )
-
- def join(self, *props, **kwargs):
- """Create a join against this ``Query`` object's criterion
- and apply generatively, returning the newly resulting ``Query``.
-
- Each element in \*props may be:
-
- * a string property name, i.e. "rooms". This will join along the
- relationship of the same name from this Query's "primary" mapper,
- if one is present.
-
- * a class-mapped attribute, i.e. Houses.rooms. This will create a
- join from "Houses" table to that of the "rooms" relationship.
-
- A two-element form of \*props may also be passed. In this form,
- the first element is a target class or selectable, the second
- is a string property name, class-mapped attribute, or clause
- construct representing an "ON" clause. This supersedes the
- previous "tuple" calling form - multiple join() calls should
- be used for multiple (target, onclause) pairs.
-
- e.g.::
-
- # join along string attribute names
- session.query(Company).join('employees')
- session.query(Company).join('employees', 'tasks')
-
- # join the Person entity to an alias of itself,
- # along the "friends" relationship
- PAlias = aliased(Person)
- session.query(Person).join(Palias, Person.friends)
-
- # join from Houses to the "rooms" attribute on the
- # "Colonials" subclass of Houses, then join to the
- # "closets" relationship on Room
- session.query(Houses).join(Colonials.rooms, Room.closets)
-
- # join from Company entities to the "employees" collection,
- # using "people JOIN engineers" as the target. Then join
- # to the "computers" collection on the Engineer entity.
- session.query(Company).\
- join(people.join(engineers), 'employees').\\
- join(Engineer.computers)
-
- # join from Articles to Keywords, using the "keywords" attribute.
- # assume this is a many-to-many relationship.
- session.query(Article).join(Article.keywords)
-
- # same thing, but spelled out entirely explicitly
- # including the association table.
- session.query(Article).join(article_keywords,
- Articles.id==article_keywords.c.article_id).\\
- join(Keyword,
- Keyword.id==article_keywords.c.keyword_id)
-
- \**kwargs include:
-
- aliased - when joining, create anonymous aliases of each table.
- This is used for self-referential joins or multiple joins to the
- same table. Consider usage of the aliased(SomeClass) construct as
- a more explicit approach to this.
-
- from_joinpoint - the given join conditions will attempt
- to join from the right endpoint of the most recent join(),
- instead of from the query's root entity. I.e. any chain
- of joins, such as::
-
- query.join(a, b, c)
-
- is equivalent to::
-
- query.join(a).\\
- join(b, from_joinpoint=True).\\
- join(c, from_joinpoint=True)
-
- """
- aliased, from_joinpoint = kwargs.pop('aliased', False),\
- kwargs.pop('from_joinpoint', False)
- if kwargs:
- raise TypeError("unknown arguments: %s" %
- ','.join(kwargs.iterkeys()))
- return self._join(props,
- outerjoin=False, create_aliases=aliased,
- from_joinpoint=from_joinpoint)
-
- def outerjoin(self, *props, **kwargs):
- """Create a left outer join against this ``Query`` object's criterion
- and apply generatively, returning the newly resulting ``Query``.
-
- Usage is the same as the ``join()`` method.
-
- """
- aliased, from_joinpoint = kwargs.pop('aliased', False), \
- kwargs.pop('from_joinpoint', False)
- if kwargs:
- raise TypeError("unknown arguments: %s" %
- ','.join(kwargs.iterkeys()))
- return self._join(props,
- outerjoin=True, create_aliases=aliased,
- from_joinpoint=from_joinpoint)
-
- @_generative(_no_statement_condition, _no_limit_offset)
- def _join(self, keys, outerjoin, create_aliases, from_joinpoint):
- """consumes arguments from join() or outerjoin(), places them into a
- consistent format with which to form the actual JOIN constructs.
-
- """
- self._polymorphic_adapters = self._polymorphic_adapters.copy()
-
- if not from_joinpoint:
- self._reset_joinpoint()
-
- if len(keys) == 2 and \
- isinstance(keys[0], (expression.FromClause,
- type, AliasedClass)) and \
- isinstance(keys[1], (basestring, expression.ClauseElement,
- interfaces.PropComparator)):
- # detect 2-arg form of join and
- # convert to a tuple.
- keys = (keys,)
-
- for arg1 in util.to_list(keys):
- if isinstance(arg1, tuple):
- # "tuple" form of join, multiple
- # tuples are accepted as well. The simpler
- # "2-arg" form is preferred. May deprecate
- # the "tuple" usage.
- arg1, arg2 = arg1
- else:
- arg2 = None
-
- # determine onclause/right_entity. there
- # is a little bit of legacy behavior still at work here
- # which means they might be in either order. may possibly
- # lock this down to (right_entity, onclause) in 0.6.
- if isinstance(arg1, (interfaces.PropComparator, basestring)):
- right_entity, onclause = arg2, arg1
- else:
- right_entity, onclause = arg1, arg2
-
- left_entity = prop = None
-
- if isinstance(onclause, basestring):
- left_entity = self._joinpoint_zero()
-
- descriptor = _entity_descriptor(left_entity, onclause)
- onclause = descriptor
-
- # check for q.join(Class.propname, from_joinpoint=True)
- # and Class is that of the current joinpoint
- elif from_joinpoint and \
- isinstance(onclause, interfaces.PropComparator):
- left_entity = onclause.parententity
-
- left_mapper, left_selectable, left_is_aliased = \
- _entity_info(self._joinpoint_zero())
- if left_mapper is left_entity:
- left_entity = self._joinpoint_zero()
- descriptor = _entity_descriptor(left_entity,
- onclause.key)
- onclause = descriptor
-
- if isinstance(onclause, interfaces.PropComparator):
- if right_entity is None:
- right_entity = onclause.property.mapper
- of_type = getattr(onclause, '_of_type', None)
- if of_type:
- right_entity = of_type
- else:
- right_entity = onclause.property.mapper
-
- left_entity = onclause.parententity
-
- prop = onclause.property
- if not isinstance(onclause, attributes.QueryableAttribute):
- onclause = prop
-
- if not create_aliases:
- # check for this path already present.
- # don't render in that case.
- if (left_entity, right_entity, prop.key) in \
- self._joinpoint:
- self._joinpoint = \
- self._joinpoint[
- (left_entity, right_entity, prop.key)]
- continue
-
- elif onclause is not None and right_entity is None:
- # TODO: no coverage here
- raise NotImplementedError("query.join(a==b) not supported.")
-
- self._join_left_to_right(
- left_entity,
- right_entity, onclause,
- outerjoin, create_aliases, prop)
-
- def _join_left_to_right(self, left, right,
- onclause, outerjoin, create_aliases, prop):
- """append a JOIN to the query's from clause."""
-
- if left is None:
- left = self._joinpoint_zero()
-
- if left is right and \
- not create_aliases:
- raise sa_exc.InvalidRequestError(
- "Can't construct a join from %s to %s, they "
- "are the same entity" %
- (left, right))
-
- left_mapper, left_selectable, left_is_aliased = _entity_info(left)
- right_mapper, right_selectable, right_is_aliased = _entity_info(right)
-
- if right_mapper and prop and \
- not right_mapper.common_parent(prop.mapper):
- raise sa_exc.InvalidRequestError(
- "Join target %s does not correspond to "
- "the right side of join condition %s" % (right, onclause)
- )
-
- if not right_mapper and prop:
- right_mapper = prop.mapper
-
- need_adapter = False
-
- if right_mapper and right is right_selectable:
- if not right_selectable.is_derived_from(
- right_mapper.mapped_table):
- raise sa_exc.InvalidRequestError(
- "Selectable '%s' is not derived from '%s'" %
- (right_selectable.description,
- right_mapper.mapped_table.description))
-
- if not isinstance(right_selectable, expression.Alias):
- right_selectable = right_selectable.alias()
-
- right = aliased(right_mapper, right_selectable)
- need_adapter = True
-
- aliased_entity = right_mapper and \
- not right_is_aliased and \
- (
- right_mapper.with_polymorphic or
- isinstance(
- right_mapper.mapped_table,
- expression.Join)
- )
-
- if not need_adapter and (create_aliases or aliased_entity):
- right = aliased(right)
- need_adapter = True
-
- # if joining on a MapperProperty path,
- # track the path to prevent redundant joins
- if not create_aliases and prop:
-
- self._joinpoint = jp = {
- '_joinpoint_entity':right,
- 'prev':((left, right, prop.key), self._joinpoint)
- }
-
- # copy backwards to the root of the _joinpath
- # dict, so that no existing dict in the path is mutated
- while 'prev' in jp:
- f, prev = jp['prev']
- prev = prev.copy()
- prev[f] = jp
- jp['prev'] = (f, prev)
- jp = prev
-
- self._joinpath = jp
-
- else:
- self._joinpoint = {
- '_joinpoint_entity':right
- }
-
- # if an alias() of the right side was generated here,
- # apply an adapter to all subsequent filter() calls
- # until reset_joinpoint() is called.
- if need_adapter:
- self._filter_aliases = ORMAdapter(right,
- equivalents=right_mapper and right_mapper._equivalent_columns or {},
- chain_to=self._filter_aliases)
-
- # if the onclause is a ClauseElement, adapt it with any
- # adapters that are in place right now
- if isinstance(onclause, expression.ClauseElement):
- onclause = self._adapt_clause(onclause, True, True)
-
- # if an alias() on the right side was generated,
- # which is intended to wrap a the right side in a subquery,
- # ensure that columns retrieved from this target in the result
- # set are also adapted.
- if aliased_entity:
- self.__mapper_loads_polymorphically_with(
- right_mapper,
- ORMAdapter(
- right,
- equivalents=right_mapper._equivalent_columns
- )
- )
-
- # this is an overly broad assumption here, but there's a
- # very wide variety of situations where we rely upon orm.join's
- # adaption to glue clauses together, with joined-table inheritance's
- # wide array of variables taking up most of the space.
- # Setting the flag here is still a guess, so it is a bug
- # that we don't have definitive criterion to determine when
- # adaption should be enabled (or perhaps that we're even doing the
- # whole thing the way we are here).
- join_to_left = not right_is_aliased and not left_is_aliased
-
- if self._from_obj and left_selectable is not None:
- replace_clause_index, clause = sql_util.find_join_source(
- self._from_obj,
- left_selectable)
- if clause is not None:
- # the entire query's FROM clause is an alias of itself (i.e.
- # from_self(), similar). if the left clause is that one,
- # ensure it adapts to the left side.
- if self._from_obj_alias and clause is self._from_obj[0]:
- join_to_left = True
-
- # An exception case where adaption to the left edge is not
- # desirable. See above note on join_to_left.
- if join_to_left and isinstance(clause, expression.Join) and \
- sql_util.clause_is_present(left_selectable, clause):
- join_to_left = False
-
- clause = orm_join(clause,
- right,
- onclause, isouter=outerjoin,
- join_to_left=join_to_left)
-
- self._from_obj = \
- self._from_obj[:replace_clause_index] + \
- (clause, ) + \
- self._from_obj[replace_clause_index + 1:]
- return
-
- if left_mapper:
- for ent in self._entities:
- if ent.corresponds_to(left):
- clause = ent.selectable
- break
- else:
- clause = left
- else:
- clause = None
-
- if clause is None:
- raise sa_exc.InvalidRequestError(
- "Could not find a FROM clause to join from")
-
- clause = orm_join(clause, right, onclause,
- isouter=outerjoin, join_to_left=join_to_left)
-
- self._from_obj = self._from_obj + (clause,)
-
- def _reset_joinpoint(self):
- self._joinpoint = self._joinpath
- self._filter_aliases = None
-
- @_generative(_no_statement_condition)
- def reset_joinpoint(self):
- """return a new Query reset the 'joinpoint' of this Query reset
- back to the starting mapper. Subsequent generative calls will
- be constructed from the new joinpoint.
-
- Note that each call to join() or outerjoin() also starts from
- the root.
-
- """
- self._reset_joinpoint()
-
- @_generative(_no_clauseelement_condition)
- def select_from(self, *from_obj):
- """Set the FROM clause of this :class:`.Query` explicitly.
-
- Sending a mapped class or entity here effectively replaces the
- "left edge" of any calls to :meth:`.Query.join`, when no
- joinpoint is otherwise established - usually, the default "join
- point" is the leftmost entity in the :class:`.Query` object's
- list of entities to be selected.
-
- Mapped entities or plain :class:`.Table` or other selectables
- can be sent here which will form the default FROM clause.
-
- """
- obj = []
- for fo in from_obj:
- if _is_mapped_class(fo):
- mapper, selectable, is_aliased_class = _entity_info(fo)
- self._select_from_entity = fo
- obj.append(selectable)
- elif not isinstance(fo, expression.FromClause):
- raise sa_exc.ArgumentError(
- "select_from() accepts FromClause objects only.")
- else:
- obj.append(fo)
-
- self._set_select_from(*obj)
-
- def __getitem__(self, item):
- if isinstance(item, slice):
- start, stop, step = util.decode_slice(item)
-
- if isinstance(stop, int) and \
- isinstance(start, int) and \
- stop - start <= 0:
- return []
-
- # perhaps we should execute a count() here so that we
- # can still use LIMIT/OFFSET ?
- elif (isinstance(start, int) and start < 0) \
- or (isinstance(stop, int) and stop < 0):
- return list(self)[item]
-
- res = self.slice(start, stop)
- if step is not None:
- return list(res)[None:None:item.step]
- else:
- return list(res)
- else:
- if item == -1:
- return list(self)[-1]
- else:
- return list(self[item:item+1])[0]
-
- @_generative(_no_statement_condition)
- def slice(self, start, stop):
- """apply LIMIT/OFFSET to the ``Query`` based on a "
- "range and return the newly resulting ``Query``."""
-
- if start is not None and stop is not None:
- self._offset = (self._offset or 0) + start
- self._limit = stop - start
- elif start is None and stop is not None:
- self._limit = stop
- elif start is not None and stop is None:
- self._offset = (self._offset or 0) + start
-
- @_generative(_no_statement_condition)
- def limit(self, limit):
- """Apply a ``LIMIT`` to the query and return the newly resulting
-
- ``Query``.
-
- """
- self._limit = limit
-
- @_generative(_no_statement_condition)
- def offset(self, offset):
- """Apply an ``OFFSET`` to the query and return the newly resulting
- ``Query``.
-
- """
- self._offset = offset
-
- @_generative(_no_statement_condition)
- def distinct(self, *criterion):
- """Apply a ``DISTINCT`` to the query and return the newly resulting
- ``Query``.
-
- :param \*expr: optional column expressions. When present,
- the Postgresql dialect will render a ``DISTINCT ON (<expressions>>)``
- construct.
-
- """
- if not criterion:
- self._distinct = True
- else:
- criterion = self._adapt_col_list(criterion)
- if isinstance(self._distinct, list):
- self._distinct += criterion
- else:
- self._distinct = criterion
-
- def all(self):
- """Return the results represented by this ``Query`` as a list.
-
- This results in an execution of the underlying query.
-
- """
- return list(self)
-
- @_generative(_no_clauseelement_condition)
- def from_statement(self, statement):
- """Execute the given SELECT statement and return results.
-
- This method bypasses all internal statement compilation, and the
- statement is executed without modification.
-
- The statement argument is either a string, a ``select()`` construct,
- or a ``text()`` construct, and should return the set of columns
- appropriate to the entity class represented by this ``Query``.
-
- """
- if isinstance(statement, basestring):
- statement = sql.text(statement)
-
- if not isinstance(statement,
- (expression._TextClause,
- expression._SelectBase)):
- raise sa_exc.ArgumentError(
- "from_statement accepts text(), select(), "
- "and union() objects only.")
-
- self._statement = statement
-
- def first(self):
- """Return the first result of this ``Query`` or
- None if the result doesn't contain any row.
-
- first() applies a limit of one within the generated SQL, so that
- only one primary entity row is generated on the server side
- (note this may consist of multiple result rows if join-loaded
- collections are present).
-
- Calling ``first()`` results in an execution of the underlying query.
-
- """
- if self._statement is not None:
- ret = list(self)[0:1]
- else:
- ret = list(self[0:1])
- if len(ret) > 0:
- return ret[0]
- else:
- return None
-
- def one(self):
- """Return exactly one result or raise an exception.
-
- Raises ``sqlalchemy.orm.exc.NoResultFound`` if the query selects
- no rows. Raises ``sqlalchemy.orm.exc.MultipleResultsFound``
- if multiple object identities are returned, or if multiple
- rows are returned for a query that does not return object
- identities.
-
- Note that an entity query, that is, one which selects one or
- more mapped classes as opposed to individual column attributes,
- may ultimately represent many rows but only one row of
- unique entity or entities - this is a successful result for one().
-
- Calling ``one()`` results in an execution of the underlying query.
- As of 0.6, ``one()`` fully fetches all results instead of applying
- any kind of limit, so that the "unique"-ing of entities does not
- conceal multiple object identities.
-
- """
- ret = list(self)
-
- l = len(ret)
- if l == 1:
- return ret[0]
- elif l == 0:
- raise orm_exc.NoResultFound("No row was found for one()")
- else:
- raise orm_exc.MultipleResultsFound(
- "Multiple rows were found for one()")
-
- def scalar(self):
- """Return the first element of the first result or None
- if no rows present. If multiple rows are returned,
- raises MultipleResultsFound.
-
- >>> session.query(Item).scalar()
- <Item>
- >>> session.query(Item.id).scalar()
- 1
- >>> session.query(Item.id).filter(Item.id < 0).scalar()
- None
- >>> session.query(Item.id, Item.name).scalar()
- 1
- >>> session.query(func.count(Parent.id)).scalar()
- 20
-
- This results in an execution of the underlying query.
-
- """
- try:
- ret = self.one()
- if not isinstance(ret, tuple):
- return ret
- return ret[0]
- except orm_exc.NoResultFound:
- return None
-
- def __iter__(self):
- context = self._compile_context()
- context.statement.use_labels = True
- if self._autoflush and not self._populate_existing:
- self.session._autoflush()
- return self._execute_and_instances(context)
-
- def _connection_from_session(self, **kw):
- conn = self.session.connection(
- **kw)
- if self._execution_options:
- conn = conn.execution_options(**self._execution_options)
- return conn
-
- def _execute_and_instances(self, querycontext):
- conn = self._connection_from_session(
- mapper = self._mapper_zero_or_none(),
- clause = querycontext.statement,
- close_with_result=True)
-
- result = conn.execute(querycontext.statement, self._params)
- return self.instances(result, querycontext)
-
- @property
- def column_descriptions(self):
- """Return metadata about the columns which would be
- returned by this :class:`.Query`.
-
- Format is a list of dictionaries::
-
- user_alias = aliased(User, name='user2')
- q = sess.query(User, User.id, user_alias)
-
- # this expression:
- q.columns
-
- # would return:
- [
- {
- 'name':'User',
- 'type':User,
- 'aliased':False,
- 'expr':User,
- },
- {
- 'name':'id',
- 'type':Integer(),
- 'aliased':False,
- 'expr':User.id,
- },
- {
- 'name':'user2',
- 'type':User,
- 'aliased':True,
- 'expr':user_alias
- }
- ]
-
- """
- return [
- {
- 'name':ent._label_name,
- 'type':ent.type,
- 'aliased':getattr(ent, 'is_aliased_class', False),
- 'expr':ent.expr
- }
- for ent in self._entities
- ]
-
- def instances(self, cursor, __context=None):
- """Given a ResultProxy cursor as returned by connection.execute(),
- return an ORM result as an iterator.
-
- e.g.::
-
- result = engine.execute("select * from users")
- for u in session.query(User).instances(result):
- print u
- """
- session = self.session
-
- context = __context
- if context is None:
- context = QueryContext(self)
-
- context.runid = _new_runid()
-
- filtered = bool(list(self._mapper_entities))
- single_entity = filtered and len(self._entities) == 1
-
- if filtered:
- if single_entity:
- filter = lambda x: util.unique_list(x, id)
- else:
- filter = util.unique_list
- else:
- filter = None
-
- custom_rows = single_entity and \
- self._entities[0].mapper.dispatch.append_result
-
- (process, labels) = \
- zip(*[
- query_entity.row_processor(self, context, custom_rows)
- for query_entity in self._entities
- ])
-
-
- while True:
- context.progress = {}
- context.partials = {}
-
- if self._yield_per:
- fetch = cursor.fetchmany(self._yield_per)
- if not fetch:
- break
- else:
- fetch = cursor.fetchall()
-
- if custom_rows:
- rows = []
- for row in fetch:
- process[0](row, rows)
- elif single_entity:
- rows = [process[0](row, None) for row in fetch]
- else:
- rows = [util.NamedTuple([proc(row, None) for proc in process],
- labels) for row in fetch]
-
- if filter:
- rows = filter(rows)
-
- if context.refresh_state and self._only_load_props \
- and context.refresh_state in context.progress:
- context.refresh_state.commit(
- context.refresh_state.dict, self._only_load_props)
- context.progress.pop(context.refresh_state)
-
- session._finalize_loaded(context.progress)
-
- for ii, (dict_, attrs) in context.partials.iteritems():
- ii.commit(dict_, attrs)
-
- for row in rows:
- yield row
-
- if not self._yield_per:
- break
-
- def merge_result(self, iterator, load=True):
- """Merge a result into this Query's Session.
-
- Given an iterator returned by a Query of the same structure as this
- one, return an identical iterator of results, with all mapped
- instances merged into the session using Session.merge(). This is an
- optimized method which will merge all mapped instances, preserving the
- structure of the result rows and unmapped columns with less method
- overhead than that of calling Session.merge() explicitly for each
- value.
-
- The structure of the results is determined based on the column list of
- this Query - if these do not correspond, unchecked errors will occur.
-
- The 'load' argument is the same as that of Session.merge().
-
- """
-
- session = self.session
- if load:
- # flush current contents if we expect to load data
- session._autoflush()
-
- autoflush = session.autoflush
- try:
- session.autoflush = False
- single_entity = len(self._entities) == 1
- if single_entity:
- if isinstance(self._entities[0], _MapperEntity):
- result = [session._merge(
- attributes.instance_state(instance),
- attributes.instance_dict(instance),
- load=load, _recursive={})
- for instance in iterator]
- else:
- result = list(iterator)
- else:
- mapped_entities = [i for i, e in enumerate(self._entities)
- if isinstance(e, _MapperEntity)]
- result = []
- for row in iterator:
- newrow = list(row)
- for i in mapped_entities:
- newrow[i] = session._merge(
- attributes.instance_state(newrow[i]),
- attributes.instance_dict(newrow[i]),
- load=load, _recursive={})
- result.append(util.NamedTuple(newrow, row._labels))
-
- return iter(result)
- finally:
- session.autoflush = autoflush
-
- @classmethod
- def _get_from_identity(cls, session, key, passive):
- """Look up the given key in the given session's identity map,
- check the object for expired state if found.
-
- """
- instance = session.identity_map.get(key)
- if instance:
-
- state = attributes.instance_state(instance)
-
- # expired - ensure it still exists
- if state.expired:
- if passive is attributes.PASSIVE_NO_FETCH:
- # TODO: no coverage here
- return attributes.PASSIVE_NO_RESULT
- elif passive is attributes.PASSIVE_NO_FETCH_RELATED:
- # this mode is used within a flush and the instance's
- # expired state will be checked soon enough, if necessary
- return instance
- try:
- state(passive)
- except orm_exc.ObjectDeletedError:
- session._remove_newly_deleted(state)
- return None
- return instance
- else:
- return None
-
- def _load_on_ident(self, key, refresh_state=None, lockmode=None,
- only_load_props=None):
- """Load the given identity key from the database."""
-
- lockmode = lockmode or self._lockmode
-
- if key is not None:
- ident = key[1]
- else:
- ident = None
-
- if refresh_state is None:
- q = self._clone()
- q._get_condition()
- else:
- q = self._clone()
-
- if ident is not None:
- mapper = self._mapper_zero()
-
- (_get_clause, _get_params) = mapper._get_clause
-
- # None present in ident - turn those comparisons
- # into "IS NULL"
- if None in ident:
- nones = set([
- _get_params[col].key for col, value in
- zip(mapper.primary_key, ident) if value is None
- ])
- _get_clause = sql_util.adapt_criterion_to_null(
- _get_clause, nones)
-
- _get_clause = q._adapt_clause(_get_clause, True, False)
- q._criterion = _get_clause
-
- params = dict([
- (_get_params[primary_key].key, id_val)
- for id_val, primary_key in zip(ident, mapper.primary_key)
- ])
-
- q._params = params
-
- if lockmode is not None:
- q._lockmode = lockmode
- q._get_options(
- populate_existing=bool(refresh_state),
- version_check=(lockmode is not None),
- only_load_props=only_load_props,
- refresh_state=refresh_state)
- q._order_by = None
-
- try:
- return q.one()
- except orm_exc.NoResultFound:
- return None
-
- @property
- def _select_args(self):
- return {
- 'limit':self._limit,
- 'offset':self._offset,
- 'distinct':self._distinct,
- 'group_by':self._group_by or None,
- 'having':self._having
- }
-
- @property
- def _should_nest_selectable(self):
- kwargs = self._select_args
- return (kwargs.get('limit') is not None or
- kwargs.get('offset') is not None or
- kwargs.get('distinct', False))
-
- def count(self):
- """Return a count of rows this Query would return.
-
- This generates the SQL for this Query as follows::
-
- SELECT count(1) AS count_1 FROM (
- SELECT <rest of query follows...>
- ) AS anon_1
-
- Note the above scheme is newly refined in 0.7
- (as of 0.7b3).
-
- For fine grained control over specific columns
- to count, to skip the usage of a subquery or
- otherwise control of the FROM clause,
- or to use other aggregate functions,
- use :attr:`.func` expressions in conjunction
- with :meth:`~.Session.query`, i.e.::
-
- from sqlalchemy import func
-
- # count User records, without
- # using a subquery.
- session.query(func.count(User.id))
-
- # return count of user "id" grouped
- # by "name"
- session.query(func.count(User.id)).\\
- group_by(User.name)
-
- from sqlalchemy import distinct
-
- # count distinct "name" values
- session.query(func.count(distinct(User.name)))
-
- """
- col = sql.func.count(sql.literal_column('*'))
- return self.from_self(col).scalar()
-
- def delete(self, synchronize_session='evaluate'):
- """Perform a bulk delete query.
-
- Deletes rows matched by this query from the database.
-
- :param synchronize_session: chooses the strategy for the removal of
- matched objects from the session. Valid values are:
-
- False - don't synchronize the session. This option is the most
- efficient and is reliable once the session is expired, which
- typically occurs after a commit(), or explicitly using
- expire_all(). Before the expiration, objects may still remain in
- the session which were in fact deleted which can lead to confusing
- results if they are accessed via get() or already loaded
- collections.
-
- 'fetch' - performs a select query before the delete to find
- objects that are matched by the delete query and need to be
- removed from the session. Matched objects are removed from the
- session.
-
- 'evaluate' - Evaluate the query's criteria in Python straight on
- the objects in the session. If evaluation of the criteria isn't
- implemented, an error is raised. In that case you probably
- want to use the 'fetch' strategy as a fallback.
-
- The expression evaluator currently doesn't account for differing
- string collations between the database and Python.
-
- Returns the number of rows deleted, excluding any cascades.
-
- The method does *not* offer in-Python cascading of relationships - it
- is assumed that ON DELETE CASCADE is configured for any foreign key
- references which require it. The Session needs to be expired (occurs
- automatically after commit(), or call expire_all()) in order for the
- state of dependent objects subject to delete or delete-orphan cascade
- to be correctly represented.
-
- Also, the ``before_delete()`` and ``after_delete()``
- :class:`~sqlalchemy.orm.interfaces.MapperExtension` methods are not
- called from this method. For a delete hook here, use the
- :meth:`.SessionExtension.after_bulk_delete()` event hook.
-
- """
- #TODO: lots of duplication and ifs - probably needs to be
- # refactored to strategies
- #TODO: cascades need handling.
-
- if synchronize_session not in [False, 'evaluate', 'fetch']:
- raise sa_exc.ArgumentError(
- "Valid strategies for session "
- "synchronization are False, 'evaluate' and "
- "'fetch'")
- self._no_select_modifiers("delete")
-
- self = self.enable_eagerloads(False)
-
- context = self._compile_context()
- if len(context.statement.froms) != 1 or \
- not isinstance(context.statement.froms[0], schema.Table):
- raise sa_exc.ArgumentError("Only deletion via a single table "
- "query is currently supported")
- primary_table = context.statement.froms[0]
-
- session = self.session
-
- if self._autoflush:
- session._autoflush()
-
- if synchronize_session == 'evaluate':
- try:
- evaluator_compiler = evaluator.EvaluatorCompiler()
- if self.whereclause is not None:
- eval_condition = evaluator_compiler.process(
- self.whereclause)
- else:
- def eval_condition(obj):
- return True
-
- except evaluator.UnevaluatableError:
- raise sa_exc.InvalidRequestError(
- "Could not evaluate current criteria in Python. "
- "Specify 'fetch' or False for the synchronize_session "
- "parameter.")
-
- target_cls = self._mapper_zero().class_
-
- #TODO: detect when the where clause is a trivial primary key match
- objs_to_expunge = [
- obj for (cls, pk),obj in
- session.identity_map.iteritems()
- if issubclass(cls, target_cls) and
- eval_condition(obj)]
-
- elif synchronize_session == 'fetch':
- #TODO: use RETURNING when available
- select_stmt = context.statement.with_only_columns(
- primary_table.primary_key)
- matched_rows = session.execute(
- select_stmt,
- params=self._params).fetchall()
-
- delete_stmt = sql.delete(primary_table, context.whereclause)
-
- result = session.execute(delete_stmt, params=self._params)
-
- if synchronize_session == 'evaluate':
- for obj in objs_to_expunge:
- session._remove_newly_deleted(attributes.instance_state(obj))
- elif synchronize_session == 'fetch':
- target_mapper = self._mapper_zero()
- for primary_key in matched_rows:
- identity_key = target_mapper.identity_key_from_primary_key(
- list(primary_key))
- if identity_key in session.identity_map:
- session._remove_newly_deleted(
- attributes.instance_state(
- session.identity_map[identity_key]
- )
- )
-
- session.dispatch.after_bulk_delete(session, self, context, result)
-
- return result.rowcount
-
- def update(self, values, synchronize_session='evaluate'):
- """Perform a bulk update query.
-
- Updates rows matched by this query in the database.
-
- :param values: a dictionary with attributes names as keys and literal
- values or sql expressions as values.
-
- :param synchronize_session: chooses the strategy to update the
- attributes on objects in the session. Valid values are:
-
- False - don't synchronize the session. This option is the most
- efficient and is reliable once the session is expired, which
- typically occurs after a commit(), or explicitly using
- expire_all(). Before the expiration, updated objects may still
- remain in the session with stale values on their attributes, which
- can lead to confusing results.
-
- 'fetch' - performs a select query before the update to find
- objects that are matched by the update query. The updated
- attributes are expired on matched objects.
-
- 'evaluate' - Evaluate the Query's criteria in Python straight on
- the objects in the session. If evaluation of the criteria isn't
- implemented, an exception is raised.
-
- The expression evaluator currently doesn't account for differing
- string collations between the database and Python.
-
- Returns the number of rows matched by the update.
-
- The method does *not* offer in-Python cascading of relationships - it
- is assumed that ON UPDATE CASCADE is configured for any foreign key
- references which require it.
-
- The Session needs to be expired (occurs automatically after commit(),
- or call expire_all()) in order for the state of dependent objects
- subject foreign key cascade to be correctly represented.
-
- Also, the ``before_update()`` and ``after_update()``
- :class:`~sqlalchemy.orm.interfaces.MapperExtension` methods are not
- called from this method. For an update hook here, use the
- :meth:`.SessionExtension.after_bulk_update()` event hook.
-
- """
-
- #TODO: value keys need to be mapped to corresponding sql cols and
- # instr.attr.s to string keys
- #TODO: updates of manytoone relationships need to be converted to
- # fk assignments
- #TODO: cascades need handling.
-
- if synchronize_session == 'expire':
- util.warn_deprecated("The 'expire' value as applied to "
- "the synchronize_session argument of "
- "query.update() is now called 'fetch'")
- synchronize_session = 'fetch'
-
- if synchronize_session not in [False, 'evaluate', 'fetch']:
- raise sa_exc.ArgumentError(
- "Valid strategies for session synchronization "
- "are False, 'evaluate' and 'fetch'")
- self._no_select_modifiers("update")
-
- self = self.enable_eagerloads(False)
-
- context = self._compile_context()
- if len(context.statement.froms) != 1 or \
- not isinstance(context.statement.froms[0], schema.Table):
- raise sa_exc.ArgumentError(
- "Only update via a single table query is "
- "currently supported")
- primary_table = context.statement.froms[0]
-
- session = self.session
-
- if self._autoflush:
- session._autoflush()
-
- if synchronize_session == 'evaluate':
- try:
- evaluator_compiler = evaluator.EvaluatorCompiler()
- if self.whereclause is not None:
- eval_condition = evaluator_compiler.process(
- self.whereclause)
- else:
- def eval_condition(obj):
- return True
-
- value_evaluators = {}
- for key,value in values.iteritems():
- key = _attr_as_key(key)
- value_evaluators[key] = evaluator_compiler.process(
- expression._literal_as_binds(value))
- except evaluator.UnevaluatableError:
- raise sa_exc.InvalidRequestError(
- "Could not evaluate current criteria in Python. "
- "Specify 'fetch' or False for the "
- "synchronize_session parameter.")
- target_cls = self._mapper_zero().class_
- matched_objects = []
- for (cls, pk),obj in session.identity_map.iteritems():
- evaluated_keys = value_evaluators.keys()
-
- if issubclass(cls, target_cls) and eval_condition(obj):
- matched_objects.append(obj)
-
- elif synchronize_session == 'fetch':
- select_stmt = context.statement.with_only_columns(
- primary_table.primary_key)
- matched_rows = session.execute(
- select_stmt,
- params=self._params).fetchall()
-
- update_stmt = sql.update(primary_table, context.whereclause, values)
-
- result = session.execute(update_stmt, params=self._params)
-
- if synchronize_session == 'evaluate':
- target_cls = self._mapper_zero().class_
-
- for obj in matched_objects:
- state, dict_ = attributes.instance_state(obj),\
- attributes.instance_dict(obj)
-
- # only evaluate unmodified attributes
- to_evaluate = state.unmodified.intersection(
- evaluated_keys)
- for key in to_evaluate:
- dict_[key] = value_evaluators[key](obj)
-
- state.commit(dict_, list(to_evaluate))
-
- # expire attributes with pending changes
- # (there was no autoflush, so they are overwritten)
- state.expire_attributes(dict_,
- set(evaluated_keys).
- difference(to_evaluate))
-
- elif synchronize_session == 'fetch':
- target_mapper = self._mapper_zero()
-
- for primary_key in matched_rows:
- identity_key = target_mapper.identity_key_from_primary_key(
- list(primary_key))
- if identity_key in session.identity_map:
- session.expire(
- session.identity_map[identity_key],
- [_attr_as_key(k) for k in values]
- )
-
- session.dispatch.after_bulk_update(session, self, context, result)
-
- return result.rowcount
-
- def _compile_context(self, labels=True):
- context = QueryContext(self)
-
- if context.statement is not None:
- return context
-
- if self._lockmode:
- try:
- for_update = {'read': 'read',
- 'update': True,
- 'update_nowait': 'nowait',
- None: False}[self._lockmode]
- except KeyError:
- raise sa_exc.ArgumentError(
- "Unknown lockmode %r" % self._lockmode)
- else:
- for_update = False
-
- for entity in self._entities:
- entity.setup_context(self, context)
-
- for rec in context.create_eager_joins:
- strategy = rec[0]
- strategy(*rec[1:])
-
- eager_joins = context.eager_joins.values()
-
- if context.from_clause:
- # "load from explicit FROMs" mode,
- # i.e. when select_from() or join() is used
- froms = list(context.from_clause)
- else:
- # "load from discrete FROMs" mode,
- # i.e. when each _MappedEntity has its own FROM
- froms = context.froms
-
- if self._enable_single_crit:
- self._adjust_for_single_inheritance(context)
-
- if not context.primary_columns:
- if self._only_load_props:
- raise sa_exc.InvalidRequestError(
- "No column-based properties specified for "
- "refresh operation. Use session.expire() "
- "to reload collections and related items.")
- else:
- raise sa_exc.InvalidRequestError(
- "Query contains no columns with which to "
- "SELECT from.")
-
- if context.multi_row_eager_loaders and self._should_nest_selectable:
- # for eager joins present and LIMIT/OFFSET/DISTINCT,
- # wrap the query inside a select,
- # then append eager joins onto that
-
- if context.order_by:
- order_by_col_expr = list(
- chain(*[
- sql_util.find_columns(o)
- for o in context.order_by
- ])
- )
- else:
- context.order_by = None
- order_by_col_expr = []
-
- inner = sql.select(
- context.primary_columns + order_by_col_expr,
- context.whereclause,
- from_obj=froms,
- use_labels=labels,
- correlate=False,
- order_by=context.order_by,
- **self._select_args
- )
-
- for hint in self._with_hints:
- inner = inner.with_hint(*hint)
-
- if self._correlate:
- inner = inner.correlate(*self._correlate)
-
- inner = inner.alias()
-
- equivs = self.__all_equivs()
-
- context.adapter = sql_util.ColumnAdapter(inner, equivs)
-
- statement = sql.select(
- [inner] + context.secondary_columns,
- for_update=for_update,
- use_labels=labels)
-
- from_clause = inner
- for eager_join in eager_joins:
- # EagerLoader places a 'stop_on' attribute on the join,
- # giving us a marker as to where the "splice point" of
- # the join should be
- from_clause = sql_util.splice_joins(
- from_clause,
- eager_join, eager_join.stop_on)
-
- statement.append_from(from_clause)
-
- if context.order_by:
- statement.append_order_by(
- *context.adapter.copy_and_process(
- context.order_by
- )
- )
-
- statement.append_order_by(*context.eager_order_by)
- else:
- if not context.order_by:
- context.order_by = None
-
- if self._distinct and context.order_by:
- order_by_col_expr = list(
- chain(*[
- sql_util.find_columns(o)
- for o in context.order_by
- ])
- )
- context.primary_columns += order_by_col_expr
-
- froms += tuple(context.eager_joins.values())
-
- statement = sql.select(
- context.primary_columns +
- context.secondary_columns,
- context.whereclause,
- from_obj=froms,
- use_labels=labels,
- for_update=for_update,
- correlate=False,
- order_by=context.order_by,
- **self._select_args
- )
-
- for hint in self._with_hints:
- statement = statement.with_hint(*hint)
-
- if self._correlate:
- statement = statement.correlate(*self._correlate)
-
- if context.eager_order_by:
- statement.append_order_by(*context.eager_order_by)
-
- context.statement = statement
-
- return context
-
- def _adjust_for_single_inheritance(self, context):
- """Apply single-table-inheritance filtering.
-
- For all distinct single-table-inheritance mappers represented in the
- columns clause of this query, add criterion to the WHERE clause of the
- given QueryContext such that only the appropriate subtypes are
- selected from the total results.
-
- """
-
- for entity, (mapper, adapter, s, i, w) in \
- self._mapper_adapter_map.iteritems():
- single_crit = mapper._single_table_criterion
- if single_crit is not None:
- if adapter:
- single_crit = adapter.traverse(single_crit)
- single_crit = self._adapt_clause(single_crit, False, False)
- context.whereclause = sql.and_(
- context.whereclause, single_crit)
-
- def __str__(self):
- return str(self._compile_context().statement)
-
-
-class _QueryEntity(object):
- """represent an entity column returned within a Query result."""
-
- def __new__(cls, *args, **kwargs):
- if cls is _QueryEntity:
- entity = args[1]
- if not isinstance(entity, basestring) and \
- _is_mapped_class(entity):
- cls = _MapperEntity
- else:
- cls = _ColumnEntity
- return object.__new__(cls)
-
- def _clone(self):
- q = self.__class__.__new__(self.__class__)
- q.__dict__ = self.__dict__.copy()
- return q
-
-class _MapperEntity(_QueryEntity):
- """mapper/class/AliasedClass entity"""
-
- def __init__(self, query, entity):
- self.primary_entity = not query._entities
- query._entities.append(self)
-
- self.entities = [entity]
- self.entity_zero = self.expr = entity
-
- def setup_entity(self, entity, mapper, adapter,
- from_obj, is_aliased_class, with_polymorphic):
- self.mapper = mapper
- self.adapter = adapter
- self.selectable = from_obj
- self._with_polymorphic = with_polymorphic
- self._polymorphic_discriminator = None
- self.is_aliased_class = is_aliased_class
- if is_aliased_class:
- self.path_entity = self.entity_zero = entity
- self._path = (entity,)
- self._label_name = self.entity_zero._sa_label_name
- self._reduced_path = (self.path_entity, )
- else:
- self.path_entity = mapper
- self._path = (mapper,)
- self._reduced_path = (mapper.base_mapper, )
- self.entity_zero = mapper
- self._label_name = self.mapper.class_.__name__
-
-
- def set_with_polymorphic(self, query, cls_or_mappers,
- selectable, discriminator):
- if cls_or_mappers is None:
- query._reset_polymorphic_adapter(self.mapper)
- return
-
- mappers, from_obj = self.mapper._with_polymorphic_args(
- cls_or_mappers, selectable)
- self._with_polymorphic = mappers
- self._polymorphic_discriminator = discriminator
-
- # TODO: do the wrapped thing here too so that
- # with_polymorphic() can be applied to aliases
- if not self.is_aliased_class:
- self.selectable = from_obj
- self.adapter = query._get_polymorphic_adapter(self, from_obj)
-
- @property
- def type(self):
- return self.mapper.class_
-
- def corresponds_to(self, entity):
- if _is_aliased_class(entity) or self.is_aliased_class:
- return entity is self.path_entity
- else:
- return entity.common_parent(self.path_entity)
-
- def adapt_to_selectable(self, query, sel):
- query._entities.append(self)
-
- def _get_entity_clauses(self, query, context):
-
- adapter = None
- if not self.is_aliased_class and query._polymorphic_adapters:
- adapter = query._polymorphic_adapters.get(self.mapper, None)
-
- if not adapter and self.adapter:
- adapter = self.adapter
-
- if adapter:
- if query._from_obj_alias:
- ret = adapter.wrap(query._from_obj_alias)
- else:
- ret = adapter
- else:
- ret = query._from_obj_alias
-
- return ret
-
- def row_processor(self, query, context, custom_rows):
- adapter = self._get_entity_clauses(query, context)
-
- if context.adapter and adapter:
- adapter = adapter.wrap(context.adapter)
- elif not adapter:
- adapter = context.adapter
-
- # polymorphic mappers which have concrete tables in
- # their hierarchy usually
- # require row aliasing unconditionally.
- if not adapter and self.mapper._requires_row_aliasing:
- adapter = sql_util.ColumnAdapter(
- self.selectable,
- self.mapper._equivalent_columns)
-
- if self.primary_entity:
- _instance = self.mapper._instance_processor(
- context,
- self._path,
- self._reduced_path,
- adapter,
- only_load_props=query._only_load_props,
- refresh_state=context.refresh_state,
- polymorphic_discriminator=
- self._polymorphic_discriminator
- )
- else:
- _instance = self.mapper._instance_processor(
- context,
- self._path,
- self._reduced_path,
- adapter,
- polymorphic_discriminator=
- self._polymorphic_discriminator)
-
- return _instance, self._label_name
-
- def setup_context(self, query, context):
- adapter = self._get_entity_clauses(query, context)
-
- context.froms += (self.selectable,)
-
- if context.order_by is False and self.mapper.order_by:
- context.order_by = self.mapper.order_by
-
- # apply adaptation to the mapper's order_by if needed.
- if adapter:
- context.order_by = adapter.adapt_list(
- util.to_list(
- context.order_by
- )
- )
-
- if self._with_polymorphic:
- poly_properties = self.mapper._iterate_polymorphic_properties(
- self._with_polymorphic)
- else:
- poly_properties = self.mapper._polymorphic_properties
-
- for value in poly_properties:
- if query._only_load_props and \
- value.key not in query._only_load_props:
- continue
- value.setup(
- context,
- self,
- self._path,
- self._reduced_path,
- adapter,
- only_load_props=query._only_load_props,
- column_collection=context.primary_columns
- )
-
- if self._polymorphic_discriminator is not None:
- if adapter:
- pd = adapter.columns[self._polymorphic_discriminator]
- else:
- pd = self._polymorphic_discriminator
- context.primary_columns.append(pd)
-
- def __str__(self):
- return str(self.mapper)
-
-class _ColumnEntity(_QueryEntity):
- """Column/expression based entity."""
-
- def __init__(self, query, column):
- self.expr = column
-
- if isinstance(column, basestring):
- column = sql.literal_column(column)
- self._label_name = column.name
- elif isinstance(column, (
- attributes.QueryableAttribute,
- interfaces.PropComparator
- )):
- self._label_name = column.key
- column = column.__clause_element__()
- else:
- self._label_name = getattr(column, 'key', None)
-
- if not isinstance(column, expression.ColumnElement) and \
- hasattr(column, '_select_iterable'):
- for c in column._select_iterable:
- if c is column:
- break
- _ColumnEntity(query, c)
-
- if c is not column:
- return
-
- if not isinstance(column, sql.ColumnElement):
- raise sa_exc.InvalidRequestError(
- "SQL expression, column, or mapped entity "
- "expected - got '%r'" % column
- )
-
- # If the Column is unnamed, give it a
- # label() so that mutable column expressions
- # can be located in the result even
- # if the expression's identity has been changed
- # due to adaption.
- if not column._label:
- column = column.label(None)
-
- query._entities.append(self)
-
- self.column = column
- self.froms = set()
-
- # look for ORM entities represented within the
- # given expression. Try to count only entities
- # for columns whose FROM object is in the actual list
- # of FROMs for the overall expression - this helps
- # subqueries which were built from ORM constructs from
- # leaking out their entities into the main select construct
- actual_froms = set(column._from_objects)
-
- self.entities = util.OrderedSet(
- elem._annotations['parententity']
- for elem in visitors.iterate(column, {})
- if 'parententity' in elem._annotations
- and actual_froms.intersection(elem._from_objects)
- )
-
- if self.entities:
- self.entity_zero = list(self.entities)[0]
- else:
- self.entity_zero = None
-
- @property
- def type(self):
- return self.column.type
-
- def adapt_to_selectable(self, query, sel):
- c = _ColumnEntity(query, sel.corresponding_column(self.column))
- c._label_name = self._label_name
- c.entity_zero = self.entity_zero
- c.entities = self.entities
-
- def setup_entity(self, entity, mapper, adapter, from_obj,
- is_aliased_class, with_polymorphic):
- self.selectable = from_obj
- self.froms.add(from_obj)
-
- def corresponds_to(self, entity):
- if self.entity_zero is None:
- return False
- elif _is_aliased_class(entity):
- return entity is self.entity_zero
- else:
- return not _is_aliased_class(self.entity_zero) and \
- entity.common_parent(self.entity_zero)
-
- def _resolve_expr_against_query_aliases(self, query, expr, context):
- return query._adapt_clause(expr, False, True)
-
- def row_processor(self, query, context, custom_rows):
- column = self._resolve_expr_against_query_aliases(
- query, self.column, context)
-
- if context.adapter:
- column = context.adapter.columns[column]
-
- def proc(row, result):
- return row[column]
-
- return proc, self._label_name
-
- def setup_context(self, query, context):
- column = self._resolve_expr_against_query_aliases(
- query, self.column, context)
- context.froms += tuple(self.froms)
- context.primary_columns.append(column)
-
- def __str__(self):
- return str(self.column)
-
-log.class_logger(Query)
-
-class QueryContext(object):
- multi_row_eager_loaders = False
- adapter = None
- froms = ()
-
- def __init__(self, query):
-
- if query._statement is not None:
- if isinstance(query._statement, expression._SelectBase) and \
- not query._statement.use_labels:
- self.statement = query._statement.apply_labels()
- else:
- self.statement = query._statement
- else:
- self.statement = None
- self.from_clause = query._from_obj
- self.whereclause = query._criterion
- self.order_by = query._order_by
-
- self.query = query
- self.session = query.session
- self.populate_existing = query._populate_existing
- self.version_check = query._version_check
- self.refresh_state = query._refresh_state
- self.primary_columns = []
- self.secondary_columns = []
- self.eager_order_by = []
- self.eager_joins = {}
- self.create_eager_joins = []
- self.propagate_options = set(o for o in query._with_options if
- o.propagate_to_loaders)
- self.attributes = query._attributes.copy()
-
-class AliasOption(interfaces.MapperOption):
-
- def __init__(self, alias):
- self.alias = alias
-
- def process_query(self, query):
- if isinstance(self.alias, basestring):
- alias = query._mapper_zero().mapped_table.alias(self.alias)
- else:
- alias = self.alias
- query._from_obj_alias = sql_util.ColumnAdapter(alias)
-
-
-_runid = 1L
-_id_lock = util.threading.Lock()
-
-def _new_runid():
- global _runid
- _id_lock.acquire()
- try:
- _runid += 1
- return _runid
- finally:
- _id_lock.release()
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/orm/scoping.py b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/orm/scoping.py
deleted file mode 100755
index 53e5e5d1..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/orm/scoping.py
+++ /dev/null
@@ -1,133 +0,0 @@
-# orm/scoping.py
-# Copyright (C) 2005-2011 the SQLAlchemy authors and contributors <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-from sqlalchemy import exc as sa_exc
-from sqlalchemy.util import ScopedRegistry, ThreadLocalRegistry, warn
-from sqlalchemy.orm import class_mapper
-from sqlalchemy.orm import exc as orm_exc
-from sqlalchemy.orm.session import Session
-
-
-__all__ = ['ScopedSession']
-
-
-class ScopedSession(object):
- """Provides thread-local management of Sessions.
-
- Usage::
-
- Session = scoped_session(sessionmaker())
-
- ... use Session normally.
-
- The internal registry is accessible as well,
- and by default is an instance of :class:`.ThreadLocalRegistry`.
-
-
- """
-
- def __init__(self, session_factory, scopefunc=None):
- self.session_factory = session_factory
- if scopefunc:
- self.registry = ScopedRegistry(session_factory, scopefunc)
- else:
- self.registry = ThreadLocalRegistry(session_factory)
-
- def __call__(self, **kwargs):
- if kwargs:
- scope = kwargs.pop('scope', False)
- if scope is not None:
- if self.registry.has():
- raise sa_exc.InvalidRequestError("Scoped session is already present; "
- "no new arguments may be specified.")
- else:
- sess = self.session_factory(**kwargs)
- self.registry.set(sess)
- return sess
- else:
- return self.session_factory(**kwargs)
- else:
- return self.registry()
-
- def remove(self):
- """Dispose of the current contextual session."""
-
- if self.registry.has():
- self.registry().close()
- self.registry.clear()
-
- def configure(self, **kwargs):
- """reconfigure the sessionmaker used by this ScopedSession."""
-
- if self.registry.has():
- warn('At least one scoped session is already present. '
- ' configure() can not affect sessions that have '
- 'already been created.')
-
- self.session_factory.configure(**kwargs)
-
- def query_property(self, query_cls=None):
- """return a class property which produces a `Query` object against the
- class when called.
-
- e.g.::
-
- Session = scoped_session(sessionmaker())
-
- class MyClass(object):
- query = Session.query_property()
-
- # after mappers are defined
- result = MyClass.query.filter(MyClass.name=='foo').all()
-
- Produces instances of the session's configured query class by
- default. To override and use a custom implementation, provide
- a ``query_cls`` callable. The callable will be invoked with
- the class's mapper as a positional argument and a session
- keyword argument.
-
- There is no limit to the number of query properties placed on
- a class.
-
- """
- class query(object):
- def __get__(s, instance, owner):
- try:
- mapper = class_mapper(owner)
- if mapper:
- if query_cls:
- # custom query class
- return query_cls(mapper, session=self.registry())
- else:
- # session's configured query class
- return self.registry().query(mapper)
- except orm_exc.UnmappedClassError:
- return None
- return query()
-
-def instrument(name):
- def do(self, *args, **kwargs):
- return getattr(self.registry(), name)(*args, **kwargs)
- return do
-for meth in Session.public_methods:
- setattr(ScopedSession, meth, instrument(meth))
-
-def makeprop(name):
- def set(self, attr):
- setattr(self.registry(), name, attr)
- def get(self):
- return getattr(self.registry(), name)
- return property(get, set)
-for prop in ('bind', 'dirty', 'deleted', 'new', 'identity_map', 'is_active', 'autoflush'):
- setattr(ScopedSession, prop, makeprop(prop))
-
-def clslevel(name):
- def do(cls, *args, **kwargs):
- return getattr(Session, name)(*args, **kwargs)
- return classmethod(do)
-for prop in ('close_all', 'object_session', 'identity_key'):
- setattr(ScopedSession, prop, clslevel(prop))
-
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/orm/session.py b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/orm/session.py
deleted file mode 100755
index 8f8770a3..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/orm/session.py
+++ /dev/null
@@ -1,1725 +0,0 @@
-# orm/session.py
-# Copyright (C) 2005-2011 the SQLAlchemy authors and contributors <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-"""Provides the Session class and related utilities."""
-
-import weakref
-from itertools import chain
-from sqlalchemy import util, sql, engine, log, exc as sa_exc
-from sqlalchemy.sql import util as sql_util, expression
-from sqlalchemy.orm import (
- SessionExtension, attributes, exc, query, unitofwork, util as mapperutil, state
- )
-from sqlalchemy.orm.util import object_mapper as _object_mapper
-from sqlalchemy.orm.util import class_mapper as _class_mapper
-from sqlalchemy.orm.util import (
- _class_to_mapper, _state_mapper,
- )
-from sqlalchemy.orm.mapper import Mapper, _none_set
-from sqlalchemy.orm.unitofwork import UOWTransaction
-from sqlalchemy.orm import identity
-from sqlalchemy import event
-from sqlalchemy.orm.events import SessionEvents
-
-import sys
-
-__all__ = ['Session', 'SessionTransaction', 'SessionExtension']
-
-
-def sessionmaker(bind=None, class_=None, autoflush=True, autocommit=False,
- expire_on_commit=True, **kwargs):
- """Generate a custom-configured :class:`~sqlalchemy.orm.session.Session` class.
-
- The returned object is a subclass of ``Session``, which, when instantiated
- with no arguments, uses the keyword arguments configured here as its
- constructor arguments.
-
- It is intended that the `sessionmaker()` function be called within the
- global scope of an application, and the returned class be made available
- to the rest of the application as the single class used to instantiate
- sessions.
-
- e.g.::
-
- # global scope
- Session = sessionmaker(autoflush=False)
-
- # later, in a local scope, create and use a session:
- sess = Session()
-
- Any keyword arguments sent to the constructor itself will override the
- "configured" keywords::
-
- Session = sessionmaker()
-
- # bind an individual session to a connection
- sess = Session(bind=connection)
-
- The class also includes a special classmethod ``configure()``, which
- allows additional configurational options to take place after the custom
- ``Session`` class has been generated. This is useful particularly for
- defining the specific ``Engine`` (or engines) to which new instances of
- ``Session`` should be bound::
-
- Session = sessionmaker()
- Session.configure(bind=create_engine('sqlite:///foo.db'))
-
- sess = Session()
-
- For options, see the constructor options for :class:`.Session`.
-
- """
- kwargs['bind'] = bind
- kwargs['autoflush'] = autoflush
- kwargs['autocommit'] = autocommit
- kwargs['expire_on_commit'] = expire_on_commit
-
- if class_ is None:
- class_ = Session
-
- class Sess(object):
- def __init__(self, **local_kwargs):
- for k in kwargs:
- local_kwargs.setdefault(k, kwargs[k])
- super(Sess, self).__init__(**local_kwargs)
-
- @classmethod
- def configure(self, **new_kwargs):
- """(Re)configure the arguments for this sessionmaker.
-
- e.g.::
-
- Session = sessionmaker()
-
- Session.configure(bind=create_engine('sqlite://'))
- """
- kwargs.update(new_kwargs)
-
-
- return type("Session", (Sess, class_), {})
-
-
-class SessionTransaction(object):
- """A Session-level transaction.
-
- This corresponds to one or more :class:`~sqlalchemy.engine.Transaction`
- instances behind the scenes, with one ``Transaction`` per ``Engine`` in
- use.
-
- Direct usage of ``SessionTransaction`` is not necessary as of SQLAlchemy
- 0.4; use the ``begin()`` and ``commit()`` methods on ``Session`` itself.
-
- The ``SessionTransaction`` object is **not** thread-safe.
-
- .. index::
- single: thread safety; SessionTransaction
-
- """
-
- _rollback_exception = None
-
- def __init__(self, session, parent=None, nested=False):
- self.session = session
- self._connections = {}
- self._parent = parent
- self.nested = nested
- self._active = True
- self._prepared = False
- if not parent and nested:
- raise sa_exc.InvalidRequestError(
- "Can't start a SAVEPOINT transaction when no existing "
- "transaction is in progress")
-
- if self.session._enable_transaction_accounting:
- self._take_snapshot()
-
- @property
- def is_active(self):
- return self.session is not None and self._active
-
- def _assert_is_active(self):
- self._assert_is_open()
- if not self._active:
- if self._rollback_exception:
- raise sa_exc.InvalidRequestError(
- "This Session's transaction has been rolled back "
- "due to a previous exception during flush."
- " To begin a new transaction with this Session, "
- "first issue Session.rollback()."
- " Original exception was: %s"
- % self._rollback_exception
- )
- else:
- raise sa_exc.InvalidRequestError(
- "This Session's transaction has been rolled back "
- "by a nested rollback() call. To begin a new "
- "transaction, issue Session.rollback() first."
- )
-
- def _assert_is_open(self, error_msg="The transaction is closed"):
- if self.session is None:
- raise sa_exc.ResourceClosedError(error_msg)
-
- @property
- def _is_transaction_boundary(self):
- return self.nested or not self._parent
-
- def connection(self, bindkey, **kwargs):
- self._assert_is_active()
- engine = self.session.get_bind(bindkey, **kwargs)
- return self._connection_for_bind(engine)
-
- def _begin(self, nested=False):
- self._assert_is_active()
- return SessionTransaction(
- self.session, self, nested=nested)
-
- def _iterate_parents(self, upto=None):
- if self._parent is upto:
- return (self,)
- else:
- if self._parent is None:
- raise sa_exc.InvalidRequestError(
- "Transaction %s is not on the active transaction list" % (
- upto))
- return (self,) + self._parent._iterate_parents(upto)
-
- def _take_snapshot(self):
- if not self._is_transaction_boundary:
- self._new = self._parent._new
- self._deleted = self._parent._deleted
- return
-
- if not self.session._flushing:
- self.session.flush()
-
- self._new = weakref.WeakKeyDictionary()
- self._deleted = weakref.WeakKeyDictionary()
-
- def _restore_snapshot(self):
- assert self._is_transaction_boundary
-
- for s in set(self._new).union(self.session._new):
- self.session._expunge_state(s)
- if s.key:
- del s.key
-
- for s in set(self._deleted).union(self.session._deleted):
- if s.deleted:
- #assert s in self._deleted
- del s.deleted
- self.session._update_impl(s)
-
- assert not self.session._deleted
-
- for s in self.session.identity_map.all_states():
- s.expire(s.dict, self.session.identity_map._modified)
-
- def _remove_snapshot(self):
- assert self._is_transaction_boundary
-
- if not self.nested and self.session.expire_on_commit:
- for s in self.session.identity_map.all_states():
- s.expire(s.dict, self.session.identity_map._modified)
-
- def _connection_for_bind(self, bind):
- self._assert_is_active()
-
- if bind in self._connections:
- return self._connections[bind][0]
-
- if self._parent:
- conn = self._parent._connection_for_bind(bind)
- if not self.nested:
- return conn
- else:
- if isinstance(bind, engine.Connection):
- conn = bind
- if conn.engine in self._connections:
- raise sa_exc.InvalidRequestError(
- "Session already has a Connection associated for the "
- "given Connection's Engine")
- else:
- conn = bind.contextual_connect()
-
- if self.session.twophase and self._parent is None:
- transaction = conn.begin_twophase()
- elif self.nested:
- transaction = conn.begin_nested()
- else:
- transaction = conn.begin()
-
- self._connections[conn] = self._connections[conn.engine] = \
- (conn, transaction, conn is not bind)
- self.session.dispatch.after_begin(self.session, self, conn)
- return conn
-
- def prepare(self):
- if self._parent is not None or not self.session.twophase:
- raise sa_exc.InvalidRequestError(
- "Only root two phase transactions of can be prepared")
- self._prepare_impl()
-
- def _prepare_impl(self):
- self._assert_is_active()
- if self._parent is None or self.nested:
- self.session.dispatch.before_commit(self.session)
-
- stx = self.session.transaction
- if stx is not self:
- for subtransaction in stx._iterate_parents(upto=self):
- subtransaction.commit()
-
- if not self.session._flushing:
- self.session.flush()
-
- if self._parent is None and self.session.twophase:
- try:
- for t in set(self._connections.values()):
- t[1].prepare()
- except:
- self.rollback()
- raise
-
- self._deactivate()
- self._prepared = True
-
- def commit(self):
- self._assert_is_open()
- if not self._prepared:
- self._prepare_impl()
-
- if self._parent is None or self.nested:
- for t in set(self._connections.values()):
- t[1].commit()
-
- self.session.dispatch.after_commit(self.session)
-
- if self.session._enable_transaction_accounting:
- self._remove_snapshot()
-
- self.close()
- return self._parent
-
- def rollback(self, _capture_exception=False):
- self._assert_is_open()
-
- stx = self.session.transaction
- if stx is not self:
- for subtransaction in stx._iterate_parents(upto=self):
- subtransaction.close()
-
- if self.is_active or self._prepared:
- for transaction in self._iterate_parents():
- if transaction._parent is None or transaction.nested:
- transaction._rollback_impl()
- transaction._deactivate()
- break
- else:
- transaction._deactivate()
-
- self.close()
- if self._parent and _capture_exception:
- self._parent._rollback_exception = sys.exc_info()[1]
- return self._parent
-
- def _rollback_impl(self):
- for t in set(self._connections.values()):
- t[1].rollback()
-
- if self.session._enable_transaction_accounting:
- self._restore_snapshot()
-
- self.session.dispatch.after_rollback(self.session)
-
- def _deactivate(self):
- self._active = False
-
- def close(self):
- self.session.transaction = self._parent
- if self._parent is None:
- for connection, transaction, autoclose in \
- set(self._connections.values()):
- if autoclose:
- connection.close()
- else:
- transaction.close()
- if not self.session.autocommit:
- self.session.begin()
- self._deactivate()
- self.session = None
- self._connections = None
-
- def __enter__(self):
- return self
-
- def __exit__(self, type, value, traceback):
- self._assert_is_open("Cannot end transaction context. The transaction "
- "was closed from within the context")
- if self.session.transaction is None:
- return
- if type is None:
- try:
- self.commit()
- except:
- self.rollback()
- raise
- else:
- self.rollback()
-
-class Session(object):
- """Manages persistence operations for ORM-mapped objects.
-
- The Session's usage paradigm is described at :ref:`session_toplevel`.
-
-
- """
-
- public_methods = (
- '__contains__', '__iter__', 'add', 'add_all', 'begin', 'begin_nested',
- 'close', 'commit', 'connection', 'delete', 'execute', 'expire',
- 'expire_all', 'expunge', 'expunge_all', 'flush', 'get_bind',
- 'is_modified',
- 'merge', 'query', 'refresh', 'rollback',
- 'scalar')
-
-
- def __init__(self, bind=None, autoflush=True, expire_on_commit=True,
- _enable_transaction_accounting=True,
- autocommit=False, twophase=False,
- weak_identity_map=True, binds=None, extension=None,
- query_cls=query.Query):
- """Construct a new Session.
-
- See also the :func:`.sessionmaker` function which is used to
- generate a :class:`.Session`-producing callable with a given
- set of arguments.
-
- :param autocommit: Defaults to ``False``. When ``True``, the ``Session``
- does not keep a persistent transaction running, and will acquire
- connections from the engine on an as-needed basis, returning them
- immediately after their use. Flushes will begin and commit (or possibly
- rollback) their own transaction if no transaction is present. When using
- this mode, the `session.begin()` method may be used to begin a
- transaction explicitly.
-
- Leaving it on its default value of ``False`` means that the ``Session``
- will acquire a connection and begin a transaction the first time it is
- used, which it will maintain persistently until ``rollback()``,
- ``commit()``, or ``close()`` is called. When the transaction is released
- by any of these methods, the ``Session`` is ready for the next usage,
- which will again acquire and maintain a new connection/transaction.
-
- :param autoflush: When ``True``, all query operations will issue a
- ``flush()`` call to this ``Session`` before proceeding. This is a
- convenience feature so that ``flush()`` need not be called repeatedly
- in order for database queries to retrieve results. It's typical that
- ``autoflush`` is used in conjunction with ``autocommit=False``. In this
- scenario, explicit calls to ``flush()`` are rarely needed; you usually
- only need to call ``commit()`` (which flushes) to finalize changes.
-
- :param bind: An optional ``Engine`` or ``Connection`` to which this
- ``Session`` should be bound. When specified, all SQL operations
- performed by this session will execute via this connectable.
-
- :param binds: An optional dictionary which contains more granular "bind"
- information than the ``bind`` parameter provides. This dictionary can
- map individual ``Table`` instances as well as ``Mapper`` instances to
- individual ``Engine`` or ``Connection`` objects. Operations which
- proceed relative to a particular ``Mapper`` will consult this
- dictionary for the direct ``Mapper`` instance as well as the mapper's
- ``mapped_table`` attribute in order to locate an connectable to use.
- The full resolution is described in the ``get_bind()`` method of
- ``Session``. Usage looks like::
-
- Session = sessionmaker(binds={
- SomeMappedClass: create_engine('postgresql://engine1'),
- somemapper: create_engine('postgresql://engine2'),
- some_table: create_engine('postgresql://engine3'),
- })
-
- Also see the :meth:`.Session.bind_mapper` and :meth:`.Session.bind_table` methods.
-
- :param \class_: Specify an alternate class other than
- ``sqlalchemy.orm.session.Session`` which should be used by the returned
- class. This is the only argument that is local to the
- ``sessionmaker()`` function, and is not sent directly to the
- constructor for ``Session``.
-
- :param _enable_transaction_accounting: Defaults to ``True``. A
- legacy-only flag which when ``False`` disables *all* 0.5-style object
- accounting on transaction boundaries, including auto-expiry of
- instances on rollback and commit, maintenance of the "new" and
- "deleted" lists upon rollback, and autoflush of pending changes upon
- begin(), all of which are interdependent.
-
- :param expire_on_commit: Defaults to ``True``. When ``True``, all
- instances will be fully expired after each ``commit()``, so that all
- attribute/object access subsequent to a completed transaction will load
- from the most recent database state.
-
- :param extension: An optional
- :class:`~.SessionExtension` instance, or a list
- of such instances, which will receive pre- and post- commit and flush
- events, as well as a post-rollback event. **Deprecated.**
- Please see :class:`.SessionEvents`.
-
- :param query_cls: Class which should be used to create new Query objects,
- as returned by the ``query()`` method. Defaults to
- :class:`~sqlalchemy.orm.query.Query`.
-
- :param twophase: When ``True``, all transactions will be started as
- a "two phase" transaction, i.e. using the "two phase" semantics
- of the database in use along with an XID. During a ``commit()``,
- after ``flush()`` has been issued for all attached databases, the
- ``prepare()`` method on each database's ``TwoPhaseTransaction`` will
- be called. This allows each database to roll back the entire
- transaction, before each transaction is committed.
-
- :param weak_identity_map: Defaults to ``True`` - when set to
- ``False``, objects placed in the :class:`.Session` will be
- strongly referenced until explicitly removed or the
- :class:`.Session` is closed. **Deprecated** - this option
- is obsolete.
-
- """
-
- if weak_identity_map:
- self._identity_cls = identity.WeakInstanceDict
- else:
- util.warn_deprecated("weak_identity_map=False is deprecated. "
- "This feature is not needed.")
- self._identity_cls = identity.StrongInstanceDict
- self.identity_map = self._identity_cls()
-
- self._new = {} # InstanceState->object, strong refs object
- self._deleted = {} # same
- self.bind = bind
- self.__binds = {}
- self._flushing = False
- self.transaction = None
- self.hash_key = id(self)
- self.autoflush = autoflush
- self.autocommit = autocommit
- self.expire_on_commit = expire_on_commit
- self._enable_transaction_accounting = _enable_transaction_accounting
- self.twophase = twophase
- self._query_cls = query_cls
-
- if extension:
- for ext in util.to_list(extension):
- SessionExtension._adapt_listener(self, ext)
-
- if binds is not None:
- for mapperortable, bind in binds.iteritems():
- if isinstance(mapperortable, (type, Mapper)):
- self.bind_mapper(mapperortable, bind)
- else:
- self.bind_table(mapperortable, bind)
-
- if not self.autocommit:
- self.begin()
- _sessions[self.hash_key] = self
-
- dispatch = event.dispatcher(SessionEvents)
-
- connection_callable = None
-
- def begin(self, subtransactions=False, nested=False):
- """Begin a transaction on this Session.
-
- If this Session is already within a transaction, either a plain
- transaction or nested transaction, an error is raised, unless
- ``subtransactions=True`` or ``nested=True`` is specified.
-
- The ``subtransactions=True`` flag indicates that this :meth:`~.Session.begin`
- can create a subtransaction if a transaction is already in progress.
- For documentation on subtransactions, please see :ref:`session_subtransactions`.
-
- The ``nested`` flag begins a SAVEPOINT transaction and is equivalent
- to calling :meth:`~.Session.begin_nested`. For documentation on SAVEPOINT
- transactions, please see :ref:`session_begin_nested`.
-
- """
- if self.transaction is not None:
- if subtransactions or nested:
- self.transaction = self.transaction._begin(
- nested=nested)
- else:
- raise sa_exc.InvalidRequestError(
- "A transaction is already begun. Use subtransactions=True "
- "to allow subtransactions.")
- else:
- self.transaction = SessionTransaction(
- self, nested=nested)
- return self.transaction # needed for __enter__/__exit__ hook
-
- def begin_nested(self):
- """Begin a `nested` transaction on this Session.
-
- The target database(s) must support SQL SAVEPOINTs or a
- SQLAlchemy-supported vendor implementation of the idea.
-
- For documentation on SAVEPOINT
- transactions, please see :ref:`session_begin_nested`.
-
- """
- return self.begin(nested=True)
-
- def rollback(self):
- """Rollback the current transaction in progress.
-
- If no transaction is in progress, this method is a pass-through.
-
- This method rolls back the current transaction or nested transaction
- regardless of subtransactions being in effect. All subtransactions up
- to the first real transaction are closed. Subtransactions occur when
- begin() is called multiple times.
-
- """
- if self.transaction is None:
- pass
- else:
- self.transaction.rollback()
-
- def commit(self):
- """Flush pending changes and commit the current transaction.
-
- If no transaction is in progress, this method raises an
- InvalidRequestError.
-
- By default, the :class:`.Session` also expires all database
- loaded state on all ORM-managed attributes after transaction commit.
- This so that subsequent operations load the most recent
- data from the database. This behavior can be disabled using
- the ``expire_on_commit=False`` option to :func:`.sessionmaker` or
- the :class:`.Session` constructor.
-
- If a subtransaction is in effect (which occurs when begin() is called
- multiple times), the subtransaction will be closed, and the next call
- to ``commit()`` will operate on the enclosing transaction.
-
- For a session configured with autocommit=False, a new transaction will
- be begun immediately after the commit, but note that the newly begun
- transaction does *not* use any connection resources until the first
- SQL is actually emitted.
-
- """
- if self.transaction is None:
- if not self.autocommit:
- self.begin()
- else:
- raise sa_exc.InvalidRequestError("No transaction is begun.")
-
- self.transaction.commit()
-
- def prepare(self):
- """Prepare the current transaction in progress for two phase commit.
-
- If no transaction is in progress, this method raises an
- InvalidRequestError.
-
- Only root transactions of two phase sessions can be prepared. If the
- current transaction is not such, an InvalidRequestError is raised.
-
- """
- if self.transaction is None:
- if not self.autocommit:
- self.begin()
- else:
- raise sa_exc.InvalidRequestError("No transaction is begun.")
-
- self.transaction.prepare()
-
- def connection(self, mapper=None, clause=None,
- bind=None,
- close_with_result=False,
- **kw):
- """Return a :class:`.Connection` object corresponding to this
- :class:`.Session` object's transactional state.
-
- If this :class:`.Session` is configured with ``autocommit=False``,
- either the :class:`.Connection` corresponding to the current transaction
- is returned, or if no transaction is in progress, a new one is begun
- and the :class:`.Connection` returned.
-
- Alternatively, if this :class:`.Session` is configured with ``autocommit=True``,
- an ad-hoc :class:`.Connection` is returned using :meth:`.Engine.contextual_connect`
- on the underlying :class:`.Engine`.
-
- Ambiguity in multi-bind or unbound :class:`.Session` objects can be resolved through
- any of the optional keyword arguments. This ultimately makes usage of the
- :meth:`.get_bind` method for resolution.
-
- :param bind:
- Optional :class:`.Engine` to be used as the bind. If
- this engine is already involved in an ongoing transaction,
- that connection will be used. This argument takes precedence
- over ``mapper``, ``clause``.
-
- :param mapper:
- Optional :func:`.mapper` mapped class, used to identify
- the appropriate bind. This argument takes precedence over
- ``clause``.
-
- :param clause:
- A :class:`.ClauseElement` (i.e. :func:`~.sql.expression.select`,
- :func:`~.sql.expression.text`,
- etc.) which will be used to locate a bind, if a bind
- cannot otherwise be identified.
-
- :param close_with_result: Passed to :meth:`Engine.connect`, indicating
- the :class:`.Connection` should be considered "single use", automatically
- closing when the first result set is closed. This flag only has
- an effect if this :class:`.Session` is configured with ``autocommit=True``
- and does not already have a transaction in progress.
-
- :param \**kw:
- Additional keyword arguments are sent to :meth:`get_bind()`,
- allowing additional arguments to be passed to custom
- implementations of :meth:`get_bind`.
-
- """
- if bind is None:
- bind = self.get_bind(mapper, clause=clause, **kw)
-
- return self._connection_for_bind(bind,
- close_with_result=close_with_result)
-
- def _connection_for_bind(self, engine, **kwargs):
- if self.transaction is not None:
- return self.transaction._connection_for_bind(engine)
- else:
- return engine.contextual_connect(**kwargs)
-
- def execute(self, clause, params=None, mapper=None, bind=None, **kw):
- """Execute a clause within the current transaction.
-
- Returns a :class:`.ResultProxy` representing
- results of the statement execution, in the same manner as that of an
- :class:`.Engine` or
- :class:`.Connection`.
-
- :meth:`~.Session.execute` accepts any executable clause construct, such
- as :func:`~.sql.expression.select`,
- :func:`~.sql.expression.insert`,
- :func:`~.sql.expression.update`,
- :func:`~.sql.expression.delete`, and
- :func:`~.sql.expression.text`, and additionally accepts
- plain strings that represent SQL statements. If a plain string is
- passed, it is first converted to a
- :func:`~.sql.expression.text` construct, which here means
- that bind parameters should be specified using the format ``:param``.
-
- The statement is executed within the current transactional context of
- this :class:`.Session`, using the same behavior as that of
- the :meth:`.Session.connection` method to determine the active
- :class:`.Connection`. The ``close_with_result`` flag is
- set to ``True`` so that an ``autocommit=True`` :class:`.Session`
- with no active transaction will produce a result that auto-closes
- the underlying :class:`.Connection`.
-
- :param clause:
- A :class:`.ClauseElement` (i.e. :func:`~.sql.expression.select`,
- :func:`~.sql.expression.text`, etc.) or string SQL statement to be executed. The clause
- will also be used to locate a bind, if this :class:`.Session`
- is not bound to a single engine already, and the ``mapper``
- and ``bind`` arguments are not passed.
-
- :param params:
- Optional dictionary of bind names mapped to values.
-
- :param mapper:
- Optional :func:`.mapper` or mapped class, used to identify
- the appropriate bind. This argument takes precedence over
- ``clause`` when locating a bind.
-
- :param bind:
- Optional :class:`.Engine` to be used as the bind. If
- this engine is already involved in an ongoing transaction,
- that connection will be used. This argument takes
- precedence over ``mapper`` and ``clause`` when locating
- a bind.
-
- :param \**kw:
- Additional keyword arguments are sent to :meth:`get_bind()`,
- allowing additional arguments to be passed to custom
- implementations of :meth:`get_bind`.
-
- """
- clause = expression._literal_as_text(clause)
-
- if bind is None:
- bind = self.get_bind(mapper, clause=clause, **kw)
-
- return self._connection_for_bind(bind, close_with_result=True).execute(
- clause, params or {})
-
- def scalar(self, clause, params=None, mapper=None, bind=None, **kw):
- """Like :meth:`~.Session.execute` but return a scalar result."""
-
- return self.execute(clause, params=params, mapper=mapper, bind=bind, **kw).scalar()
-
- def close(self):
- """Close this Session.
-
- This clears all items and ends any transaction in progress.
-
- If this session were created with ``autocommit=False``, a new
- transaction is immediately begun. Note that this new transaction does
- not use any connection resources until they are first needed.
-
- """
- self.expunge_all()
- if self.transaction is not None:
- for transaction in self.transaction._iterate_parents():
- transaction.close()
-
- @classmethod
- def close_all(cls):
- """Close *all* sessions in memory."""
-
- for sess in _sessions.values():
- sess.close()
-
- def expunge_all(self):
- """Remove all object instances from this ``Session``.
-
- This is equivalent to calling ``expunge(obj)`` on all objects in this
- ``Session``.
-
- """
- for state in self.identity_map.all_states() + list(self._new):
- state.detach()
-
- self.identity_map = self._identity_cls()
- self._new = {}
- self._deleted = {}
-
- # TODO: need much more test coverage for bind_mapper() and similar !
- # TODO: + crystalize + document resolution order vis. bind_mapper/bind_table
-
- def bind_mapper(self, mapper, bind):
- """Bind operations for a mapper to a Connectable.
-
- mapper
- A mapper instance or mapped class
-
- bind
- Any Connectable: a ``Engine`` or ``Connection``.
-
- All subsequent operations involving this mapper will use the given
- `bind`.
-
- """
- if isinstance(mapper, type):
- mapper = _class_mapper(mapper)
-
- self.__binds[mapper.base_mapper] = bind
- for t in mapper._all_tables:
- self.__binds[t] = bind
-
- def bind_table(self, table, bind):
- """Bind operations on a Table to a Connectable.
-
- table
- A ``Table`` instance
-
- bind
- Any Connectable: a ``Engine`` or ``Connection``.
-
- All subsequent operations involving this ``Table`` will use the
- given `bind`.
-
- """
- self.__binds[table] = bind
-
- def get_bind(self, mapper, clause=None):
- """Return an engine corresponding to the given arguments.
-
- All arguments are optional.
-
- mapper
- Optional, a ``Mapper`` or mapped class
-
- clause
- Optional, A ClauseElement (i.e. select(), text(), etc.)
-
- """
- if mapper is clause is None:
- if self.bind:
- return self.bind
- else:
- raise sa_exc.UnboundExecutionError(
- "This session is not bound to a single Engine or "
- "Connection, and no context was provided to locate "
- "a binding.")
-
- c_mapper = mapper is not None and _class_to_mapper(mapper) or None
-
- # manually bound?
- if self.__binds:
- if c_mapper:
- if c_mapper.base_mapper in self.__binds:
- return self.__binds[c_mapper.base_mapper]
- elif c_mapper.mapped_table in self.__binds:
- return self.__binds[c_mapper.mapped_table]
- if clause is not None:
- for t in sql_util.find_tables(clause, include_crud=True):
- if t in self.__binds:
- return self.__binds[t]
-
- if self.bind:
- return self.bind
-
- if isinstance(clause, sql.expression.ClauseElement) and clause.bind:
- return clause.bind
-
- if c_mapper and c_mapper.mapped_table.bind:
- return c_mapper.mapped_table.bind
-
- context = []
- if mapper is not None:
- context.append('mapper %s' % c_mapper)
- if clause is not None:
- context.append('SQL expression')
-
- raise sa_exc.UnboundExecutionError(
- "Could not locate a bind configured on %s or this Session" % (
- ', '.join(context)))
-
- def query(self, *entities, **kwargs):
- """Return a new ``Query`` object corresponding to this ``Session``."""
-
- return self._query_cls(entities, self, **kwargs)
-
- def _autoflush(self):
- if self.autoflush and not self._flushing:
- self.flush()
-
- def _finalize_loaded(self, states):
- for state, dict_ in states.items():
- state.commit_all(dict_, self.identity_map)
-
- def refresh(self, instance, attribute_names=None, lockmode=None):
- """Expire and refresh the attributes on the given instance.
-
- A query will be issued to the database and all attributes will be
- refreshed with their current database value.
-
- Lazy-loaded relational attributes will remain lazily loaded, so that
- the instance-wide refresh operation will be followed immediately by
- the lazy load of that attribute.
-
- Eagerly-loaded relational attributes will eagerly load within the
- single refresh operation.
-
- Note that a highly isolated transaction will return the same values as
- were previously read in that same transaction, regardless of changes
- in database state outside of that transaction - usage of
- :meth:`~Session.refresh` usually only makes sense if non-ORM SQL
- statement were emitted in the ongoing transaction, or if autocommit
- mode is turned on.
-
- :param attribute_names: optional. An iterable collection of
- string attribute names indicating a subset of attributes to
- be refreshed.
-
- :param lockmode: Passed to the :class:`~sqlalchemy.orm.query.Query`
- as used by :meth:`~sqlalchemy.orm.query.Query.with_lockmode`.
-
- """
- try:
- state = attributes.instance_state(instance)
- except exc.NO_STATE:
- raise exc.UnmappedInstanceError(instance)
-
- self._expire_state(state, attribute_names)
-
- if self.query(_object_mapper(instance))._load_on_ident(
- state.key, refresh_state=state,
- lockmode=lockmode,
- only_load_props=attribute_names) is None:
- raise sa_exc.InvalidRequestError(
- "Could not refresh instance '%s'" %
- mapperutil.instance_str(instance))
-
- def expire_all(self):
- """Expires all persistent instances within this Session.
-
- When any attributes on a persistent instance is next accessed,
- a query will be issued using the
- :class:`.Session` object's current transactional context in order to
- load all expired attributes for the given instance. Note that
- a highly isolated transaction will return the same values as were
- previously read in that same transaction, regardless of changes
- in database state outside of that transaction.
-
- To expire individual objects and individual attributes
- on those objects, use :meth:`Session.expire`.
-
- The :class:`.Session` object's default behavior is to
- expire all state whenever the :meth:`Session.rollback`
- or :meth:`Session.commit` methods are called, so that new
- state can be loaded for the new transaction. For this reason,
- calling :meth:`Session.expire_all` should not be needed when
- autocommit is ``False``, assuming the transaction is isolated.
-
- """
- for state in self.identity_map.all_states():
- state.expire(state.dict, self.identity_map._modified)
-
- def expire(self, instance, attribute_names=None):
- """Expire the attributes on an instance.
-
- Marks the attributes of an instance as out of date. When an expired
- attribute is next accessed, a query will be issued to the
- :class:`.Session` object's current transactional context in order to
- load all expired attributes for the given instance. Note that
- a highly isolated transaction will return the same values as were
- previously read in that same transaction, regardless of changes
- in database state outside of that transaction.
-
- To expire all objects in the :class:`.Session` simultaneously,
- use :meth:`Session.expire_all`.
-
- The :class:`.Session` object's default behavior is to
- expire all state whenever the :meth:`Session.rollback`
- or :meth:`Session.commit` methods are called, so that new
- state can be loaded for the new transaction. For this reason,
- calling :meth:`Session.expire` only makes sense for the specific
- case that a non-ORM SQL statement was emitted in the current
- transaction.
-
- :param instance: The instance to be refreshed.
- :param attribute_names: optional list of string attribute names
- indicating a subset of attributes to be expired.
-
- """
- try:
- state = attributes.instance_state(instance)
- except exc.NO_STATE:
- raise exc.UnmappedInstanceError(instance)
- self._expire_state(state, attribute_names)
-
- def _expire_state(self, state, attribute_names):
- self._validate_persistent(state)
- if attribute_names:
- state.expire_attributes(state.dict, attribute_names)
- else:
- # pre-fetch the full cascade since the expire is going to
- # remove associations
- cascaded = list(state.manager.mapper.cascade_iterator(
- 'refresh-expire', state))
- self._conditional_expire(state)
- for o, m, st_, dct_ in cascaded:
- self._conditional_expire(st_)
-
- def _conditional_expire(self, state):
- """Expire a state if persistent, else expunge if pending"""
-
- if state.key:
- state.expire(state.dict, self.identity_map._modified)
- elif state in self._new:
- self._new.pop(state)
- state.detach()
-
- @util.deprecated("0.7", "The non-weak-referencing identity map "
- "feature is no longer needed.")
- def prune(self):
- """Remove unreferenced instances cached in the identity map.
-
- Note that this method is only meaningful if "weak_identity_map" is set
- to False. The default weak identity map is self-pruning.
-
- Removes any object in this Session's identity map that is not
- referenced in user code, modified, new or scheduled for deletion.
- Returns the number of objects pruned.
-
- """
- return self.identity_map.prune()
-
- def expunge(self, instance):
- """Remove the `instance` from this ``Session``.
-
- This will free all internal references to the instance. Cascading
- will be applied according to the *expunge* cascade rule.
-
- """
- try:
- state = attributes.instance_state(instance)
- except exc.NO_STATE:
- raise exc.UnmappedInstanceError(instance)
- if state.session_id is not self.hash_key:
- raise sa_exc.InvalidRequestError(
- "Instance %s is not present in this Session" %
- mapperutil.state_str(state))
-
- cascaded = list(state.manager.mapper.cascade_iterator(
- 'expunge', state))
- self._expunge_state(state)
- for o, m, st_, dct_ in cascaded:
- self._expunge_state(st_)
-
- def _expunge_state(self, state):
- if state in self._new:
- self._new.pop(state)
- state.detach()
- elif self.identity_map.contains_state(state):
- self.identity_map.discard(state)
- self._deleted.pop(state, None)
- state.detach()
-
- def _register_newly_persistent(self, state):
- mapper = _state_mapper(state)
-
- # prevent against last minute dereferences of the object
- obj = state.obj()
- if obj is not None:
-
- instance_key = mapper._identity_key_from_state(state)
-
- if _none_set.issubset(instance_key[1]) and \
- not mapper.allow_partial_pks or \
- _none_set.issuperset(instance_key[1]):
- raise exc.FlushError('Instance %s has a NULL identity '
- 'key. Check if this flush is occurring at an '
- 'inappropriate time, such as during a load '
- 'operation.' % mapperutil.state_str(state))
-
- if state.key is None:
- state.key = instance_key
- elif state.key != instance_key:
- # primary key switch. use discard() in case another
- # state has already replaced this one in the identity
- # map (see test/orm/test_naturalpks.py ReversePKsTest)
- self.identity_map.discard(state)
- state.key = instance_key
-
- self.identity_map.replace(state)
- state.commit_all(state.dict, self.identity_map)
-
- # remove from new last, might be the last strong ref
- if state in self._new:
- if self._enable_transaction_accounting and self.transaction:
- self.transaction._new[state] = True
- self._new.pop(state)
-
- def _remove_newly_deleted(self, state):
- if self._enable_transaction_accounting and self.transaction:
- self.transaction._deleted[state] = True
-
- self.identity_map.discard(state)
- self._deleted.pop(state, None)
- state.deleted = True
-
- def add(self, instance):
- """Place an object in the ``Session``.
-
- Its state will be persisted to the database on the next flush
- operation.
-
- Repeated calls to ``add()`` will be ignored. The opposite of ``add()``
- is ``expunge()``.
-
- """
- try:
- state = attributes.instance_state(instance)
- except exc.NO_STATE:
- raise exc.UnmappedInstanceError(instance)
-
- self._save_or_update_state(state)
-
- def add_all(self, instances):
- """Add the given collection of instances to this ``Session``."""
-
- for instance in instances:
- self.add(instance)
-
- def _save_or_update_state(self, state):
- self._save_or_update_impl(state)
-
- mapper = _state_mapper(state)
- for o, m, st_, dct_ in mapper.cascade_iterator(
- 'save-update',
- state,
- halt_on=self._contains_state):
- self._save_or_update_impl(st_)
-
- def delete(self, instance):
- """Mark an instance as deleted.
-
- The database delete operation occurs upon ``flush()``.
-
- """
- try:
- state = attributes.instance_state(instance)
- except exc.NO_STATE:
- raise exc.UnmappedInstanceError(instance)
-
- if state.key is None:
- raise sa_exc.InvalidRequestError(
- "Instance '%s' is not persisted" %
- mapperutil.state_str(state))
-
- if state in self._deleted:
- return
-
- # ensure object is attached to allow the
- # cascade operation to load deferred attributes
- # and collections
- self._attach(state)
-
- # grab the cascades before adding the item to the deleted list
- # so that autoflush does not delete the item
- # the strong reference to the instance itself is significant here
- cascade_states = list(state.manager.mapper.cascade_iterator(
- 'delete', state))
-
- self._deleted[state] = state.obj()
- self.identity_map.add(state)
-
- for o, m, st_, dct_ in cascade_states:
- self._delete_impl(st_)
-
- def merge(self, instance, load=True, **kw):
- """Copy the state an instance onto the persistent instance with the
- same identifier.
-
- If there is no persistent instance currently associated with the
- session, it will be loaded. Return the persistent instance. If the
- given instance is unsaved, save a copy of and return it as a newly
- persistent instance. The given instance does not become associated
- with the session.
-
- This operation cascades to associated instances if the association is
- mapped with ``cascade="merge"``.
-
- See :ref:`unitofwork_merging` for a detailed discussion of merging.
-
- """
- if 'dont_load' in kw:
- load = not kw['dont_load']
- util.warn_deprecated('dont_load=True has been renamed to '
- 'load=False.')
-
- _recursive = {}
-
- if load:
- # flush current contents if we expect to load data
- self._autoflush()
-
- _object_mapper(instance) # verify mapped
- autoflush = self.autoflush
- try:
- self.autoflush = False
- return self._merge(
- attributes.instance_state(instance),
- attributes.instance_dict(instance),
- load=load, _recursive=_recursive)
- finally:
- self.autoflush = autoflush
-
- def _merge(self, state, state_dict, load=True, _recursive=None):
- mapper = _state_mapper(state)
- if state in _recursive:
- return _recursive[state]
-
- new_instance = False
- key = state.key
-
- if key is None:
- if not load:
- raise sa_exc.InvalidRequestError(
- "merge() with load=False option does not support "
- "objects transient (i.e. unpersisted) objects. flush() "
- "all changes on mapped instances before merging with "
- "load=False.")
- key = mapper._identity_key_from_state(state)
-
- if key in self.identity_map:
- merged = self.identity_map[key]
-
- elif not load:
- if state.modified:
- raise sa_exc.InvalidRequestError(
- "merge() with load=False option does not support "
- "objects marked as 'dirty'. flush() all changes on "
- "mapped instances before merging with load=False.")
- merged = mapper.class_manager.new_instance()
- merged_state = attributes.instance_state(merged)
- merged_state.key = key
- self._update_impl(merged_state)
- new_instance = True
-
- elif not _none_set.issubset(key[1]) or \
- (mapper.allow_partial_pks and
- not _none_set.issuperset(key[1])):
- merged = self.query(mapper.class_).get(key[1])
- else:
- merged = None
-
- if merged is None:
- merged = mapper.class_manager.new_instance()
- merged_state = attributes.instance_state(merged)
- merged_dict = attributes.instance_dict(merged)
- new_instance = True
- self._save_or_update_state(merged_state)
- else:
- merged_state = attributes.instance_state(merged)
- merged_dict = attributes.instance_dict(merged)
-
- _recursive[state] = merged
-
- # check that we didn't just pull the exact same
- # state out.
- if state is not merged_state:
- # version check if applicable
- if mapper.version_id_col is not None:
- existing_version = mapper._get_state_attr_by_column(
- state,
- state_dict,
- mapper.version_id_col,
- passive=attributes.PASSIVE_NO_INITIALIZE)
-
- merged_version = mapper._get_state_attr_by_column(
- merged_state,
- merged_dict,
- mapper.version_id_col,
- passive=attributes.PASSIVE_NO_INITIALIZE)
-
- if existing_version is not attributes.PASSIVE_NO_RESULT and \
- merged_version is not attributes.PASSIVE_NO_RESULT and \
- existing_version != merged_version:
- raise exc.StaleDataError(
- "Version id '%s' on merged state %s "
- "does not match existing version '%s'. "
- "Leave the version attribute unset when "
- "merging to update the most recent version."
- % (
- existing_version,
- mapperutil.state_str(merged_state),
- merged_version
- ))
-
- merged_state.load_path = state.load_path
- merged_state.load_options = state.load_options
-
- for prop in mapper.iterate_properties:
- prop.merge(self, state, state_dict,
- merged_state, merged_dict,
- load, _recursive)
-
- if not load:
- # remove any history
- merged_state.commit_all(merged_dict, self.identity_map)
-
- if new_instance:
- merged_state.manager.dispatch.load(merged_state, None)
- return merged
-
- @classmethod
- def identity_key(cls, *args, **kwargs):
- return mapperutil.identity_key(*args, **kwargs)
-
- @classmethod
- def object_session(cls, instance):
- """Return the ``Session`` to which an object belongs."""
-
- return object_session(instance)
-
- def _validate_persistent(self, state):
- if not self.identity_map.contains_state(state):
- raise sa_exc.InvalidRequestError(
- "Instance '%s' is not persistent within this Session" %
- mapperutil.state_str(state))
-
- def _save_impl(self, state):
- if state.key is not None:
- raise sa_exc.InvalidRequestError(
- "Object '%s' already has an identity - it can't be registered "
- "as pending" % mapperutil.state_str(state))
-
- self._attach(state)
- if state not in self._new:
- self._new[state] = state.obj()
- state.insert_order = len(self._new)
-
- def _update_impl(self, state):
- if (self.identity_map.contains_state(state) and
- state not in self._deleted):
- return
-
- if state.key is None:
- raise sa_exc.InvalidRequestError(
- "Instance '%s' is not persisted" %
- mapperutil.state_str(state))
-
- if state.deleted:
- raise sa_exc.InvalidRequestError(
- "Instance '%s' has been deleted. Use the make_transient() "
- "function to send this object back to the transient state." %
- mapperutil.state_str(state)
- )
- self._attach(state)
- self._deleted.pop(state, None)
- self.identity_map.add(state)
-
- def _save_or_update_impl(self, state):
- if state.key is None:
- self._save_impl(state)
- else:
- self._update_impl(state)
-
- def _delete_impl(self, state):
- if state in self._deleted:
- return
-
- if state.key is None:
- return
-
- self._attach(state)
- self._deleted[state] = state.obj()
- self.identity_map.add(state)
-
- def _attach(self, state):
- if state.key and \
- state.key in self.identity_map and \
- not self.identity_map.contains_state(state):
- raise sa_exc.InvalidRequestError("Can't attach instance "
- "%s; another instance with key %s is already "
- "present in this session."
- % (mapperutil.state_str(state), state.key))
-
- if state.session_id and state.session_id is not self.hash_key:
- raise sa_exc.InvalidRequestError(
- "Object '%s' is already attached to session '%s' "
- "(this is '%s')" % (mapperutil.state_str(state),
- state.session_id, self.hash_key))
-
- if state.session_id != self.hash_key:
- state.session_id = self.hash_key
- if self.dispatch.after_attach:
- self.dispatch.after_attach(self, state.obj())
-
- def __contains__(self, instance):
- """Return True if the instance is associated with this session.
-
- The instance may be pending or persistent within the Session for a
- result of True.
-
- """
- try:
- state = attributes.instance_state(instance)
- except exc.NO_STATE:
- raise exc.UnmappedInstanceError(instance)
- return self._contains_state(state)
-
- def __iter__(self):
- """Iterate over all pending or persistent instances within this Session."""
-
- return iter(list(self._new.values()) + self.identity_map.values())
-
- def _contains_state(self, state):
- return state in self._new or self.identity_map.contains_state(state)
-
- def flush(self, objects=None):
- """Flush all the object changes to the database.
-
- Writes out all pending object creations, deletions and modifications
- to the database as INSERTs, DELETEs, UPDATEs, etc. Operations are
- automatically ordered by the Session's unit of work dependency
- solver..
-
- Database operations will be issued in the current transactional
- context and do not affect the state of the transaction, unless an
- error occurs, in which case the entire transaction is rolled back.
- You may flush() as often as you like within a transaction to move
- changes from Python to the database's transaction buffer.
-
- For ``autocommit`` Sessions with no active manual transaction, flush()
- will create a transaction on the fly that surrounds the entire set of
- operations int the flush.
-
- objects
- Optional; a list or tuple collection. Restricts the flush operation
- to only these objects, rather than all pending changes.
- Deprecated - this flag prevents the session from properly maintaining
- accounting among inter-object relations and can cause invalid results.
-
- """
-
- if objects:
- util.warn_deprecated(
- "The 'objects' argument to session.flush() is deprecated; "
- "Please do not add objects to the session which should not "
- "yet be persisted.")
-
- if self._flushing:
- raise sa_exc.InvalidRequestError("Session is already flushing")
-
- try:
- self._flushing = True
- self._flush(objects)
- finally:
- self._flushing = False
-
- def _flush(self, objects=None):
- if (not self.identity_map.check_modified() and
- not self._deleted and not self._new):
- return
-
- dirty = self._dirty_states
- if not dirty and not self._deleted and not self._new:
- self.identity_map._modified.clear()
- return
-
- flush_context = UOWTransaction(self)
-
- if self.dispatch.before_flush:
- self.dispatch.before_flush(self, flush_context, objects)
- # re-establish "dirty states" in case the listeners
- # added
- dirty = self._dirty_states
-
- deleted = set(self._deleted)
- new = set(self._new)
-
- dirty = set(dirty).difference(deleted)
-
- # create the set of all objects we want to operate upon
- if objects:
- # specific list passed in
- objset = set()
- for o in objects:
- try:
- state = attributes.instance_state(o)
- except exc.NO_STATE:
- raise exc.UnmappedInstanceError(o)
- objset.add(state)
- else:
- objset = None
-
- # store objects whose fate has been decided
- processed = set()
-
- # put all saves/updates into the flush context. detect top-level
- # orphans and throw them into deleted.
- if objset:
- proc = new.union(dirty).intersection(objset).difference(deleted)
- else:
- proc = new.union(dirty).difference(deleted)
-
- for state in proc:
- is_orphan = _state_mapper(state)._is_orphan(state) and state.has_identity
- flush_context.register_object(state, isdelete=is_orphan)
- processed.add(state)
-
- # put all remaining deletes into the flush context.
- if objset:
- proc = deleted.intersection(objset).difference(processed)
- else:
- proc = deleted.difference(processed)
- for state in proc:
- flush_context.register_object(state, isdelete=True)
-
- if not flush_context.has_work:
- return
-
- flush_context.transaction = transaction = self.begin(
- subtransactions=True)
- try:
- flush_context.execute()
-
- self.dispatch.after_flush(self, flush_context)
-
- flush_context.finalize_flush_changes()
-
- # useful assertions:
- #if not objects:
- # assert not self.identity_map._modified
- #else:
- # assert self.identity_map._modified == \
- # self.identity_map._modified.difference(objects)
-
- self.dispatch.after_flush_postexec(self, flush_context)
-
- transaction.commit()
-
- except:
- transaction.rollback(_capture_exception=True)
- raise
-
-
- def is_modified(self, instance, include_collections=True,
- passive=attributes.PASSIVE_OFF):
- """Return ``True`` if instance has modified attributes.
-
- This method retrieves a history instance for each instrumented
- attribute on the instance and performs a comparison of the current
- value to its previously committed value.
-
- ``include_collections`` indicates if multivalued collections should be
- included in the operation. Setting this to False is a way to detect
- only local-column based properties (i.e. scalar columns or many-to-one
- foreign keys) that would result in an UPDATE for this instance upon
- flush.
-
- The ``passive`` flag indicates if unloaded attributes and collections
- should not be loaded in the course of performing this test.
- Allowed values include :attr:`.PASSIVE_OFF`, :attr:`.PASSIVE_NO_INITIALIZE`.
-
- A few caveats to this method apply:
-
- * Instances present in the 'dirty' collection may result in a value
- of ``False`` when tested with this method. This because while
- the object may have received attribute set events, there may be
- no net changes on its state.
- * Scalar attributes may not have recorded the "previously" set
- value when a new value was applied, if the attribute was not loaded,
- or was expired, at the time the new value was received - in these
- cases, the attribute is assumed to have a change, even if there is
- ultimately no net change against its database value. SQLAlchemy in
- most cases does not need the "old" value when a set event occurs, so
- it skips the expense of a SQL call if the old value isn't present,
- based on the assumption that an UPDATE of the scalar value is
- usually needed, and in those few cases where it isn't, is less
- expensive on average than issuing a defensive SELECT.
-
- The "old" value is fetched unconditionally only if the attribute
- container has the "active_history" flag set to ``True``. This flag
- is set typically for primary key attributes and scalar references
- that are not a simple many-to-one.
-
- """
- try:
- state = attributes.instance_state(instance)
- except exc.NO_STATE:
- raise exc.UnmappedInstanceError(instance)
- dict_ = state.dict
- if passive is True:
- passive = attributes.PASSIVE_NO_INITIALIZE
- elif passive is False:
- passive = attributes.PASSIVE_OFF
- for attr in state.manager.attributes:
- if \
- (
- not include_collections and
- hasattr(attr.impl, 'get_collection')
- ) or not hasattr(attr.impl, 'get_history'):
- continue
-
- (added, unchanged, deleted) = \
- attr.impl.get_history(state, dict_, passive=passive)
-
- if added or deleted:
- return True
- return False
-
- @property
- def is_active(self):
- """True if this Session has an active transaction."""
-
- return self.transaction and self.transaction.is_active
-
- @property
- def _dirty_states(self):
- """The set of all persistent states considered dirty.
-
- This method returns all states that were modified including
- those that were possibly deleted.
-
- """
- return self.identity_map._dirty_states()
-
- @property
- def dirty(self):
- """The set of all persistent instances considered dirty.
-
- Instances are considered dirty when they were modified but not
- deleted.
-
- Note that this 'dirty' calculation is 'optimistic'; most
- attribute-setting or collection modification operations will
- mark an instance as 'dirty' and place it in this set, even if
- there is no net change to the attribute's value. At flush
- time, the value of each attribute is compared to its
- previously saved value, and if there's no net change, no SQL
- operation will occur (this is a more expensive operation so
- it's only done at flush time).
-
- To check if an instance has actionable net changes to its
- attributes, use the is_modified() method.
-
- """
- return util.IdentitySet(
- [state.obj()
- for state in self._dirty_states
- if state not in self._deleted])
-
- @property
- def deleted(self):
- "The set of all instances marked as 'deleted' within this ``Session``"
-
- return util.IdentitySet(self._deleted.values())
-
- @property
- def new(self):
- "The set of all instances marked as 'new' within this ``Session``."
-
- return util.IdentitySet(self._new.values())
-
-_sessions = weakref.WeakValueDictionary()
-
-def make_transient(instance):
- """Make the given instance 'transient'.
-
- This will remove its association with any
- session and additionally will remove its "identity key",
- such that it's as though the object were newly constructed,
- except retaining its values. It also resets the
- "deleted" flag on the state if this object
- had been explicitly deleted by its session.
-
- Attributes which were "expired" or deferred at the
- instance level are reverted to undefined, and
- will not trigger any loads.
-
- """
- state = attributes.instance_state(instance)
- s = _state_session(state)
- if s:
- s._expunge_state(state)
-
- # remove expired state and
- # deferred callables
- state.callables.clear()
- if state.key:
- del state.key
- if state.deleted:
- del state.deleted
-
-def object_session(instance):
- """Return the ``Session`` to which instance belongs.
-
- If the instance is not a mapped instance, an error is raised.
-
- """
-
- try:
- return _state_session(attributes.instance_state(instance))
- except exc.NO_STATE:
- raise exc.UnmappedInstanceError(instance)
-
-
-def _state_session(state):
- if state.session_id:
- try:
- return _sessions[state.session_id]
- except KeyError:
- pass
- return None
-
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/orm/shard.py b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/orm/shard.py
deleted file mode 100755
index 5d57472d..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/orm/shard.py
+++ /dev/null
@@ -1,15 +0,0 @@
-# orm/shard.py
-# Copyright (C) 2005-2011 the SQLAlchemy authors and contributors <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-from sqlalchemy import util
-
-util.warn_deprecated(
- "Horizontal sharding is now importable via "
- "'import sqlalchemy.ext.horizontal_shard"
-)
-
-from sqlalchemy.ext.horizontal_shard import *
-
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/orm/state.py b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/orm/state.py
deleted file mode 100755
index 0963a526..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/orm/state.py
+++ /dev/null
@@ -1,557 +0,0 @@
-# orm/state.py
-# Copyright (C) 2005-2011 the SQLAlchemy authors and contributors <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-"""Defines instrumentation of instances.
-
-This module is usually not directly visible to user applications, but
-defines a large part of the ORM's interactivity.
-
-"""
-
-from sqlalchemy.util import EMPTY_SET
-import weakref
-from sqlalchemy import util
-
-from sqlalchemy.orm import exc as orm_exc, attributes, interfaces,\
- util as orm_util
-from sqlalchemy.orm.attributes import PASSIVE_OFF, PASSIVE_NO_RESULT, \
- PASSIVE_NO_FETCH, NEVER_SET, ATTR_WAS_SET, NO_VALUE
-
-mapperlib = util.importlater("sqlalchemy.orm", "mapperlib")
-
-import sys
-
-class InstanceState(object):
- """tracks state information at the instance level."""
-
- session_id = None
- key = None
- runid = None
- load_options = EMPTY_SET
- load_path = ()
- insert_order = None
- mutable_dict = None
- _strong_obj = None
- modified = False
- expired = False
- deleted = False
-
- def __init__(self, obj, manager):
- self.class_ = obj.__class__
- self.manager = manager
- self.obj = weakref.ref(obj, self._cleanup)
- self.callables = {}
- self.committed_state = {}
-
- @util.memoized_property
- def parents(self):
- return {}
-
- @util.memoized_property
- def pending(self):
- return {}
-
- @property
- def has_identity(self):
- return bool(self.key)
-
- def detach(self):
- self.session_id = None
-
- def dispose(self):
- self.detach()
- del self.obj
-
- def _cleanup(self, ref):
- instance_dict = self._instance_dict()
- if instance_dict:
- instance_dict.discard(self)
-
- self.callables = {}
- self.session_id = None
- del self.obj
-
- def obj(self):
- return None
-
- @property
- def dict(self):
- o = self.obj()
- if o is not None:
- return attributes.instance_dict(o)
- else:
- return {}
-
- @property
- def sort_key(self):
- return self.key and self.key[1] or (self.insert_order, )
-
- def initialize_instance(*mixed, **kwargs):
- self, instance, args = mixed[0], mixed[1], mixed[2:]
- manager = self.manager
-
- manager.dispatch.init(self, args, kwargs)
-
- #if manager.mutable_attributes:
- # assert self.__class__ is MutableAttrInstanceState
-
- try:
- return manager.original_init(*mixed[1:], **kwargs)
- except:
- manager.dispatch.init_failure(self, args, kwargs)
- raise
-
- def get_history(self, key, passive):
- return self.manager[key].impl.get_history(self, self.dict, passive)
-
- def get_impl(self, key):
- return self.manager[key].impl
-
- def get_pending(self, key):
- if key not in self.pending:
- self.pending[key] = PendingCollection()
- return self.pending[key]
-
- def value_as_iterable(self, dict_, key, passive=PASSIVE_OFF):
- """Return a list of tuples (state, obj) for the given
- key.
-
- returns an empty list if the value is None/empty/PASSIVE_NO_RESULT
- """
-
- impl = self.manager[key].impl
- x = impl.get(self, dict_, passive=passive)
- if x is PASSIVE_NO_RESULT or x is None:
- return []
- elif hasattr(impl, 'get_collection'):
- return [
- (attributes.instance_state(o), o) for o in
- impl.get_collection(self, dict_, x, passive=passive)
- ]
- else:
- return [(attributes.instance_state(x), x)]
-
- def __getstate__(self):
- d = {'instance':self.obj()}
-
- d.update(
- (k, self.__dict__[k]) for k in (
- 'committed_state', 'pending', 'parents', 'modified', 'expired',
- 'callables', 'key', 'load_options', 'mutable_dict'
- ) if k in self.__dict__
- )
- if self.load_path:
- d['load_path'] = interfaces.serialize_path(self.load_path)
-
- self.manager.dispatch.pickle(self, d)
-
- return d
-
- def __setstate__(self, state):
- from sqlalchemy.orm import instrumentation
- self.obj = weakref.ref(state['instance'], self._cleanup)
- self.class_ = state['instance'].__class__
- self.manager = manager = instrumentation.manager_of_class(self.class_)
- if manager is None:
- raise orm_exc.UnmappedInstanceError(
- state['instance'],
- "Cannot deserialize object of type %r - no mapper() has"
- " been configured for this class within the current Python process!" %
- self.class_)
- elif manager.is_mapped and not manager.mapper.configured:
- mapperlib.configure_mappers()
-
- self.committed_state = state.get('committed_state', {})
- self.pending = state.get('pending', {})
- self.parents = state.get('parents', {})
- self.modified = state.get('modified', False)
- self.expired = state.get('expired', False)
- self.callables = state.get('callables', {})
-
- if self.modified:
- self._strong_obj = state['instance']
-
- self.__dict__.update([
- (k, state[k]) for k in (
- 'key', 'load_options', 'mutable_dict'
- ) if k in state
- ])
-
- if 'load_path' in state:
- self.load_path = interfaces.deserialize_path(state['load_path'])
-
- manager.dispatch.unpickle(self, state)
-
- def initialize(self, key):
- """Set this attribute to an empty value or collection,
- based on the AttributeImpl in use."""
-
- self.manager.get_impl(key).initialize(self, self.dict)
-
- def reset(self, dict_, key):
- """Remove the given attribute and any
- callables associated with it."""
-
- dict_.pop(key, None)
- self.callables.pop(key, None)
-
- def expire_attribute_pre_commit(self, dict_, key):
- """a fast expire that can be called by column loaders during a load.
-
- The additional bookkeeping is finished up in commit_all().
-
- This method is actually called a lot with joined-table
- loading, when the second table isn't present in the result.
-
- """
- dict_.pop(key, None)
- self.callables[key] = self
-
- def set_callable(self, dict_, key, callable_):
- """Remove the given attribute and set the given callable
- as a loader."""
-
- dict_.pop(key, None)
- self.callables[key] = callable_
-
- def expire(self, dict_, modified_set):
- self.expired = True
- if self.modified:
- modified_set.discard(self)
-
- self.modified = False
-
- pending = self.__dict__.get('pending', None)
- mutable_dict = self.mutable_dict
- self.committed_state.clear()
- if mutable_dict:
- mutable_dict.clear()
- if pending:
- pending.clear()
-
- for key in self.manager:
- impl = self.manager[key].impl
- if impl.accepts_scalar_loader and \
- (impl.expire_missing or key in dict_):
- self.callables[key] = self
- dict_.pop(key, None)
-
- self.manager.dispatch.expire(self, None)
-
- def expire_attributes(self, dict_, attribute_names):
- pending = self.__dict__.get('pending', None)
- mutable_dict = self.mutable_dict
-
- for key in attribute_names:
- impl = self.manager[key].impl
- if impl.accepts_scalar_loader:
- self.callables[key] = self
- dict_.pop(key, None)
-
- self.committed_state.pop(key, None)
- if mutable_dict:
- mutable_dict.pop(key, None)
- if pending:
- pending.pop(key, None)
-
- self.manager.dispatch.expire(self, attribute_names)
-
- def __call__(self, passive):
- """__call__ allows the InstanceState to act as a deferred
- callable for loading expired attributes, which is also
- serializable (picklable).
-
- """
-
- if passive is PASSIVE_NO_FETCH:
- return PASSIVE_NO_RESULT
-
- toload = self.expired_attributes.\
- intersection(self.unmodified)
-
- self.manager.deferred_scalar_loader(self, toload)
-
- # if the loader failed, or this
- # instance state didn't have an identity,
- # the attributes still might be in the callables
- # dict. ensure they are removed.
- for k in toload.intersection(self.callables):
- del self.callables[k]
-
- return ATTR_WAS_SET
-
- @property
- def unmodified(self):
- """Return the set of keys which have no uncommitted changes"""
-
- return set(self.manager).difference(self.committed_state)
-
- def unmodified_intersection(self, keys):
- """Return self.unmodified.intersection(keys)."""
-
- return set(keys).intersection(self.manager).\
- difference(self.committed_state)
-
-
- @property
- def unloaded(self):
- """Return the set of keys which do not have a loaded value.
-
- This includes expired attributes and any other attribute that
- was never populated or modified.
-
- """
- return set(self.manager).\
- difference(self.committed_state).\
- difference(self.dict)
-
- @property
- def expired_attributes(self):
- """Return the set of keys which are 'expired' to be loaded by
- the manager's deferred scalar loader, assuming no pending
- changes.
-
- see also the ``unmodified`` collection which is intersected
- against this set when a refresh operation occurs.
-
- """
- return set([k for k, v in self.callables.items() if v is self])
-
- def _instance_dict(self):
- return None
-
- def _is_really_none(self):
- return self.obj()
-
- def modified_event(self, dict_, attr, previous, collection=False):
- if attr.key not in self.committed_state:
- if collection:
- if previous is NEVER_SET:
- if attr.key in dict_:
- previous = dict_[attr.key]
-
- if previous not in (None, NO_VALUE, NEVER_SET):
- previous = attr.copy(previous)
-
- self.committed_state[attr.key] = previous
-
- # the "or not self.modified" is defensive at
- # this point. The assertion below is expected
- # to be True:
- # assert self._strong_obj is None or self.modified
-
- if self._strong_obj is None or not self.modified:
- instance_dict = self._instance_dict()
- if instance_dict:
- instance_dict._modified.add(self)
-
- self._strong_obj = self.obj()
- if self._strong_obj is None:
- raise orm_exc.ObjectDereferencedError(
- "Can't emit change event for attribute '%s' - "
- "parent object of type %s has been garbage "
- "collected."
- % (
- self.manager[attr.key],
- orm_util.state_class_str(self)
- ))
- self.modified = True
-
- def commit(self, dict_, keys):
- """Commit attributes.
-
- This is used by a partial-attribute load operation to mark committed
- those attributes which were refreshed from the database.
-
- Attributes marked as "expired" can potentially remain "expired" after
- this step if a value was not populated in state.dict.
-
- """
- class_manager = self.manager
- if class_manager.mutable_attributes:
- for key in keys:
- if key in dict_ and key in class_manager.mutable_attributes:
- self.committed_state[key] = self.manager[key].impl.copy(dict_[key])
- else:
- self.committed_state.pop(key, None)
- else:
- for key in keys:
- self.committed_state.pop(key, None)
-
- self.expired = False
-
- for key in set(self.callables).\
- intersection(keys).\
- intersection(dict_):
- del self.callables[key]
-
- def commit_all(self, dict_, instance_dict=None):
- """commit all attributes unconditionally.
-
- This is used after a flush() or a full load/refresh
- to remove all pending state from the instance.
-
- - all attributes are marked as "committed"
- - the "strong dirty reference" is removed
- - the "modified" flag is set to False
- - any "expired" markers/callables for attributes loaded are removed.
-
- Attributes marked as "expired" can potentially remain "expired" after this step
- if a value was not populated in state.dict.
-
- """
-
- self.committed_state.clear()
- self.__dict__.pop('pending', None)
-
- callables = self.callables
- for key in list(callables):
- if key in dict_ and callables[key] is self:
- del callables[key]
-
- for key in self.manager.mutable_attributes:
- if key in dict_:
- self.committed_state[key] = self.manager[key].impl.copy(dict_[key])
-
- if instance_dict and self.modified:
- instance_dict._modified.discard(self)
-
- self.modified = self.expired = False
- self._strong_obj = None
-
-class MutableAttrInstanceState(InstanceState):
- """InstanceState implementation for objects that reference 'mutable'
- attributes.
-
- Has a more involved "cleanup" handler that checks mutable attributes
- for changes upon dereference, resurrecting if needed.
-
- """
-
- @util.memoized_property
- def mutable_dict(self):
- return {}
-
- def _get_modified(self, dict_=None):
- if self.__dict__.get('modified', False):
- return True
- else:
- if dict_ is None:
- dict_ = self.dict
- for key in self.manager.mutable_attributes:
- if self.manager[key].impl.check_mutable_modified(self, dict_):
- return True
- else:
- return False
-
- def _set_modified(self, value):
- self.__dict__['modified'] = value
-
- modified = property(_get_modified, _set_modified)
-
- @property
- def unmodified(self):
- """a set of keys which have no uncommitted changes"""
-
- dict_ = self.dict
-
- return set([
- key for key in self.manager
- if (key not in self.committed_state or
- (key in self.manager.mutable_attributes and
- not self.manager[key].impl.check_mutable_modified(self, dict_)))])
-
- def unmodified_intersection(self, keys):
- """Return self.unmodified.intersection(keys)."""
-
- dict_ = self.dict
-
- return set([
- key for key in keys
- if (key not in self.committed_state or
- (key in self.manager.mutable_attributes and
- not self.manager[key].impl.check_mutable_modified(self, dict_)))])
-
-
- def _is_really_none(self):
- """do a check modified/resurrect.
-
- This would be called in the extremely rare
- race condition that the weakref returned None but
- the cleanup handler had not yet established the
- __resurrect callable as its replacement.
-
- """
- if self.modified:
- self.obj = self.__resurrect
- return self.obj()
- else:
- return None
-
- def reset(self, dict_, key):
- self.mutable_dict.pop(key, None)
- InstanceState.reset(self, dict_, key)
-
- def _cleanup(self, ref):
- """weakref callback.
-
- This method may be called by an asynchronous
- gc.
-
- If the state shows pending changes, the weakref
- is replaced by the __resurrect callable which will
- re-establish an object reference on next access,
- else removes this InstanceState from the owning
- identity map, if any.
-
- """
- if self._get_modified(self.mutable_dict):
- self.obj = self.__resurrect
- else:
- instance_dict = self._instance_dict()
- if instance_dict:
- instance_dict.discard(self)
- self.dispose()
-
- def __resurrect(self):
- """A substitute for the obj() weakref function which resurrects."""
-
- # store strong ref'ed version of the object; will revert
- # to weakref when changes are persisted
- obj = self.manager.new_instance(state=self)
- self.obj = weakref.ref(obj, self._cleanup)
- self._strong_obj = obj
- obj.__dict__.update(self.mutable_dict)
-
- # re-establishes identity attributes from the key
- self.manager.dispatch.resurrect(self)
-
- return obj
-
-class PendingCollection(object):
- """A writable placeholder for an unloaded collection.
-
- Stores items appended to and removed from a collection that has not yet
- been loaded. When the collection is loaded, the changes stored in
- PendingCollection are applied to it to produce the final result.
-
- """
- def __init__(self):
- self.deleted_items = util.IdentitySet()
- self.added_items = util.OrderedIdentitySet()
-
- def append(self, value):
- if value in self.deleted_items:
- self.deleted_items.remove(value)
- else:
- self.added_items.add(value)
-
- def remove(self, value):
- if value in self.added_items:
- self.added_items.remove(value)
- else:
- self.deleted_items.add(value)
-
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/orm/strategies.py b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/orm/strategies.py
deleted file mode 100755
index acdac998..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/orm/strategies.py
+++ /dev/null
@@ -1,1300 +0,0 @@
-# orm/strategies.py
-# Copyright (C) 2005-2011 the SQLAlchemy authors and contributors <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-"""sqlalchemy.orm.interfaces.LoaderStrategy
- implementations, and related MapperOptions."""
-
-from sqlalchemy import exc as sa_exc
-from sqlalchemy import sql, util, log, event
-from sqlalchemy.sql import util as sql_util
-from sqlalchemy.sql import visitors, expression, operators
-from sqlalchemy.orm import mapper, attributes, interfaces, exc as orm_exc
-from sqlalchemy.orm.mapper import _none_set
-from sqlalchemy.orm.interfaces import (
- LoaderStrategy, StrategizedOption, MapperOption, PropertyOption,
- serialize_path, deserialize_path, StrategizedProperty
- )
-from sqlalchemy.orm import session as sessionlib, unitofwork
-from sqlalchemy.orm import util as mapperutil
-from sqlalchemy.orm.query import Query
-import itertools
-
-def _register_attribute(strategy, mapper, useobject,
- compare_function=None,
- typecallable=None,
- copy_function=None,
- mutable_scalars=False,
- uselist=False,
- callable_=None,
- proxy_property=None,
- active_history=False,
- impl_class=None,
- **kw
-):
-
- prop = strategy.parent_property
-
- attribute_ext = list(util.to_list(prop.extension, default=[]))
-
- listen_hooks = []
-
- if useobject and prop.single_parent:
- listen_hooks.append(single_parent_validator)
-
- if prop.key in prop.parent._validators:
- listen_hooks.append(
- lambda desc, prop: mapperutil._validator_events(desc,
- prop.key,
- prop.parent._validators[prop.key])
- )
-
- if useobject:
- listen_hooks.append(unitofwork.track_cascade_events)
-
- # need to assemble backref listeners
- # after the singleparentvalidator, mapper validator
- backref = kw.pop('backref', None)
- if backref:
- listen_hooks.append(
- lambda desc, prop: attributes.backref_listeners(desc,
- backref,
- uselist)
- )
-
- for m in mapper.self_and_descendants:
- if prop is m._props.get(prop.key):
-
- desc = attributes.register_attribute_impl(
- m.class_,
- prop.key,
- parent_token=prop,
- mutable_scalars=mutable_scalars,
- uselist=uselist,
- copy_function=copy_function,
- compare_function=compare_function,
- useobject=useobject,
- extension=attribute_ext,
- trackparent=useobject,
- typecallable=typecallable,
- callable_=callable_,
- active_history=active_history,
- impl_class=impl_class,
- doc=prop.doc,
- **kw
- )
-
- for hook in listen_hooks:
- hook(desc, prop)
-
-class UninstrumentedColumnLoader(LoaderStrategy):
- """Represent the a non-instrumented MapperProperty.
-
- The polymorphic_on argument of mapper() often results in this,
- if the argument is against the with_polymorphic selectable.
-
- """
- def init(self):
- self.columns = self.parent_property.columns
-
- def setup_query(self, context, entity, path, reduced_path, adapter,
- column_collection=None, **kwargs):
- for c in self.columns:
- if adapter:
- c = adapter.columns[c]
- column_collection.append(c)
-
- def create_row_processor(self, selectcontext, path, reduced_path, mapper, row, adapter):
- return None, None, None
-
-class ColumnLoader(LoaderStrategy):
- """Strategize the loading of a plain column-based MapperProperty."""
-
- def init(self):
- self.columns = self.parent_property.columns
- self.is_composite = hasattr(self.parent_property, 'composite_class')
-
- def setup_query(self, context, entity, path, reduced_path, adapter,
- column_collection=None, **kwargs):
- for c in self.columns:
- if adapter:
- c = adapter.columns[c]
- column_collection.append(c)
-
- def init_class_attribute(self, mapper):
- self.is_class_level = True
- coltype = self.columns[0].type
- # TODO: check all columns ? check for foreign key as well?
- active_history = self.parent_property.active_history or \
- self.columns[0].primary_key
-
- _register_attribute(self, mapper, useobject=False,
- compare_function=coltype.compare_values,
- copy_function=coltype.copy_value,
- mutable_scalars=self.columns[0].type.is_mutable(),
- active_history = active_history
- )
-
- def create_row_processor(self, selectcontext, path, reduced_path, mapper, row, adapter):
- key = self.key
- # look through list of columns represented here
- # to see which, if any, is present in the row.
- for col in self.columns:
- if adapter:
- col = adapter.columns[col]
- if col is not None and col in row:
- def new_execute(state, dict_, row):
- dict_[key] = row[col]
- return new_execute, None, None
- else:
- def new_execute(state, dict_, row):
- state.expire_attribute_pre_commit(dict_, key)
- return new_execute, None, None
-
-log.class_logger(ColumnLoader)
-
-class DeferredColumnLoader(LoaderStrategy):
- """Strategize the loading of a deferred column-based MapperProperty."""
-
- def create_row_processor(self, selectcontext, path, reduced_path, mapper, row, adapter):
- col = self.columns[0]
- if adapter:
- col = adapter.columns[col]
-
- key = self.key
- if col in row:
- return self.parent_property._get_strategy(ColumnLoader).\
- create_row_processor(
- selectcontext, path, reduced_path, mapper, row, adapter)
-
- elif not self.is_class_level:
- def new_execute(state, dict_, row):
- state.set_callable(dict_, key, LoadDeferredColumns(state, key))
- else:
- def new_execute(state, dict_, row):
- # reset state on the key so that deferred callables
- # fire off on next access.
- state.reset(dict_, key)
-
- return new_execute, None, None
-
- def init(self):
- if hasattr(self.parent_property, 'composite_class'):
- raise NotImplementedError("Deferred loading for composite "
- "types not implemented yet")
- self.columns = self.parent_property.columns
- self.group = self.parent_property.group
-
- def init_class_attribute(self, mapper):
- self.is_class_level = True
-
- _register_attribute(self, mapper, useobject=False,
- compare_function=self.columns[0].type.compare_values,
- copy_function=self.columns[0].type.copy_value,
- mutable_scalars=self.columns[0].type.is_mutable(),
- callable_=self._load_for_state,
- expire_missing=False
- )
-
- def setup_query(self, context, entity, path, reduced_path, adapter,
- only_load_props=None, **kwargs):
- if (
- self.group is not None and
- context.attributes.get(('undefer', self.group), False)
- ) or (only_load_props and self.key in only_load_props):
- self.parent_property._get_strategy(ColumnLoader).\
- setup_query(context, entity,
- path, reduced_path, adapter, **kwargs)
-
- def _load_for_state(self, state, passive):
- if not state.key:
- return attributes.ATTR_EMPTY
-
- if passive is attributes.PASSIVE_NO_FETCH:
- return attributes.PASSIVE_NO_RESULT
-
- prop = self.parent_property
- localparent = state.manager.mapper
-
- if self.group:
- toload = [
- p.key for p in
- localparent.iterate_properties
- if isinstance(p, StrategizedProperty) and
- isinstance(p.strategy, DeferredColumnLoader) and
- p.group==self.group
- ]
- else:
- toload = [self.key]
-
- # narrow the keys down to just those which have no history
- group = [k for k in toload if k in state.unmodified]
-
- session = sessionlib._state_session(state)
- if session is None:
- raise orm_exc.DetachedInstanceError(
- "Parent instance %s is not bound to a Session; "
- "deferred load operation of attribute '%s' cannot proceed" %
- (mapperutil.state_str(state), self.key)
- )
-
- query = session.query(localparent)
- query._load_on_ident(state.key,
- only_load_props=group, refresh_state=state)
- return attributes.ATTR_WAS_SET
-
-log.class_logger(DeferredColumnLoader)
-
-class LoadDeferredColumns(object):
- """serializable loader object used by DeferredColumnLoader"""
-
- def __init__(self, state, key):
- self.state = state
- self.key = key
-
- def __call__(self, passive=attributes.PASSIVE_OFF):
- state, key = self.state, self.key
-
- localparent = state.manager.mapper
- prop = localparent._props[key]
- strategy = prop._strategies[DeferredColumnLoader]
- return strategy._load_for_state(state, passive)
-
-class DeferredOption(StrategizedOption):
- propagate_to_loaders = True
-
- def __init__(self, key, defer=False):
- super(DeferredOption, self).__init__(key)
- self.defer = defer
-
- def get_strategy_class(self):
- if self.defer:
- return DeferredColumnLoader
- else:
- return ColumnLoader
-
-class UndeferGroupOption(MapperOption):
- propagate_to_loaders = True
-
- def __init__(self, group):
- self.group = group
-
- def process_query(self, query):
- query._attributes[('undefer', self.group)] = True
-
-class AbstractRelationshipLoader(LoaderStrategy):
- """LoaderStratgies which deal with related objects."""
-
- def init(self):
- self.mapper = self.parent_property.mapper
- self.target = self.parent_property.target
- self.table = self.parent_property.table
- self.uselist = self.parent_property.uselist
-
-class NoLoader(AbstractRelationshipLoader):
- """Strategize a relationship() that doesn't load data automatically."""
-
- def init_class_attribute(self, mapper):
- self.is_class_level = True
-
- _register_attribute(self, mapper,
- useobject=True,
- uselist=self.parent_property.uselist,
- typecallable = self.parent_property.collection_class,
- )
-
- def create_row_processor(self, selectcontext, path, reduced_path, mapper, row, adapter):
- def new_execute(state, dict_, row):
- state.initialize(self.key)
- return new_execute, None, None
-
-log.class_logger(NoLoader)
-
-class LazyLoader(AbstractRelationshipLoader):
- """Strategize a relationship() that loads when first accessed."""
-
- def init(self):
- super(LazyLoader, self).init()
- self.__lazywhere, \
- self.__bind_to_col, \
- self._equated_columns = self._create_lazy_clause(self.parent_property)
-
- self.logger.info("%s lazy loading clause %s", self, self.__lazywhere)
-
- # determine if our "lazywhere" clause is the same as the mapper's
- # get() clause. then we can just use mapper.get()
- #from sqlalchemy.orm import query
- self.use_get = not self.uselist and \
- self.mapper._get_clause[0].compare(
- self.__lazywhere,
- use_proxies=True,
- equivalents=self.mapper._equivalent_columns
- )
-
- if self.use_get:
- for col in self._equated_columns.keys():
- if col in self.mapper._equivalent_columns:
- for c in self.mapper._equivalent_columns[col]:
- self._equated_columns[c] = self._equated_columns[col]
-
- self.logger.info("%s will use query.get() to "
- "optimize instance loads" % self)
-
- def init_class_attribute(self, mapper):
- self.is_class_level = True
-
- # MANYTOONE currently only needs the
- # "old" value for delete-orphan
- # cascades. the required _SingleParentValidator
- # will enable active_history
- # in that case. otherwise we don't need the
- # "old" value during backref operations.
- _register_attribute(self,
- mapper,
- useobject=True,
- callable_=self._load_for_state,
- uselist = self.parent_property.uselist,
- backref = self.parent_property.back_populates,
- typecallable = self.parent_property.collection_class,
- active_history = \
- self.parent_property.active_history or \
- self.parent_property.direction is not \
- interfaces.MANYTOONE or \
- not self.use_get,
- )
-
- def lazy_clause(self, state, reverse_direction=False,
- alias_secondary=False,
- adapt_source=None):
- if state is None:
- return self._lazy_none_clause(
- reverse_direction,
- adapt_source=adapt_source)
-
- if not reverse_direction:
- criterion, bind_to_col, rev = \
- self.__lazywhere, \
- self.__bind_to_col, \
- self._equated_columns
- else:
- criterion, bind_to_col, rev = \
- LazyLoader._create_lazy_clause(
- self.parent_property,
- reverse_direction=reverse_direction)
-
- if reverse_direction:
- mapper = self.parent_property.mapper
- else:
- mapper = self.parent_property.parent
-
- o = state.obj() # strong ref
- dict_ = attributes.instance_dict(o)
-
- # use the "committed state" only if we're in a flush
- # for this state.
-
- sess = sessionlib._state_session(state)
- if sess is not None and sess._flushing:
- def visit_bindparam(bindparam):
- if bindparam.key in bind_to_col:
- bindparam.callable = \
- lambda: mapper._get_committed_state_attr_by_column(
- state, dict_, bind_to_col[bindparam.key])
- else:
- def visit_bindparam(bindparam):
- if bindparam.key in bind_to_col:
- bindparam.callable = lambda: mapper._get_state_attr_by_column(
- state, dict_, bind_to_col[bindparam.key])
-
-
- if self.parent_property.secondary is not None and alias_secondary:
- criterion = sql_util.ClauseAdapter(
- self.parent_property.secondary.alias()).\
- traverse(criterion)
-
- criterion = visitors.cloned_traverse(
- criterion, {}, {'bindparam':visit_bindparam})
-
- if adapt_source:
- criterion = adapt_source(criterion)
- return criterion
-
- def _lazy_none_clause(self, reverse_direction=False, adapt_source=None):
- if not reverse_direction:
- criterion, bind_to_col, rev = \
- self.__lazywhere, \
- self.__bind_to_col,\
- self._equated_columns
- else:
- criterion, bind_to_col, rev = \
- LazyLoader._create_lazy_clause(
- self.parent_property,
- reverse_direction=reverse_direction)
-
- criterion = sql_util.adapt_criterion_to_null(criterion, bind_to_col)
-
- if adapt_source:
- criterion = adapt_source(criterion)
- return criterion
-
- def _load_for_state(self, state, passive):
- if not state.key and \
- (not self.parent_property.load_on_pending or not state.session_id):
- return attributes.ATTR_EMPTY
-
- instance_mapper = state.manager.mapper
- prop = self.parent_property
- key = self.key
- prop_mapper = self.mapper
- pending = not state.key
-
- if (
- (passive is attributes.PASSIVE_NO_FETCH or \
- passive is attributes.PASSIVE_NO_FETCH_RELATED) and
- not self.use_get
- ) or (
- passive is attributes.PASSIVE_ONLY_PERSISTENT and
- pending
- ):
- return attributes.PASSIVE_NO_RESULT
-
- session = sessionlib._state_session(state)
- if not session:
- raise orm_exc.DetachedInstanceError(
- "Parent instance %s is not bound to a Session; "
- "lazy load operation of attribute '%s' cannot proceed" %
- (mapperutil.state_str(state), key)
- )
-
- # if we have a simple primary key load, check the
- # identity map without generating a Query at all
- if self.use_get:
- if session._flushing:
- get_attr = instance_mapper._get_committed_state_attr_by_column
- else:
- get_attr = instance_mapper._get_state_attr_by_column
-
- dict_ = state.dict
- if passive is attributes.PASSIVE_NO_FETCH_RELATED:
- attr_passive = attributes.PASSIVE_OFF
- else:
- attr_passive = passive
-
- ident = [
- get_attr(
- state,
- state.dict,
- self._equated_columns[pk],
- passive=attr_passive)
- for pk in prop_mapper.primary_key
- ]
- if attributes.PASSIVE_NO_RESULT in ident:
- return attributes.PASSIVE_NO_RESULT
-
- if _none_set.issuperset(ident):
- return None
-
- ident_key = prop_mapper.identity_key_from_primary_key(ident)
- instance = Query._get_from_identity(session, ident_key, passive)
- if instance is not None:
- return instance
- elif passive is attributes.PASSIVE_NO_FETCH or \
- passive is attributes.PASSIVE_NO_FETCH_RELATED:
- return attributes.PASSIVE_NO_RESULT
-
- q = session.query(prop_mapper)._adapt_all_clauses()
-
- # don't autoflush on pending
- if pending:
- q = q.autoflush(False)
-
- if state.load_path:
- q = q._with_current_path(state.load_path + (key,))
-
- if state.load_options:
- q = q._conditional_options(*state.load_options)
-
- if self.use_get:
- return q._load_on_ident(ident_key)
-
- if prop.order_by:
- q = q.order_by(*util.to_list(prop.order_by))
-
- for rev in prop._reverse_property:
- # reverse props that are MANYTOONE are loading *this*
- # object from get(), so don't need to eager out to those.
- if rev.direction is interfaces.MANYTOONE and \
- rev._use_get and \
- not isinstance(rev.strategy, LazyLoader):
- q = q.options(EagerLazyOption((rev.key,), lazy='select'))
-
- lazy_clause = self.lazy_clause(state)
-
- if pending:
- bind_values = sql_util.bind_values(lazy_clause)
- if None in bind_values:
- return None
-
- q = q.filter(lazy_clause)
-
- result = q.all()
- if self.uselist:
- return result
- else:
- l = len(result)
- if l:
- if l > 1:
- util.warn(
- "Multiple rows returned with "
- "uselist=False for lazily-loaded attribute '%s' "
- % prop)
-
- return result[0]
- else:
- return None
-
- def create_row_processor(self, selectcontext, path, reduced_path,
- mapper, row, adapter):
- key = self.key
- if not self.is_class_level:
- def new_execute(state, dict_, row):
- # we are not the primary manager for this attribute
- # on this class - set up a
- # per-instance lazyloader, which will override the
- # class-level behavior.
- # this currently only happens when using a
- # "lazyload" option on a "no load"
- # attribute - "eager" attributes always have a
- # class-level lazyloader installed.
- state.set_callable(dict_, key, LoadLazyAttribute(state, key))
- else:
- def new_execute(state, dict_, row):
- # we are the primary manager for this attribute on
- # this class - reset its
- # per-instance attribute state, so that the class-level
- # lazy loader is
- # executed when next referenced on this instance.
- # this is needed in
- # populate_existing() types of scenarios to reset
- # any existing state.
- state.reset(dict_, key)
-
- return new_execute, None, None
-
- @classmethod
- def _create_lazy_clause(cls, prop, reverse_direction=False):
- binds = util.column_dict()
- lookup = util.column_dict()
- equated_columns = util.column_dict()
-
- if reverse_direction and prop.secondaryjoin is None:
- for l, r in prop.local_remote_pairs:
- _list = lookup.setdefault(r, [])
- _list.append((r, l))
- equated_columns[l] = r
- else:
- for l, r in prop.local_remote_pairs:
- _list = lookup.setdefault(l, [])
- _list.append((l, r))
- equated_columns[r] = l
-
- def col_to_bind(col):
- if col in lookup:
- for tobind, equated in lookup[col]:
- if equated in binds:
- return None
- if col not in binds:
- binds[col] = sql.bindparam(None, None, type_=col.type)
- return binds[col]
- return None
-
- lazywhere = prop.primaryjoin
-
- if prop.secondaryjoin is None or not reverse_direction:
- lazywhere = visitors.replacement_traverse(
- lazywhere, {}, col_to_bind)
-
- if prop.secondaryjoin is not None:
- secondaryjoin = prop.secondaryjoin
- if reverse_direction:
- secondaryjoin = visitors.replacement_traverse(
- secondaryjoin, {}, col_to_bind)
- lazywhere = sql.and_(lazywhere, secondaryjoin)
-
- bind_to_col = dict((binds[col].key, col) for col in binds)
-
- return lazywhere, bind_to_col, equated_columns
-
-log.class_logger(LazyLoader)
-
-class LoadLazyAttribute(object):
- """serializable loader object used by LazyLoader"""
-
- def __init__(self, state, key):
- self.state = state
- self.key = key
-
- def __call__(self, passive=attributes.PASSIVE_OFF):
- state, key = self.state, self.key
- instance_mapper = state.manager.mapper
- prop = instance_mapper._props[key]
- strategy = prop._strategies[LazyLoader]
-
- return strategy._load_for_state(state, passive)
-
-
-class ImmediateLoader(AbstractRelationshipLoader):
- def init_class_attribute(self, mapper):
- self.parent_property.\
- _get_strategy(LazyLoader).\
- init_class_attribute(mapper)
-
- def setup_query(self, context, entity,
- path, reduced_path, adapter, column_collection=None,
- parentmapper=None, **kwargs):
- pass
-
- def create_row_processor(self, context, path, reduced_path, mapper, row, adapter):
- def execute(state, dict_, row):
- state.get_impl(self.key).get(state, dict_)
-
- return None, None, execute
-
-class SubqueryLoader(AbstractRelationshipLoader):
- def init(self):
- super(SubqueryLoader, self).init()
- self.join_depth = self.parent_property.join_depth
-
- def init_class_attribute(self, mapper):
- self.parent_property.\
- _get_strategy(LazyLoader).\
- init_class_attribute(mapper)
-
- def setup_query(self, context, entity,
- path, reduced_path, adapter, column_collection=None,
- parentmapper=None, **kwargs):
-
- if not context.query._enable_eagerloads:
- return
-
- path = path + (self.key, )
- reduced_path = reduced_path + (self.key, )
-
- # build up a path indicating the path from the leftmost
- # entity to the thing we're subquery loading.
- subq_path = context.attributes.get(('subquery_path', None), ())
-
- subq_path = subq_path + path
-
- # join-depth / recursion check
- if ("loaderstrategy", reduced_path) not in context.attributes:
- if self.join_depth:
- if len(path) / 2 > self.join_depth:
- return
- else:
- if self.mapper.base_mapper in interfaces._reduce_path(subq_path):
- return
-
- orig_query = context.attributes.get(
- ("orig_query", SubqueryLoader),
- context.query)
-
- subq_mapper = mapperutil._class_to_mapper(subq_path[0])
-
- # determine attributes of the leftmost mapper
- if self.parent.isa(subq_mapper) and self.key==subq_path[1]:
- leftmost_mapper, leftmost_prop = \
- self.parent, self.parent_property
- else:
- leftmost_mapper, leftmost_prop = \
- subq_mapper, \
- subq_mapper._props[subq_path[1]]
- leftmost_cols, remote_cols = self._local_remote_columns(leftmost_prop)
-
- leftmost_attr = [
- leftmost_mapper._columntoproperty[c].class_attribute
- for c in leftmost_cols
- ]
-
- # reformat the original query
- # to look only for significant columns
- q = orig_query._clone()
-
- # TODO: why does polymporphic etc. require hardcoding
- # into _adapt_col_list ? Does query.add_columns(...) work
- # with polymorphic loading ?
- q._set_entities(q._adapt_col_list(leftmost_attr))
-
- # don't need ORDER BY if no limit/offset
- if q._limit is None and q._offset is None:
- q._order_by = None
-
- # the original query now becomes a subquery
- # which we'll join onto.
- embed_q = q.with_labels().subquery()
- left_alias = mapperutil.AliasedClass(leftmost_mapper, embed_q)
-
- # q becomes a new query. basically doing a longhand
- # "from_self()". (from_self() itself not quite industrial
- # strength enough for all contingencies...but very close)
-
- q = q.session.query(self.mapper)
- q._attributes = {
- ("orig_query", SubqueryLoader): orig_query,
- ('subquery_path', None) : subq_path
- }
- q = q._enable_single_crit(False)
-
- # figure out what's being joined. a.k.a. the fun part
- to_join = [
- (subq_path[i], subq_path[i+1])
- for i in xrange(0, len(subq_path), 2)
- ]
-
- # determine the immediate parent class we are joining from,
- # which needs to be aliased.
-
- if len(to_join) < 2:
- # in the case of a one level eager load, this is the
- # leftmost "left_alias".
- parent_alias = left_alias
- elif subq_path[-2].isa(self.parent):
- # In the case of multiple levels, retrieve
- # it from subq_path[-2]. This is the same as self.parent
- # in the vast majority of cases, and [ticket:2014]
- # illustrates a case where sub_path[-2] is a subclass
- # of self.parent
- parent_alias = mapperutil.AliasedClass(subq_path[-2])
- else:
- # if of_type() were used leading to this relationship,
- # self.parent is more specific than subq_path[-2]
- parent_alias = mapperutil.AliasedClass(self.parent)
-
- local_cols, remote_cols = \
- self._local_remote_columns(self.parent_property)
-
- local_attr = [
- getattr(parent_alias, self.parent._columntoproperty[c].key)
- for c in local_cols
- ]
- q = q.order_by(*local_attr)
- q = q.add_columns(*local_attr)
-
- for i, (mapper, key) in enumerate(to_join):
-
- # we need to use query.join() as opposed to
- # orm.join() here because of the
- # rich behavior it brings when dealing with
- # "with_polymorphic" mappers. "aliased"
- # and "from_joinpoint" take care of most of
- # the chaining and aliasing for us.
-
- first = i == 0
- middle = i < len(to_join) - 1
- second_to_last = i == len(to_join) - 2
-
- if first:
- attr = getattr(left_alias, key)
- else:
- attr = key
-
- if second_to_last:
- q = q.join(parent_alias, attr, from_joinpoint=True)
- else:
- q = q.join(attr, aliased=middle, from_joinpoint=True)
-
- # propagate loader options etc. to the new query.
- # these will fire relative to subq_path.
- q = q._with_current_path(subq_path)
- q = q._conditional_options(*orig_query._with_options)
-
- if self.parent_property.order_by:
- # if there's an ORDER BY, alias it the same
- # way joinedloader does, but we have to pull out
- # the "eagerjoin" from the query.
- # this really only picks up the "secondary" table
- # right now.
- eagerjoin = q._from_obj[0]
- eager_order_by = \
- eagerjoin._target_adapter.\
- copy_and_process(
- util.to_list(
- self.parent_property.order_by
- )
- )
- q = q.order_by(*eager_order_by)
-
- # add new query to attributes to be picked up
- # by create_row_processor
- context.attributes[('subquery', reduced_path)] = q
-
- def _local_remote_columns(self, prop):
- if prop.secondary is None:
- return zip(*prop.local_remote_pairs)
- else:
- return \
- [p[0] for p in prop.synchronize_pairs],\
- [
- p[0] for p in prop.
- secondary_synchronize_pairs
- ]
-
- def create_row_processor(self, context, path, reduced_path,
- mapper, row, adapter):
- if not self.parent.class_manager[self.key].impl.supports_population:
- raise sa_exc.InvalidRequestError(
- "'%s' does not support object "
- "population - eager loading cannot be applied." %
- self)
-
- reduced_path = reduced_path + (self.key,)
-
- if ('subquery', reduced_path) not in context.attributes:
- return None, None, None
-
- local_cols, remote_cols = self._local_remote_columns(self.parent_property)
-
- remote_attr = [
- self.mapper._columntoproperty[c].key
- for c in remote_cols]
-
- q = context.attributes[('subquery', reduced_path)]
-
- collections = dict(
- (k, [v[0] for v in v])
- for k, v in itertools.groupby(
- q,
- lambda x:x[1:]
- ))
-
- if adapter:
- local_cols = [adapter.columns[c] for c in local_cols]
-
- if self.uselist:
- def execute(state, dict_, row):
- collection = collections.get(
- tuple([row[col] for col in local_cols]),
- ()
- )
- state.get_impl(self.key).\
- set_committed_value(state, dict_, collection)
- else:
- def execute(state, dict_, row):
- collection = collections.get(
- tuple([row[col] for col in local_cols]),
- (None,)
- )
- if len(collection) > 1:
- util.warn(
- "Multiple rows returned with "
- "uselist=False for eagerly-loaded attribute '%s' "
- % self)
-
- scalar = collection[0]
- state.get_impl(self.key).\
- set_committed_value(state, dict_, scalar)
-
- return execute, None, None
-
-log.class_logger(SubqueryLoader)
-
-class EagerLoader(AbstractRelationshipLoader):
- """Strategize a relationship() that loads within the process
- of the parent object being selected."""
-
- def init(self):
- super(EagerLoader, self).init()
- self.join_depth = self.parent_property.join_depth
-
- def init_class_attribute(self, mapper):
- self.parent_property.\
- _get_strategy(LazyLoader).init_class_attribute(mapper)
-
- def setup_query(self, context, entity, path, reduced_path, adapter, \
- column_collection=None, parentmapper=None,
- allow_innerjoin=True,
- **kwargs):
- """Add a left outer join to the statement thats being constructed."""
-
-
- if not context.query._enable_eagerloads:
- return
-
- path = path + (self.key,)
- reduced_path = reduced_path + (self.key,)
-
- # check for user-defined eager alias
- if ("user_defined_eager_row_processor", reduced_path) in\
- context.attributes:
- clauses = context.attributes[
- ("user_defined_eager_row_processor",
- reduced_path)]
-
- adapter = entity._get_entity_clauses(context.query, context)
- if adapter and clauses:
- context.attributes[
- ("user_defined_eager_row_processor",
- reduced_path)] = clauses = clauses.wrap(adapter)
- elif adapter:
- context.attributes[
- ("user_defined_eager_row_processor",
- reduced_path)] = clauses = adapter
-
- add_to_collection = context.primary_columns
-
- else:
- # check for join_depth or basic recursion,
- # if the current path was not explicitly stated as
- # a desired "loaderstrategy" (i.e. via query.options())
- if ("loaderstrategy", reduced_path) not in context.attributes:
- if self.join_depth:
- if len(path) / 2 > self.join_depth:
- return
- else:
- if self.mapper.base_mapper in reduced_path:
- return
-
- clauses = mapperutil.ORMAdapter(
- mapperutil.AliasedClass(self.mapper),
- equivalents=self.mapper._equivalent_columns,
- adapt_required=True)
-
- if self.parent_property.direction != interfaces.MANYTOONE:
- context.multi_row_eager_loaders = True
-
- innerjoin = allow_innerjoin and context.attributes.get(
- ("eager_join_type", path),
- self.parent_property.innerjoin)
- if not innerjoin:
- # if this is an outer join, all eager joins from
- # here must also be outer joins
- allow_innerjoin = False
-
- context.create_eager_joins.append(
- (self._create_eager_join, context,
- entity, path, adapter,
- parentmapper, clauses, innerjoin)
- )
-
- add_to_collection = context.secondary_columns
- context.attributes[
- ("eager_row_processor", reduced_path)
- ] = clauses
-
- path += (self.mapper,)
- reduced_path += (self.mapper.base_mapper,)
-
- for value in self.mapper._polymorphic_properties:
- value.setup(
- context,
- entity,
- path,
- reduced_path,
- clauses,
- parentmapper=self.mapper,
- column_collection=add_to_collection,
- allow_innerjoin=allow_innerjoin)
-
- def _create_eager_join(self, context, entity,
- path, adapter, parentmapper,
- clauses, innerjoin):
-
- if parentmapper is None:
- localparent = entity.mapper
- else:
- localparent = parentmapper
-
- # whether or not the Query will wrap the selectable in a subquery,
- # and then attach eager load joins to that (i.e., in the case of
- # LIMIT/OFFSET etc.)
- should_nest_selectable = context.multi_row_eager_loaders and \
- context.query._should_nest_selectable
-
- entity_key = None
- if entity not in context.eager_joins and \
- not should_nest_selectable and \
- context.from_clause:
- index, clause = \
- sql_util.find_join_source(
- context.from_clause, entity.selectable)
- if clause is not None:
- # join to an existing FROM clause on the query.
- # key it to its list index in the eager_joins dict.
- # Query._compile_context will adapt as needed and
- # append to the FROM clause of the select().
- entity_key, default_towrap = index, clause
-
- if entity_key is None:
- entity_key, default_towrap = entity, entity.selectable
-
- towrap = context.eager_joins.setdefault(entity_key, default_towrap)
-
- join_to_left = False
- if adapter:
- if getattr(adapter, 'aliased_class', None):
- onclause = getattr(
- adapter.aliased_class, self.key,
- self.parent_property)
- else:
- onclause = getattr(
- mapperutil.AliasedClass(
- self.parent,
- adapter.selectable
- ),
- self.key, self.parent_property
- )
-
- if onclause is self.parent_property:
- # TODO: this is a temporary hack to
- # account for polymorphic eager loads where
- # the eagerload is referencing via of_type().
- join_to_left = True
- else:
- onclause = self.parent_property
-
- context.eager_joins[entity_key] = eagerjoin = \
- mapperutil.join(
- towrap,
- clauses.aliased_class,
- onclause,
- join_to_left=join_to_left,
- isouter=not innerjoin
- )
-
- # send a hint to the Query as to where it may "splice" this join
- eagerjoin.stop_on = entity.selectable
-
- if self.parent_property.secondary is None and \
- not parentmapper:
- # for parentclause that is the non-eager end of the join,
- # ensure all the parent cols in the primaryjoin are actually
- # in the
- # columns clause (i.e. are not deferred), so that aliasing applied
- # by the Query propagates those columns outward.
- # This has the effect
- # of "undefering" those columns.
- for col in sql_util.find_columns(
- self.parent_property.primaryjoin):
- if localparent.mapped_table.c.contains_column(col):
- if adapter:
- col = adapter.columns[col]
- context.primary_columns.append(col)
-
- if self.parent_property.order_by:
- context.eager_order_by += \
- eagerjoin._target_adapter.\
- copy_and_process(
- util.to_list(
- self.parent_property.order_by
- )
- )
-
-
- def _create_eager_adapter(self, context, row, adapter, path, reduced_path):
- if ("user_defined_eager_row_processor", reduced_path) in \
- context.attributes:
- decorator = context.attributes[
- ("user_defined_eager_row_processor",
- reduced_path)]
- # user defined eagerloads are part of the "primary"
- # portion of the load.
- # the adapters applied to the Query should be honored.
- if context.adapter and decorator:
- decorator = decorator.wrap(context.adapter)
- elif context.adapter:
- decorator = context.adapter
- elif ("eager_row_processor", reduced_path) in context.attributes:
- decorator = context.attributes[
- ("eager_row_processor", reduced_path)]
- else:
- return False
-
- try:
- identity_key = self.mapper.identity_key_from_row(row, decorator)
- return decorator
- except KeyError, k:
- # no identity key - dont return a row
- # processor, will cause a degrade to lazy
- return False
-
- def create_row_processor(self, context, path, reduced_path, mapper, row, adapter):
- if not self.parent.class_manager[self.key].impl.supports_population:
- raise sa_exc.InvalidRequestError(
- "'%s' does not support object "
- "population - eager loading cannot be applied." %
- self)
-
- our_path = path + (self.key,)
- our_reduced_path = reduced_path + (self.key,)
-
- eager_adapter = self._create_eager_adapter(
- context,
- row,
- adapter, our_path,
- our_reduced_path)
-
- if eager_adapter is not False:
- key = self.key
- _instance = self.mapper._instance_processor(
- context,
- our_path + (self.mapper,),
- our_reduced_path + (self.mapper.base_mapper,),
- eager_adapter)
-
- if not self.uselist:
- def new_execute(state, dict_, row):
- # set a scalar object instance directly on the parent
- # object, bypassing InstrumentedAttribute event handlers.
- dict_[key] = _instance(row, None)
-
- def existing_execute(state, dict_, row):
- # call _instance on the row, even though the object has
- # been created, so that we further descend into properties
- existing = _instance(row, None)
- if existing is not None \
- and key in dict_ \
- and existing is not dict_[key]:
- util.warn(
- "Multiple rows returned with "
- "uselist=False for eagerly-loaded attribute '%s' "
- % self)
- return new_execute, existing_execute, None
- else:
- def new_execute(state, dict_, row):
- collection = attributes.init_state_collection(
- state, dict_, key)
- result_list = util.UniqueAppender(collection,
- 'append_without_event')
- context.attributes[(state, key)] = result_list
- _instance(row, result_list)
-
- def existing_execute(state, dict_, row):
- if (state, key) in context.attributes:
- result_list = context.attributes[(state, key)]
- else:
- # appender_key can be absent from context.attributes
- # with isnew=False when self-referential eager loading
- # is used; the same instance may be present in two
- # distinct sets of result columns
- collection = attributes.init_state_collection(state,
- dict_, key)
- result_list = util.UniqueAppender(
- collection,
- 'append_without_event')
- context.attributes[(state, key)] = result_list
- _instance(row, result_list)
- return new_execute, existing_execute, None
- else:
- return self.parent_property.\
- _get_strategy(LazyLoader).\
- create_row_processor(
- context, path,
- reduced_path,
- mapper, row, adapter)
-
-log.class_logger(EagerLoader)
-
-class EagerLazyOption(StrategizedOption):
- def __init__(self, key, lazy=True, chained=False,
- propagate_to_loaders=True
- ):
- super(EagerLazyOption, self).__init__(key)
- self.lazy = lazy
- self.chained = self.lazy in (False, 'joined', 'subquery') and chained
- self.propagate_to_loaders = propagate_to_loaders
- self.strategy_cls = factory(lazy)
-
- def get_strategy_class(self):
- return self.strategy_cls
-
-def factory(identifier):
- if identifier is False or identifier == 'joined':
- return EagerLoader
- elif identifier is None or identifier == 'noload':
- return NoLoader
- elif identifier is False or identifier == 'select':
- return LazyLoader
- elif identifier == 'subquery':
- return SubqueryLoader
- elif identifier == 'immediate':
- return ImmediateLoader
- else:
- return LazyLoader
-
-
-
-class EagerJoinOption(PropertyOption):
-
- def __init__(self, key, innerjoin, chained=False):
- super(EagerJoinOption, self).__init__(key)
- self.innerjoin = innerjoin
- self.chained = chained
-
- def process_query_property(self, query, paths, mappers):
- if self.chained:
- for path in paths:
- query._attributes[("eager_join_type", path)] = self.innerjoin
- else:
- query._attributes[("eager_join_type", paths[-1])] = self.innerjoin
-
-class LoadEagerFromAliasOption(PropertyOption):
-
- def __init__(self, key, alias=None, chained=False):
- super(LoadEagerFromAliasOption, self).__init__(key)
- if alias is not None:
- if not isinstance(alias, basestring):
- m, alias, is_aliased_class = mapperutil._entity_info(alias)
- self.alias = alias
- self.chained = chained
-
- def process_query_property(self, query, paths, mappers):
- if self.chained:
- for path in paths[0:-1]:
- (root_mapper, propname) = path[-2:]
- prop = root_mapper._props[propname]
- adapter = query._polymorphic_adapters.get(prop.mapper, None)
- query._attributes.setdefault(
- ("user_defined_eager_row_processor",
- interfaces._reduce_path(path)), adapter)
-
- if self.alias is not None:
- if isinstance(self.alias, basestring):
- (root_mapper, propname) = paths[-1][-2:]
- prop = root_mapper._props[propname]
- self.alias = prop.target.alias(self.alias)
- query._attributes[
- ("user_defined_eager_row_processor",
- interfaces._reduce_path(paths[-1]))
- ] = sql_util.ColumnAdapter(self.alias)
- else:
- (root_mapper, propname) = paths[-1][-2:]
- prop = root_mapper._props[propname]
- adapter = query._polymorphic_adapters.get(prop.mapper, None)
- query._attributes[
- ("user_defined_eager_row_processor",
- interfaces._reduce_path(paths[-1]))] = adapter
-
-def single_parent_validator(desc, prop):
- def _do_check(state, value, oldvalue, initiator):
- if value is not None:
- hasparent = initiator.hasparent(attributes.instance_state(value))
- if hasparent and oldvalue is not value:
- raise sa_exc.InvalidRequestError(
- "Instance %s is already associated with an instance "
- "of %s via its %s attribute, and is only allowed a "
- "single parent." %
- (mapperutil.instance_str(value), state.class_, prop)
- )
- return value
-
- def append(state, value, initiator):
- return _do_check(state, value, None, initiator)
-
- def set_(state, value, oldvalue, initiator):
- return _do_check(state, value, oldvalue, initiator)
-
- event.listen(desc, 'append', append, raw=True, retval=True, active_history=True)
- event.listen(desc, 'set', set_, raw=True, retval=True, active_history=True)
-
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/orm/sync.py b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/orm/sync.py
deleted file mode 100755
index 5ebd44fb..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/orm/sync.py
+++ /dev/null
@@ -1,107 +0,0 @@
-# orm/sync.py
-# Copyright (C) 2005-2011 the SQLAlchemy authors and contributors <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-"""private module containing functions used for copying data
-between instances based on join conditions.
-"""
-
-from sqlalchemy.orm import exc, util as mapperutil, attributes
-
-def populate(source, source_mapper, dest, dest_mapper,
- synchronize_pairs, uowcommit, flag_cascaded_pks):
- source_dict = source.dict
- dest_dict = dest.dict
-
- for l, r in synchronize_pairs:
- try:
- # inline of source_mapper._get_state_attr_by_column
- prop = source_mapper._columntoproperty[l]
- value = source.manager[prop.key].impl.get(source, source_dict,
- attributes.PASSIVE_OFF)
- except exc.UnmappedColumnError:
- _raise_col_to_prop(False, source_mapper, l, dest_mapper, r)
-
- try:
- # inline of dest_mapper._set_state_attr_by_column
- prop = dest_mapper._columntoproperty[r]
- dest.manager[prop.key].impl.set(dest, dest_dict, value, None)
- except exc.UnmappedColumnError:
- _raise_col_to_prop(True, source_mapper, l, dest_mapper, r)
-
- # technically the "r.primary_key" check isn't
- # needed here, but we check for this condition to limit
- # how often this logic is invoked for memory/performance
- # reasons, since we only need this info for a primary key
- # destination.
- if flag_cascaded_pks and l.primary_key and \
- r.primary_key and \
- r.references(l):
- uowcommit.attributes[("pk_cascaded", dest, r)] = True
-
-def clear(dest, dest_mapper, synchronize_pairs):
- for l, r in synchronize_pairs:
- if r.primary_key:
- raise AssertionError(
- "Dependency rule tried to blank-out primary key "
- "column '%s' on instance '%s'" %
- (r, mapperutil.state_str(dest))
- )
- try:
- dest_mapper._set_state_attr_by_column(dest, dest.dict, r, None)
- except exc.UnmappedColumnError:
- _raise_col_to_prop(True, None, l, dest_mapper, r)
-
-def update(source, source_mapper, dest, old_prefix, synchronize_pairs):
- for l, r in synchronize_pairs:
- try:
- oldvalue = source_mapper._get_committed_attr_by_column(source.obj(), l)
- value = source_mapper._get_state_attr_by_column(source, source.dict, l)
- except exc.UnmappedColumnError:
- _raise_col_to_prop(False, source_mapper, l, None, r)
- dest[r.key] = value
- dest[old_prefix + r.key] = oldvalue
-
-def populate_dict(source, source_mapper, dict_, synchronize_pairs):
- for l, r in synchronize_pairs:
- try:
- value = source_mapper._get_state_attr_by_column(source, source.dict, l)
- except exc.UnmappedColumnError:
- _raise_col_to_prop(False, source_mapper, l, None, r)
-
- dict_[r.key] = value
-
-def source_modified(uowcommit, source, source_mapper, synchronize_pairs):
- """return true if the source object has changes from an old to a
- new value on the given synchronize pairs
-
- """
- for l, r in synchronize_pairs:
- try:
- prop = source_mapper._columntoproperty[l]
- except exc.UnmappedColumnError:
- _raise_col_to_prop(False, source_mapper, l, None, r)
- history = uowcommit.get_attribute_history(source, prop.key,
- attributes.PASSIVE_NO_INITIALIZE)
- return bool(history.deleted)
- else:
- return False
-
-def _raise_col_to_prop(isdest, source_mapper, source_column, dest_mapper, dest_column):
- if isdest:
- raise exc.UnmappedColumnError(
- "Can't execute sync rule for destination column '%s'; "
- "mapper '%s' does not map this column. Try using an explicit"
- " `foreign_keys` collection which does not include this column "
- "(or use a viewonly=True relation)." % (dest_column, dest_mapper)
- )
- else:
- raise exc.UnmappedColumnError(
- "Can't execute sync rule for source column '%s'; mapper '%s' "
- "does not map this column. Try using an explicit `foreign_keys`"
- " collection which does not include destination column '%s' (or "
- "use a viewonly=True relation)." %
- (source_column, source_mapper, dest_column)
- )
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/orm/unitofwork.py b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/orm/unitofwork.py
deleted file mode 100755
index 5e0c9393..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/orm/unitofwork.py
+++ /dev/null
@@ -1,583 +0,0 @@
-# orm/unitofwork.py
-# Copyright (C) 2005-2011 the SQLAlchemy authors and contributors <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-"""The internals for the unit of work system.
-
-The session's flush() process passes objects to a contextual object
-here, which assembles flush tasks based on mappers and their properties,
-organizes them in order of dependency, and executes.
-
-"""
-
-from sqlalchemy import util, event
-from sqlalchemy.util import topological
-from sqlalchemy.orm import attributes, interfaces
-from sqlalchemy.orm import util as mapperutil
-session = util.importlater("sqlalchemy.orm", "session")
-
-def track_cascade_events(descriptor, prop):
- """Establish event listeners on object attributes which handle
- cascade-on-set/append.
-
- """
- key = prop.key
-
- def append(state, item, initiator):
- # process "save_update" cascade rules for when
- # an instance is appended to the list of another instance
-
- sess = session._state_session(state)
- if sess:
- prop = state.manager.mapper._props[key]
- item_state = attributes.instance_state(item)
- if prop.cascade.save_update and \
- (prop.cascade_backrefs or key == initiator.key) and \
- not sess._contains_state(item_state):
- sess._save_or_update_state(item_state)
- return item
-
- def remove(state, item, initiator):
- sess = session._state_session(state)
- if sess:
- prop = state.manager.mapper._props[key]
- # expunge pending orphans
- item_state = attributes.instance_state(item)
- if prop.cascade.delete_orphan and \
- item_state in sess._new and \
- prop.mapper._is_orphan(item_state):
- sess.expunge(item)
-
- def set_(state, newvalue, oldvalue, initiator):
- # process "save_update" cascade rules for when an instance
- # is attached to another instance
- if oldvalue is newvalue:
- return newvalue
-
- sess = session._state_session(state)
- if sess:
- prop = state.manager.mapper._props[key]
- if newvalue is not None:
- newvalue_state = attributes.instance_state(newvalue)
- if prop.cascade.save_update and \
- (prop.cascade_backrefs or key == initiator.key) and \
- not sess._contains_state(newvalue_state):
- sess._save_or_update_state(newvalue_state)
-
- if oldvalue is not None and prop.cascade.delete_orphan:
- oldvalue_state = attributes.instance_state(oldvalue)
-
- if oldvalue_state in sess._new and \
- prop.mapper._is_orphan(oldvalue_state):
- sess.expunge(oldvalue)
- return newvalue
-
- event.listen(descriptor, 'append', append, raw=True, retval=True)
- event.listen(descriptor, 'remove', remove, raw=True, retval=True)
- event.listen(descriptor, 'set', set_, raw=True, retval=True)
-
-
-class UOWTransaction(object):
- def __init__(self, session):
- self.session = session
-
- # dictionary used by external actors to
- # store arbitrary state information.
- self.attributes = {}
-
- # dictionary of mappers to sets of
- # DependencyProcessors, which are also
- # set to be part of the sorted flush actions,
- # which have that mapper as a parent.
- self.deps = util.defaultdict(set)
-
- # dictionary of mappers to sets of InstanceState
- # items pending for flush which have that mapper
- # as a parent.
- self.mappers = util.defaultdict(set)
-
- # a dictionary of Preprocess objects, which gather
- # additional states impacted by the flush
- # and determine if a flush action is needed
- self.presort_actions = {}
-
- # dictionary of PostSortRec objects, each
- # one issues work during the flush within
- # a certain ordering.
- self.postsort_actions = {}
-
- # a set of 2-tuples, each containing two
- # PostSortRec objects where the second
- # is dependent on the first being executed
- # first
- self.dependencies = set()
-
- # dictionary of InstanceState-> (isdelete, listonly)
- # tuples, indicating if this state is to be deleted
- # or insert/updated, or just refreshed
- self.states = {}
-
- # tracks InstanceStates which will be receiving
- # a "post update" call. Keys are mappers,
- # values are a set of states and a set of the
- # columns which should be included in the update.
- self.post_update_states = util.defaultdict(lambda: (set(), set()))
-
- @property
- def has_work(self):
- return bool(self.states)
-
- def is_deleted(self, state):
- """return true if the given state is marked as deleted
- within this uowtransaction."""
-
- return state in self.states and self.states[state][0]
-
- def memo(self, key, callable_):
- if key in self.attributes:
- return self.attributes[key]
- else:
- self.attributes[key] = ret = callable_()
- return ret
-
- def remove_state_actions(self, state):
- """remove pending actions for a state from the uowtransaction."""
-
- isdelete = self.states[state][0]
-
- self.states[state] = (isdelete, True)
-
- def get_attribute_history(self, state, key,
- passive=attributes.PASSIVE_NO_INITIALIZE):
- """facade to attributes.get_state_history(), including caching of results."""
-
- hashkey = ("history", state, key)
-
- # cache the objects, not the states; the strong reference here
- # prevents newly loaded objects from being dereferenced during the
- # flush process
-
- if hashkey in self.attributes:
- history, state_history, cached_passive = self.attributes[hashkey]
- # if the cached lookup was "passive" and now
- # we want non-passive, do a non-passive lookup and re-cache
- if cached_passive is not attributes.PASSIVE_OFF \
- and passive is attributes.PASSIVE_OFF:
- impl = state.manager[key].impl
- history = impl.get_history(state, state.dict,
- attributes.PASSIVE_OFF)
- if history and impl.uses_objects:
- state_history = history.as_state()
- else:
- state_history = history
- self.attributes[hashkey] = (history, state_history, passive)
- else:
- impl = state.manager[key].impl
- # TODO: store the history as (state, object) tuples
- # so we don't have to keep converting here
- history = impl.get_history(state, state.dict, passive)
- if history and impl.uses_objects:
- state_history = history.as_state()
- else:
- state_history = history
- self.attributes[hashkey] = (history, state_history, passive)
-
- return state_history
-
- def has_dep(self, processor):
- return (processor, True) in self.presort_actions
-
- def register_preprocessor(self, processor, fromparent):
- key = (processor, fromparent)
- if key not in self.presort_actions:
- self.presort_actions[key] = Preprocess(processor, fromparent)
-
- def register_object(self, state, isdelete=False,
- listonly=False, cancel_delete=False,
- operation=None, prop=None):
- if not self.session._contains_state(state):
- if not state.deleted and operation is not None:
- util.warn("Object of type %s not in session, %s operation "
- "along '%s' will not proceed" %
- (mapperutil.state_class_str(state), operation, prop))
- return False
-
- if state not in self.states:
- mapper = state.manager.mapper
-
- if mapper not in self.mappers:
- mapper._per_mapper_flush_actions(self)
-
- self.mappers[mapper].add(state)
- self.states[state] = (isdelete, listonly)
- else:
- if not listonly and (isdelete or cancel_delete):
- self.states[state] = (isdelete, False)
- return True
-
- def issue_post_update(self, state, post_update_cols):
- mapper = state.manager.mapper.base_mapper
- states, cols = self.post_update_states[mapper]
- states.add(state)
- cols.update(post_update_cols)
-
- @util.memoized_property
- def _mapper_for_dep(self):
- """return a dynamic mapping of (Mapper, DependencyProcessor) to
- True or False, indicating if the DependencyProcessor operates
- on objects of that Mapper.
-
- The result is stored in the dictionary persistently once
- calculated.
-
- """
- return util.PopulateDict(
- lambda tup:tup[0]._props.get(tup[1].key) is tup[1].prop
- )
-
- def filter_states_for_dep(self, dep, states):
- """Filter the given list of InstanceStates to those relevant to the
- given DependencyProcessor.
-
- """
- mapper_for_dep = self._mapper_for_dep
- return [s for s in states if mapper_for_dep[(s.manager.mapper, dep)]]
-
- def states_for_mapper_hierarchy(self, mapper, isdelete, listonly):
- checktup = (isdelete, listonly)
- for mapper in mapper.base_mapper.self_and_descendants:
- for state in self.mappers[mapper]:
- if self.states[state] == checktup:
- yield state
-
- def _generate_actions(self):
- """Generate the full, unsorted collection of PostSortRecs as
- well as dependency pairs for this UOWTransaction.
-
- """
- # execute presort_actions, until all states
- # have been processed. a presort_action might
- # add new states to the uow.
- while True:
- ret = False
- for action in list(self.presort_actions.values()):
- if action.execute(self):
- ret = True
- if not ret:
- break
-
- # see if the graph of mapper dependencies has cycles.
- self.cycles = cycles = topological.find_cycles(
- self.dependencies,
- self.postsort_actions.values())
-
- if cycles:
- # if yes, break the per-mapper actions into
- # per-state actions
- convert = dict(
- (rec, set(rec.per_state_flush_actions(self)))
- for rec in cycles
- )
-
- # rewrite the existing dependencies to point to
- # the per-state actions for those per-mapper actions
- # that were broken up.
- for edge in list(self.dependencies):
- if None in edge or \
- edge[0].disabled or edge[1].disabled or \
- cycles.issuperset(edge):
- self.dependencies.remove(edge)
- elif edge[0] in cycles:
- self.dependencies.remove(edge)
- for dep in convert[edge[0]]:
- self.dependencies.add((dep, edge[1]))
- elif edge[1] in cycles:
- self.dependencies.remove(edge)
- for dep in convert[edge[1]]:
- self.dependencies.add((edge[0], dep))
-
- return set([a for a in self.postsort_actions.values()
- if not a.disabled
- ]
- ).difference(cycles)
-
- def execute(self):
- postsort_actions = self._generate_actions()
-
- #sort = topological.sort(self.dependencies, postsort_actions)
- #print "--------------"
- #print self.dependencies
- #print list(sort)
- #print "COUNT OF POSTSORT ACTIONS", len(postsort_actions)
-
- # execute
- if self.cycles:
- for set_ in topological.sort_as_subsets(
- self.dependencies,
- postsort_actions):
- while set_:
- n = set_.pop()
- n.execute_aggregate(self, set_)
- else:
- for rec in topological.sort(
- self.dependencies,
- postsort_actions):
- rec.execute(self)
-
-
- def finalize_flush_changes(self):
- """mark processed objects as clean / deleted after a successful flush().
-
- this method is called within the flush() method after the
- execute() method has succeeded and the transaction has been committed.
-
- """
- for state, (isdelete, listonly) in self.states.iteritems():
- if isdelete:
- self.session._remove_newly_deleted(state)
- else:
- # if listonly:
- # debug... would like to see how many do this
- self.session._register_newly_persistent(state)
-
-class IterateMappersMixin(object):
- def _mappers(self, uow):
- if self.fromparent:
- return iter(
- m for m in self.dependency_processor.parent.self_and_descendants
- if uow._mapper_for_dep[(m, self.dependency_processor)]
- )
- else:
- return self.dependency_processor.mapper.self_and_descendants
-
-class Preprocess(IterateMappersMixin):
- def __init__(self, dependency_processor, fromparent):
- self.dependency_processor = dependency_processor
- self.fromparent = fromparent
- self.processed = set()
- self.setup_flush_actions = False
-
- def execute(self, uow):
- delete_states = set()
- save_states = set()
-
- for mapper in self._mappers(uow):
- for state in uow.mappers[mapper].difference(self.processed):
- (isdelete, listonly) = uow.states[state]
- if not listonly:
- if isdelete:
- delete_states.add(state)
- else:
- save_states.add(state)
-
- if delete_states:
- self.dependency_processor.presort_deletes(uow, delete_states)
- self.processed.update(delete_states)
- if save_states:
- self.dependency_processor.presort_saves(uow, save_states)
- self.processed.update(save_states)
-
- if (delete_states or save_states):
- if not self.setup_flush_actions and (
- self.dependency_processor.\
- prop_has_changes(uow, delete_states, True) or
- self.dependency_processor.\
- prop_has_changes(uow, save_states, False)
- ):
- self.dependency_processor.per_property_flush_actions(uow)
- self.setup_flush_actions = True
- return True
- else:
- return False
-
-class PostSortRec(object):
- disabled = False
-
- def __new__(cls, uow, *args):
- key = (cls, ) + args
- if key in uow.postsort_actions:
- return uow.postsort_actions[key]
- else:
- uow.postsort_actions[key] = \
- ret = \
- object.__new__(cls)
- return ret
-
- def execute_aggregate(self, uow, recs):
- self.execute(uow)
-
- def __repr__(self):
- return "%s(%s)" % (
- self.__class__.__name__,
- ",".join(str(x) for x in self.__dict__.values())
- )
-
-class ProcessAll(IterateMappersMixin, PostSortRec):
- def __init__(self, uow, dependency_processor, delete, fromparent):
- self.dependency_processor = dependency_processor
- self.delete = delete
- self.fromparent = fromparent
- uow.deps[dependency_processor.parent.base_mapper].add(dependency_processor)
-
- def execute(self, uow):
- states = self._elements(uow)
- if self.delete:
- self.dependency_processor.process_deletes(uow, states)
- else:
- self.dependency_processor.process_saves(uow, states)
-
- def per_state_flush_actions(self, uow):
- # this is handled by SaveUpdateAll and DeleteAll,
- # since a ProcessAll should unconditionally be pulled
- # into per-state if either the parent/child mappers
- # are part of a cycle
- return iter([])
-
- def __repr__(self):
- return "%s(%s, delete=%s)" % (
- self.__class__.__name__,
- self.dependency_processor,
- self.delete
- )
-
- def _elements(self, uow):
- for mapper in self._mappers(uow):
- for state in uow.mappers[mapper]:
- (isdelete, listonly) = uow.states[state]
- if isdelete == self.delete and not listonly:
- yield state
-
-class IssuePostUpdate(PostSortRec):
- def __init__(self, uow, mapper, isdelete):
- self.mapper = mapper
- self.isdelete = isdelete
-
- def execute(self, uow):
- states, cols = uow.post_update_states[self.mapper]
- states = [s for s in states if uow.states[s][0] == self.isdelete]
-
- self.mapper._post_update(states, uow, cols)
-
-class SaveUpdateAll(PostSortRec):
- def __init__(self, uow, mapper):
- self.mapper = mapper
- assert mapper is mapper.base_mapper
-
- def execute(self, uow):
- self.mapper._save_obj(
- uow.states_for_mapper_hierarchy(self.mapper, False, False),
- uow
- )
-
- def per_state_flush_actions(self, uow):
- states = list(uow.states_for_mapper_hierarchy(self.mapper, False, False))
- for rec in self.mapper._per_state_flush_actions(
- uow,
- states,
- False):
- yield rec
-
- for dep in uow.deps[self.mapper]:
- states_for_prop = uow.filter_states_for_dep(dep, states)
- dep.per_state_flush_actions(uow, states_for_prop, False)
-
-class DeleteAll(PostSortRec):
- def __init__(self, uow, mapper):
- self.mapper = mapper
- assert mapper is mapper.base_mapper
-
- def execute(self, uow):
- self.mapper._delete_obj(
- uow.states_for_mapper_hierarchy(self.mapper, True, False),
- uow
- )
-
- def per_state_flush_actions(self, uow):
- states = list(uow.states_for_mapper_hierarchy(self.mapper, True, False))
- for rec in self.mapper._per_state_flush_actions(
- uow,
- states,
- True):
- yield rec
-
- for dep in uow.deps[self.mapper]:
- states_for_prop = uow.filter_states_for_dep(dep, states)
- dep.per_state_flush_actions(uow, states_for_prop, True)
-
-class ProcessState(PostSortRec):
- def __init__(self, uow, dependency_processor, delete, state):
- self.dependency_processor = dependency_processor
- self.delete = delete
- self.state = state
-
- def execute_aggregate(self, uow, recs):
- cls_ = self.__class__
- dependency_processor = self.dependency_processor
- delete = self.delete
- our_recs = [r for r in recs
- if r.__class__ is cls_ and
- r.dependency_processor is dependency_processor and
- r.delete is delete]
- recs.difference_update(our_recs)
- states = [self.state] + [r.state for r in our_recs]
- if delete:
- dependency_processor.process_deletes(uow, states)
- else:
- dependency_processor.process_saves(uow, states)
-
- def __repr__(self):
- return "%s(%s, %s, delete=%s)" % (
- self.__class__.__name__,
- self.dependency_processor,
- mapperutil.state_str(self.state),
- self.delete
- )
-
-class SaveUpdateState(PostSortRec):
- def __init__(self, uow, state, mapper):
- self.state = state
- self.mapper = mapper
-
- def execute_aggregate(self, uow, recs):
- cls_ = self.__class__
- mapper = self.mapper
- our_recs = [r for r in recs
- if r.__class__ is cls_ and
- r.mapper is mapper]
- recs.difference_update(our_recs)
- mapper._save_obj(
- [self.state] +
- [r.state for r in our_recs],
- uow)
-
- def __repr__(self):
- return "%s(%s)" % (
- self.__class__.__name__,
- mapperutil.state_str(self.state)
- )
-
-class DeleteState(PostSortRec):
- def __init__(self, uow, state, mapper):
- self.state = state
- self.mapper = mapper
-
- def execute_aggregate(self, uow, recs):
- cls_ = self.__class__
- mapper = self.mapper
- our_recs = [r for r in recs
- if r.__class__ is cls_ and
- r.mapper is mapper]
- recs.difference_update(our_recs)
- states = [self.state] + [r.state for r in our_recs]
- mapper._delete_obj(
- [s for s in states if uow.states[s][0]],
- uow)
-
- def __repr__(self):
- return "%s(%s)" % (
- self.__class__.__name__,
- mapperutil.state_str(self.state)
- )
-
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/orm/util.py b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/orm/util.py
deleted file mode 100755
index 8448b545..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/orm/util.py
+++ /dev/null
@@ -1,625 +0,0 @@
-# orm/util.py
-# Copyright (C) 2005-2011 the SQLAlchemy authors and contributors <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-
-from sqlalchemy import sql, util, event, exc as sa_exc
-from sqlalchemy.sql import expression, util as sql_util, operators
-from sqlalchemy.orm.interfaces import MapperExtension, EXT_CONTINUE,\
- PropComparator, MapperProperty
-from sqlalchemy.orm import attributes, exc
-import operator
-
-mapperlib = util.importlater("sqlalchemy.orm", "mapperlib")
-
-all_cascades = frozenset(("delete", "delete-orphan", "all", "merge",
- "expunge", "save-update", "refresh-expire",
- "none"))
-
-_INSTRUMENTOR = ('mapper', 'instrumentor')
-
-class CascadeOptions(dict):
- """Keeps track of the options sent to relationship().cascade"""
-
- def __init__(self, arg=""):
- if not arg:
- values = set()
- else:
- values = set(c.strip() for c in arg.split(','))
-
- for name in ['save-update', 'delete', 'refresh-expire',
- 'merge', 'expunge']:
- boolean = name in values or 'all' in values
- setattr(self, name.replace('-', '_'), boolean)
- if boolean:
- self[name] = True
- self.delete_orphan = "delete-orphan" in values
- if self.delete_orphan:
- self['delete-orphan'] = True
-
- if self.delete_orphan and not self.delete:
- util.warn("The 'delete-orphan' cascade option requires "
- "'delete'. This will raise an error in 0.6.")
-
- for x in values:
- if x not in all_cascades:
- raise sa_exc.ArgumentError("Invalid cascade option '%s'" % x)
-
- def __repr__(self):
- return "CascadeOptions(%s)" % repr(",".join(
- [x for x in ['delete', 'save_update', 'merge', 'expunge',
- 'delete_orphan', 'refresh-expire']
- if getattr(self, x, False) is True]))
-
-def _validator_events(desc, key, validator):
- """Runs a validation method on an attribute value to be set or appended."""
-
- def append(state, value, initiator):
- return validator(state.obj(), key, value)
-
- def set_(state, value, oldvalue, initiator):
- return validator(state.obj(), key, value)
-
- event.listen(desc, 'append', append, raw=True, retval=True)
- event.listen(desc, 'set', set_, raw=True, retval=True)
-
-def polymorphic_union(table_map, typecolname, aliasname='p_union', cast_nulls=True):
- """Create a ``UNION`` statement used by a polymorphic mapper.
-
- See :ref:`concrete_inheritance` for an example of how
- this is used.
-
- :param table_map: mapping of polymorphic identities to
- :class:`.Table` objects.
- :param typecolname: string name of a "discriminator" column, which will be
- derived from the query, producing the polymorphic identity for each row. If
- ``None``, no polymorphic discriminator is generated.
- :param aliasname: name of the :func:`~sqlalchemy.sql.expression.alias()`
- construct generated.
- :param cast_nulls: if True, non-existent columns, which are represented as labeled
- NULLs, will be passed into CAST. This is a legacy behavior that is problematic
- on some backends such as Oracle - in which case it can be set to False.
-
- """
-
- colnames = util.OrderedSet()
- colnamemaps = {}
- types = {}
- for key in table_map.keys():
- table = table_map[key]
-
- # mysql doesnt like selecting from a select;
- # make it an alias of the select
- if isinstance(table, sql.Select):
- table = table.alias()
- table_map[key] = table
-
- m = {}
- for c in table.c:
- colnames.add(c.key)
- m[c.key] = c
- types[c.key] = c.type
- colnamemaps[table] = m
-
- def col(name, table):
- try:
- return colnamemaps[table][name]
- except KeyError:
- if cast_nulls:
- return sql.cast(sql.null(), types[name]).label(name)
- else:
- return sql.type_coerce(sql.null(), types[name]).label(name)
-
- result = []
- for type, table in table_map.iteritems():
- if typecolname is not None:
- result.append(
- sql.select([col(name, table) for name in colnames] +
- [sql.literal_column(sql_util._quote_ddl_expr(type)).
- label(typecolname)],
- from_obj=[table]))
- else:
- result.append(sql.select([col(name, table) for name in colnames],
- from_obj=[table]))
- return sql.union_all(*result).alias(aliasname)
-
-def identity_key(*args, **kwargs):
- """Get an identity key.
-
- Valid call signatures:
-
- * ``identity_key(class, ident)``
-
- class
- mapped class (must be a positional argument)
-
- ident
- primary key, if the key is composite this is a tuple
-
-
- * ``identity_key(instance=instance)``
-
- instance
- object instance (must be given as a keyword arg)
-
- * ``identity_key(class, row=row)``
-
- class
- mapped class (must be a positional argument)
-
- row
- result proxy row (must be given as a keyword arg)
-
- """
- if args:
- if len(args) == 1:
- class_ = args[0]
- try:
- row = kwargs.pop("row")
- except KeyError:
- ident = kwargs.pop("ident")
- elif len(args) == 2:
- class_, ident = args
- elif len(args) == 3:
- class_, ident = args
- else:
- raise sa_exc.ArgumentError("expected up to three "
- "positional arguments, got %s" % len(args))
- if kwargs:
- raise sa_exc.ArgumentError("unknown keyword arguments: %s"
- % ", ".join(kwargs.keys()))
- mapper = class_mapper(class_)
- if "ident" in locals():
- return mapper.identity_key_from_primary_key(ident)
- return mapper.identity_key_from_row(row)
- instance = kwargs.pop("instance")
- if kwargs:
- raise sa_exc.ArgumentError("unknown keyword arguments: %s"
- % ", ".join(kwargs.keys()))
- mapper = object_mapper(instance)
- return mapper.identity_key_from_instance(instance)
-
-class ORMAdapter(sql_util.ColumnAdapter):
- """Extends ColumnAdapter to accept ORM entities.
-
- The selectable is extracted from the given entity,
- and the AliasedClass if any is referenced.
-
- """
- def __init__(self, entity, equivalents=None,
- chain_to=None, adapt_required=False):
- self.mapper, selectable, is_aliased_class = _entity_info(entity)
- if is_aliased_class:
- self.aliased_class = entity
- else:
- self.aliased_class = None
- sql_util.ColumnAdapter.__init__(self, selectable,
- equivalents, chain_to,
- adapt_required=adapt_required)
-
- def replace(self, elem):
- entity = elem._annotations.get('parentmapper', None)
- if not entity or entity.isa(self.mapper):
- return sql_util.ColumnAdapter.replace(self, elem)
- else:
- return None
-
-class AliasedClass(object):
- """Represents an "aliased" form of a mapped class for usage with Query.
-
- The ORM equivalent of a :func:`sqlalchemy.sql.expression.alias`
- construct, this object mimics the mapped class using a
- __getattr__ scheme and maintains a reference to a
- real :class:`~sqlalchemy.sql.expression.Alias` object.
-
- Usage is via the :class:`~sqlalchemy.orm.aliased()` synonym::
-
- # find all pairs of users with the same name
- user_alias = aliased(User)
- session.query(User, user_alias).\\
- join((user_alias, User.id > user_alias.id)).\\
- filter(User.name==user_alias.name)
-
- """
- def __init__(self, cls, alias=None, name=None):
- self.__mapper = _class_to_mapper(cls)
- self.__target = self.__mapper.class_
- if alias is None:
- alias = self.__mapper._with_polymorphic_selectable.alias(name=name)
- self.__adapter = sql_util.ClauseAdapter(alias,
- equivalents=self.__mapper._equivalent_columns)
- self.__alias = alias
- # used to assign a name to the RowTuple object
- # returned by Query.
- self._sa_label_name = name
- self.__name__ = 'AliasedClass_' + str(self.__target)
-
- def __getstate__(self):
- return {
- 'mapper':self.__mapper,
- 'alias':self.__alias,
- 'name':self._sa_label_name
- }
-
- def __setstate__(self, state):
- self.__mapper = state['mapper']
- self.__target = self.__mapper.class_
- alias = state['alias']
- self.__adapter = sql_util.ClauseAdapter(alias,
- equivalents=self.__mapper._equivalent_columns)
- self.__alias = alias
- name = state['name']
- self._sa_label_name = name
- self.__name__ = 'AliasedClass_' + str(self.__target)
-
- def __adapt_element(self, elem):
- return self.__adapter.traverse(elem).\
- _annotate({
- 'parententity': self,
- 'parentmapper':self.__mapper}
- )
-
- def __adapt_prop(self, existing, key):
- comparator = existing.comparator.adapted(self.__adapt_element)
-
- queryattr = attributes.QueryableAttribute(self, key,
- impl=existing.impl, parententity=self, comparator=comparator)
- setattr(self, key, queryattr)
- return queryattr
-
- def __getattr__(self, key):
- for base in self.__target.__mro__:
- try:
- attr = object.__getattribute__(base, key)
- except AttributeError:
- continue
- else:
- break
- else:
- raise AttributeError(key)
-
- if isinstance(attr, attributes.QueryableAttribute):
- return self.__adapt_prop(attr, key)
- elif hasattr(attr, 'func_code'):
- is_method = getattr(self.__target, key, None)
- if is_method and is_method.im_self is not None:
- return util.types.MethodType(attr.im_func, self, self)
- else:
- return None
- elif hasattr(attr, '__get__'):
- ret = attr.__get__(None, self)
- if isinstance(ret, PropComparator):
- return ret.adapted(self.__adapt_element)
- return ret
- else:
- return attr
-
- def __repr__(self):
- return '<AliasedClass at 0x%x; %s>' % (
- id(self), self.__target.__name__)
-
-def aliased(element, alias=None, name=None):
- if isinstance(element, expression.FromClause):
- return element.alias(name)
- else:
- return AliasedClass(element, alias=alias, name=name)
-
-def _orm_annotate(element, exclude=None):
- """Deep copy the given ClauseElement, annotating each element with the
- "_orm_adapt" flag.
-
- Elements within the exclude collection will be cloned but not annotated.
-
- """
- return sql_util._deep_annotate(element, {'_orm_adapt':True}, exclude)
-
-_orm_deannotate = sql_util._deep_deannotate
-
-class _ORMJoin(expression.Join):
- """Extend Join to support ORM constructs as input."""
-
- __visit_name__ = expression.Join.__visit_name__
-
- def __init__(self, left, right, onclause=None,
- isouter=False, join_to_left=True):
- adapt_from = None
-
- if hasattr(left, '_orm_mappers'):
- left_mapper = left._orm_mappers[1]
- if join_to_left:
- adapt_from = left.right
- else:
- left_mapper, left, left_is_aliased = _entity_info(left)
- if join_to_left and (left_is_aliased or not left_mapper):
- adapt_from = left
-
- right_mapper, right, right_is_aliased = _entity_info(right)
- if right_is_aliased:
- adapt_to = right
- else:
- adapt_to = None
-
- if left_mapper or right_mapper:
- self._orm_mappers = (left_mapper, right_mapper)
-
- if isinstance(onclause, basestring):
- prop = left_mapper.get_property(onclause)
- elif isinstance(onclause, attributes.QueryableAttribute):
- if adapt_from is None:
- adapt_from = onclause.__clause_element__()
- prop = onclause.property
- elif isinstance(onclause, MapperProperty):
- prop = onclause
- else:
- prop = None
-
- if prop:
- pj, sj, source, dest, \
- secondary, target_adapter = prop._create_joins(
- source_selectable=adapt_from,
- dest_selectable=adapt_to,
- source_polymorphic=True,
- dest_polymorphic=True,
- of_type=right_mapper)
-
- if sj is not None:
- left = sql.join(left, secondary, pj, isouter)
- onclause = sj
- else:
- onclause = pj
- self._target_adapter = target_adapter
-
- expression.Join.__init__(self, left, right, onclause, isouter)
-
- def join(self, right, onclause=None, isouter=False, join_to_left=True):
- return _ORMJoin(self, right, onclause, isouter, join_to_left)
-
- def outerjoin(self, right, onclause=None, join_to_left=True):
- return _ORMJoin(self, right, onclause, True, join_to_left)
-
-def join(left, right, onclause=None, isouter=False, join_to_left=True):
- """Produce an inner join between left and right clauses.
-
- In addition to the interface provided by
- :func:`~sqlalchemy.sql.expression.join()`, left and right may be mapped
- classes or AliasedClass instances. The onclause may be a
- string name of a relationship(), or a class-bound descriptor
- representing a relationship.
-
- join_to_left indicates to attempt aliasing the ON clause,
- in whatever form it is passed, to the selectable
- passed as the left side. If False, the onclause
- is used as is.
-
- """
- return _ORMJoin(left, right, onclause, isouter, join_to_left)
-
-def outerjoin(left, right, onclause=None, join_to_left=True):
- """Produce a left outer join between left and right clauses.
-
- In addition to the interface provided by
- :func:`~sqlalchemy.sql.expression.outerjoin()`, left and right may be
- mapped classes or AliasedClass instances. The onclause may be a string
- name of a relationship(), or a class-bound descriptor representing a
- relationship.
-
- """
- return _ORMJoin(left, right, onclause, True, join_to_left)
-
-def with_parent(instance, prop):
- """Create filtering criterion that relates this query's primary entity
- to the given related instance, using established :func:`.relationship()`
- configuration.
-
- The SQL rendered is the same as that rendered when a lazy loader
- would fire off from the given parent on that attribute, meaning
- that the appropriate state is taken from the parent object in
- Python without the need to render joins to the parent table
- in the rendered statement.
-
- As of 0.6.4, this method accepts parent instances in all
- persistence states, including transient, persistent, and detached.
- Only the requisite primary key/foreign key attributes need to
- be populated. Previous versions didn't work with transient
- instances.
-
- :param instance:
- An instance which has some :func:`.relationship`.
-
- :param property:
- String property name, or class-bound attribute, which indicates
- what relationship from the instance should be used to reconcile the
- parent/child relationship.
-
- """
- if isinstance(prop, basestring):
- mapper = object_mapper(instance)
- prop = mapper.get_property(prop)
- elif isinstance(prop, attributes.QueryableAttribute):
- prop = prop.property
-
- return prop.compare(operators.eq,
- instance,
- value_is_parent=True)
-
-
-def _entity_info(entity, compile=True):
- """Return mapping information given a class, mapper, or AliasedClass.
-
- Returns 3-tuple of: mapper, mapped selectable, boolean indicating if this
- is an aliased() construct.
-
- If the given entity is not a mapper, mapped class, or aliased construct,
- returns None, the entity, False. This is typically used to allow
- unmapped selectables through.
-
- """
- if isinstance(entity, AliasedClass):
- return entity._AliasedClass__mapper, entity._AliasedClass__alias, True
-
- if isinstance(entity, mapperlib.Mapper):
- mapper = entity
-
- elif isinstance(entity, type):
- class_manager = attributes.manager_of_class(entity)
-
- if class_manager is None:
- return None, entity, False
-
- mapper = class_manager.mapper
- else:
- return None, entity, False
-
- if compile and mapperlib.module._new_mappers:
- mapperlib.configure_mappers()
- return mapper, mapper._with_polymorphic_selectable, False
-
-def _entity_descriptor(entity, key):
- """Return a class attribute given an entity and string name.
-
- May return :class:`.InstrumentedAttribute` or user-defined
- attribute.
-
- """
- if not isinstance(entity, (AliasedClass, type)):
- entity = entity.class_
-
- try:
- return getattr(entity, key)
- except AttributeError:
- raise sa_exc.InvalidRequestError(
- "Entity '%s' has no property '%s'" %
- (entity, key)
- )
-
-def _orm_columns(entity):
- mapper, selectable, is_aliased_class = _entity_info(entity)
- if isinstance(selectable, expression.Selectable):
- return [c for c in selectable.c]
- else:
- return [selectable]
-
-def _orm_selectable(entity):
- mapper, selectable, is_aliased_class = _entity_info(entity)
- return selectable
-
-def _attr_as_key(attr):
- if hasattr(attr, 'key'):
- return attr.key
- else:
- return expression._column_as_key(attr)
-
-def _is_aliased_class(entity):
- return isinstance(entity, AliasedClass)
-
-_state_mapper = util.dottedgetter('manager.mapper')
-
-def object_mapper(instance):
- """Given an object, return the primary Mapper associated with the object
- instance.
-
- Raises UnmappedInstanceError if no mapping is configured.
-
- """
- try:
- state = attributes.instance_state(instance)
- return state.manager.mapper
- except exc.UnmappedClassError:
- raise exc.UnmappedInstanceError(instance)
- except exc.NO_STATE:
- raise exc.UnmappedInstanceError(instance)
-
-def class_mapper(class_, compile=True):
- """Given a class, return the primary Mapper associated with the key.
-
- Raises UnmappedClassError if no mapping is configured.
-
- """
-
- try:
- class_manager = attributes.manager_of_class(class_)
- mapper = class_manager.mapper
-
- except exc.NO_STATE:
- raise exc.UnmappedClassError(class_)
-
- if compile and mapperlib.module._new_mappers:
- mapperlib.configure_mappers()
- return mapper
-
-def _class_to_mapper(class_or_mapper, compile=True):
- if _is_aliased_class(class_or_mapper):
- return class_or_mapper._AliasedClass__mapper
-
- elif isinstance(class_or_mapper, type):
- try:
- class_manager = attributes.manager_of_class(class_or_mapper)
- mapper = class_manager.mapper
- except exc.NO_STATE:
- raise exc.UnmappedClassError(class_or_mapper)
- elif isinstance(class_or_mapper, mapperlib.Mapper):
- mapper = class_or_mapper
- else:
- raise exc.UnmappedClassError(class_or_mapper)
-
- if compile and mapperlib.module._new_mappers:
- mapperlib.configure_mappers()
- return mapper
-
-def has_identity(object):
- state = attributes.instance_state(object)
- return state.has_identity
-
-def _is_mapped_class(cls):
- if isinstance(cls, (AliasedClass, mapperlib.Mapper)):
- return True
- if isinstance(cls, expression.ClauseElement):
- return False
- if isinstance(cls, type):
- manager = attributes.manager_of_class(cls)
- return manager and _INSTRUMENTOR in manager.info
- return False
-
-def instance_str(instance):
- """Return a string describing an instance."""
-
- return state_str(attributes.instance_state(instance))
-
-def state_str(state):
- """Return a string describing an instance via its InstanceState."""
-
- if state is None:
- return "None"
- else:
- return '<%s at 0x%x>' % (state.class_.__name__, id(state.obj()))
-
-def state_class_str(state):
- """Return a string describing an instance's class via its InstanceState."""
-
- if state is None:
- return "None"
- else:
- return '<%s>' % (state.class_.__name__, )
-
-def attribute_str(instance, attribute):
- return instance_str(instance) + "." + attribute
-
-def state_attribute_str(state, attribute):
- return state_str(state) + "." + attribute
-
-def identity_equal(a, b):
- if a is b:
- return True
- if a is None or b is None:
- return False
- try:
- state_a = attributes.instance_state(a)
- state_b = attributes.instance_state(b)
- except exc.NO_STATE:
- return False
- if state_a.key is None or state_b.key is None:
- return False
- return state_a.key == state_b.key
-
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/pool.py b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/pool.py
deleted file mode 100755
index 2edafbf3..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/pool.py
+++ /dev/null
@@ -1,958 +0,0 @@
-# sqlalchemy/pool.py
-# Copyright (C) 2005-2011 the SQLAlchemy authors and contributors <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-
-"""Connection pooling for DB-API connections.
-
-Provides a number of connection pool implementations for a variety of
-usage scenarios and thread behavior requirements imposed by the
-application, DB-API or database itself.
-
-Also provides a DB-API 2.0 connection proxying mechanism allowing
-regular DB-API connect() methods to be transparently managed by a
-SQLAlchemy connection pool.
-"""
-
-import weakref, time, traceback
-
-from sqlalchemy import exc, log, event, events, interfaces, util
-from sqlalchemy.util import queue as sqla_queue
-from sqlalchemy.util import threading, memoized_property, \
- chop_traceback
-
-proxies = {}
-
-def manage(module, **params):
- """Return a proxy for a DB-API module that automatically
- pools connections.
-
- Given a DB-API 2.0 module and pool management parameters, returns
- a proxy for the module that will automatically pool connections,
- creating new connection pools for each distinct set of connection
- arguments sent to the decorated module's connect() function.
-
- :param module: a DB-API 2.0 database module
-
- :param poolclass: the class used by the pool module to provide
- pooling. Defaults to :class:`.QueuePool`.
-
- :param \*\*params: will be passed through to *poolclass*
-
- """
- try:
- return proxies[module]
- except KeyError:
- return proxies.setdefault(module, _DBProxy(module, **params))
-
-def clear_managers():
- """Remove all current DB-API 2.0 managers.
-
- All pools and connections are disposed.
- """
-
- for manager in proxies.itervalues():
- manager.close()
- proxies.clear()
-
-
-class Pool(log.Identified):
- """Abstract base class for connection pools."""
-
- def __init__(self,
- creator, recycle=-1, echo=None,
- use_threadlocal=False,
- logging_name=None,
- reset_on_return=True,
- listeners=None,
- events=None,
- _dispatch=None):
- """
- Construct a Pool.
-
- :param creator: a callable function that returns a DB-API
- connection object. The function will be called with
- parameters.
-
- :param recycle: If set to non -1, number of seconds between
- connection recycling, which means upon checkout, if this
- timeout is surpassed the connection will be closed and
- replaced with a newly opened connection. Defaults to -1.
-
- :param logging_name: String identifier which will be used within
- the "name" field of logging records generated within the
- "sqlalchemy.pool" logger. Defaults to a hexstring of the object's
- id.
-
- :param echo: If True, connections being pulled and retrieved
- from the pool will be logged to the standard output, as well
- as pool sizing information. Echoing can also be achieved by
- enabling logging for the "sqlalchemy.pool"
- namespace. Defaults to False.
-
- :param use_threadlocal: If set to True, repeated calls to
- :meth:`connect` within the same application thread will be
- guaranteed to return the same connection object, if one has
- already been retrieved from the pool and has not been
- returned yet. Offers a slight performance advantage at the
- cost of individual transactions by default. The
- :meth:`unique_connection` method is provided to bypass the
- threadlocal behavior installed into :meth:`connect`.
-
- :param reset_on_return: If true, reset the database state of
- connections returned to the pool. This is typically a
- ROLLBACK to release locks and transaction resources.
- Disable at your own peril. Defaults to True.
-
- :param events: a list of 2-tuples, each of the form
- ``(callable, target)`` which will be passed to event.listen()
- upon construction. Provided here so that event listeners
- can be assigned via ``create_engine`` before dialect-level
- listeners are applied.
-
- :param listeners: Deprecated. A list of
- :class:`~sqlalchemy.interfaces.PoolListener`-like objects or
- dictionaries of callables that receive events when DB-API
- connections are created, checked out and checked in to the
- pool. This has been superseded by
- :func:`~sqlalchemy.event.listen`.
-
- """
- if logging_name:
- self.logging_name = self._orig_logging_name = logging_name
- else:
- self._orig_logging_name = None
-
- log.instance_logger(self, echoflag=echo)
- self._threadconns = threading.local()
- self._creator = creator
- self._recycle = recycle
- self._use_threadlocal = use_threadlocal
- self._reset_on_return = reset_on_return
- self.echo = echo
- if _dispatch:
- self.dispatch._update(_dispatch, only_propagate=False)
- if events:
- for fn, target in events:
- event.listen(self, target, fn)
- if listeners:
- util.warn_deprecated(
- "The 'listeners' argument to Pool (and "
- "create_engine()) is deprecated. Use event.listen().")
- for l in listeners:
- self.add_listener(l)
-
- dispatch = event.dispatcher(events.PoolEvents)
-
- @util.deprecated(2.7, "Pool.add_listener is deprecated. Use event.listen()")
- def add_listener(self, listener):
- """Add a :class:`.PoolListener`-like object to this pool.
-
- ``listener`` may be an object that implements some or all of
- PoolListener, or a dictionary of callables containing implementations
- of some or all of the named methods in PoolListener.
-
- """
- interfaces.PoolListener._adapt_listener(self, listener)
-
- def unique_connection(self):
- """Produce a DBAPI connection that is not referenced by any
- thread-local context.
-
- This method is different from :meth:`.Pool.connect` only if the
- ``use_threadlocal`` flag has been set to ``True``.
-
- """
-
- return _ConnectionFairy(self).checkout()
-
- def _create_connection(self):
- """Called by subclasses to create a new ConnectionRecord."""
-
- return _ConnectionRecord(self)
-
- def recreate(self):
- """Return a new :class:`.Pool`, of the same class as this one
- and configured with identical creation arguments.
-
- This method is used in conjunection with :meth:`dispose`
- to close out an entire :class:`.Pool` and create a new one in
- its place.
-
- """
-
- raise NotImplementedError()
-
- def dispose(self):
- """Dispose of this pool.
-
- This method leaves the possibility of checked-out connections
- remaining open, It is advised to not reuse the pool once dispose()
- is called, and to instead use a new pool constructed by the
- recreate() method.
-
- """
-
- raise NotImplementedError()
-
- def connect(self):
- """Return a DBAPI connection from the pool.
-
- The connection is instrumented such that when its
- ``close()`` method is called, the connection will be returned to
- the pool.
-
- """
- if not self._use_threadlocal:
- return _ConnectionFairy(self).checkout()
-
- try:
- rec = self._threadconns.current()
- if rec:
- return rec.checkout()
- except AttributeError:
- pass
-
- agent = _ConnectionFairy(self)
- self._threadconns.current = weakref.ref(agent)
- return agent.checkout()
-
- def _return_conn(self, record):
- """Given a _ConnectionRecord, return it to the :class:`.Pool`.
-
- This method is called when an instrumented DBAPI connection
- has its ``close()`` method called.
-
- """
- if self._use_threadlocal:
- try:
- del self._threadconns.current
- except AttributeError:
- pass
- self._do_return_conn(record)
-
- def _do_get(self):
- """Implementation for :meth:`get`, supplied by subclasses."""
-
- raise NotImplementedError()
-
- def _do_return_conn(self, conn):
- """Implementation for :meth:`return_conn`, supplied by subclasses."""
-
- raise NotImplementedError()
-
- def status(self):
- raise NotImplementedError()
-
-
-class _ConnectionRecord(object):
- finalize_callback = None
-
- def __init__(self, pool):
- self.__pool = pool
- self.connection = self.__connect()
- self.info = {}
-
- pool.dispatch.first_connect.exec_once(self.connection, self)
- pool.dispatch.connect(self.connection, self)
-
- def close(self):
- if self.connection is not None:
- self.__pool.logger.debug("Closing connection %r", self.connection)
- try:
- self.connection.close()
- except (SystemExit, KeyboardInterrupt):
- raise
- except:
- self.__pool.logger.debug("Exception closing connection %r",
- self.connection)
-
- def invalidate(self, e=None):
- if e is not None:
- self.__pool.logger.info(
- "Invalidate connection %r (reason: %s:%s)",
- self.connection, e.__class__.__name__, e)
- else:
- self.__pool.logger.info(
- "Invalidate connection %r", self.connection)
- self.__close()
- self.connection = None
-
- def get_connection(self):
- if self.connection is None:
- self.connection = self.__connect()
- self.info.clear()
- if self.__pool.dispatch.connect:
- self.__pool.dispatch.connect(self.connection, self)
- elif self.__pool._recycle > -1 and \
- time.time() - self.starttime > self.__pool._recycle:
- self.__pool.logger.info(
- "Connection %r exceeded timeout; recycling",
- self.connection)
- self.__close()
- self.connection = self.__connect()
- self.info.clear()
- if self.__pool.dispatch.connect:
- self.__pool.dispatch.connect(self.connection, self)
- return self.connection
-
- def __close(self):
- try:
- self.__pool.logger.debug("Closing connection %r", self.connection)
- self.connection.close()
- except (SystemExit, KeyboardInterrupt):
- raise
- except Exception, e:
- self.__pool.logger.debug(
- "Connection %r threw an error on close: %s",
- self.connection, e)
-
- def __connect(self):
- try:
- self.starttime = time.time()
- connection = self.__pool._creator()
- self.__pool.logger.debug("Created new connection %r", connection)
- return connection
- except Exception, e:
- self.__pool.logger.debug("Error on connect(): %s", e)
- raise
-
-
-def _finalize_fairy(connection, connection_record, pool, ref, echo):
- _refs.discard(connection_record)
-
- if ref is not None and \
- connection_record.fairy is not ref:
- return
-
- if connection is not None:
- try:
- if pool._reset_on_return:
- connection.rollback()
- # Immediately close detached instances
- if connection_record is None:
- connection.close()
- except Exception, e:
- if connection_record is not None:
- connection_record.invalidate(e=e)
- if isinstance(e, (SystemExit, KeyboardInterrupt)):
- raise
-
- if connection_record is not None:
- connection_record.fairy = None
- if echo:
- pool.logger.debug("Connection %r being returned to pool",
- connection)
- if connection_record.finalize_callback:
- connection_record.finalize_callback(connection)
- del connection_record.finalize_callback
- if pool.dispatch.checkin:
- pool.dispatch.checkin(connection, connection_record)
- pool._return_conn(connection_record)
-
-_refs = set()
-
-class _ConnectionFairy(object):
- """Proxies a DB-API connection and provides return-on-dereference
- support."""
-
- __slots__ = '_pool', '__counter', 'connection', \
- '_connection_record', '__weakref__', \
- '_detached_info', '_echo'
-
- def __init__(self, pool):
- self._pool = pool
- self.__counter = 0
- self._echo = _echo = pool._should_log_debug()
- try:
- rec = self._connection_record = pool._do_get()
- conn = self.connection = self._connection_record.get_connection()
- rec.fairy = weakref.ref(
- self,
- lambda ref:_finalize_fairy(conn, rec, pool, ref, _echo)
- )
- _refs.add(rec)
- except:
- # helps with endless __getattr__ loops later on
- self.connection = None
- self._connection_record = None
- raise
- if self._echo:
- self._pool.logger.debug("Connection %r checked out from pool" %
- self.connection)
-
- @property
- def _logger(self):
- return self._pool.logger
-
- @property
- def is_valid(self):
- return self.connection is not None
-
- @property
- def info(self):
- """An info collection unique to this DB-API connection."""
-
- try:
- return self._connection_record.info
- except AttributeError:
- if self.connection is None:
- raise exc.InvalidRequestError("This connection is closed")
- try:
- return self._detached_info
- except AttributeError:
- self._detached_info = value = {}
- return value
-
- def invalidate(self, e=None):
- """Mark this connection as invalidated.
-
- The connection will be immediately closed. The containing
- ConnectionRecord will create a new connection when next used.
- """
-
- if self.connection is None:
- raise exc.InvalidRequestError("This connection is closed")
- if self._connection_record is not None:
- self._connection_record.invalidate(e=e)
- self.connection = None
- self._close()
-
- def cursor(self, *args, **kwargs):
- return self.connection.cursor(*args, **kwargs)
-
- def __getattr__(self, key):
- return getattr(self.connection, key)
-
- def checkout(self):
- if self.connection is None:
- raise exc.InvalidRequestError("This connection is closed")
- self.__counter += 1
-
- if not self._pool.dispatch.checkout or self.__counter != 1:
- return self
-
- # Pool listeners can trigger a reconnection on checkout
- attempts = 2
- while attempts > 0:
- try:
- self._pool.dispatch.checkout(self.connection,
- self._connection_record,
- self)
- return self
- except exc.DisconnectionError, e:
- self._pool.logger.info(
- "Disconnection detected on checkout: %s", e)
- self._connection_record.invalidate(e)
- self.connection = self._connection_record.get_connection()
- attempts -= 1
-
- self._pool.logger.info("Reconnection attempts exhausted on checkout")
- self.invalidate()
- raise exc.InvalidRequestError("This connection is closed")
-
- def detach(self):
- """Separate this connection from its Pool.
-
- This means that the connection will no longer be returned to the
- pool when closed, and will instead be literally closed. The
- containing ConnectionRecord is separated from the DB-API connection,
- and will create a new connection when next used.
-
- Note that any overall connection limiting constraints imposed by a
- Pool implementation may be violated after a detach, as the detached
- connection is removed from the pool's knowledge and control.
- """
-
- if self._connection_record is not None:
- _refs.remove(self._connection_record)
- self._connection_record.fairy = None
- self._connection_record.connection = None
- self._pool._do_return_conn(self._connection_record)
- self._detached_info = \
- self._connection_record.info.copy()
- self._connection_record = None
-
- def close(self):
- self.__counter -= 1
- if self.__counter == 0:
- self._close()
-
- def _close(self):
- _finalize_fairy(self.connection, self._connection_record,
- self._pool, None, self._echo)
- self.connection = None
- self._connection_record = None
-
-class SingletonThreadPool(Pool):
- """A Pool that maintains one connection per thread.
-
- Maintains one connection per each thread, never moving a connection to a
- thread other than the one which it was created in.
-
- Options are the same as those of :class:`.Pool`, as well as:
-
- :param pool_size: The number of threads in which to maintain connections
- at once. Defaults to five.
-
- :class:`.SingletonThreadPool` is used by the SQLite dialect
- automatically when a memory-based database is used.
- See :ref:`sqlite_toplevel`.
-
- """
-
- def __init__(self, creator, pool_size=5, **kw):
- kw['use_threadlocal'] = True
- Pool.__init__(self, creator, **kw)
- self._conn = threading.local()
- self._all_conns = set()
- self.size = pool_size
-
- def recreate(self):
- self.logger.info("Pool recreating")
- return SingletonThreadPool(self._creator,
- pool_size=self.size,
- recycle=self._recycle,
- echo=self.echo,
- logging_name=self._orig_logging_name,
- use_threadlocal=self._use_threadlocal,
- _dispatch=self.dispatch)
-
- def dispose(self):
- """Dispose of this pool."""
-
- for conn in self._all_conns:
- try:
- conn.close()
- except (SystemExit, KeyboardInterrupt):
- raise
- except:
- # pysqlite won't even let you close a conn from a thread
- # that didn't create it
- pass
-
- self._all_conns.clear()
-
- def _cleanup(self):
- while len(self._all_conns) > self.size:
- c = self._all_conns.pop()
- c.close()
-
- def status(self):
- return "SingletonThreadPool id:%d size: %d" % \
- (id(self), len(self._all_conns))
-
- def _do_return_conn(self, conn):
- pass
-
- def _do_get(self):
- try:
- c = self._conn.current()
- if c:
- return c
- except AttributeError:
- pass
- c = self._create_connection()
- self._conn.current = weakref.ref(c)
- self._all_conns.add(c)
- if len(self._all_conns) > self.size:
- self._cleanup()
- return c
-
-class QueuePool(Pool):
- """A :class:`.Pool` that imposes a limit on the number of open connections.
-
- :class:`.QueuePool` is the default pooling implementation used for
- all :class:`.Engine` objects, unless the SQLite dialect is in use.
-
- """
-
- def __init__(self, creator, pool_size=5, max_overflow=10, timeout=30,
- **kw):
- """
- Construct a QueuePool.
-
- :param creator: a callable function that returns a DB-API
- connection object. The function will be called with
- parameters.
-
- :param pool_size: The size of the pool to be maintained,
- defaults to 5. This is the largest number of connections that
- will be kept persistently in the pool. Note that the pool
- begins with no connections; once this number of connections
- is requested, that number of connections will remain.
- ``pool_size`` can be set to 0 to indicate no size limit; to
- disable pooling, use a :class:`~sqlalchemy.pool.NullPool`
- instead.
-
- :param max_overflow: The maximum overflow size of the
- pool. When the number of checked-out connections reaches the
- size set in pool_size, additional connections will be
- returned up to this limit. When those additional connections
- are returned to the pool, they are disconnected and
- discarded. It follows then that the total number of
- simultaneous connections the pool will allow is pool_size +
- `max_overflow`, and the total number of "sleeping"
- connections the pool will allow is pool_size. `max_overflow`
- can be set to -1 to indicate no overflow limit; no limit
- will be placed on the total number of concurrent
- connections. Defaults to 10.
-
- :param timeout: The number of seconds to wait before giving up
- on returning a connection. Defaults to 30.
-
- :param recycle: If set to non -1, number of seconds between
- connection recycling, which means upon checkout, if this
- timeout is surpassed the connection will be closed and
- replaced with a newly opened connection. Defaults to -1.
-
- :param echo: If True, connections being pulled and retrieved
- from the pool will be logged to the standard output, as well
- as pool sizing information. Echoing can also be achieved by
- enabling logging for the "sqlalchemy.pool"
- namespace. Defaults to False.
-
- :param use_threadlocal: If set to True, repeated calls to
- :meth:`connect` within the same application thread will be
- guaranteed to return the same connection object, if one has
- already been retrieved from the pool and has not been
- returned yet. Offers a slight performance advantage at the
- cost of individual transactions by default. The
- :meth:`unique_connection` method is provided to bypass the
- threadlocal behavior installed into :meth:`connect`.
-
- :param reset_on_return: If true, reset the database state of
- connections returned to the pool. This is typically a
- ROLLBACK to release locks and transaction resources.
- Disable at your own peril. Defaults to True.
-
- :param listeners: A list of
- :class:`~sqlalchemy.interfaces.PoolListener`-like objects or
- dictionaries of callables that receive events when DB-API
- connections are created, checked out and checked in to the
- pool.
-
- """
- Pool.__init__(self, creator, **kw)
- self._pool = sqla_queue.Queue(pool_size)
- self._overflow = 0 - pool_size
- self._max_overflow = max_overflow
- self._timeout = timeout
- self._overflow_lock = self._max_overflow > -1 and \
- threading.Lock() or None
-
- def recreate(self):
- self.logger.info("Pool recreating")
- return QueuePool(self._creator, pool_size=self._pool.maxsize,
- max_overflow=self._max_overflow,
- timeout=self._timeout,
- recycle=self._recycle, echo=self.echo,
- logging_name=self._orig_logging_name,
- use_threadlocal=self._use_threadlocal,
- _dispatch=self.dispatch)
-
- def _do_return_conn(self, conn):
- try:
- self._pool.put(conn, False)
- except sqla_queue.Full:
- conn.close()
- if self._overflow_lock is None:
- self._overflow -= 1
- else:
- self._overflow_lock.acquire()
- try:
- self._overflow -= 1
- finally:
- self._overflow_lock.release()
-
- def _do_get(self):
- try:
- wait = self._max_overflow > -1 and \
- self._overflow >= self._max_overflow
- return self._pool.get(wait, self._timeout)
- except sqla_queue.Empty:
- if self._max_overflow > -1 and \
- self._overflow >= self._max_overflow:
- if not wait:
- return self._do_get()
- else:
- raise exc.TimeoutError(
- "QueuePool limit of size %d overflow %d reached, "
- "connection timed out, timeout %d" %
- (self.size(), self.overflow(), self._timeout))
-
- if self._overflow_lock is not None:
- self._overflow_lock.acquire()
-
- if self._max_overflow > -1 and \
- self._overflow >= self._max_overflow:
- if self._overflow_lock is not None:
- self._overflow_lock.release()
- return self._do_get()
-
- try:
- con = self._create_connection()
- self._overflow += 1
- finally:
- if self._overflow_lock is not None:
- self._overflow_lock.release()
- return con
-
- def dispose(self):
- while True:
- try:
- conn = self._pool.get(False)
- conn.close()
- except sqla_queue.Empty:
- break
-
- self._overflow = 0 - self.size()
- self.logger.info("Pool disposed. %s", self.status())
-
- def status(self):
- return "Pool size: %d Connections in pool: %d "\
- "Current Overflow: %d Current Checked out "\
- "connections: %d" % (self.size(),
- self.checkedin(),
- self.overflow(),
- self.checkedout())
-
- def size(self):
- return self._pool.maxsize
-
- def checkedin(self):
- return self._pool.qsize()
-
- def overflow(self):
- return self._overflow
-
- def checkedout(self):
- return self._pool.maxsize - self._pool.qsize() + self._overflow
-
-class NullPool(Pool):
- """A Pool which does not pool connections.
-
- Instead it literally opens and closes the underlying DB-API connection
- per each connection open/close.
-
- Reconnect-related functions such as ``recycle`` and connection
- invalidation are not supported by this Pool implementation, since
- no connections are held persistently.
-
- :class:`.NullPool` is used by the SQlite dilalect automatically
- when a file-based database is used (as of SQLAlchemy 0.7).
- See :ref:`sqlite_toplevel`.
-
- """
-
- def status(self):
- return "NullPool"
-
- def _do_return_conn(self, conn):
- conn.close()
-
- def _do_get(self):
- return self._create_connection()
-
- def recreate(self):
- self.logger.info("Pool recreating")
-
- return NullPool(self._creator,
- recycle=self._recycle,
- echo=self.echo,
- logging_name=self._orig_logging_name,
- use_threadlocal=self._use_threadlocal,
- _dispatch=self.dispatch)
-
- def dispose(self):
- pass
-
-
-class StaticPool(Pool):
- """A Pool of exactly one connection, used for all requests.
-
- Reconnect-related functions such as ``recycle`` and connection
- invalidation (which is also used to support auto-reconnect) are not
- currently supported by this Pool implementation but may be implemented
- in a future release.
-
- """
-
- @memoized_property
- def _conn(self):
- return self._creator()
-
- @memoized_property
- def connection(self):
- return _ConnectionRecord(self)
-
- def status(self):
- return "StaticPool"
-
- def dispose(self):
- if '_conn' in self.__dict__:
- self._conn.close()
- self._conn = None
-
- def recreate(self):
- self.logger.info("Pool recreating")
- return self.__class__(creator=self._creator,
- recycle=self._recycle,
- use_threadlocal=self._use_threadlocal,
- reset_on_return=self._reset_on_return,
- echo=self.echo,
- logging_name=self._orig_logging_name,
- _dispatch=self.dispatch)
-
- def _create_connection(self):
- return self._conn
-
- def _do_return_conn(self, conn):
- pass
-
- def _do_get(self):
- return self.connection
-
-class AssertionPool(Pool):
- """A :class:`.Pool` that allows at most one checked out connection at any given
- time.
-
- This will raise an exception if more than one connection is checked out
- at a time. Useful for debugging code that is using more connections
- than desired.
-
- :class:`.AssertionPool` also logs a traceback of where
- the original connection was checked out, and reports
- this in the assertion error raised (new in 0.7).
-
- """
- def __init__(self, *args, **kw):
- self._conn = None
- self._checked_out = False
- self._store_traceback = kw.pop('store_traceback', True)
- self._checkout_traceback = None
- Pool.__init__(self, *args, **kw)
-
- def status(self):
- return "AssertionPool"
-
- def _do_return_conn(self, conn):
- if not self._checked_out:
- raise AssertionError("connection is not checked out")
- self._checked_out = False
- assert conn is self._conn
-
- def dispose(self):
- self._checked_out = False
- if self._conn:
- self._conn.close()
-
- def recreate(self):
- self.logger.info("Pool recreating")
- return AssertionPool(self._creator, echo=self.echo,
- logging_name=self._orig_logging_name,
- _dispatch=self.dispatch)
-
- def _do_get(self):
- if self._checked_out:
- if self._checkout_traceback:
- suffix = ' at:\n%s' % ''.join(
- chop_traceback(self._checkout_traceback))
- else:
- suffix = ''
- raise AssertionError("connection is already checked out" + suffix)
-
- if not self._conn:
- self._conn = self._create_connection()
-
- self._checked_out = True
- if self._store_traceback:
- self._checkout_traceback = traceback.format_stack()
- return self._conn
-
-class _DBProxy(object):
- """Layers connection pooling behavior on top of a standard DB-API module.
-
- Proxies a DB-API 2.0 connect() call to a connection pool keyed to the
- specific connect parameters. Other functions and attributes are delegated
- to the underlying DB-API module.
- """
-
- def __init__(self, module, poolclass=QueuePool, **kw):
- """Initializes a new proxy.
-
- module
- a DB-API 2.0 module
-
- poolclass
- a Pool class, defaulting to QueuePool
-
- Other parameters are sent to the Pool object's constructor.
-
- """
-
- self.module = module
- self.kw = kw
- self.poolclass = poolclass
- self.pools = {}
- self._create_pool_mutex = threading.Lock()
-
- def close(self):
- for key in self.pools.keys():
- del self.pools[key]
-
- def __del__(self):
- self.close()
-
- def __getattr__(self, key):
- return getattr(self.module, key)
-
- def get_pool(self, *args, **kw):
- key = self._serialize(*args, **kw)
- try:
- return self.pools[key]
- except KeyError:
- self._create_pool_mutex.acquire()
- try:
- if key not in self.pools:
- pool = self.poolclass(lambda:
- self.module.connect(*args, **kw), **self.kw)
- self.pools[key] = pool
- return pool
- else:
- return self.pools[key]
- finally:
- self._create_pool_mutex.release()
-
- def connect(self, *args, **kw):
- """Activate a connection to the database.
-
- Connect to the database using this DBProxy's module and the given
- connect arguments. If the arguments match an existing pool, the
- connection will be returned from the pool's current thread-local
- connection instance, or if there is no thread-local connection
- instance it will be checked out from the set of pooled connections.
-
- If the pool has no available connections and allows new connections
- to be created, a new database connection will be made.
-
- """
-
- return self.get_pool(*args, **kw).connect()
-
- def dispose(self, *args, **kw):
- """Dispose the pool referenced by the given connect arguments."""
-
- key = self._serialize(*args, **kw)
- try:
- del self.pools[key]
- except KeyError:
- pass
-
- def _serialize(self, *args, **kw):
- return tuple(
- list(args) +
- [(k, kw[k]) for k in sorted(kw)]
- )
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/processors.py b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/processors.py
deleted file mode 100755
index cb5f00bd..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/processors.py
+++ /dev/null
@@ -1,109 +0,0 @@
-# sqlalchemy/processors.py
-# Copyright (C) 2010-2011 the SQLAlchemy authors and contributors <see AUTHORS file>
-# Copyright (C) 2010 Gaetan de Menten gdementen@gmail.com
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-"""defines generic type conversion functions, as used in bind and result
-processors.
-
-They all share one common characteristic: None is passed through unchanged.
-
-"""
-
-import codecs
-import re
-import datetime
-
-def str_to_datetime_processor_factory(regexp, type_):
- rmatch = regexp.match
- # Even on python2.6 datetime.strptime is both slower than this code
- # and it does not support microseconds.
- def process(value):
- if value is None:
- return None
- else:
- return type_(*map(int, rmatch(value).groups(0)))
- return process
-
-def boolean_to_int(value):
- if value is None:
- return None
- else:
- return int(value)
-
-try:
- from sqlalchemy.cprocessors import UnicodeResultProcessor, \
- DecimalResultProcessor, \
- to_float, to_str, int_to_boolean, \
- str_to_datetime, str_to_time, \
- str_to_date
-
- def to_unicode_processor_factory(encoding, errors=None):
- # this is cumbersome but it would be even more so on the C side
- if errors is not None:
- return UnicodeResultProcessor(encoding, errors).process
- else:
- return UnicodeResultProcessor(encoding).process
-
- def to_decimal_processor_factory(target_class, scale=10):
- # Note that the scale argument is not taken into account for integer
- # values in the C implementation while it is in the Python one.
- # For example, the Python implementation might return
- # Decimal('5.00000') whereas the C implementation will
- # return Decimal('5'). These are equivalent of course.
- return DecimalResultProcessor(target_class, "%%.%df" % scale).process
-
-except ImportError:
- def to_unicode_processor_factory(encoding, errors=None):
- decoder = codecs.getdecoder(encoding)
-
- def process(value):
- if value is None:
- return None
- else:
- # decoder returns a tuple: (value, len). Simply dropping the
- # len part is safe: it is done that way in the normal
- # 'xx'.decode(encoding) code path.
- return decoder(value, errors)[0]
- return process
-
- def to_decimal_processor_factory(target_class, scale=10):
- fstring = "%%.%df" % scale
-
- def process(value):
- if value is None:
- return None
- else:
- return target_class(fstring % value)
- return process
-
- def to_float(value):
- if value is None:
- return None
- else:
- return float(value)
-
- def to_str(value):
- if value is None:
- return None
- else:
- return str(value)
-
- def int_to_boolean(value):
- if value is None:
- return None
- else:
- return value and True or False
-
- DATETIME_RE = re.compile(
- "(\d+)-(\d+)-(\d+) (\d+):(\d+):(\d+)(?:\.(\d+))?")
- TIME_RE = re.compile("(\d+):(\d+):(\d+)(?:\.(\d+))?")
- DATE_RE = re.compile("(\d+)-(\d+)-(\d+)")
-
- str_to_datetime = str_to_datetime_processor_factory(DATETIME_RE,
- datetime.datetime)
- str_to_time = str_to_datetime_processor_factory(TIME_RE, datetime.time)
- str_to_date = str_to_datetime_processor_factory(DATE_RE, datetime.date)
-
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/schema.py b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/schema.py
deleted file mode 100755
index e85c82ad..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/schema.py
+++ /dev/null
@@ -1,2950 +0,0 @@
-# sqlalchemy/schema.py
-# Copyright (C) 2005-2011 the SQLAlchemy authors and contributors <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-"""The schema module provides the building blocks for database metadata.
-
-Each element within this module describes a database entity which can be
-created and dropped, or is otherwise part of such an entity. Examples include
-tables, columns, sequences, and indexes.
-
-All entities are subclasses of :class:`~sqlalchemy.schema.SchemaItem`, and as
-defined in this module they are intended to be agnostic of any vendor-specific
-constructs.
-
-A collection of entities are grouped into a unit called
-:class:`~sqlalchemy.schema.MetaData`. MetaData serves as a logical grouping of
-schema elements, and can also be associated with an actual database connection
-such that operations involving the contained elements can contact the database
-as needed.
-
-Two of the elements here also build upon their "syntactic" counterparts, which
-are defined in :class:`~sqlalchemy.sql.expression.`, specifically
-:class:`~sqlalchemy.schema.Table` and :class:`~sqlalchemy.schema.Column`.
-Since these objects are part of the SQL expression language, they are usable
-as components in SQL expressions.
-
-"""
-import re, inspect
-from sqlalchemy import exc, util, dialects
-from sqlalchemy.sql import expression, visitors
-from sqlalchemy import event, events
-
-sqlutil = util.importlater("sqlalchemy.sql", "util")
-url = util.importlater("sqlalchemy.engine", "url")
-sqltypes = util.importlater("sqlalchemy", "types")
-
-__all__ = ['SchemaItem', 'Table', 'Column', 'ForeignKey', 'Sequence', 'Index',
- 'ForeignKeyConstraint', 'PrimaryKeyConstraint', 'CheckConstraint',
- 'UniqueConstraint', 'DefaultGenerator', 'Constraint', 'MetaData',
- 'ThreadLocalMetaData', 'SchemaVisitor', 'PassiveDefault',
- 'DefaultClause', 'FetchedValue', 'ColumnDefault', 'DDL',
- 'CreateTable', 'DropTable', 'CreateSequence', 'DropSequence',
- 'AddConstraint', 'DropConstraint',
- ]
-__all__.sort()
-
-RETAIN_SCHEMA = util.symbol('retain_schema')
-
-class SchemaItem(events.SchemaEventTarget, visitors.Visitable):
- """Base class for items that define a database schema."""
-
- __visit_name__ = 'schema_item'
- quote = None
-
- def _init_items(self, *args):
- """Initialize the list of child items for this SchemaItem."""
-
- for item in args:
- if item is not None:
- item._set_parent_with_dispatch(self)
-
- def get_children(self, **kwargs):
- """used to allow SchemaVisitor access"""
- return []
-
- def __repr__(self):
- return "%s()" % self.__class__.__name__
-
- @util.memoized_property
- def info(self):
- return {}
-
-def _get_table_key(name, schema):
- if schema is None:
- return name
- else:
- return schema + "." + name
-
-
-class Table(SchemaItem, expression.TableClause):
- """Represent a table in a database.
-
- e.g.::
-
- mytable = Table("mytable", metadata,
- Column('mytable_id', Integer, primary_key=True),
- Column('value', String(50))
- )
-
- The :class:`.Table` object constructs a unique instance of itself based on its
- name and optionl schema name within the given :class:`.MetaData` object.
- Calling the :class:`.Table`
- constructor with the same name and same :class:`.MetaData` argument
- a second time will return the *same* :class:`.Table` object - in this way
- the :class:`.Table` constructor acts as a registry function.
-
- Constructor arguments are as follows:
-
- :param name: The name of this table as represented in the database.
-
- This property, along with the *schema*, indicates the *singleton
- identity* of this table in relation to its parent :class:`.MetaData`.
- Additional calls to :class:`.Table` with the same name, metadata,
- and schema name will return the same :class:`.Table` object.
-
- Names which contain no upper case characters
- will be treated as case insensitive names, and will not be quoted
- unless they are a reserved word. Names with any number of upper
- case characters will be quoted and sent exactly. Note that this
- behavior applies even for databases which standardize upper
- case names as case insensitive such as Oracle.
-
- :param metadata: a :class:`.MetaData` object which will contain this
- table. The metadata is used as a point of association of this table
- with other tables which are referenced via foreign key. It also
- may be used to associate this table with a particular
- :class:`~sqlalchemy.engine.base.Connectable`.
-
- :param \*args: Additional positional arguments are used primarily
- to add the list of :class:`.Column` objects contained within this
- table. Similar to the style of a CREATE TABLE statement, other
- :class:`.SchemaItem` constructs may be added here, including
- :class:`.PrimaryKeyConstraint`, and :class:`.ForeignKeyConstraint`.
-
- :param autoload: Defaults to False: the Columns for this table should
- be reflected from the database. Usually there will be no Column
- objects in the constructor if this property is set.
-
- :param autoload_with: If autoload==True, this is an optional Engine
- or Connection instance to be used for the table reflection. If
- ``None``, the underlying MetaData's bound connectable will be used.
-
- :param extend_existing: When ``True``, indicates that if this Table is already
- present in the given :class:`.MetaData`, apply further arguments within
- the constructor to the existing :class:`.Table`.
-
- If extend_existing or keep_existing are not set, an error is
- raised if additional table modifiers are specified when
- the given :class:`.Table` is already present in the :class:`.MetaData`.
-
- :param implicit_returning: True by default - indicates that
- RETURNING can be used by default to fetch newly inserted primary key
- values, for backends which support this. Note that
- create_engine() also provides an implicit_returning flag.
-
- :param include_columns: A list of strings indicating a subset of
- columns to be loaded via the ``autoload`` operation; table columns who
- aren't present in this list will not be represented on the resulting
- ``Table`` object. Defaults to ``None`` which indicates all columns
- should be reflected.
-
- :param info: A dictionary which defaults to ``{}``. A space to store
- application specific data. This must be a dictionary.
-
- :param keep_existing: When ``True``, indicates that if this Table
- is already present in the given :class:`.MetaData`, ignore
- further arguments within the constructor to the existing
- :class:`.Table`, and return the :class:`.Table` object as
- originally created. This is to allow a function that wishes
- to define a new :class:`.Table` on first call, but on
- subsequent calls will return the same :class:`.Table`,
- without any of the declarations (particularly constraints)
- being applied a second time. Also see extend_existing.
-
- If extend_existing or keep_existing are not set, an error is
- raised if additional table modifiers are specified when
- the given :class:`.Table` is already present in the :class:`.MetaData`.
-
- :param listeners: A list of tuples of the form ``(<eventname>, <fn>)``
- which will be passed to :func:`.event.listen` upon construction.
- This alternate hook to :func:`.event.listen` allows the establishment
- of a listener function specific to this :class:`.Table` before
- the "autoload" process begins. Particularly useful for
- the :meth:`.events.column_reflect` event::
-
- def listen_for_reflect(table, column_info):
- "handle the column reflection event"
- # ...
-
- t = Table(
- 'sometable',
- autoload=True,
- listeners=[
- ('column_reflect', listen_for_reflect)
- ])
-
- :param mustexist: When ``True``, indicates that this Table must already
- be present in the given :class:`.MetaData`` collection, else
- an exception is raised.
-
- :param prefixes:
- A list of strings to insert after CREATE in the CREATE TABLE
- statement. They will be separated by spaces.
-
- :param quote: Force quoting of this table's name on or off, corresponding
- to ``True`` or ``False``. When left at its default of ``None``,
- the column identifier will be quoted according to whether the name is
- case sensitive (identifiers with at least one upper case character are
- treated as case sensitive), or if it's a reserved word. This flag
- is only needed to force quoting of a reserved word which is not known
- by the SQLAlchemy dialect.
-
- :param quote_schema: same as 'quote' but applies to the schema identifier.
-
- :param schema: The *schema name* for this table, which is required if
- the table resides in a schema other than the default selected schema
- for the engine's database connection. Defaults to ``None``.
-
- :param useexisting: Deprecated. Use extend_existing.
-
- """
-
- __visit_name__ = 'table'
-
- def __new__(cls, *args, **kw):
- if not args:
- # python3k pickle seems to call this
- return object.__new__(cls)
-
- try:
- name, metadata, args = args[0], args[1], args[2:]
- except IndexError:
- raise TypeError("Table() takes at least two arguments")
-
- schema = kw.get('schema', None)
- keep_existing = kw.pop('keep_existing', False)
- extend_existing = kw.pop('extend_existing', False)
- if 'useexisting' in kw:
- util.warn_deprecated("useexisting is deprecated. Use extend_existing.")
- if extend_existing:
- raise exc.ArgumentError("useexisting is synonymous "
- "with extend_existing.")
- extend_existing = kw.pop('useexisting', False)
-
- if keep_existing and extend_existing:
- raise exc.ArgumentError("keep_existing and extend_existing "
- "are mutually exclusive.")
-
- mustexist = kw.pop('mustexist', False)
- key = _get_table_key(name, schema)
- if key in metadata.tables:
- if not keep_existing and not extend_existing and bool(args):
- raise exc.InvalidRequestError(
- "Table '%s' is already defined for this MetaData "
- "instance. Specify 'extend_existing=True' "
- "to redefine "
- "options and columns on an "
- "existing Table object." % key)
- table = metadata.tables[key]
- if extend_existing:
- table._init_existing(*args, **kw)
- return table
- else:
- if mustexist:
- raise exc.InvalidRequestError(
- "Table '%s' not defined" % (key))
- table = object.__new__(cls)
- table.dispatch.before_parent_attach(table, metadata)
- metadata._add_table(name, schema, table)
- try:
- table._init(name, metadata, *args, **kw)
- table.dispatch.after_parent_attach(table, metadata)
- return table
- except:
- metadata._remove_table(name, schema)
- raise
-
- def __init__(self, *args, **kw):
- """Constructor for :class:`~.schema.Table`.
-
- This method is a no-op. See the top-level
- documentation for :class:`~.schema.Table`
- for constructor arguments.
-
- """
- # __init__ is overridden to prevent __new__ from
- # calling the superclass constructor.
-
- def _init(self, name, metadata, *args, **kwargs):
- super(Table, self).__init__(name)
- self.metadata = metadata
- self.schema = kwargs.pop('schema', None)
- self.indexes = set()
- self.constraints = set()
- self._columns = expression.ColumnCollection()
- PrimaryKeyConstraint()._set_parent_with_dispatch(self)
- self.foreign_keys = set()
- self._extra_dependencies = set()
- self.kwargs = {}
- if self.schema is not None:
- self.fullname = "%s.%s" % (self.schema, self.name)
- else:
- self.fullname = self.name
-
- autoload = kwargs.pop('autoload', False)
- autoload_with = kwargs.pop('autoload_with', None)
- include_columns = kwargs.pop('include_columns', None)
-
- self.implicit_returning = kwargs.pop('implicit_returning', True)
- self.quote = kwargs.pop('quote', None)
- self.quote_schema = kwargs.pop('quote_schema', None)
- if 'info' in kwargs:
- self.info = kwargs.pop('info')
- if 'listeners' in kwargs:
- listeners = kwargs.pop('listeners')
- for evt, fn in listeners:
- event.listen(self, evt, fn)
-
- self._prefixes = kwargs.pop('prefixes', [])
-
- self._extra_kwargs(**kwargs)
-
- # load column definitions from the database if 'autoload' is defined
- # we do it after the table is in the singleton dictionary to support
- # circular foreign keys
- if autoload:
- if autoload_with:
- autoload_with.reflecttable(self,
- include_columns=include_columns)
- else:
- _bind_or_error(metadata,
- msg="No engine is bound to this Table's MetaData. "
- "Pass an engine to the Table via "
- "autoload_with=<someengine>, "
- "or associate the MetaData with an engine via "
- "metadata.bind=<someengine>").\
- reflecttable(self, include_columns=include_columns)
-
- # initialize all the column, etc. objects. done after reflection to
- # allow user-overrides
- self._init_items(*args)
-
- @property
- def _sorted_constraints(self):
- """Return the set of constraints as a list, sorted by creation order."""
-
- return sorted(self.constraints, key=lambda c:c._creation_order)
-
- def _init_existing(self, *args, **kwargs):
- autoload = kwargs.pop('autoload', False)
- autoload_with = kwargs.pop('autoload_with', None)
- schema = kwargs.pop('schema', None)
- if schema and schema != self.schema:
- raise exc.ArgumentError(
- "Can't change schema of existing table from '%s' to '%s'",
- (self.schema, schema))
-
- include_columns = kwargs.pop('include_columns', None)
- if include_columns:
- for c in self.c:
- if c.name not in include_columns:
- self._columns.remove(c)
-
- for key in ('quote', 'quote_schema'):
- if key in kwargs:
- setattr(self, key, kwargs.pop(key))
-
- if 'info' in kwargs:
- self.info = kwargs.pop('info')
-
- self._extra_kwargs(**kwargs)
- self._init_items(*args)
-
- def _extra_kwargs(self, **kwargs):
- # validate remaining kwargs that they all specify DB prefixes
- if len([k for k in kwargs
- if not re.match(
- r'^(?:%s)_' %
- '|'.join(dialects.__all__), k
- )
- ]):
- raise TypeError(
- "Invalid argument(s) for Table: %r" % kwargs.keys())
- self.kwargs.update(kwargs)
-
- def _init_collections(self):
- pass
-
-
- @util.memoized_property
- def _autoincrement_column(self):
- for col in self.primary_key:
- if col.autoincrement and \
- issubclass(col.type._type_affinity, sqltypes.Integer) and \
- not col.foreign_keys and \
- isinstance(col.default, (type(None), Sequence)) and \
- (col.server_default is None or col.server_default.reflected):
- return col
-
- @property
- def key(self):
- return _get_table_key(self.name, self.schema)
-
- def __repr__(self):
- return "Table(%s)" % ', '.join(
- [repr(self.name)] + [repr(self.metadata)] +
- [repr(x) for x in self.columns] +
- ["%s=%s" % (k, repr(getattr(self, k))) for k in ['schema']])
-
- def __str__(self):
- return _get_table_key(self.description, self.schema)
-
- @property
- def bind(self):
- """Return the connectable associated with this Table."""
-
- return self.metadata and self.metadata.bind or None
-
- def add_is_dependent_on(self, table):
- """Add a 'dependency' for this Table.
-
- This is another Table object which must be created
- first before this one can, or dropped after this one.
-
- Usually, dependencies between tables are determined via
- ForeignKey objects. However, for other situations that
- create dependencies outside of foreign keys (rules, inheriting),
- this method can manually establish such a link.
-
- """
- self._extra_dependencies.add(table)
-
- def append_column(self, column):
- """Append a :class:`~.schema.Column` to this :class:`~.schema.Table`.
-
- The "key" of the newly added :class:`~.schema.Column`, i.e. the
- value of its ``.key`` attribute, will then be available
- in the ``.c`` collection of this :class:`~.schema.Table`, and the
- column definition will be included in any CREATE TABLE, SELECT,
- UPDATE, etc. statements generated from this :class:`~.schema.Table`
- construct.
-
- Note that this does **not** change the definition of the table
- as it exists within any underlying database, assuming that
- table has already been created in the database. Relational
- databases support the addition of columns to existing tables
- using the SQL ALTER command, which would need to be
- emitted for an already-existing table that doesn't contain
- the newly added column.
-
- """
-
- column._set_parent_with_dispatch(self)
-
- def append_constraint(self, constraint):
- """Append a :class:`~.schema.Constraint` to this :class:`~.schema.Table`.
-
- This has the effect of the constraint being included in any
- future CREATE TABLE statement, assuming specific DDL creation
- events have not been associated with the given :class:`~.schema.Constraint`
- object.
-
- Note that this does **not** produce the constraint within the
- relational database automatically, for a table that already exists
- in the database. To add a constraint to an
- existing relational database table, the SQL ALTER command must
- be used. SQLAlchemy also provides the :class:`.AddConstraint` construct
- which can produce this SQL when invoked as an executable clause.
-
- """
-
- constraint._set_parent_with_dispatch(self)
-
- def append_ddl_listener(self, event_name, listener):
- """Append a DDL event listener to this ``Table``.
-
- Deprecated. See :class:`.DDLEvents`.
-
- """
-
- def adapt_listener(target, connection, **kw):
- listener(event_name, target, connection, **kw)
-
- event.listen(self, "" + event_name.replace('-', '_'), adapt_listener)
-
- def _set_parent(self, metadata):
- metadata._add_table(self.name, self.schema, self)
- self.metadata = metadata
-
- def get_children(self, column_collections=True,
- schema_visitor=False, **kw):
- if not schema_visitor:
- return expression.TableClause.get_children(
- self, column_collections=column_collections, **kw)
- else:
- if column_collections:
- return list(self.columns)
- else:
- return []
-
- def exists(self, bind=None):
- """Return True if this table exists."""
-
- if bind is None:
- bind = _bind_or_error(self)
-
- return bind.run_callable(bind.dialect.has_table,
- self.name, schema=self.schema)
-
- def create(self, bind=None, checkfirst=False):
- """Issue a ``CREATE`` statement for this table.
-
- See also ``metadata.create_all()``.
-
- """
-
- if bind is None:
- bind = _bind_or_error(self)
- bind.create(self, checkfirst=checkfirst)
-
- def drop(self, bind=None, checkfirst=False):
- """Issue a ``DROP`` statement for this table.
-
- See also ``metadata.drop_all()``.
-
- """
- if bind is None:
- bind = _bind_or_error(self)
- bind.drop(self, checkfirst=checkfirst)
-
-
- def tometadata(self, metadata, schema=RETAIN_SCHEMA):
- """Return a copy of this :class:`.Table` associated with a different
- :class:`.MetaData`.
-
- E.g.::
-
- # create two metadata
- meta1 = MetaData('sqlite:///querytest.db')
- meta2 = MetaData()
-
- # load 'users' from the sqlite engine
- users_table = Table('users', meta1, autoload=True)
-
- # create the same Table object for the plain metadata
- users_table_2 = users_table.tometadata(meta2)
-
- """
-
- if schema is RETAIN_SCHEMA:
- schema = self.schema
- key = _get_table_key(self.name, schema)
- if key in metadata.tables:
- util.warn("Table '%s' already exists within the given "
- "MetaData - not copying." % self.description)
- return metadata.tables[key]
-
- args = []
- for c in self.columns:
- args.append(c.copy(schema=schema))
- for c in self.constraints:
- args.append(c.copy(schema=schema))
- table = Table(
- self.name, metadata, schema=schema,
- *args, **self.kwargs
- )
- for index in self.indexes:
- # skip indexes that would be generated
- # by the 'index' flag on Column
- if len(index.columns) == 1 and \
- list(index.columns)[0].index:
- continue
- Index(index.name,
- unique=index.unique,
- *[table.c[col] for col in index.columns.keys()],
- **index.kwargs)
- table.dispatch._update(self.dispatch)
- return table
-
-class Column(SchemaItem, expression.ColumnClause):
- """Represents a column in a database table."""
-
- __visit_name__ = 'column'
-
- def __init__(self, *args, **kwargs):
- """
- Construct a new ``Column`` object.
-
- :param name: The name of this column as represented in the database.
- This argument may be the first positional argument, or specified
- via keyword.
-
- Names which contain no upper case characters
- will be treated as case insensitive names, and will not be quoted
- unless they are a reserved word. Names with any number of upper
- case characters will be quoted and sent exactly. Note that this
- behavior applies even for databases which standardize upper
- case names as case insensitive such as Oracle.
-
- The name field may be omitted at construction time and applied
- later, at any time before the Column is associated with a
- :class:`.Table`. This is to support convenient
- usage within the :mod:`~sqlalchemy.ext.declarative` extension.
-
- :param type\_: The column's type, indicated using an instance which
- subclasses :class:`~sqlalchemy.types.TypeEngine`. If no arguments
- are required for the type, the class of the type can be sent
- as well, e.g.::
-
- # use a type with arguments
- Column('data', String(50))
-
- # use no arguments
- Column('level', Integer)
-
- The ``type`` argument may be the second positional argument
- or specified by keyword.
-
- There is partial support for automatic detection of the
- type based on that of a :class:`.ForeignKey` associated
- with this column, if the type is specified as ``None``.
- However, this feature is not fully implemented and
- may not function in all cases.
-
- :param \*args: Additional positional arguments include various
- :class:`.SchemaItem` derived constructs which will be applied
- as options to the column. These include instances of
- :class:`.Constraint`, :class:`.ForeignKey`, :class:`.ColumnDefault`,
- and :class:`.Sequence`. In some cases an equivalent keyword
- argument is available such as ``server_default``, ``default``
- and ``unique``.
-
- :param autoincrement: This flag may be set to ``False`` to
- indicate an integer primary key column that should not be
- considered to be the "autoincrement" column, that is
- the integer primary key column which generates values
- implicitly upon INSERT and whose value is usually returned
- via the DBAPI cursor.lastrowid attribute. It defaults
- to ``True`` to satisfy the common use case of a table
- with a single integer primary key column. If the table
- has a composite primary key consisting of more than one
- integer column, set this flag to True only on the
- column that should be considered "autoincrement".
-
- The setting *only* has an effect for columns which are:
-
- * Integer derived (i.e. INT, SMALLINT, BIGINT).
-
- * Part of the primary key
-
- * Are not referenced by any foreign keys
-
- * have no server side or client side defaults (with the exception
- of Postgresql SERIAL).
-
- The setting has these two effects on columns that meet the
- above criteria:
-
- * DDL issued for the column will include database-specific
- keywords intended to signify this column as an
- "autoincrement" column, such as AUTO INCREMENT on MySQL,
- SERIAL on Postgresql, and IDENTITY on MS-SQL. It does
- *not* issue AUTOINCREMENT for SQLite since this is a
- special SQLite flag that is not required for autoincrementing
- behavior. See the SQLite dialect documentation for
- information on SQLite's AUTOINCREMENT.
-
- * The column will be considered to be available as
- cursor.lastrowid or equivalent, for those dialects which
- "post fetch" newly inserted identifiers after a row has
- been inserted (SQLite, MySQL, MS-SQL). It does not have
- any effect in this regard for databases that use sequences
- to generate primary key identifiers (i.e. Firebird, Postgresql,
- Oracle).
-
- :param default: A scalar, Python callable, or
- :class:`~sqlalchemy.sql.expression.ClauseElement` representing the
- *default value* for this column, which will be invoked upon insert
- if this column is otherwise not specified in the VALUES clause of
- the insert. This is a shortcut to using :class:`.ColumnDefault` as
- a positional argument.
-
- Contrast this argument to ``server_default`` which creates a
- default generator on the database side.
-
- :param doc: optional String that can be used by the ORM or similar
- to document attributes. This attribute does not render SQL
- comments (a future attribute 'comment' will achieve that).
-
- :param key: An optional string identifier which will identify this
- ``Column`` object on the :class:`.Table`. When a key is provided,
- this is the only identifier referencing the ``Column`` within the
- application, including ORM attribute mapping; the ``name`` field
- is used only when rendering SQL.
-
- :param index: When ``True``, indicates that the column is indexed.
- This is a shortcut for using a :class:`.Index` construct on the
- table. To specify indexes with explicit names or indexes that
- contain multiple columns, use the :class:`.Index` construct
- instead.
-
- :param info: A dictionary which defaults to ``{}``. A space to store
- application specific data. This must be a dictionary.
-
- :param nullable: If set to the default of ``True``, indicates the
- column will be rendered as allowing NULL, else it's rendered as
- NOT NULL. This parameter is only used when issuing CREATE TABLE
- statements.
-
- :param onupdate: A scalar, Python callable, or
- :class:`~sqlalchemy.sql.expression.ClauseElement` representing a
- default value to be applied to the column within UPDATE
- statements, which wil be invoked upon update if this column is not
- present in the SET clause of the update. This is a shortcut to
- using :class:`.ColumnDefault` as a positional argument with
- ``for_update=True``.
-
- :param primary_key: If ``True``, marks this column as a primary key
- column. Multiple columns can have this flag set to specify
- composite primary keys. As an alternative, the primary key of a
- :class:`.Table` can be specified via an explicit
- :class:`.PrimaryKeyConstraint` object.
-
- :param server_default: A :class:`.FetchedValue` instance, str, Unicode
- or :func:`~sqlalchemy.sql.expression.text` construct representing
- the DDL DEFAULT value for the column.
-
- String types will be emitted as-is, surrounded by single quotes::
-
- Column('x', Text, server_default="val")
-
- x TEXT DEFAULT 'val'
-
- A :func:`~sqlalchemy.sql.expression.text` expression will be
- rendered as-is, without quotes::
-
- Column('y', DateTime, server_default=text('NOW()'))0
-
- y DATETIME DEFAULT NOW()
-
- Strings and text() will be converted into a :class:`.DefaultClause`
- object upon initialization.
-
- Use :class:`.FetchedValue` to indicate that an already-existing
- column will generate a default value on the database side which
- will be available to SQLAlchemy for post-fetch after inserts. This
- construct does not specify any DDL and the implementation is left
- to the database, such as via a trigger.
-
- :param server_onupdate: A :class:`.FetchedValue` instance
- representing a database-side default generation function. This
- indicates to SQLAlchemy that a newly generated value will be
- available after updates. This construct does not specify any DDL
- and the implementation is left to the database, such as via a
- trigger.
-
- :param quote: Force quoting of this column's name on or off,
- corresponding to ``True`` or ``False``. When left at its default
- of ``None``, the column identifier will be quoted according to
- whether the name is case sensitive (identifiers with at least one
- upper case character are treated as case sensitive), or if it's a
- reserved word. This flag is only needed to force quoting of a
- reserved word which is not known by the SQLAlchemy dialect.
-
- :param unique: When ``True``, indicates that this column contains a
- unique constraint, or if ``index`` is ``True`` as well, indicates
- that the :class:`.Index` should be created with the unique flag.
- To specify multiple columns in the constraint/index or to specify
- an explicit name, use the :class:`.UniqueConstraint` or
- :class:`.Index` constructs explicitly.
-
- """
-
- name = kwargs.pop('name', None)
- type_ = kwargs.pop('type_', None)
- args = list(args)
- if args:
- if isinstance(args[0], basestring):
- if name is not None:
- raise exc.ArgumentError(
- "May not pass name positionally and as a keyword.")
- name = args.pop(0)
- if args:
- coltype = args[0]
-
- if (isinstance(coltype, sqltypes.TypeEngine) or
- (isinstance(coltype, type) and
- issubclass(coltype, sqltypes.TypeEngine))):
- if type_ is not None:
- raise exc.ArgumentError(
- "May not pass type_ positionally and as a keyword.")
- type_ = args.pop(0)
-
- no_type = type_ is None
-
- super(Column, self).__init__(name, None, type_)
- self.key = kwargs.pop('key', name)
- self.primary_key = kwargs.pop('primary_key', False)
- self.nullable = kwargs.pop('nullable', not self.primary_key)
- self.default = kwargs.pop('default', None)
- self.server_default = kwargs.pop('server_default', None)
- self.server_onupdate = kwargs.pop('server_onupdate', None)
- self.index = kwargs.pop('index', None)
- self.unique = kwargs.pop('unique', None)
- self.quote = kwargs.pop('quote', None)
- self.doc = kwargs.pop('doc', None)
- self.onupdate = kwargs.pop('onupdate', None)
- self.autoincrement = kwargs.pop('autoincrement', True)
- self.constraints = set()
- self.foreign_keys = set()
-
- # check if this Column is proxying another column
- if '_proxies' in kwargs:
- self.proxies = kwargs.pop('_proxies')
- # otherwise, add DDL-related events
- elif isinstance(self.type, sqltypes.SchemaType):
- self.type._set_parent_with_dispatch(self)
-
- if self.default is not None:
- if isinstance(self.default, (ColumnDefault, Sequence)):
- args.append(self.default)
- else:
- if getattr(self.type, '_warn_on_bytestring', False):
- # Py3K
- #if isinstance(self.default, bytes):
- # Py2K
- if isinstance(self.default, str):
- # end Py2K
- util.warn("Unicode column received non-unicode "
- "default value.")
- args.append(ColumnDefault(self.default))
-
- if self.server_default is not None:
- if isinstance(self.server_default, FetchedValue):
- args.append(self.server_default)
- else:
- args.append(DefaultClause(self.server_default))
-
- if self.onupdate is not None:
- if isinstance(self.onupdate, (ColumnDefault, Sequence)):
- args.append(self.onupdate)
- else:
- args.append(ColumnDefault(self.onupdate, for_update=True))
-
- if self.server_onupdate is not None:
- if isinstance(self.server_onupdate, FetchedValue):
- args.append(self.server_onupdate)
- else:
- args.append(DefaultClause(self.server_onupdate,
- for_update=True))
- self._init_items(*args)
-
- if not self.foreign_keys and no_type:
- raise exc.ArgumentError("'type' is required on Column objects "
- "which have no foreign keys.")
- util.set_creation_order(self)
-
- if 'info' in kwargs:
- self.info = kwargs.pop('info')
-
- if kwargs:
- raise exc.ArgumentError(
- "Unknown arguments passed to Column: " + repr(kwargs.keys()))
-
- def __str__(self):
- if self.name is None:
- return "(no name)"
- elif self.table is not None:
- if self.table.named_with_column:
- return (self.table.description + "." + self.description)
- else:
- return self.description
- else:
- return self.description
-
- def references(self, column):
- """Return True if this Column references the given column via foreign
- key."""
-
- for fk in self.foreign_keys:
- if fk.column.proxy_set.intersection(column.proxy_set):
- return True
- else:
- return False
-
- def append_foreign_key(self, fk):
- fk._set_parent_with_dispatch(self)
-
- def __repr__(self):
- kwarg = []
- if self.key != self.name:
- kwarg.append('key')
- if self.primary_key:
- kwarg.append('primary_key')
- if not self.nullable:
- kwarg.append('nullable')
- if self.onupdate:
- kwarg.append('onupdate')
- if self.default:
- kwarg.append('default')
- if self.server_default:
- kwarg.append('server_default')
- return "Column(%s)" % ', '.join(
- [repr(self.name)] + [repr(self.type)] +
- [repr(x) for x in self.foreign_keys if x is not None] +
- [repr(x) for x in self.constraints] +
- [(self.table is not None and "table=<%s>" %
- self.table.description or "")] +
- ["%s=%s" % (k, repr(getattr(self, k))) for k in kwarg])
-
- def _set_parent(self, table):
- if not self.name:
- raise exc.ArgumentError(
- "Column must be constructed with a non-blank name or "
- "assign a non-blank .name before adding to a Table.")
- if self.key is None:
- self.key = self.name
-
- if getattr(self, 'table', None) is not None:
- raise exc.ArgumentError(
- "Column object already assigned to Table '%s'" %
- self.table.description)
-
- if self.key in table._columns:
- col = table._columns.get(self.key)
- for fk in list(col.foreign_keys):
- col.foreign_keys.remove(fk)
- table.foreign_keys.remove(fk)
- if fk.constraint in table.constraints:
- # this might have been removed
- # already, if it's a composite constraint
- # and more than one col being replaced
- table.constraints.remove(fk.constraint)
-
- table._columns.replace(self)
-
- if self.primary_key:
- table.primary_key._replace(self)
- elif self.key in table.primary_key:
- raise exc.ArgumentError(
- "Trying to redefine primary-key column '%s' as a "
- "non-primary-key column on table '%s'" % (
- self.key, table.fullname))
- self.table = table
-
- if self.index:
- if isinstance(self.index, basestring):
- raise exc.ArgumentError(
- "The 'index' keyword argument on Column is boolean only. "
- "To create indexes with a specific name, create an "
- "explicit Index object external to the Table.")
- Index(expression._generated_label('ix_%s' % self._label), self, unique=self.unique)
- elif self.unique:
- if isinstance(self.unique, basestring):
- raise exc.ArgumentError(
- "The 'unique' keyword argument on Column is boolean "
- "only. To create unique constraints or indexes with a "
- "specific name, append an explicit UniqueConstraint to "
- "the Table's list of elements, or create an explicit "
- "Index object external to the Table.")
- table.append_constraint(UniqueConstraint(self.key))
-
- def _on_table_attach(self, fn):
- if self.table is not None:
- fn(self, self.table)
- event.listen(self, 'after_parent_attach', fn)
-
- def copy(self, **kw):
- """Create a copy of this ``Column``, unitialized.
-
- This is used in ``Table.tometadata``.
-
- """
-
- # Constraint objects plus non-constraint-bound ForeignKey objects
- args = \
- [c.copy(**kw) for c in self.constraints] + \
- [c.copy(**kw) for c in self.foreign_keys if not c.constraint]
-
- c = Column(
- name=self.name,
- type_=self.type,
- key = self.key,
- primary_key = self.primary_key,
- nullable = self.nullable,
- unique = self.unique,
- quote=self.quote,
- index=self.index,
- autoincrement=self.autoincrement,
- default=self.default,
- server_default=self.server_default,
- onupdate=self.onupdate,
- server_onupdate=self.server_onupdate,
- info=self.info,
- doc=self.doc,
- *args
- )
- c.dispatch._update(self.dispatch)
- return c
-
- def _make_proxy(self, selectable, name=None):
- """Create a *proxy* for this column.
-
- This is a copy of this ``Column`` referenced by a different parent
- (such as an alias or select statement). The column should
- be used only in select scenarios, as its full DDL/default
- information is not transferred.
-
- """
- fk = [ForeignKey(f.column) for f in self.foreign_keys]
- if name is None and self.name is None:
- raise exc.InvalidRequestError("Cannot initialize a sub-selectable"
- " with this Column object until it's 'name' has "
- "been assigned.")
- try:
- c = self._constructor(
- name or self.name,
- self.type,
- key = name or self.key,
- primary_key = self.primary_key,
- nullable = self.nullable,
- quote=self.quote, _proxies=[self], *fk)
- except TypeError, e:
- # Py3K
- #raise TypeError(
- # "Could not create a copy of this %r object. "
- # "Ensure the class includes a _constructor() "
- # "attribute or method which accepts the "
- # "standard Column constructor arguments, or "
- # "references the Column class itself." % self.__class__) from e
- # Py2K
- raise TypeError(
- "Could not create a copy of this %r object. "
- "Ensure the class includes a _constructor() "
- "attribute or method which accepts the "
- "standard Column constructor arguments, or "
- "references the Column class itself. "
- "Original error: %s" % (self.__class__, e))
- # end Py2K
-
- c.table = selectable
- selectable._columns.add(c)
- if self.primary_key:
- selectable.primary_key.add(c)
- c.dispatch.after_parent_attach(c, selectable)
- return c
-
- def get_children(self, schema_visitor=False, **kwargs):
- if schema_visitor:
- return [x for x in (self.default, self.onupdate)
- if x is not None] + \
- list(self.foreign_keys) + list(self.constraints)
- else:
- return expression.ColumnClause.get_children(self, **kwargs)
-
-
-class ForeignKey(SchemaItem):
- """Defines a dependency between two columns.
-
- ``ForeignKey`` is specified as an argument to a :class:`.Column` object,
- e.g.::
-
- t = Table("remote_table", metadata,
- Column("remote_id", ForeignKey("main_table.id"))
- )
-
- Note that ``ForeignKey`` is only a marker object that defines
- a dependency between two columns. The actual constraint
- is in all cases represented by the :class:`.ForeignKeyConstraint`
- object. This object will be generated automatically when
- a ``ForeignKey`` is associated with a :class:`.Column` which
- in turn is associated with a :class:`.Table`. Conversely,
- when :class:`.ForeignKeyConstraint` is applied to a :class:`.Table`,
- ``ForeignKey`` markers are automatically generated to be
- present on each associated :class:`.Column`, which are also
- associated with the constraint object.
-
- Note that you cannot define a "composite" foreign key constraint,
- that is a constraint between a grouping of multiple parent/child
- columns, using ``ForeignKey`` objects. To define this grouping,
- the :class:`.ForeignKeyConstraint` object must be used, and applied
- to the :class:`.Table`. The associated ``ForeignKey`` objects
- are created automatically.
-
- The ``ForeignKey`` objects associated with an individual
- :class:`.Column` object are available in the `foreign_keys` collection
- of that column.
-
- Further examples of foreign key configuration are in
- :ref:`metadata_foreignkeys`.
-
- """
-
- __visit_name__ = 'foreign_key'
-
- def __init__(self, column, _constraint=None, use_alter=False, name=None,
- onupdate=None, ondelete=None, deferrable=None,
- initially=None, link_to_name=False):
- """
- Construct a column-level FOREIGN KEY.
-
- The :class:`.ForeignKey` object when constructed generates a
- :class:`.ForeignKeyConstraint` which is associated with the parent
- :class:`.Table` object's collection of constraints.
-
- :param column: A single target column for the key relationship. A
- :class:`.Column` object or a column name as a string:
- ``tablename.columnkey`` or ``schema.tablename.columnkey``.
- ``columnkey`` is the ``key`` which has been assigned to the column
- (defaults to the column name itself), unless ``link_to_name`` is
- ``True`` in which case the rendered name of the column is used.
-
- :param name: Optional string. An in-database name for the key if
- `constraint` is not provided.
-
- :param onupdate: Optional string. If set, emit ON UPDATE <value> when
- issuing DDL for this constraint. Typical values include CASCADE,
- DELETE and RESTRICT.
-
- :param ondelete: Optional string. If set, emit ON DELETE <value> when
- issuing DDL for this constraint. Typical values include CASCADE,
- DELETE and RESTRICT.
-
- :param deferrable: Optional bool. If set, emit DEFERRABLE or NOT
- DEFERRABLE when issuing DDL for this constraint.
-
- :param initially: Optional string. If set, emit INITIALLY <value> when
- issuing DDL for this constraint.
-
- :param link_to_name: if True, the string name given in ``column`` is
- the rendered name of the referenced column, not its locally
- assigned ``key``.
-
- :param use_alter: passed to the underlying
- :class:`.ForeignKeyConstraint` to indicate the constraint should be
- generated/dropped externally from the CREATE TABLE/ DROP TABLE
- statement. See that classes' constructor for details.
-
- """
-
- self._colspec = column
-
- # the linked ForeignKeyConstraint.
- # ForeignKey will create this when parent Column
- # is attached to a Table, *or* ForeignKeyConstraint
- # object passes itself in when creating ForeignKey
- # markers.
- self.constraint = _constraint
-
-
- self.use_alter = use_alter
- self.name = name
- self.onupdate = onupdate
- self.ondelete = ondelete
- self.deferrable = deferrable
- self.initially = initially
- self.link_to_name = link_to_name
-
- def __repr__(self):
- return "ForeignKey(%r)" % self._get_colspec()
-
- def copy(self, schema=None):
- """Produce a copy of this :class:`.ForeignKey` object.
-
- The new :class:`.ForeignKey` will not be bound
- to any :class:`.Column`.
-
- This method is usually used by the internal
- copy procedures of :class:`.Column`, :class:`.Table`,
- and :class:`.MetaData`.
-
- :param schema: The returned :class:`.ForeignKey` will
- reference the original table and column name, qualified
- by the given string schema name.
-
- """
-
- fk = ForeignKey(
- self._get_colspec(schema=schema),
- use_alter=self.use_alter,
- name=self.name,
- onupdate=self.onupdate,
- ondelete=self.ondelete,
- deferrable=self.deferrable,
- initially=self.initially,
- link_to_name=self.link_to_name
- )
- fk.dispatch._update(self.dispatch)
- return fk
-
- def _get_colspec(self, schema=None):
- """Return a string based 'column specification' for this :class:`.ForeignKey`.
-
- This is usually the equivalent of the string-based "tablename.colname"
- argument first passed to the object's constructor.
-
- """
- if schema:
- return schema + "." + self.column.table.name + \
- "." + self.column.key
- elif isinstance(self._colspec, basestring):
- return self._colspec
- elif hasattr(self._colspec, '__clause_element__'):
- _column = self._colspec.__clause_element__()
- else:
- _column = self._colspec
-
- return "%s.%s" % (_column.table.fullname, _column.key)
-
- target_fullname = property(_get_colspec)
-
- def references(self, table):
- """Return True if the given :class:`.Table` is referenced by this :class:`.ForeignKey`."""
-
- return table.corresponding_column(self.column) is not None
-
- def get_referent(self, table):
- """Return the :class:`.Column` in the given :class:`.Table`
- referenced by this :class:`.ForeignKey`.
-
- Returns None if this :class:`.ForeignKey` does not reference the given
- :class:`.Table`.
-
- """
-
- return table.corresponding_column(self.column)
-
- @util.memoized_property
- def column(self):
- """Return the target :class:`.Column` referenced by this :class:`.ForeignKey`.
-
- If this :class:`.ForeignKey` was created using a
- string-based target column specification, this
- attribute will on first access initiate a resolution
- process to locate the referenced remote
- :class:`.Column`. The resolution process traverses
- to the parent :class:`.Column`, :class:`.Table`, and
- :class:`.MetaData` to proceed - if any of these aren't
- yet present, an error is raised.
-
- """
- # ForeignKey inits its remote column as late as possible, so tables
- # can be defined without dependencies
- if isinstance(self._colspec, basestring):
- # locate the parent table this foreign key is attached to. we
- # use the "original" column which our parent column represents
- # (its a list of columns/other ColumnElements if the parent
- # table is a UNION)
- for c in self.parent.base_columns:
- if isinstance(c, Column):
- parenttable = c.table
- break
- else:
- raise exc.ArgumentError(
- "Parent column '%s' does not descend from a "
- "table-attached Column" % str(self.parent))
-
- m = self._colspec.split('.')
-
- if m is None:
- raise exc.ArgumentError(
- "Invalid foreign key column specification: %s" %
- self._colspec)
-
- # A FK between column 'bar' and table 'foo' can be
- # specified as 'foo', 'foo.bar', 'dbo.foo.bar',
- # 'otherdb.dbo.foo.bar'. Once we have the column name and
- # the table name, treat everything else as the schema
- # name. Some databases (e.g. Sybase) support
- # inter-database foreign keys. See tickets#1341 and --
- # indirectly related -- Ticket #594. This assumes that '.'
- # will never appear *within* any component of the FK.
-
- (schema, tname, colname) = (None, None, None)
- if (len(m) == 1):
- tname = m.pop()
- else:
- colname = m.pop()
- tname = m.pop()
-
- if (len(m) > 0):
- schema = '.'.join(m)
-
- if _get_table_key(tname, schema) not in parenttable.metadata:
- raise exc.NoReferencedTableError(
- "Foreign key associated with column '%s' could not find "
- "table '%s' with which to generate a "
- "foreign key to target column '%s'" % (self.parent, tname, colname),
- tname)
- table = Table(tname, parenttable.metadata,
- mustexist=True, schema=schema)
-
- _column = None
- if colname is None:
- # colname is None in the case that ForeignKey argument
- # was specified as table name only, in which case we
- # match the column name to the same column on the
- # parent.
- key = self.parent
- _column = table.c.get(self.parent.key, None)
- elif self.link_to_name:
- key = colname
- for c in table.c:
- if c.name == colname:
- _column = c
- else:
- key = colname
- _column = table.c.get(colname, None)
-
- if _column is None:
- raise exc.NoReferencedColumnError(
- "Could not create ForeignKey '%s' on table '%s': "
- "table '%s' has no column named '%s'" % (
- self._colspec, parenttable.name, table.name, key),
- table.name, key)
-
- elif hasattr(self._colspec, '__clause_element__'):
- _column = self._colspec.__clause_element__()
- else:
- _column = self._colspec
-
- # propagate TypeEngine to parent if it didn't have one
- if isinstance(self.parent.type, sqltypes.NullType):
- self.parent.type = _column.type
- return _column
-
- def _set_parent(self, column):
- if hasattr(self, 'parent'):
- if self.parent is column:
- return
- raise exc.InvalidRequestError(
- "This ForeignKey already has a parent !")
- self.parent = column
- self.parent.foreign_keys.add(self)
- self.parent._on_table_attach(self._set_table)
-
- def _set_table(self, column, table):
- # standalone ForeignKey - create ForeignKeyConstraint
- # on the hosting Table when attached to the Table.
- if self.constraint is None and isinstance(table, Table):
- self.constraint = ForeignKeyConstraint(
- [], [], use_alter=self.use_alter, name=self.name,
- onupdate=self.onupdate, ondelete=self.ondelete,
- deferrable=self.deferrable, initially=self.initially,
- )
- self.constraint._elements[self.parent] = self
- self.constraint._set_parent_with_dispatch(table)
- table.foreign_keys.add(self)
-
-class _NotAColumnExpr(object):
- def _not_a_column_expr(self):
- raise exc.InvalidRequestError(
- "This %s cannot be used directly "
- "as a column expression." % self.__class__.__name__)
-
- __clause_element__ = self_group = lambda self: self._not_a_column_expr()
- _from_objects = property(lambda self: self._not_a_column_expr())
-
-class DefaultGenerator(_NotAColumnExpr, SchemaItem):
- """Base class for column *default* values."""
-
- __visit_name__ = 'default_generator'
-
- is_sequence = False
- is_server_default = False
- column = None
-
- def __init__(self, for_update=False):
- self.for_update = for_update
-
- def _set_parent(self, column):
- self.column = column
- if self.for_update:
- self.column.onupdate = self
- else:
- self.column.default = self
-
- def execute(self, bind=None, **kwargs):
- if bind is None:
- bind = _bind_or_error(self)
- return bind._execute_default(self, **kwargs)
-
- @property
- def bind(self):
- """Return the connectable associated with this default."""
- if getattr(self, 'column', None) is not None:
- return self.column.table.bind
- else:
- return None
-
- def __repr__(self):
- return "DefaultGenerator()"
-
-
-class ColumnDefault(DefaultGenerator):
- """A plain default value on a column.
-
- This could correspond to a constant, a callable function,
- or a SQL clause.
-
- :class:`.ColumnDefault` is generated automatically
- whenever the ``default``, ``onupdate`` arguments of
- :class:`.Column` are used. A :class:`.ColumnDefault`
- can be passed positionally as well.
-
- For example, the following::
-
- Column('foo', Integer, default=50)
-
- Is equivalent to::
-
- Column('foo', Integer, ColumnDefault(50))
-
-
- """
-
- def __init__(self, arg, **kwargs):
- super(ColumnDefault, self).__init__(**kwargs)
- if isinstance(arg, FetchedValue):
- raise exc.ArgumentError(
- "ColumnDefault may not be a server-side default type.")
- if util.callable(arg):
- arg = self._maybe_wrap_callable(arg)
- self.arg = arg
-
- @util.memoized_property
- def is_callable(self):
- return util.callable(self.arg)
-
- @util.memoized_property
- def is_clause_element(self):
- return isinstance(self.arg, expression.ClauseElement)
-
- @util.memoized_property
- def is_scalar(self):
- return not self.is_callable and \
- not self.is_clause_element and \
- not self.is_sequence
-
- def _maybe_wrap_callable(self, fn):
- """Backward compat: Wrap callables that don't accept a context."""
-
- if inspect.isfunction(fn):
- inspectable = fn
- elif inspect.isclass(fn):
- inspectable = fn.__init__
- elif hasattr(fn, '__call__'):
- inspectable = fn.__call__
- else:
- # probably not inspectable, try anyways.
- inspectable = fn
- try:
- argspec = inspect.getargspec(inspectable)
- except TypeError:
- return lambda ctx: fn()
-
- positionals = len(argspec[0])
-
- # Py3K compat - no unbound methods
- if inspect.ismethod(inspectable) or inspect.isclass(fn):
- positionals -= 1
-
- if positionals == 0:
- return lambda ctx: fn()
-
- defaulted = argspec[3] is not None and len(argspec[3]) or 0
- if positionals - defaulted > 1:
- raise exc.ArgumentError(
- "ColumnDefault Python function takes zero or one "
- "positional arguments")
- return fn
-
- def _visit_name(self):
- if self.for_update:
- return "column_onupdate"
- else:
- return "column_default"
- __visit_name__ = property(_visit_name)
-
- def __repr__(self):
- return "ColumnDefault(%r)" % self.arg
-
-class Sequence(DefaultGenerator):
- """Represents a named database sequence.
-
- The :class:`.Sequence` object represents the name and configurational
- parameters of a database sequence. It also represents
- a construct that can be "executed" by a SQLAlchemy :class:`.Engine`
- or :class:`.Connection`, rendering the appropriate "next value" function
- for the target database and returning a result.
-
- The :class:`.Sequence` is typically associated with a primary key column::
-
- some_table = Table('some_table', metadata,
- Column('id', Integer, Sequence('some_table_seq'), primary_key=True)
- )
-
- When CREATE TABLE is emitted for the above :class:`.Table`, if the
- target platform supports sequences, a CREATE SEQUENCE statement will
- be emitted as well. For platforms that don't support sequences,
- the :class:`.Sequence` construct is ignored.
-
- See also: :class:`.CreateSequence` :class:`.DropSequence`
-
- """
-
- __visit_name__ = 'sequence'
-
- is_sequence = True
-
- def __init__(self, name, start=None, increment=None, schema=None,
- optional=False, quote=None, metadata=None,
- for_update=False):
- """Construct a :class:`.Sequence` object.
-
- :param name: The name of the sequence.
- :param start: the starting index of the sequence. This value is
- used when the CREATE SEQUENCE command is emitted to the database
- as the value of the "START WITH" clause. If ``None``, the
- clause is omitted, which on most platforms indicates a starting
- value of 1.
- :param increment: the increment value of the sequence. This
- value is used when the CREATE SEQUENCE command is emitted to
- the database as the value of the "INCREMENT BY" clause. If ``None``,
- the clause is omitted, which on most platforms indicates an
- increment of 1.
- :param schema: Optional schema name for the sequence, if located
- in a schema other than the default.
- :param optional: boolean value, when ``True``, indicates that this
- :class:`.Sequence` object only needs to be explicitly generated
- on backends that don't provide another way to generate primary
- key identifiers. Currently, it essentially means, "don't create
- this sequence on the Postgresql backend, where the SERIAL keyword
- creates a sequence for us automatically".
- :param quote: boolean value, when ``True`` or ``False``, explicitly
- forces quoting of the schema name on or off. When left at its
- default of ``None``, normal quoting rules based on casing and reserved
- words take place.
- :param metadata: optional :class:`.MetaData` object which will be
- associated with this :class:`.Sequence`. A :class:`.Sequence`
- that is associated with a :class:`.MetaData` gains access to the
- ``bind`` of that :class:`.MetaData`, meaning the :meth:`.Sequence.create`
- and :meth:`.Sequence.drop` methods will make usage of that engine
- automatically. Additionally, the appropriate CREATE SEQUENCE/
- DROP SEQUENCE DDL commands will be emitted corresponding to this
- :class:`.Sequence` when :meth:`.MetaData.create_all` and
- :meth:`.MetaData.drop_all` are invoked (new in 0.7).
-
- Note that when a :class:`.Sequence` is applied to a :class:`.Column`,
- the :class:`.Sequence` is automatically associated with the
- :class:`.MetaData` object of that column's parent :class:`.Table`,
- when that association is made. The :class:`.Sequence` will then
- be subject to automatic CREATE SEQUENCE/DROP SEQUENCE corresponding
- to when the :class:`.Table` object itself is created or dropped,
- rather than that of the :class:`.MetaData` object overall.
- :param for_update: Indicates this :class:`.Sequence`, when associated
- with a :class:`.Column`, should be invoked for UPDATE statements
- on that column's table, rather than for INSERT statements, when
- no value is otherwise present for that column in the statement.
-
- """
- super(Sequence, self).__init__(for_update=for_update)
- self.name = name
- self.start = start
- self.increment = increment
- self.optional = optional
- self.quote = quote
- self.schema = schema
- self.metadata = metadata
- self._key = _get_table_key(name, schema)
- if metadata:
- self._set_metadata(metadata)
-
- @util.memoized_property
- def is_callable(self):
- return False
-
- @util.memoized_property
- def is_clause_element(self):
- return False
-
- def next_value(self):
- """Return a :class:`.next_value` function element
- which will render the appropriate increment function
- for this :class:`.Sequence` within any SQL expression.
-
- """
- return expression.func.next_value(self, bind=self.bind)
-
- def __repr__(self):
- return "Sequence(%s)" % ', '.join(
- [repr(self.name)] +
- ["%s=%s" % (k, repr(getattr(self, k)))
- for k in ['start', 'increment', 'optional']])
-
- def _set_parent(self, column):
- super(Sequence, self)._set_parent(column)
- column._on_table_attach(self._set_table)
-
- def _set_table(self, column, table):
- self._set_metadata(table.metadata)
-
- def _set_metadata(self, metadata):
- self.metadata = metadata
- self.metadata._sequences[self._key] = self
-
- @property
- def bind(self):
- if self.metadata:
- return self.metadata.bind
- else:
- return None
-
- def create(self, bind=None, checkfirst=True):
- """Creates this sequence in the database."""
-
- if bind is None:
- bind = _bind_or_error(self)
- bind.create(self, checkfirst=checkfirst)
-
- def drop(self, bind=None, checkfirst=True):
- """Drops this sequence from the database."""
-
- if bind is None:
- bind = _bind_or_error(self)
- bind.drop(self, checkfirst=checkfirst)
-
- def _not_a_column_expr(self):
- raise exc.InvalidRequestError(
- "This %s cannot be used directly "
- "as a column expression. Use func.next_value(sequence) "
- "to produce a 'next value' function that's usable "
- "as a column element."
- % self.__class__.__name__)
-
-
-class FetchedValue(_NotAColumnExpr, events.SchemaEventTarget):
- """A marker for a transparent database-side default.
-
- Use :class:`.FetchedValue` when the database is configured
- to provide some automatic default for a column.
-
- E.g.::
-
- Column('foo', Integer, FetchedValue())
-
- Would indicate that some trigger or default generator
- will create a new value for the ``foo`` column during an
- INSERT.
-
- """
- is_server_default = True
- reflected = False
- has_argument = False
-
- def __init__(self, for_update=False):
- self.for_update = for_update
-
- def _set_parent(self, column):
- self.column = column
- if self.for_update:
- self.column.server_onupdate = self
- else:
- self.column.server_default = self
-
- def __repr__(self):
- return 'FetchedValue(for_update=%r)' % self.for_update
-
-
-class DefaultClause(FetchedValue):
- """A DDL-specified DEFAULT column value.
-
- :class:`.DefaultClause` is a :class:`.FetchedValue`
- that also generates a "DEFAULT" clause when
- "CREATE TABLE" is emitted.
-
- :class:`.DefaultClause` is generated automatically
- whenever the ``server_default``, ``server_onupdate`` arguments of
- :class:`.Column` are used. A :class:`.DefaultClause`
- can be passed positionally as well.
-
- For example, the following::
-
- Column('foo', Integer, server_default="50")
-
- Is equivalent to::
-
- Column('foo', Integer, DefaultClause("50"))
-
- """
-
- has_argument = True
-
- def __init__(self, arg, for_update=False, _reflected=False):
- util.assert_arg_type(arg, (basestring,
- expression.ClauseElement,
- expression._TextClause), 'arg')
- super(DefaultClause, self).__init__(for_update)
- self.arg = arg
- self.reflected = _reflected
-
- def __repr__(self):
- return "DefaultClause(%r, for_update=%r)" % \
- (self.arg, self.for_update)
-
-class PassiveDefault(DefaultClause):
- """A DDL-specified DEFAULT column value.
-
- .. deprecated:: 0.6 :class:`.PassiveDefault` is deprecated.
- Use :class:`.DefaultClause`.
- """
- @util.deprecated("0.6",
- ":class:`.PassiveDefault` is deprecated. "
- "Use :class:`.DefaultClause`.",
- False)
- def __init__(self, *arg, **kw):
- DefaultClause.__init__(self, *arg, **kw)
-
-class Constraint(SchemaItem):
- """A table-level SQL constraint."""
-
- __visit_name__ = 'constraint'
-
- def __init__(self, name=None, deferrable=None, initially=None,
- _create_rule=None):
- """Create a SQL constraint.
-
- :param name:
- Optional, the in-database name of this ``Constraint``.
-
- :param deferrable:
- Optional bool. If set, emit DEFERRABLE or NOT DEFERRABLE when
- issuing DDL for this constraint.
-
- :param initially:
- Optional string. If set, emit INITIALLY <value> when issuing DDL
- for this constraint.
-
- :param _create_rule:
- a callable which is passed the DDLCompiler object during
- compilation. Returns True or False to signal inline generation of
- this Constraint.
-
- The AddConstraint and DropConstraint DDL constructs provide
- DDLElement's more comprehensive "conditional DDL" approach that is
- passed a database connection when DDL is being issued. _create_rule
- is instead called during any CREATE TABLE compilation, where there
- may not be any transaction/connection in progress. However, it
- allows conditional compilation of the constraint even for backends
- which do not support addition of constraints through ALTER TABLE,
- which currently includes SQLite.
-
- _create_rule is used by some types to create constraints.
- Currently, its call signature is subject to change at any time.
-
- """
-
- self.name = name
- self.deferrable = deferrable
- self.initially = initially
- self._create_rule = _create_rule
- util.set_creation_order(self)
-
- @property
- def table(self):
- try:
- if isinstance(self.parent, Table):
- return self.parent
- except AttributeError:
- pass
- raise exc.InvalidRequestError(
- "This constraint is not bound to a table. Did you "
- "mean to call table.add_constraint(constraint) ?")
-
- def _set_parent(self, parent):
- self.parent = parent
- parent.constraints.add(self)
-
- def copy(self, **kw):
- raise NotImplementedError()
-
-class ColumnCollectionMixin(object):
- def __init__(self, *columns):
- self.columns = expression.ColumnCollection()
- self._pending_colargs = [_to_schema_column_or_string(c)
- for c in columns]
- if self._pending_colargs and \
- isinstance(self._pending_colargs[0], Column) and \
- self._pending_colargs[0].table is not None:
- self._set_parent_with_dispatch(self._pending_colargs[0].table)
-
- def _set_parent(self, table):
- for col in self._pending_colargs:
- if isinstance(col, basestring):
- col = table.c[col]
- self.columns.add(col)
-
-class ColumnCollectionConstraint(ColumnCollectionMixin, Constraint):
- """A constraint that proxies a ColumnCollection."""
-
- def __init__(self, *columns, **kw):
- """
- :param \*columns:
- A sequence of column names or Column objects.
-
- :param name:
- Optional, the in-database name of this constraint.
-
- :param deferrable:
- Optional bool. If set, emit DEFERRABLE or NOT DEFERRABLE when
- issuing DDL for this constraint.
-
- :param initially:
- Optional string. If set, emit INITIALLY <value> when issuing DDL
- for this constraint.
-
- """
- ColumnCollectionMixin.__init__(self, *columns)
- Constraint.__init__(self, **kw)
-
- def _set_parent(self, table):
- ColumnCollectionMixin._set_parent(self, table)
- Constraint._set_parent(self, table)
-
- def __contains__(self, x):
- return x in self.columns
-
- def copy(self, **kw):
- c = self.__class__(name=self.name, deferrable=self.deferrable,
- initially=self.initially, *self.columns.keys())
- c.dispatch._update(self.dispatch)
- return c
-
- def contains_column(self, col):
- return self.columns.contains_column(col)
-
- def __iter__(self):
- # inlining of
- # return iter(self.columns)
- # ColumnCollection->OrderedProperties->OrderedDict
- ordered_dict = self.columns._data
- return (ordered_dict[key] for key in ordered_dict._list)
-
- def __len__(self):
- return len(self.columns._data)
-
-
-class CheckConstraint(Constraint):
- """A table- or column-level CHECK constraint.
-
- Can be included in the definition of a Table or Column.
- """
-
- def __init__(self, sqltext, name=None, deferrable=None,
- initially=None, table=None, _create_rule=None):
- """Construct a CHECK constraint.
-
- :param sqltext:
- A string containing the constraint definition, which will be used
- verbatim, or a SQL expression construct.
-
- :param name:
- Optional, the in-database name of the constraint.
-
- :param deferrable:
- Optional bool. If set, emit DEFERRABLE or NOT DEFERRABLE when
- issuing DDL for this constraint.
-
- :param initially:
- Optional string. If set, emit INITIALLY <value> when issuing DDL
- for this constraint.
-
- """
-
- super(CheckConstraint, self).\
- __init__(name, deferrable, initially, _create_rule)
- self.sqltext = expression._literal_as_text(sqltext)
- if table is not None:
- self._set_parent_with_dispatch(table)
-
- def __visit_name__(self):
- if isinstance(self.parent, Table):
- return "check_constraint"
- else:
- return "column_check_constraint"
- __visit_name__ = property(__visit_name__)
-
- def copy(self, **kw):
- c = CheckConstraint(self.sqltext,
- name=self.name,
- initially=self.initially,
- deferrable=self.deferrable,
- _create_rule=self._create_rule)
- c.dispatch._update(self.dispatch)
- return c
-
-class ForeignKeyConstraint(Constraint):
- """A table-level FOREIGN KEY constraint.
-
- Defines a single column or composite FOREIGN KEY ... REFERENCES
- constraint. For a no-frills, single column foreign key, adding a
- :class:`.ForeignKey` to the definition of a :class:`.Column` is a shorthand
- equivalent for an unnamed, single column :class:`.ForeignKeyConstraint`.
-
- Examples of foreign key configuration are in :ref:`metadata_foreignkeys`.
-
- """
- __visit_name__ = 'foreign_key_constraint'
-
- def __init__(self, columns, refcolumns, name=None, onupdate=None,
- ondelete=None, deferrable=None, initially=None, use_alter=False,
- link_to_name=False, table=None):
- """Construct a composite-capable FOREIGN KEY.
-
- :param columns: A sequence of local column names. The named columns
- must be defined and present in the parent Table. The names should
- match the ``key`` given to each column (defaults to the name) unless
- ``link_to_name`` is True.
-
- :param refcolumns: A sequence of foreign column names or Column
- objects. The columns must all be located within the same Table.
-
- :param name: Optional, the in-database name of the key.
-
- :param onupdate: Optional string. If set, emit ON UPDATE <value> when
- issuing DDL for this constraint. Typical values include CASCADE,
- DELETE and RESTRICT.
-
- :param ondelete: Optional string. If set, emit ON DELETE <value> when
- issuing DDL for this constraint. Typical values include CASCADE,
- DELETE and RESTRICT.
-
- :param deferrable: Optional bool. If set, emit DEFERRABLE or NOT
- DEFERRABLE when issuing DDL for this constraint.
-
- :param initially: Optional string. If set, emit INITIALLY <value> when
- issuing DDL for this constraint.
-
- :param link_to_name: if True, the string name given in ``column`` is
- the rendered name of the referenced column, not its locally assigned
- ``key``.
-
- :param use_alter: If True, do not emit the DDL for this constraint as
- part of the CREATE TABLE definition. Instead, generate it via an
- ALTER TABLE statement issued after the full collection of tables
- have been created, and drop it via an ALTER TABLE statement before
- the full collection of tables are dropped. This is shorthand for the
- usage of :class:`.AddConstraint` and :class:`.DropConstraint` applied
- as "after-create" and "before-drop" events on the MetaData object.
- This is normally used to generate/drop constraints on objects that
- are mutually dependent on each other.
-
- """
- super(ForeignKeyConstraint, self).\
- __init__(name, deferrable, initially)
-
- self.onupdate = onupdate
- self.ondelete = ondelete
- self.link_to_name = link_to_name
- if self.name is None and use_alter:
- raise exc.ArgumentError("Alterable Constraint requires a name")
- self.use_alter = use_alter
-
- self._elements = util.OrderedDict()
-
- # standalone ForeignKeyConstraint - create
- # associated ForeignKey objects which will be applied to hosted
- # Column objects (in col.foreign_keys), either now or when attached
- # to the Table for string-specified names
- for col, refcol in zip(columns, refcolumns):
- self._elements[col] = ForeignKey(
- refcol,
- _constraint=self,
- name=self.name,
- onupdate=self.onupdate,
- ondelete=self.ondelete,
- use_alter=self.use_alter,
- link_to_name=self.link_to_name
- )
-
- if table is not None:
- self._set_parent_with_dispatch(table)
-
- @property
- def columns(self):
- return self._elements.keys()
-
- @property
- def elements(self):
- return self._elements.values()
-
- def _set_parent(self, table):
- super(ForeignKeyConstraint, self)._set_parent(table)
- for col, fk in self._elements.iteritems():
- # string-specified column names now get
- # resolved to Column objects
- if isinstance(col, basestring):
- col = table.c[col]
-
- if not hasattr(fk, 'parent') or \
- fk.parent is not col:
- fk._set_parent_with_dispatch(col)
-
- if self.use_alter:
- def supports_alter(ddl, event, schema_item, bind, **kw):
- return table in set(kw['tables']) and \
- bind.dialect.supports_alter
-
- event.listen(table.metadata, "after_create", AddConstraint(self, on=supports_alter))
- event.listen(table.metadata, "before_drop", DropConstraint(self, on=supports_alter))
-
-
- def copy(self, **kw):
- fkc = ForeignKeyConstraint(
- [x.parent.name for x in self._elements.values()],
- [x._get_colspec(**kw) for x in self._elements.values()],
- name=self.name,
- onupdate=self.onupdate,
- ondelete=self.ondelete,
- use_alter=self.use_alter,
- deferrable=self.deferrable,
- initially=self.initially,
- link_to_name=self.link_to_name
- )
- fkc.dispatch._update(self.dispatch)
- return fkc
-
-class PrimaryKeyConstraint(ColumnCollectionConstraint):
- """A table-level PRIMARY KEY constraint.
-
- Defines a single column or composite PRIMARY KEY constraint. For a
- no-frills primary key, adding ``primary_key=True`` to one or more
- ``Column`` definitions is a shorthand equivalent for an unnamed single- or
- multiple-column PrimaryKeyConstraint.
- """
-
- __visit_name__ = 'primary_key_constraint'
-
- def _set_parent(self, table):
- super(PrimaryKeyConstraint, self)._set_parent(table)
-
- if table.primary_key in table.constraints:
- table.constraints.remove(table.primary_key)
- table.primary_key = self
- table.constraints.add(self)
-
- for c in self.columns:
- c.primary_key = True
-
- def _replace(self, col):
- self.columns.replace(col)
-
-class UniqueConstraint(ColumnCollectionConstraint):
- """A table-level UNIQUE constraint.
-
- Defines a single column or composite UNIQUE constraint. For a no-frills,
- single column constraint, adding ``unique=True`` to the ``Column``
- definition is a shorthand equivalent for an unnamed, single column
- UniqueConstraint.
- """
-
- __visit_name__ = 'unique_constraint'
-
-class Index(ColumnCollectionMixin, SchemaItem):
- """A table-level INDEX.
-
- Defines a composite (one or more column) INDEX. For a no-frills, single
- column index, adding ``index=True`` to the ``Column`` definition is
- a shorthand equivalent for an unnamed, single column Index.
- """
-
- __visit_name__ = 'index'
-
- def __init__(self, name, *columns, **kw):
- """Construct an index object.
-
- :param name:
- The name of the index
-
- :param \*columns:
- Columns to include in the index. All columns must belong to the same
- table.
-
- :param unique:
- Defaults to False: create a unique index.
-
- :param \**kw:
- Other keyword arguments may be interpreted by specific dialects.
-
- """
- self.table = None
- # will call _set_parent() if table-bound column
- # objects are present
- ColumnCollectionMixin.__init__(self, *columns)
- self.name = name
- self.unique = kw.pop('unique', False)
- self.kwargs = kw
-
- def _set_parent(self, table):
- ColumnCollectionMixin._set_parent(self, table)
-
- if self.table is not None and table is not self.table:
- raise exc.ArgumentError(
- "Index '%s' is against table '%s', and "
- "cannot be associated with table '%s'." % (
- self.name,
- self.table.description,
- table.description
- )
- )
- self.table = table
- for c in self.columns:
- if c.table != self.table:
- raise exc.ArgumentError(
- "Column '%s' is not part of table '%s'." %
- (c, self.table.description)
- )
- table.indexes.add(self)
-
- @property
- def bind(self):
- """Return the connectable associated with this Index."""
-
- return self.table.bind
-
- def create(self, bind=None):
- if bind is None:
- bind = _bind_or_error(self)
- bind.create(self)
- return self
-
- def drop(self, bind=None):
- if bind is None:
- bind = _bind_or_error(self)
- bind.drop(self)
-
- def __repr__(self):
- return 'Index("%s", %s%s)' % (
- self.name,
- ', '.join(repr(c) for c in self.columns),
- (self.unique and ', unique=True') or '')
-
-class MetaData(SchemaItem):
- """A collection of Tables and their associated schema constructs.
-
- Holds a collection of Tables and an optional binding to an ``Engine`` or
- ``Connection``. If bound, the :class:`~sqlalchemy.schema.Table` objects
- in the collection and their columns may participate in implicit SQL
- execution.
-
- The `Table` objects themselves are stored in the `metadata.tables`
- dictionary.
-
- The ``bind`` property may be assigned to dynamically. A common pattern is
- to start unbound and then bind later when an engine is available::
-
- metadata = MetaData()
- # define tables
- Table('mytable', metadata, ...)
- # connect to an engine later, perhaps after loading a URL from a
- # configuration file
- metadata.bind = an_engine
-
- MetaData is a thread-safe object after tables have been explicitly defined
- or loaded via reflection.
-
- .. index::
- single: thread safety; MetaData
-
- """
-
- __visit_name__ = 'metadata'
-
- def __init__(self, bind=None, reflect=False):
- """Create a new MetaData object.
-
- :param bind:
- An Engine or Connection to bind to. May also be a string or URL
- instance, these are passed to create_engine() and this MetaData will
- be bound to the resulting engine.
-
- :param reflect:
- Optional, automatically load all tables from the bound database.
- Defaults to False. ``bind`` is required when this option is set.
- For finer control over loaded tables, use the ``reflect`` method of
- ``MetaData``.
-
- """
- self.tables = util.immutabledict()
- self._schemas = set()
- self._sequences = {}
- self.bind = bind
- if reflect:
- if not bind:
- raise exc.ArgumentError(
- "A bind must be supplied in conjunction "
- "with reflect=True")
- self.reflect()
-
- def __repr__(self):
- return 'MetaData(%r)' % self.bind
-
- def __contains__(self, table_or_key):
- if not isinstance(table_or_key, basestring):
- table_or_key = table_or_key.key
- return table_or_key in self.tables
-
- def _add_table(self, name, schema, table):
- key = _get_table_key(name, schema)
- dict.__setitem__(self.tables, key, table)
- if schema:
- self._schemas.add(schema)
-
- def _remove_table(self, name, schema):
- key = _get_table_key(name, schema)
- dict.pop(self.tables, key, None)
- if self._schemas:
- self._schemas = set([t.schema
- for t in self.tables.values()
- if t.schema is not None])
-
- def __getstate__(self):
- return {'tables': self.tables, 'schemas':self._schemas,
- 'sequences':self._sequences}
-
- def __setstate__(self, state):
- self.tables = state['tables']
- self._bind = None
- self._sequences = state['sequences']
- self._schemas = state['schemas']
-
- def is_bound(self):
- """True if this MetaData is bound to an Engine or Connection."""
-
- return self._bind is not None
-
- def bind(self):
- """An Engine or Connection to which this MetaData is bound.
-
- This property may be assigned an ``Engine`` or ``Connection``, or
- assigned a string or URL to automatically create a basic ``Engine``
- for this bind with ``create_engine()``.
-
- """
- return self._bind
-
- def _bind_to(self, bind):
- """Bind this MetaData to an Engine, Connection, string or URL."""
-
- if isinstance(bind, (basestring, url.URL)):
- from sqlalchemy import create_engine
- self._bind = create_engine(bind)
- else:
- self._bind = bind
- bind = property(bind, _bind_to)
-
- def clear(self):
- """Clear all Table objects from this MetaData."""
-
- dict.clear(self.tables)
- self._schemas.clear()
-
- def remove(self, table):
- """Remove the given Table object from this MetaData."""
-
- self._remove_table(table.name, table.schema)
-
- @property
- def sorted_tables(self):
- """Returns a list of ``Table`` objects sorted in order of
- dependency.
- """
- return sqlutil.sort_tables(self.tables.itervalues())
-
- def reflect(self, bind=None, schema=None, views=False, only=None):
- """Load all available table definitions from the database.
-
- Automatically creates ``Table`` entries in this ``MetaData`` for any
- table available in the database but not yet present in the
- ``MetaData``. May be called multiple times to pick up tables recently
- added to the database, however no special action is taken if a table
- in this ``MetaData`` no longer exists in the database.
-
- :param bind:
- A :class:`~sqlalchemy.engine.base.Connectable` used to access the
- database; if None, uses the existing bind on this ``MetaData``, if
- any.
-
- :param schema:
- Optional, query and reflect tables from an alterate schema.
-
- :param views:
- If True, also reflect views.
-
- :param only:
- Optional. Load only a sub-set of available named tables. May be
- specified as a sequence of names or a callable.
-
- If a sequence of names is provided, only those tables will be
- reflected. An error is raised if a table is requested but not
- available. Named tables already present in this ``MetaData`` are
- ignored.
-
- If a callable is provided, it will be used as a boolean predicate to
- filter the list of potential table names. The callable is called
- with a table name and this ``MetaData`` instance as positional
- arguments and should return a true value for any table to reflect.
-
- """
- reflect_opts = {'autoload': True}
- if bind is None:
- bind = _bind_or_error(self)
- conn = None
- else:
- reflect_opts['autoload_with'] = bind
- conn = bind.contextual_connect()
-
- if schema is not None:
- reflect_opts['schema'] = schema
-
- try:
- available = util.OrderedSet(bind.engine.table_names(schema,
- connection=conn))
- if views:
- available.update(
- bind.dialect.get_view_names(conn or bind, schema)
- )
-
- current = set(self.tables.iterkeys())
-
- if only is None:
- load = [name for name in available if name not in current]
- elif util.callable(only):
- load = [name for name in available
- if name not in current and only(name, self)]
- else:
- missing = [name for name in only if name not in available]
- if missing:
- s = schema and (" schema '%s'" % schema) or ''
- raise exc.InvalidRequestError(
- 'Could not reflect: requested table(s) not available '
- 'in %s%s: (%s)' %
- (bind.engine.url, s, ', '.join(missing)))
- load = [name for name in only if name not in current]
-
- for name in load:
- Table(name, self, **reflect_opts)
- finally:
- if conn is not None:
- conn.close()
-
- def append_ddl_listener(self, event_name, listener):
- """Append a DDL event listener to this ``MetaData``.
-
- Deprecated. See :class:`.DDLEvents`.
-
- """
- def adapt_listener(target, connection, **kw):
- listener(event, target, connection, **kw)
-
- event.listen(self, "" + event_name.replace('-', '_'), adapt_listener)
-
- def create_all(self, bind=None, tables=None, checkfirst=True):
- """Create all tables stored in this metadata.
-
- Conditional by default, will not attempt to recreate tables already
- present in the target database.
-
- :param bind:
- A :class:`~sqlalchemy.engine.base.Connectable` used to access the
- database; if None, uses the existing bind on this ``MetaData``, if
- any.
-
- :param tables:
- Optional list of ``Table`` objects, which is a subset of the total
- tables in the ``MetaData`` (others are ignored).
-
- :param checkfirst:
- Defaults to True, don't issue CREATEs for tables already present
- in the target database.
-
- """
- if bind is None:
- bind = _bind_or_error(self)
- bind.create(self, checkfirst=checkfirst, tables=tables)
-
- def drop_all(self, bind=None, tables=None, checkfirst=True):
- """Drop all tables stored in this metadata.
-
- Conditional by default, will not attempt to drop tables not present in
- the target database.
-
- :param bind:
- A :class:`~sqlalchemy.engine.base.Connectable` used to access the
- database; if None, uses the existing bind on this ``MetaData``, if
- any.
-
- :param tables:
- Optional list of ``Table`` objects, which is a subset of the
- total tables in the ``MetaData`` (others are ignored).
-
- :param checkfirst:
- Defaults to True, only issue DROPs for tables confirmed to be
- present in the target database.
-
- """
- if bind is None:
- bind = _bind_or_error(self)
- bind.drop(self, checkfirst=checkfirst, tables=tables)
-
-class ThreadLocalMetaData(MetaData):
- """A MetaData variant that presents a different ``bind`` in every thread.
-
- Makes the ``bind`` property of the MetaData a thread-local value, allowing
- this collection of tables to be bound to different ``Engine``
- implementations or connections in each thread.
-
- The ThreadLocalMetaData starts off bound to None in each thread. Binds
- must be made explicitly by assigning to the ``bind`` property or using
- ``connect()``. You can also re-bind dynamically multiple times per
- thread, just like a regular ``MetaData``.
-
- """
-
- __visit_name__ = 'metadata'
-
- def __init__(self):
- """Construct a ThreadLocalMetaData."""
-
- self.context = util.threading.local()
- self.__engines = {}
- super(ThreadLocalMetaData, self).__init__()
-
- def bind(self):
- """The bound Engine or Connection for this thread.
-
- This property may be assigned an Engine or Connection, or assigned a
- string or URL to automatically create a basic Engine for this bind
- with ``create_engine()``."""
-
- return getattr(self.context, '_engine', None)
-
- def _bind_to(self, bind):
- """Bind to a Connectable in the caller's thread."""
-
- if isinstance(bind, (basestring, url.URL)):
- try:
- self.context._engine = self.__engines[bind]
- except KeyError:
- from sqlalchemy import create_engine
- e = create_engine(bind)
- self.__engines[bind] = e
- self.context._engine = e
- else:
- # TODO: this is squirrely. we shouldnt have to hold onto engines
- # in a case like this
- if bind not in self.__engines:
- self.__engines[bind] = bind
- self.context._engine = bind
-
- bind = property(bind, _bind_to)
-
- def is_bound(self):
- """True if there is a bind for this thread."""
- return (hasattr(self.context, '_engine') and
- self.context._engine is not None)
-
- def dispose(self):
- """Dispose all bound engines, in all thread contexts."""
-
- for e in self.__engines.itervalues():
- if hasattr(e, 'dispose'):
- e.dispose()
-
-class SchemaVisitor(visitors.ClauseVisitor):
- """Define the visiting for ``SchemaItem`` objects."""
-
- __traverse_options__ = {'schema_visitor':True}
-
-
-class DDLElement(expression.Executable, expression.ClauseElement):
- """Base class for DDL expression constructs.
-
- This class is the base for the general purpose :class:`.DDL` class,
- as well as the various create/drop clause constructs such as
- :class:`.CreateTable`, :class:`.DropTable`, :class:`.AddConstraint`,
- etc.
-
- :class:`.DDLElement` integrates closely with SQLAlchemy events,
- introduced in :ref:`event_toplevel`. An instance of one is
- itself an event receiving callable::
-
- event.listen(
- users,
- 'after_create',
- AddConstraint(constraint).execute_if(dialect='postgresql')
- )
-
- See also:
-
- :class:`.DDL`
-
- :class:`.DDLEvents`
-
- :ref:`event_toplevel`
-
- :ref:`schema_ddl_sequences`
-
- """
-
- _execution_options = expression.Executable.\
- _execution_options.union({'autocommit':True})
-
- target = None
- on = None
- dialect = None
- callable_ = None
-
- def execute(self, bind=None, target=None):
- """Execute this DDL immediately.
-
- Executes the DDL statement in isolation using the supplied
- :class:`~sqlalchemy.engine.base.Connectable` or
- :class:`~sqlalchemy.engine.base.Connectable` assigned to the ``.bind``
- property, if not supplied. If the DDL has a conditional ``on``
- criteria, it will be invoked with None as the event.
-
- :param bind:
- Optional, an ``Engine`` or ``Connection``. If not supplied, a valid
- :class:`~sqlalchemy.engine.base.Connectable` must be present in the
- ``.bind`` property.
-
- :param target:
- Optional, defaults to None. The target SchemaItem for the
- execute call. Will be passed to the ``on`` callable if any,
- and may also provide string expansion data for the
- statement. See ``execute_at`` for more information.
-
- """
-
- if bind is None:
- bind = _bind_or_error(self)
-
- if self._should_execute(target, bind):
- return bind.execute(self.against(target))
- else:
- bind.engine.logger.info(
- "DDL execution skipped, criteria not met.")
-
- @util.deprecated("0.7", "See :class:`.DDLEvents`, as well as "
- ":meth:`.DDLElement.execute_if`.")
- def execute_at(self, event_name, target):
- """Link execution of this DDL to the DDL lifecycle of a SchemaItem.
-
- Links this ``DDLElement`` to a ``Table`` or ``MetaData`` instance,
- executing it when that schema item is created or dropped. The DDL
- statement will be executed using the same Connection and transactional
- context as the Table create/drop itself. The ``.bind`` property of
- this statement is ignored.
-
- :param event:
- One of the events defined in the schema item's ``.ddl_events``;
- e.g. 'before-create', 'after-create', 'before-drop' or 'after-drop'
-
- :param target:
- The Table or MetaData instance for which this DDLElement will
- be associated with.
-
- A DDLElement instance can be linked to any number of schema items.
-
- ``execute_at`` builds on the ``append_ddl_listener`` interface of
- :class:`.MetaData` and :class:`.Table` objects.
-
- Caveat: Creating or dropping a Table in isolation will also trigger
- any DDL set to ``execute_at`` that Table's MetaData. This may change
- in a future release.
-
- """
-
- def call_event(target, connection, **kw):
- if self._should_execute_deprecated(event_name,
- target, connection, **kw):
- return connection.execute(self.against(target))
-
- event.listen(target, "" + event_name.replace('-', '_'), call_event)
-
- @expression._generative
- def against(self, target):
- """Return a copy of this DDL against a specific schema item."""
-
- self.target = target
-
- @expression._generative
- def execute_if(self, dialect=None, callable_=None, state=None):
- """Return a callable that will execute this
- DDLElement conditionally.
-
- Used to provide a wrapper for event listening::
-
- event.listen(
- metadata,
- 'before_create',
- DDL("my_ddl").execute_if(dialect='postgresql')
- )
-
- :param dialect: May be a string, tuple or a callable
- predicate. If a string, it will be compared to the name of the
- executing database dialect::
-
- DDL('something').execute_if(dialect='postgresql')
-
- If a tuple, specifies multiple dialect names::
-
- DDL('something').execute_if(dialect=('postgresql', 'mysql'))
-
- :param callable_: A callable, which will be invoked with
- four positional arguments as well as optional keyword
- arguments:
-
- :ddl:
- This DDL element.
-
- :target:
- The :class:`.Table` or :class:`.MetaData` object which is the target of
- this event. May be None if the DDL is executed explicitly.
-
- :bind:
- The :class:`.Connection` being used for DDL execution
-
- :tables:
- Optional keyword argument - a list of Table objects which are to
- be created/ dropped within a MetaData.create_all() or drop_all()
- method call.
-
- :state:
- Optional keyword argument - will be the ``state`` argument
- passed to this function.
-
- :checkfirst:
- Keyword argument, will be True if the 'checkfirst' flag was
- set during the call to ``create()``, ``create_all()``,
- ``drop()``, ``drop_all()``.
-
- If the callable returns a true value, the DDL statement will be
- executed.
-
- :param state: any value which will be passed to the callable_
- as the ``state`` keyword argument.
-
- See also:
-
- :class:`.DDLEvents`
-
- :ref:`event_toplevel`
-
- """
- self.dialect = dialect
- self.callable_ = callable_
- self.state = state
-
- def _should_execute(self, target, bind, **kw):
- if self.on is not None and \
- not self._should_execute_deprecated(None, target, bind, **kw):
- return False
-
- if isinstance(self.dialect, basestring):
- if self.dialect != bind.engine.name:
- return False
- elif isinstance(self.dialect, (tuple, list, set)):
- if bind.engine.name not in self.dialect:
- return False
- if self.callable_ is not None and \
- not self.callable_(self, target, bind, state=self.state, **kw):
- return False
-
- return True
-
- def _should_execute_deprecated(self, event, target, bind, **kw):
- if self.on is None:
- return True
- elif isinstance(self.on, basestring):
- return self.on == bind.engine.name
- elif isinstance(self.on, (tuple, list, set)):
- return bind.engine.name in self.on
- else:
- return self.on(self, event, target, bind, **kw)
-
- def __call__(self, target, bind, **kw):
- """Execute the DDL as a ddl_listener."""
-
- if self._should_execute(target, bind, **kw):
- return bind.execute(self.against(target))
-
- def _check_ddl_on(self, on):
- if (on is not None and
- (not isinstance(on, (basestring, tuple, list, set)) and
- not util.callable(on))):
- raise exc.ArgumentError(
- "Expected the name of a database dialect, a tuple "
- "of names, or a callable for "
- "'on' criteria, got type '%s'." % type(on).__name__)
-
- def bind(self):
- if self._bind:
- return self._bind
- def _set_bind(self, bind):
- self._bind = bind
- bind = property(bind, _set_bind)
-
- def _generate(self):
- s = self.__class__.__new__(self.__class__)
- s.__dict__ = self.__dict__.copy()
- return s
-
- def _compiler(self, dialect, **kw):
- """Return a compiler appropriate for this ClauseElement, given a
- Dialect."""
-
- return dialect.ddl_compiler(dialect, self, **kw)
-
-class DDL(DDLElement):
- """A literal DDL statement.
-
- Specifies literal SQL DDL to be executed by the database. DDL objects
- function as DDL event listeners, and can be subscribed to those events
- listed in :class:`.DDLEvents`, using either :class:`.Table` or :class:`.MetaData`
- objects as targets. Basic templating support allows a single DDL instance
- to handle repetitive tasks for multiple tables.
-
- Examples::
-
- from sqlalchemy import event, DDL
-
- tbl = Table('users', metadata, Column('uid', Integer))
- event.listen(tbl, 'before_create', DDL('DROP TRIGGER users_trigger'))
-
- spow = DDL('ALTER TABLE %(table)s SET secretpowers TRUE')
- event.listen(tbl, 'after_create', spow.execute_if(dialect='somedb'))
-
- drop_spow = DDL('ALTER TABLE users SET secretpowers FALSE')
- connection.execute(drop_spow)
-
- When operating on Table events, the following ``statement``
- string substitions are available::
-
- %(table)s - the Table name, with any required quoting applied
- %(schema)s - the schema name, with any required quoting applied
- %(fullname)s - the Table name including schema, quoted if needed
-
- The DDL's "context", if any, will be combined with the standard
- substutions noted above. Keys present in the context will override
- the standard substitutions.
-
- """
-
- __visit_name__ = "ddl"
-
- def __init__(self, statement, on=None, context=None, bind=None):
- """Create a DDL statement.
-
- :param statement:
- A string or unicode string to be executed. Statements will be
- processed with Python's string formatting operator. See the
- ``context`` argument and the ``execute_at`` method.
-
- A literal '%' in a statement must be escaped as '%%'.
-
- SQL bind parameters are not available in DDL statements.
-
- :param on:
- Deprecated. See :meth:`.DDLElement.execute_if`.
-
- Optional filtering criteria. May be a string, tuple or a callable
- predicate. If a string, it will be compared to the name of the
- executing database dialect::
-
- DDL('something', on='postgresql')
-
- If a tuple, specifies multiple dialect names::
-
- DDL('something', on=('postgresql', 'mysql'))
-
- If a callable, it will be invoked with four positional arguments
- as well as optional keyword arguments:
-
- :ddl:
- This DDL element.
-
- :event:
- The name of the event that has triggered this DDL, such as
- 'after-create' Will be None if the DDL is executed explicitly.
-
- :target:
- The ``Table`` or ``MetaData`` object which is the target of
- this event. May be None if the DDL is executed explicitly.
-
- :connection:
- The ``Connection`` being used for DDL execution
-
- :tables:
- Optional keyword argument - a list of Table objects which are to
- be created/ dropped within a MetaData.create_all() or drop_all()
- method call.
-
-
- If the callable returns a true value, the DDL statement will be
- executed.
-
- :param context:
- Optional dictionary, defaults to None. These values will be
- available for use in string substitutions on the DDL statement.
-
- :param bind:
- Optional. A :class:`~sqlalchemy.engine.base.Connectable`, used by
- default when ``execute()`` is invoked without a bind argument.
-
-
- See also:
-
- :class:`.DDLEvents`
- :mod:`sqlalchemy.event`
-
- """
-
- if not isinstance(statement, basestring):
- raise exc.ArgumentError(
- "Expected a string or unicode SQL statement, got '%r'" %
- statement)
-
- self.statement = statement
- self.context = context or {}
-
- self._check_ddl_on(on)
- self.on = on
- self._bind = bind
-
-
- def __repr__(self):
- return '<%s@%s; %s>' % (
- type(self).__name__, id(self),
- ', '.join([repr(self.statement)] +
- ['%s=%r' % (key, getattr(self, key))
- for key in ('on', 'context')
- if getattr(self, key)]))
-
-def _to_schema_column(element):
- if hasattr(element, '__clause_element__'):
- element = element.__clause_element__()
- if not isinstance(element, Column):
- raise exc.ArgumentError("schema.Column object expected")
- return element
-
-def _to_schema_column_or_string(element):
- if hasattr(element, '__clause_element__'):
- element = element.__clause_element__()
- return element
-
-class _CreateDropBase(DDLElement):
- """Base class for DDL constucts that represent CREATE and DROP or
- equivalents.
-
- The common theme of _CreateDropBase is a single
- ``element`` attribute which refers to the element
- to be created or dropped.
-
- """
-
- def __init__(self, element, on=None, bind=None):
- self.element = element
- self._check_ddl_on(on)
- self.on = on
- self.bind = bind
-
- def _create_rule_disable(self, compiler):
- """Allow disable of _create_rule using a callable.
-
- Pass to _create_rule using
- util.portable_instancemethod(self._create_rule_disable)
- to retain serializability.
-
- """
- return False
-
-class CreateTable(_CreateDropBase):
- """Represent a CREATE TABLE statement."""
-
- __visit_name__ = "create_table"
-
-class DropTable(_CreateDropBase):
- """Represent a DROP TABLE statement."""
-
- __visit_name__ = "drop_table"
-
-class CreateSequence(_CreateDropBase):
- """Represent a CREATE SEQUENCE statement."""
-
- __visit_name__ = "create_sequence"
-
-class DropSequence(_CreateDropBase):
- """Represent a DROP SEQUENCE statement."""
-
- __visit_name__ = "drop_sequence"
-
-class CreateIndex(_CreateDropBase):
- """Represent a CREATE INDEX statement."""
-
- __visit_name__ = "create_index"
-
-class DropIndex(_CreateDropBase):
- """Represent a DROP INDEX statement."""
-
- __visit_name__ = "drop_index"
-
-class AddConstraint(_CreateDropBase):
- """Represent an ALTER TABLE ADD CONSTRAINT statement."""
-
- __visit_name__ = "add_constraint"
-
- def __init__(self, element, *args, **kw):
- super(AddConstraint, self).__init__(element, *args, **kw)
- element._create_rule = util.portable_instancemethod(
- self._create_rule_disable)
-
-class DropConstraint(_CreateDropBase):
- """Represent an ALTER TABLE DROP CONSTRAINT statement."""
-
- __visit_name__ = "drop_constraint"
-
- def __init__(self, element, cascade=False, **kw):
- self.cascade = cascade
- super(DropConstraint, self).__init__(element, **kw)
- element._create_rule = util.portable_instancemethod(
- self._create_rule_disable)
-
-def _bind_or_error(schemaitem, msg=None):
- bind = schemaitem.bind
- if not bind:
- name = schemaitem.__class__.__name__
- label = getattr(schemaitem, 'fullname',
- getattr(schemaitem, 'name', None))
- if label:
- item = '%s %r' % (name, label)
- else:
- item = name
- if isinstance(schemaitem, (MetaData, DDL)):
- bindable = "the %s's .bind" % name
- else:
- bindable = "this %s's .metadata.bind" % name
-
- if msg is None:
- msg = "The %s is not bound to an Engine or Connection. "\
- "Execution can not proceed without a database to execute "\
- "against. Either execute with an explicit connection or "\
- "assign %s to enable implicit execution." % \
- (item, bindable)
- raise exc.UnboundExecutionError(msg)
- return bind
-
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/sql/__init__.py b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/sql/__init__.py
deleted file mode 100755
index c591e680..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/sql/__init__.py
+++ /dev/null
@@ -1,66 +0,0 @@
-# sql/__init__.py
-# Copyright (C) 2005-2011 the SQLAlchemy authors and contributors <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-from sqlalchemy.sql.expression import (
- Alias,
- ClauseElement,
- ColumnCollection,
- ColumnElement,
- CompoundSelect,
- Delete,
- FromClause,
- Insert,
- Join,
- Select,
- Selectable,
- TableClause,
- Update,
- alias,
- and_,
- asc,
- between,
- bindparam,
- case,
- cast,
- collate,
- column,
- delete,
- desc,
- distinct,
- except_,
- except_all,
- exists,
- extract,
- func,
- insert,
- intersect,
- intersect_all,
- join,
- label,
- literal,
- literal_column,
- modifier,
- not_,
- null,
- or_,
- outerjoin,
- outparam,
- over,
- select,
- subquery,
- table,
- text,
- tuple_,
- type_coerce,
- union,
- union_all,
- update,
- )
-
-from sqlalchemy.sql.visitors import ClauseVisitor
-
-__tmp = locals().keys()
-__all__ = sorted([i for i in __tmp if not i.startswith('__')])
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/sql/compiler.py b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/sql/compiler.py
deleted file mode 100755
index 829adeba..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/sql/compiler.py
+++ /dev/null
@@ -1,1793 +0,0 @@
-# sql/compiler.py
-# Copyright (C) 2005-2011 the SQLAlchemy authors and contributors <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-"""Base SQL and DDL compiler implementations.
-
-Classes provided include:
-
-:class:`~sqlalchemy.sql.compiler.SQLCompiler` - renders SQL
-strings
-
-:class:`~sqlalchemy.sql.compiler.DDLCompiler` - renders DDL
-(data definition language) strings
-
-:class:`~sqlalchemy.sql.compiler.GenericTypeCompiler` - renders
-type specification strings.
-
-To generate user-defined SQL strings, see
-:module:`~sqlalchemy.ext.compiler`.
-
-"""
-
-import re
-from sqlalchemy import schema, engine, util, exc
-from sqlalchemy.sql import operators, functions, util as sql_util, \
- visitors
-from sqlalchemy.sql import expression as sql
-import decimal
-
-RESERVED_WORDS = set([
- 'all', 'analyse', 'analyze', 'and', 'any', 'array',
- 'as', 'asc', 'asymmetric', 'authorization', 'between',
- 'binary', 'both', 'case', 'cast', 'check', 'collate',
- 'column', 'constraint', 'create', 'cross', 'current_date',
- 'current_role', 'current_time', 'current_timestamp',
- 'current_user', 'default', 'deferrable', 'desc',
- 'distinct', 'do', 'else', 'end', 'except', 'false',
- 'for', 'foreign', 'freeze', 'from', 'full', 'grant',
- 'group', 'having', 'ilike', 'in', 'initially', 'inner',
- 'intersect', 'into', 'is', 'isnull', 'join', 'leading',
- 'left', 'like', 'limit', 'localtime', 'localtimestamp',
- 'natural', 'new', 'not', 'notnull', 'null', 'off', 'offset',
- 'old', 'on', 'only', 'or', 'order', 'outer', 'overlaps',
- 'placing', 'primary', 'references', 'right', 'select',
- 'session_user', 'set', 'similar', 'some', 'symmetric', 'table',
- 'then', 'to', 'trailing', 'true', 'union', 'unique', 'user',
- 'using', 'verbose', 'when', 'where'])
-
-LEGAL_CHARACTERS = re.compile(r'^[A-Z0-9_$]+$', re.I)
-ILLEGAL_INITIAL_CHARACTERS = set([str(x) for x in xrange(0, 10)]).union(['$'])
-
-BIND_PARAMS = re.compile(r'(?<![:\w\$\x5c]):([\w\$]+)(?![:\w\$])', re.UNICODE)
-BIND_PARAMS_ESC = re.compile(r'\x5c(:[\w\$]+)(?![:\w\$])', re.UNICODE)
-
-BIND_TEMPLATES = {
- 'pyformat':"%%(%(name)s)s",
- 'qmark':"?",
- 'format':"%%s",
- 'numeric':":%(position)s",
- 'named':":%(name)s"
-}
-
-
-OPERATORS = {
- # binary
- operators.and_ : ' AND ',
- operators.or_ : ' OR ',
- operators.add : ' + ',
- operators.mul : ' * ',
- operators.sub : ' - ',
- # Py2K
- operators.div : ' / ',
- # end Py2K
- operators.mod : ' % ',
- operators.truediv : ' / ',
- operators.neg : '-',
- operators.lt : ' < ',
- operators.le : ' <= ',
- operators.ne : ' != ',
- operators.gt : ' > ',
- operators.ge : ' >= ',
- operators.eq : ' = ',
- operators.concat_op : ' || ',
- operators.between_op : ' BETWEEN ',
- operators.match_op : ' MATCH ',
- operators.in_op : ' IN ',
- operators.notin_op : ' NOT IN ',
- operators.comma_op : ', ',
- operators.from_ : ' FROM ',
- operators.as_ : ' AS ',
- operators.is_ : ' IS ',
- operators.isnot : ' IS NOT ',
- operators.collate : ' COLLATE ',
-
- # unary
- operators.exists : 'EXISTS ',
- operators.distinct_op : 'DISTINCT ',
- operators.inv : 'NOT ',
-
- # modifiers
- operators.desc_op : ' DESC',
- operators.asc_op : ' ASC',
- operators.nullsfirst_op : ' NULLS FIRST',
- operators.nullslast_op : ' NULLS LAST',
-}
-
-FUNCTIONS = {
- functions.coalesce : 'coalesce%(expr)s',
- functions.current_date: 'CURRENT_DATE',
- functions.current_time: 'CURRENT_TIME',
- functions.current_timestamp: 'CURRENT_TIMESTAMP',
- functions.current_user: 'CURRENT_USER',
- functions.localtime: 'LOCALTIME',
- functions.localtimestamp: 'LOCALTIMESTAMP',
- functions.random: 'random%(expr)s',
- functions.sysdate: 'sysdate',
- functions.session_user :'SESSION_USER',
- functions.user: 'USER'
-}
-
-EXTRACT_MAP = {
- 'month': 'month',
- 'day': 'day',
- 'year': 'year',
- 'second': 'second',
- 'hour': 'hour',
- 'doy': 'doy',
- 'minute': 'minute',
- 'quarter': 'quarter',
- 'dow': 'dow',
- 'week': 'week',
- 'epoch': 'epoch',
- 'milliseconds': 'milliseconds',
- 'microseconds': 'microseconds',
- 'timezone_hour': 'timezone_hour',
- 'timezone_minute': 'timezone_minute'
-}
-
-COMPOUND_KEYWORDS = {
- sql.CompoundSelect.UNION : 'UNION',
- sql.CompoundSelect.UNION_ALL : 'UNION ALL',
- sql.CompoundSelect.EXCEPT : 'EXCEPT',
- sql.CompoundSelect.EXCEPT_ALL : 'EXCEPT ALL',
- sql.CompoundSelect.INTERSECT : 'INTERSECT',
- sql.CompoundSelect.INTERSECT_ALL : 'INTERSECT ALL'
-}
-
-class _CompileLabel(visitors.Visitable):
- """lightweight label object which acts as an expression._Label."""
-
- __visit_name__ = 'label'
- __slots__ = 'element', 'name'
-
- def __init__(self, col, name):
- self.element = col
- self.name = name
-
- @property
- def type(self):
- return self.element.type
-
- @property
- def quote(self):
- return self.element.quote
-
-class SQLCompiler(engine.Compiled):
- """Default implementation of Compiled.
-
- Compiles ClauseElements into SQL strings. Uses a similar visit
- paradigm as visitors.ClauseVisitor but implements its own traversal.
-
- """
-
- extract_map = EXTRACT_MAP
-
- compound_keywords = COMPOUND_KEYWORDS
-
- # class-level defaults which can be set at the instance
- # level to define if this Compiled instance represents
- # INSERT/UPDATE/DELETE
- isdelete = isinsert = isupdate = False
-
- # holds the "returning" collection of columns if
- # the statement is CRUD and defines returning columns
- # either implicitly or explicitly
- returning = None
-
- # set to True classwide to generate RETURNING
- # clauses before the VALUES or WHERE clause (i.e. MSSQL)
- returning_precedes_values = False
-
- # SQL 92 doesn't allow bind parameters to be used
- # in the columns clause of a SELECT, nor does it allow
- # ambiguous expressions like "? = ?". A compiler
- # subclass can set this flag to False if the target
- # driver/DB enforces this
- ansi_bind_rules = False
-
- def __init__(self, dialect, statement, column_keys=None,
- inline=False, **kwargs):
- """Construct a new ``DefaultCompiler`` object.
-
- dialect
- Dialect to be used
-
- statement
- ClauseElement to be compiled
-
- column_keys
- a list of column names to be compiled into an INSERT or UPDATE
- statement.
-
- """
- self.column_keys = column_keys
-
- # compile INSERT/UPDATE defaults/sequences inlined (no pre-
- # execute)
- self.inline = inline or getattr(statement, 'inline', False)
-
- # a dictionary of bind parameter keys to _BindParamClause
- # instances.
- self.binds = {}
-
- # a dictionary of _BindParamClause instances to "compiled" names
- # that are actually present in the generated SQL
- self.bind_names = util.column_dict()
-
- # stack which keeps track of nested SELECT statements
- self.stack = []
-
- # relates label names in the final SQL to a tuple of local
- # column/label name, ColumnElement object (if any) and
- # TypeEngine. ResultProxy uses this for type processing and
- # column targeting
- self.result_map = {}
-
- # true if the paramstyle is positional
- self.positional = dialect.positional
- if self.positional:
- self.positiontup = []
- self.bindtemplate = BIND_TEMPLATES[dialect.paramstyle]
-
- # an IdentifierPreparer that formats the quoting of identifiers
- self.preparer = dialect.identifier_preparer
- self.label_length = dialect.label_length \
- or dialect.max_identifier_length
-
- # a map which tracks "anonymous" identifiers that are created on
- # the fly here
- self.anon_map = util.PopulateDict(self._process_anon)
-
- # a map which tracks "truncated" names based on
- # dialect.label_length or dialect.max_identifier_length
- self.truncated_names = {}
- engine.Compiled.__init__(self, dialect, statement, **kwargs)
-
-
-
- @util.memoized_property
- def _bind_processors(self):
- return dict(
- (key, value) for key, value in
- ( (self.bind_names[bindparam],
- bindparam.type._cached_bind_processor(self.dialect))
- for bindparam in self.bind_names )
- if value is not None
- )
-
- def is_subquery(self):
- return len(self.stack) > 1
-
- @property
- def sql_compiler(self):
- return self
-
- def construct_params(self, params=None, _group_number=None):
- """return a dictionary of bind parameter keys and values"""
-
- if params:
- pd = {}
- for bindparam, name in self.bind_names.iteritems():
- if bindparam.key in params:
- pd[name] = params[bindparam.key]
- elif name in params:
- pd[name] = params[name]
- elif bindparam.required:
- if _group_number:
- raise exc.InvalidRequestError(
- "A value is required for bind parameter %r, "
- "in parameter group %d" %
- (bindparam.key, _group_number))
- else:
- raise exc.InvalidRequestError(
- "A value is required for bind parameter %r"
- % bindparam.key)
- elif bindparam.callable:
- pd[name] = bindparam.callable()
- else:
- pd[name] = bindparam.value
- return pd
- else:
- pd = {}
- for bindparam in self.bind_names:
- if bindparam.callable:
- pd[self.bind_names[bindparam]] = bindparam.callable()
- else:
- pd[self.bind_names[bindparam]] = bindparam.value
- return pd
-
- params = property(construct_params, doc="""
- Return the bind params for this compiled object.
-
- """)
-
- def default_from(self):
- """Called when a SELECT statement has no froms, and no FROM clause is
- to be appended.
-
- Gives Oracle a chance to tack on a ``FROM DUAL`` to the string output.
-
- """
- return ""
-
- def visit_grouping(self, grouping, asfrom=False, **kwargs):
- return "(" + grouping.element._compiler_dispatch(self, **kwargs) + ")"
-
- def visit_label(self, label, result_map=None,
- within_label_clause=False,
- within_columns_clause=False, **kw):
- # only render labels within the columns clause
- # or ORDER BY clause of a select. dialect-specific compilers
- # can modify this behavior.
- if within_columns_clause and not within_label_clause:
- if isinstance(label.name, sql._generated_label):
- labelname = self._truncated_identifier("colident", label.name)
- else:
- labelname = label.name
-
- if result_map is not None:
- result_map[labelname.lower()] = \
- (label.name, (label, label.element, labelname),\
- label.type)
-
- return label.element._compiler_dispatch(self,
- within_columns_clause=True,
- within_label_clause=True,
- **kw) + \
- OPERATORS[operators.as_] + \
- self.preparer.format_label(label, labelname)
- else:
- return label.element._compiler_dispatch(self,
- within_columns_clause=False,
- **kw)
-
- def visit_column(self, column, result_map=None, **kwargs):
- name = column.name
- if name is None:
- raise exc.CompileError("Cannot compile Column object until "
- "it's 'name' is assigned.")
-
- is_literal = column.is_literal
- if not is_literal and isinstance(name, sql._generated_label):
- name = self._truncated_identifier("colident", name)
-
- if result_map is not None:
- result_map[name.lower()] = (name, (column, ), column.type)
-
- if is_literal:
- name = self.escape_literal_column(name)
- else:
- name = self.preparer.quote(name, column.quote)
-
- table = column.table
- if table is None or not table.named_with_column:
- return name
- else:
- if table.schema:
- schema_prefix = self.preparer.quote_schema(
- table.schema,
- table.quote_schema) + '.'
- else:
- schema_prefix = ''
- tablename = table.name
- if isinstance(tablename, sql._generated_label):
- tablename = self._truncated_identifier("alias", tablename)
-
- return schema_prefix + \
- self.preparer.quote(tablename, table.quote) + \
- "." + name
-
- def escape_literal_column(self, text):
- """provide escaping for the literal_column() construct."""
-
- # TODO: some dialects might need different behavior here
- return text.replace('%', '%%')
-
- def visit_fromclause(self, fromclause, **kwargs):
- return fromclause.name
-
- def visit_index(self, index, **kwargs):
- return index.name
-
- def visit_typeclause(self, typeclause, **kwargs):
- return self.dialect.type_compiler.process(typeclause.type)
-
- def post_process_text(self, text):
- return text
-
- def visit_textclause(self, textclause, **kwargs):
- if textclause.typemap is not None:
- for colname, type_ in textclause.typemap.iteritems():
- self.result_map[colname.lower()] = (colname, None, type_)
-
- def do_bindparam(m):
- name = m.group(1)
- if name in textclause.bindparams:
- return self.process(textclause.bindparams[name])
- else:
- return self.bindparam_string(name)
-
- # un-escape any \:params
- return BIND_PARAMS_ESC.sub(lambda m: m.group(1),
- BIND_PARAMS.sub(do_bindparam,
- self.post_process_text(textclause.text))
- )
-
- def visit_null(self, expr, **kw):
- return 'NULL'
-
- def visit_true(self, expr, **kw):
- return 'true'
-
- def visit_false(self, expr, **kw):
- return 'false'
-
- def visit_clauselist(self, clauselist, **kwargs):
- sep = clauselist.operator
- if sep is None:
- sep = " "
- else:
- sep = OPERATORS[clauselist.operator]
- return sep.join(
- s for s in
- (c._compiler_dispatch(self, **kwargs)
- for c in clauselist.clauses)
- if s is not None)
-
- def visit_case(self, clause, **kwargs):
- x = "CASE "
- if clause.value is not None:
- x += clause.value._compiler_dispatch(self, **kwargs) + " "
- for cond, result in clause.whens:
- x += "WHEN " + cond._compiler_dispatch(
- self, **kwargs
- ) + " THEN " + result._compiler_dispatch(
- self, **kwargs) + " "
- if clause.else_ is not None:
- x += "ELSE " + clause.else_._compiler_dispatch(
- self, **kwargs
- ) + " "
- x += "END"
- return x
-
- def visit_cast(self, cast, **kwargs):
- return "CAST(%s AS %s)" % \
- (cast.clause._compiler_dispatch(self, **kwargs),
- cast.typeclause._compiler_dispatch(self, **kwargs))
-
- def visit_over(self, over, **kwargs):
- x ="%s OVER (" % over.func._compiler_dispatch(self, **kwargs)
- if over.partition_by is not None:
- x += "PARTITION BY %s" % \
- over.partition_by._compiler_dispatch(self, **kwargs)
- if over.order_by is not None:
- x += " "
- if over.order_by is not None:
- x += "ORDER BY %s" % \
- over.order_by._compiler_dispatch(self, **kwargs)
- x += ")"
- return x
-
- def visit_extract(self, extract, **kwargs):
- field = self.extract_map.get(extract.field, extract.field)
- return "EXTRACT(%s FROM %s)" % (field,
- extract.expr._compiler_dispatch(self, **kwargs))
-
- def visit_function(self, func, result_map=None, **kwargs):
- if result_map is not None:
- result_map[func.name.lower()] = (func.name, None, func.type)
-
- disp = getattr(self, "visit_%s_func" % func.name.lower(), None)
- if disp:
- return disp(func, **kwargs)
- else:
- name = FUNCTIONS.get(func.__class__, func.name + "%(expr)s")
- return ".".join(list(func.packagenames) + [name]) % \
- {'expr':self.function_argspec(func, **kwargs)}
-
- def visit_next_value_func(self, next_value, **kw):
- return self.visit_sequence(next_value.sequence)
-
- def visit_sequence(self, sequence):
- raise NotImplementedError(
- "Dialect '%s' does not support sequence increments." % self.dialect.name
- )
-
- def function_argspec(self, func, **kwargs):
- return func.clause_expr._compiler_dispatch(self, **kwargs)
-
- def visit_compound_select(self, cs, asfrom=False,
- parens=True, compound_index=1, **kwargs):
- entry = self.stack and self.stack[-1] or {}
- self.stack.append({'from':entry.get('from', None), 'iswrapper':True})
-
- keyword = self.compound_keywords.get(cs.keyword)
-
- text = (" " + keyword + " ").join(
- (c._compiler_dispatch(self,
- asfrom=asfrom, parens=False,
- compound_index=i, **kwargs)
- for i, c in enumerate(cs.selects))
- )
-
- group_by = cs._group_by_clause._compiler_dispatch(
- self, asfrom=asfrom, **kwargs)
- if group_by:
- text += " GROUP BY " + group_by
-
- text += self.order_by_clause(cs, **kwargs)
- text += (cs._limit is not None or cs._offset is not None) and \
- self.limit_clause(cs) or ""
-
- self.stack.pop(-1)
- if asfrom and parens:
- return "(" + text + ")"
- else:
- return text
-
- def visit_unary(self, unary, **kw):
- s = unary.element._compiler_dispatch(self, **kw)
- if unary.operator:
- s = OPERATORS[unary.operator] + s
- if unary.modifier:
- s = s + OPERATORS[unary.modifier]
- return s
-
- def visit_binary(self, binary, **kw):
- # don't allow "? = ?" to render
- if self.ansi_bind_rules and \
- isinstance(binary.left, sql._BindParamClause) and \
- isinstance(binary.right, sql._BindParamClause):
- kw['literal_binds'] = True
-
- return self._operator_dispatch(binary.operator,
- binary,
- lambda opstr: binary.left._compiler_dispatch(self, **kw) +
- opstr +
- binary.right._compiler_dispatch(
- self, **kw),
- **kw
- )
-
- def visit_like_op(self, binary, **kw):
- escape = binary.modifiers.get("escape", None)
- return '%s LIKE %s' % (
- binary.left._compiler_dispatch(self, **kw),
- binary.right._compiler_dispatch(self, **kw)) \
- + (escape and
- (' ESCAPE ' + self.render_literal_value(escape, None))
- or '')
-
- def visit_notlike_op(self, binary, **kw):
- escape = binary.modifiers.get("escape", None)
- return '%s NOT LIKE %s' % (
- binary.left._compiler_dispatch(self, **kw),
- binary.right._compiler_dispatch(self, **kw)) \
- + (escape and
- (' ESCAPE ' + self.render_literal_value(escape, None))
- or '')
-
- def visit_ilike_op(self, binary, **kw):
- escape = binary.modifiers.get("escape", None)
- return 'lower(%s) LIKE lower(%s)' % (
- binary.left._compiler_dispatch(self, **kw),
- binary.right._compiler_dispatch(self, **kw)) \
- + (escape and
- (' ESCAPE ' + self.render_literal_value(escape, None))
- or '')
-
- def visit_notilike_op(self, binary, **kw):
- escape = binary.modifiers.get("escape", None)
- return 'lower(%s) NOT LIKE lower(%s)' % (
- binary.left._compiler_dispatch(self, **kw),
- binary.right._compiler_dispatch(self, **kw)) \
- + (escape and
- (' ESCAPE ' + self.render_literal_value(escape, None))
- or '')
-
- def _operator_dispatch(self, operator, element, fn, **kw):
- if util.callable(operator):
- disp = getattr(self, "visit_%s" % operator.__name__, None)
- if disp:
- return disp(element, **kw)
- else:
- return fn(OPERATORS[operator])
- else:
- return fn(" " + operator + " ")
-
- def visit_bindparam(self, bindparam, within_columns_clause=False,
- literal_binds=False, **kwargs):
-
- if literal_binds or \
- (within_columns_clause and \
- self.ansi_bind_rules):
- if bindparam.value is None:
- raise exc.CompileError("Bind parameter without a "
- "renderable value not allowed here.")
- return self.render_literal_bindparam(bindparam,
- within_columns_clause=True, **kwargs)
-
- name = self._truncate_bindparam(bindparam)
-
- if name in self.binds:
- existing = self.binds[name]
- if existing is not bindparam:
- if existing.unique or bindparam.unique:
- raise exc.CompileError(
- "Bind parameter '%s' conflicts with "
- "unique bind parameter of the same name" %
- bindparam.key
- )
- elif getattr(existing, '_is_crud', False) or \
- getattr(bindparam, '_is_crud', False):
- raise exc.CompileError(
- "bindparam() name '%s' is reserved "
- "for automatic usage in the VALUES or SET "
- "clause of this "
- "insert/update statement. Please use a "
- "name other than column name when using bindparam() "
- "with insert() or update() (for example, 'b_%s')."
- % (bindparam.key, bindparam.key)
- )
-
- self.binds[bindparam.key] = self.binds[name] = bindparam
-
- return self.bindparam_string(name)
-
- def render_literal_bindparam(self, bindparam, **kw):
- value = bindparam.value
- processor = bindparam.type._cached_bind_processor(self.dialect)
- if processor:
- value = processor(value)
- return self.render_literal_value(value, bindparam.type)
-
- def render_literal_value(self, value, type_):
- """Render the value of a bind parameter as a quoted literal.
-
- This is used for statement sections that do not accept bind paramters
- on the target driver/database.
-
- This should be implemented by subclasses using the quoting services
- of the DBAPI.
-
- """
- if isinstance(value, basestring):
- value = value.replace("'", "''")
- return "'%s'" % value
- elif value is None:
- return "NULL"
- elif isinstance(value, (float, int, long)):
- return repr(value)
- elif isinstance(value, decimal.Decimal):
- return str(value)
- else:
- raise NotImplementedError(
- "Don't know how to literal-quote value %r" % value)
-
- def _truncate_bindparam(self, bindparam):
- if bindparam in self.bind_names:
- return self.bind_names[bindparam]
-
- bind_name = bindparam.key
- if isinstance(bind_name, sql._generated_label):
- bind_name = self._truncated_identifier("bindparam", bind_name)
-
- # add to bind_names for translation
- self.bind_names[bindparam] = bind_name
-
- return bind_name
-
- def _truncated_identifier(self, ident_class, name):
- if (ident_class, name) in self.truncated_names:
- return self.truncated_names[(ident_class, name)]
-
- anonname = name % self.anon_map
-
- if len(anonname) > self.label_length:
- counter = self.truncated_names.get(ident_class, 1)
- truncname = anonname[0:max(self.label_length - 6, 0)] + \
- "_" + hex(counter)[2:]
- self.truncated_names[ident_class] = counter + 1
- else:
- truncname = anonname
- self.truncated_names[(ident_class, name)] = truncname
- return truncname
-
- def _anonymize(self, name):
- return name % self.anon_map
-
- def _process_anon(self, key):
- (ident, derived) = key.split(' ', 1)
- anonymous_counter = self.anon_map.get(derived, 1)
- self.anon_map[derived] = anonymous_counter + 1
- return derived + "_" + str(anonymous_counter)
-
- def bindparam_string(self, name):
- if self.positional:
- self.positiontup.append(name)
- return self.bindtemplate % {
- 'name':name, 'position':len(self.positiontup)}
- else:
- return self.bindtemplate % {'name':name}
-
- def visit_alias(self, alias, asfrom=False, ashint=False,
- fromhints=None, **kwargs):
- if asfrom or ashint:
- if isinstance(alias.name, sql._generated_label):
- alias_name = self._truncated_identifier("alias", alias.name)
- else:
- alias_name = alias.name
-
- if ashint:
- return self.preparer.format_alias(alias, alias_name)
- elif asfrom:
- ret = alias.original._compiler_dispatch(self,
- asfrom=True, **kwargs) + \
- " AS " + \
- self.preparer.format_alias(alias, alias_name)
-
- if fromhints and alias in fromhints:
- hinttext = self.get_from_hint_text(alias, fromhints[alias])
- if hinttext:
- ret += " " + hinttext
-
- return ret
- else:
- return alias.original._compiler_dispatch(self, **kwargs)
-
- def label_select_column(self, select, column, asfrom):
- """label columns present in a select()."""
-
- if isinstance(column, sql._Label):
- return column
-
- elif select is not None and select.use_labels and column._label:
- return _CompileLabel(column, column._label)
-
- elif \
- asfrom and \
- isinstance(column, sql.ColumnClause) and \
- not column.is_literal and \
- column.table is not None and \
- not isinstance(column.table, sql.Select):
- return _CompileLabel(column, sql._generated_label(column.name))
- elif not isinstance(column,
- (sql._UnaryExpression, sql._TextClause)) \
- and (not hasattr(column, 'name') or \
- isinstance(column, sql.Function)):
- return _CompileLabel(column, column.anon_label)
- else:
- return column
-
- def get_select_hint_text(self, byfroms):
- return None
-
- def get_from_hint_text(self, table, text):
- return None
-
- def visit_select(self, select, asfrom=False, parens=True,
- iswrapper=False, fromhints=None,
- compound_index=1, **kwargs):
-
- entry = self.stack and self.stack[-1] or {}
-
- existingfroms = entry.get('from', None)
-
- froms = select._get_display_froms(existingfroms)
-
- correlate_froms = set(sql._from_objects(*froms))
-
- # TODO: might want to propagate existing froms for
- # select(select(select)) where innermost select should correlate
- # to outermost if existingfroms: correlate_froms =
- # correlate_froms.union(existingfroms)
-
- self.stack.append({'from': correlate_froms, 'iswrapper'
- : iswrapper})
-
- if compound_index==1 and not entry or entry.get('iswrapper', False):
- column_clause_args = {'result_map':self.result_map}
- else:
- column_clause_args = {}
-
- # the actual list of columns to print in the SELECT column list.
- inner_columns = [
- c for c in [
- self.label_select_column(select, co, asfrom=asfrom).\
- _compiler_dispatch(self,
- within_columns_clause=True,
- **column_clause_args)
- for co in util.unique_list(select.inner_columns)
- ]
- if c is not None
- ]
-
- text = "SELECT " # we're off to a good start !
-
- if select._hints:
- byfrom = dict([
- (from_, hinttext % {
- 'name':from_._compiler_dispatch(
- self, ashint=True)
- })
- for (from_, dialect), hinttext in
- select._hints.iteritems()
- if dialect in ('*', self.dialect.name)
- ])
- hint_text = self.get_select_hint_text(byfrom)
- if hint_text:
- text += hint_text + " "
-
- if select._prefixes:
- text += " ".join(
- x._compiler_dispatch(self, **kwargs)
- for x in select._prefixes) + " "
- text += self.get_select_precolumns(select)
- text += ', '.join(inner_columns)
-
- if froms:
- text += " \nFROM "
-
- if select._hints:
- text += ', '.join([f._compiler_dispatch(self,
- asfrom=True, fromhints=byfrom,
- **kwargs)
- for f in froms])
- else:
- text += ', '.join([f._compiler_dispatch(self,
- asfrom=True, **kwargs)
- for f in froms])
- else:
- text += self.default_from()
-
- if select._whereclause is not None:
- t = select._whereclause._compiler_dispatch(self, **kwargs)
- if t:
- text += " \nWHERE " + t
-
- if select._group_by_clause.clauses:
- group_by = select._group_by_clause._compiler_dispatch(
- self, **kwargs)
- if group_by:
- text += " GROUP BY " + group_by
-
- if select._having is not None:
- t = select._having._compiler_dispatch(self, **kwargs)
- if t:
- text += " \nHAVING " + t
-
- if select._order_by_clause.clauses:
- text += self.order_by_clause(select, **kwargs)
- if select._limit is not None or select._offset is not None:
- text += self.limit_clause(select)
- if select.for_update:
- text += self.for_update_clause(select)
-
- self.stack.pop(-1)
-
- if asfrom and parens:
- return "(" + text + ")"
- else:
- return text
-
- def get_select_precolumns(self, select):
- """Called when building a ``SELECT`` statement, position is just
- before column list.
-
- """
- return select._distinct and "DISTINCT " or ""
-
- def order_by_clause(self, select, **kw):
- order_by = select._order_by_clause._compiler_dispatch(self, **kw)
- if order_by:
- return " ORDER BY " + order_by
- else:
- return ""
-
- def for_update_clause(self, select):
- if select.for_update:
- return " FOR UPDATE"
- else:
- return ""
-
- def limit_clause(self, select):
- text = ""
- if select._limit is not None:
- text += "\n LIMIT " + self.process(sql.literal(select._limit))
- if select._offset is not None:
- if select._limit is None:
- text += "\n LIMIT -1"
- text += " OFFSET " + self.process(sql.literal(select._offset))
- return text
-
- def visit_table(self, table, asfrom=False, ashint=False,
- fromhints=None, **kwargs):
- if asfrom or ashint:
- if getattr(table, "schema", None):
- ret = self.preparer.quote_schema(table.schema,
- table.quote_schema) + \
- "." + self.preparer.quote(table.name,
- table.quote)
- else:
- ret = self.preparer.quote(table.name, table.quote)
- if fromhints and table in fromhints:
- hinttext = self.get_from_hint_text(table, fromhints[table])
- if hinttext:
- ret += " " + hinttext
- return ret
- else:
- return ""
-
- def visit_join(self, join, asfrom=False, **kwargs):
- return (
- join.left._compiler_dispatch(self, asfrom=True, **kwargs) +
- (join.isouter and " LEFT OUTER JOIN " or " JOIN ") +
- join.right._compiler_dispatch(self, asfrom=True, **kwargs) +
- " ON " +
- join.onclause._compiler_dispatch(self, **kwargs)
- )
-
- def visit_insert(self, insert_stmt):
- self.isinsert = True
- colparams = self._get_colparams(insert_stmt)
-
- if not colparams and \
- not self.dialect.supports_default_values and \
- not self.dialect.supports_empty_insert:
- raise exc.CompileError("The version of %s you are using does "
- "not support empty inserts." %
- self.dialect.name)
-
- preparer = self.preparer
- supports_default_values = self.dialect.supports_default_values
-
- text = "INSERT"
-
- prefixes = [self.process(x) for x in insert_stmt._prefixes]
- if prefixes:
- text += " " + " ".join(prefixes)
-
- text += " INTO " + preparer.format_table(insert_stmt.table)
-
- if colparams or not supports_default_values:
- text += " (%s)" % ', '.join([preparer.format_column(c[0])
- for c in colparams])
-
- if self.returning or insert_stmt._returning:
- self.returning = self.returning or insert_stmt._returning
- returning_clause = self.returning_clause(
- insert_stmt, self.returning)
-
- if self.returning_precedes_values:
- text += " " + returning_clause
-
- if not colparams and supports_default_values:
- text += " DEFAULT VALUES"
- else:
- text += " VALUES (%s)" % \
- ', '.join([c[1] for c in colparams])
-
- if self.returning and not self.returning_precedes_values:
- text += " " + returning_clause
-
- return text
-
- def visit_update(self, update_stmt):
- self.stack.append({'from': set([update_stmt.table])})
-
- self.isupdate = True
- colparams = self._get_colparams(update_stmt)
-
- text = "UPDATE " + self.preparer.format_table(update_stmt.table)
-
- text += ' SET ' + \
- ', '.join(
- self.preparer.quote(c[0].name, c[0].quote) +
- '=' + c[1]
- for c in colparams
- )
-
- if update_stmt._returning:
- self.returning = update_stmt._returning
- if self.returning_precedes_values:
- text += " " + self.returning_clause(
- update_stmt, update_stmt._returning)
-
- if update_stmt._whereclause is not None:
- text += " WHERE " + self.process(update_stmt._whereclause)
-
- if self.returning and not self.returning_precedes_values:
- text += " " + self.returning_clause(
- update_stmt, update_stmt._returning)
-
- self.stack.pop(-1)
-
- return text
-
- def _create_crud_bind_param(self, col, value, required=False):
- bindparam = sql.bindparam(col.key, value,
- type_=col.type, required=required)
- bindparam._is_crud = True
- return bindparam._compiler_dispatch(self)
-
-
- def _get_colparams(self, stmt):
- """create a set of tuples representing column/string pairs for use
- in an INSERT or UPDATE statement.
-
- Also generates the Compiled object's postfetch, prefetch, and
- returning column collections, used for default handling and ultimately
- populating the ResultProxy's prefetch_cols() and postfetch_cols()
- collections.
-
- """
-
- self.postfetch = []
- self.prefetch = []
- self.returning = []
-
- # no parameters in the statement, no parameters in the
- # compiled params - return binds for all columns
- if self.column_keys is None and stmt.parameters is None:
- return [
- (c, self._create_crud_bind_param(c,
- None, required=True))
- for c in stmt.table.columns
- ]
-
- required = object()
-
- # if we have statement parameters - set defaults in the
- # compiled params
- if self.column_keys is None:
- parameters = {}
- else:
- parameters = dict((sql._column_as_key(key), required)
- for key in self.column_keys
- if not stmt.parameters or
- key not in stmt.parameters)
-
- if stmt.parameters is not None:
- for k, v in stmt.parameters.iteritems():
- parameters.setdefault(sql._column_as_key(k), v)
-
- # create a list of column assignment clauses as tuples
- values = []
-
- need_pks = self.isinsert and \
- not self.inline and \
- not stmt._returning
-
- implicit_returning = need_pks and \
- self.dialect.implicit_returning and \
- stmt.table.implicit_returning
-
- postfetch_lastrowid = need_pks and self.dialect.postfetch_lastrowid
-
- # iterating through columns at the top to maintain ordering.
- # otherwise we might iterate through individual sets of
- # "defaults", "primary key cols", etc.
- for c in stmt.table.columns:
- if c.key in parameters:
- value = parameters[c.key]
- if sql._is_literal(value):
- value = self._create_crud_bind_param(
- c, value, required=value is required)
- elif c.primary_key and implicit_returning:
- self.returning.append(c)
- value = self.process(value.self_group())
- else:
- self.postfetch.append(c)
- value = self.process(value.self_group())
- values.append((c, value))
-
- elif self.isinsert:
- if c.primary_key and \
- need_pks and \
- (
- implicit_returning or
- not postfetch_lastrowid or
- c is not stmt.table._autoincrement_column
- ):
-
- if implicit_returning:
- if c.default is not None:
- if c.default.is_sequence:
- if self.dialect.supports_sequences and \
- (not c.default.optional or \
- not self.dialect.sequences_optional):
- proc = self.process(c.default)
- values.append((c, proc))
- self.returning.append(c)
- elif c.default.is_clause_element:
- values.append(
- (c,
- self.process(c.default.arg.self_group()))
- )
- self.returning.append(c)
- else:
- values.append(
- (c, self._create_crud_bind_param(c, None))
- )
- self.prefetch.append(c)
- else:
- self.returning.append(c)
- else:
- if c.default is not None or \
- c is stmt.table._autoincrement_column and (
- self.dialect.supports_sequences or
- self.dialect.preexecute_autoincrement_sequences
- ):
-
- values.append(
- (c, self._create_crud_bind_param(c, None))
- )
-
- self.prefetch.append(c)
-
- elif c.default is not None:
- if c.default.is_sequence:
- if self.dialect.supports_sequences and \
- (not c.default.optional or \
- not self.dialect.sequences_optional):
- proc = self.process(c.default)
- values.append((c, proc))
- if not c.primary_key:
- self.postfetch.append(c)
- elif c.default.is_clause_element:
- values.append(
- (c, self.process(c.default.arg.self_group()))
- )
-
- if not c.primary_key:
- # dont add primary key column to postfetch
- self.postfetch.append(c)
- else:
- values.append(
- (c, self._create_crud_bind_param(c, None))
- )
- self.prefetch.append(c)
- elif c.server_default is not None:
- if not c.primary_key:
- self.postfetch.append(c)
-
- elif self.isupdate:
- if c.onupdate is not None and not c.onupdate.is_sequence:
- if c.onupdate.is_clause_element:
- values.append(
- (c, self.process(c.onupdate.arg.self_group()))
- )
- self.postfetch.append(c)
- else:
- values.append(
- (c, self._create_crud_bind_param(c, None))
- )
- self.prefetch.append(c)
- elif c.server_onupdate is not None:
- self.postfetch.append(c)
- return values
-
- def visit_delete(self, delete_stmt):
- self.stack.append({'from': set([delete_stmt.table])})
- self.isdelete = True
-
- text = "DELETE FROM " + self.preparer.format_table(delete_stmt.table)
-
- if delete_stmt._returning:
- self.returning = delete_stmt._returning
- if self.returning_precedes_values:
- text += " " + self.returning_clause(
- delete_stmt, delete_stmt._returning)
-
- if delete_stmt._whereclause is not None:
- text += " WHERE " + self.process(delete_stmt._whereclause)
-
- if self.returning and not self.returning_precedes_values:
- text += " " + self.returning_clause(
- delete_stmt, delete_stmt._returning)
-
- self.stack.pop(-1)
-
- return text
-
- def visit_savepoint(self, savepoint_stmt):
- return "SAVEPOINT %s" % self.preparer.format_savepoint(savepoint_stmt)
-
- def visit_rollback_to_savepoint(self, savepoint_stmt):
- return "ROLLBACK TO SAVEPOINT %s" % \
- self.preparer.format_savepoint(savepoint_stmt)
-
- def visit_release_savepoint(self, savepoint_stmt):
- return "RELEASE SAVEPOINT %s" % \
- self.preparer.format_savepoint(savepoint_stmt)
-
-
-class DDLCompiler(engine.Compiled):
-
- @util.memoized_property
- def sql_compiler(self):
- return self.dialect.statement_compiler(self.dialect, None)
-
- @property
- def preparer(self):
- return self.dialect.identifier_preparer
-
- def construct_params(self, params=None):
- return None
-
- def visit_ddl(self, ddl, **kwargs):
- # table events can substitute table and schema name
- context = ddl.context
- if isinstance(ddl.target, schema.Table):
- context = context.copy()
-
- preparer = self.dialect.identifier_preparer
- path = preparer.format_table_seq(ddl.target)
- if len(path) == 1:
- table, sch = path[0], ''
- else:
- table, sch = path[-1], path[0]
-
- context.setdefault('table', table)
- context.setdefault('schema', sch)
- context.setdefault('fullname', preparer.format_table(ddl.target))
-
- return self.sql_compiler.post_process_text(ddl.statement % context)
-
- def visit_create_table(self, create):
- table = create.element
- preparer = self.dialect.identifier_preparer
-
- text = "\n" + " ".join(['CREATE'] + \
- table._prefixes + \
- ['TABLE',
- preparer.format_table(table),
- "("])
- separator = "\n"
-
- # if only one primary key, specify it along with the column
- first_pk = False
- for column in table.columns:
- text += separator
- separator = ", \n"
- text += "\t" + self.get_column_specification(
- column,
- first_pk=column.primary_key and \
- not first_pk
- )
- if column.primary_key:
- first_pk = True
- const = " ".join(self.process(constraint) \
- for constraint in column.constraints)
- if const:
- text += " " + const
-
- const = self.create_table_constraints(table)
- if const:
- text += ", \n\t" + const
-
- text += "\n)%s\n\n" % self.post_create_table(table)
- return text
-
- def create_table_constraints(self, table):
-
- # On some DB order is significant: visit PK first, then the
- # other constraints (engine.ReflectionTest.testbasic failed on FB2)
- constraints = []
- if table.primary_key:
- constraints.append(table.primary_key)
-
- constraints.extend([c for c in table._sorted_constraints
- if c is not table.primary_key])
-
- return ", \n\t".join(p for p in
- (self.process(constraint)
- for constraint in constraints
- if (
- constraint._create_rule is None or
- constraint._create_rule(self))
- and (
- not self.dialect.supports_alter or
- not getattr(constraint, 'use_alter', False)
- )) if p is not None
- )
-
- def visit_drop_table(self, drop):
- return "\nDROP TABLE " + self.preparer.format_table(drop.element)
-
- def _index_identifier(self, ident):
- if isinstance(ident, sql._generated_label):
- max = self.dialect.max_index_name_length or \
- self.dialect.max_identifier_length
- if len(ident) > max:
- return ident[0:max - 8] + \
- "_" + util.md5_hex(ident)[-4:]
- else:
- return ident
- else:
- self.dialect.validate_identifier(ident)
- return ident
-
- def visit_create_index(self, create):
- index = create.element
- preparer = self.preparer
- text = "CREATE "
- if index.unique:
- text += "UNIQUE "
- text += "INDEX %s ON %s (%s)" \
- % (preparer.quote(self._index_identifier(index.name),
- index.quote),
- preparer.format_table(index.table),
- ', '.join(preparer.quote(c.name, c.quote)
- for c in index.columns))
- return text
-
- def visit_drop_index(self, drop):
- index = drop.element
- return "\nDROP INDEX " + \
- self.preparer.quote(
- self._index_identifier(index.name), index.quote)
-
- def visit_add_constraint(self, create):
- preparer = self.preparer
- return "ALTER TABLE %s ADD %s" % (
- self.preparer.format_table(create.element.table),
- self.process(create.element)
- )
-
- def visit_create_sequence(self, create):
- text = "CREATE SEQUENCE %s" % \
- self.preparer.format_sequence(create.element)
- if create.element.increment is not None:
- text += " INCREMENT BY %d" % create.element.increment
- if create.element.start is not None:
- text += " START WITH %d" % create.element.start
- return text
-
- def visit_drop_sequence(self, drop):
- return "DROP SEQUENCE %s" % \
- self.preparer.format_sequence(drop.element)
-
- def visit_drop_constraint(self, drop):
- preparer = self.preparer
- return "ALTER TABLE %s DROP CONSTRAINT %s%s" % (
- self.preparer.format_table(drop.element.table),
- self.preparer.format_constraint(drop.element),
- drop.cascade and " CASCADE" or ""
- )
-
- def get_column_specification(self, column, **kwargs):
- colspec = self.preparer.format_column(column) + " " + \
- self.dialect.type_compiler.process(column.type)
- default = self.get_column_default_string(column)
- if default is not None:
- colspec += " DEFAULT " + default
-
- if not column.nullable:
- colspec += " NOT NULL"
- return colspec
-
- def post_create_table(self, table):
- return ''
-
- def get_column_default_string(self, column):
- if isinstance(column.server_default, schema.DefaultClause):
- if isinstance(column.server_default.arg, basestring):
- return "'%s'" % column.server_default.arg
- else:
- return self.sql_compiler.process(column.server_default.arg)
- else:
- return None
-
- def visit_check_constraint(self, constraint):
- text = ""
- if constraint.name is not None:
- text += "CONSTRAINT %s " % \
- self.preparer.format_constraint(constraint)
- sqltext = sql_util.expression_as_ddl(constraint.sqltext)
- text += "CHECK (%s)" % self.sql_compiler.process(sqltext)
- text += self.define_constraint_deferrability(constraint)
- return text
-
- def visit_column_check_constraint(self, constraint):
- text = "CHECK (%s)" % constraint.sqltext
- text += self.define_constraint_deferrability(constraint)
- return text
-
- def visit_primary_key_constraint(self, constraint):
- if len(constraint) == 0:
- return ''
- text = ""
- if constraint.name is not None:
- text += "CONSTRAINT %s " % \
- self.preparer.format_constraint(constraint)
- text += "PRIMARY KEY "
- text += "(%s)" % ', '.join(self.preparer.quote(c.name, c.quote)
- for c in constraint)
- text += self.define_constraint_deferrability(constraint)
- return text
-
- def visit_foreign_key_constraint(self, constraint):
- preparer = self.dialect.identifier_preparer
- text = ""
- if constraint.name is not None:
- text += "CONSTRAINT %s " % \
- preparer.format_constraint(constraint)
- remote_table = list(constraint._elements.values())[0].column.table
- text += "FOREIGN KEY(%s) REFERENCES %s (%s)" % (
- ', '.join(preparer.quote(f.parent.name, f.parent.quote)
- for f in constraint._elements.values()),
- self.define_constraint_remote_table(
- constraint, remote_table, preparer),
- ', '.join(preparer.quote(f.column.name, f.column.quote)
- for f in constraint._elements.values())
- )
- text += self.define_constraint_cascades(constraint)
- text += self.define_constraint_deferrability(constraint)
- return text
-
- def define_constraint_remote_table(self, constraint, table, preparer):
- """Format the remote table clause of a CREATE CONSTRAINT clause."""
-
- return preparer.format_table(table)
-
- def visit_unique_constraint(self, constraint):
- text = ""
- if constraint.name is not None:
- text += "CONSTRAINT %s " % \
- self.preparer.format_constraint(constraint)
- text += "UNIQUE (%s)" % (
- ', '.join(self.preparer.quote(c.name, c.quote)
- for c in constraint))
- text += self.define_constraint_deferrability(constraint)
- return text
-
- def define_constraint_cascades(self, constraint):
- text = ""
- if constraint.ondelete is not None:
- text += " ON DELETE %s" % constraint.ondelete
- if constraint.onupdate is not None:
- text += " ON UPDATE %s" % constraint.onupdate
- return text
-
- def define_constraint_deferrability(self, constraint):
- text = ""
- if constraint.deferrable is not None:
- if constraint.deferrable:
- text += " DEFERRABLE"
- else:
- text += " NOT DEFERRABLE"
- if constraint.initially is not None:
- text += " INITIALLY %s" % constraint.initially
- return text
-
-
-class GenericTypeCompiler(engine.TypeCompiler):
- def visit_CHAR(self, type_):
- return "CHAR" + (type_.length and "(%d)" % type_.length or "")
-
- def visit_NCHAR(self, type_):
- return "NCHAR" + (type_.length and "(%d)" % type_.length or "")
-
- def visit_FLOAT(self, type_):
- return "FLOAT"
-
- def visit_REAL(self, type_):
- return "REAL"
-
- def visit_NUMERIC(self, type_):
- if type_.precision is None:
- return "NUMERIC"
- elif type_.scale is None:
- return "NUMERIC(%(precision)s)" % \
- {'precision': type_.precision}
- else:
- return "NUMERIC(%(precision)s, %(scale)s)" % \
- {'precision': type_.precision,
- 'scale' : type_.scale}
-
- def visit_DECIMAL(self, type_):
- return "DECIMAL"
-
- def visit_INTEGER(self, type_):
- return "INTEGER"
-
- def visit_SMALLINT(self, type_):
- return "SMALLINT"
-
- def visit_BIGINT(self, type_):
- return "BIGINT"
-
- def visit_TIMESTAMP(self, type_):
- return 'TIMESTAMP'
-
- def visit_DATETIME(self, type_):
- return "DATETIME"
-
- def visit_DATE(self, type_):
- return "DATE"
-
- def visit_TIME(self, type_):
- return "TIME"
-
- def visit_CLOB(self, type_):
- return "CLOB"
-
- def visit_NCLOB(self, type_):
- return "NCLOB"
-
- def visit_VARCHAR(self, type_):
- return "VARCHAR" + (type_.length and "(%d)" % type_.length or "")
-
- def visit_NVARCHAR(self, type_):
- return "NVARCHAR" + (type_.length and "(%d)" % type_.length or "")
-
- def visit_BLOB(self, type_):
- return "BLOB"
-
- def visit_BINARY(self, type_):
- return "BINARY" + (type_.length and "(%d)" % type_.length or "")
-
- def visit_VARBINARY(self, type_):
- return "VARBINARY" + (type_.length and "(%d)" % type_.length or "")
-
- def visit_BOOLEAN(self, type_):
- return "BOOLEAN"
-
- def visit_TEXT(self, type_):
- return "TEXT"
-
- def visit_large_binary(self, type_):
- return self.visit_BLOB(type_)
-
- def visit_boolean(self, type_):
- return self.visit_BOOLEAN(type_)
-
- def visit_time(self, type_):
- return self.visit_TIME(type_)
-
- def visit_datetime(self, type_):
- return self.visit_DATETIME(type_)
-
- def visit_date(self, type_):
- return self.visit_DATE(type_)
-
- def visit_big_integer(self, type_):
- return self.visit_BIGINT(type_)
-
- def visit_small_integer(self, type_):
- return self.visit_SMALLINT(type_)
-
- def visit_integer(self, type_):
- return self.visit_INTEGER(type_)
-
- def visit_real(self, type_):
- return self.visit_REAL(type_)
-
- def visit_float(self, type_):
- return self.visit_FLOAT(type_)
-
- def visit_numeric(self, type_):
- return self.visit_NUMERIC(type_)
-
- def visit_string(self, type_):
- return self.visit_VARCHAR(type_)
-
- def visit_unicode(self, type_):
- return self.visit_VARCHAR(type_)
-
- def visit_text(self, type_):
- return self.visit_TEXT(type_)
-
- def visit_unicode_text(self, type_):
- return self.visit_TEXT(type_)
-
- def visit_enum(self, type_):
- return self.visit_VARCHAR(type_)
-
- def visit_null(self, type_):
- raise NotImplementedError("Can't generate DDL for the null type")
-
- def visit_type_decorator(self, type_):
- return self.process(type_.type_engine(self.dialect))
-
- def visit_user_defined(self, type_):
- return type_.get_col_spec()
-
-class IdentifierPreparer(object):
- """Handle quoting and case-folding of identifiers based on options."""
-
- reserved_words = RESERVED_WORDS
-
- legal_characters = LEGAL_CHARACTERS
-
- illegal_initial_characters = ILLEGAL_INITIAL_CHARACTERS
-
- def __init__(self, dialect, initial_quote='"',
- final_quote=None, escape_quote='"', omit_schema=False):
- """Construct a new ``IdentifierPreparer`` object.
-
- initial_quote
- Character that begins a delimited identifier.
-
- final_quote
- Character that ends a delimited identifier. Defaults to
- `initial_quote`.
-
- omit_schema
- Prevent prepending schema name. Useful for databases that do
- not support schemae.
- """
-
- self.dialect = dialect
- self.initial_quote = initial_quote
- self.final_quote = final_quote or self.initial_quote
- self.escape_quote = escape_quote
- self.escape_to_quote = self.escape_quote * 2
- self.omit_schema = omit_schema
- self._strings = {}
-
- def _escape_identifier(self, value):
- """Escape an identifier.
-
- Subclasses should override this to provide database-dependent
- escaping behavior.
- """
-
- return value.replace(self.escape_quote, self.escape_to_quote)
-
- def _unescape_identifier(self, value):
- """Canonicalize an escaped identifier.
-
- Subclasses should override this to provide database-dependent
- unescaping behavior that reverses _escape_identifier.
- """
-
- return value.replace(self.escape_to_quote, self.escape_quote)
-
- def quote_identifier(self, value):
- """Quote an identifier.
-
- Subclasses should override this to provide database-dependent
- quoting behavior.
- """
-
- return self.initial_quote + \
- self._escape_identifier(value) + \
- self.final_quote
-
- def _requires_quotes(self, value):
- """Return True if the given identifier requires quoting."""
- lc_value = value.lower()
- return (lc_value in self.reserved_words
- or value[0] in self.illegal_initial_characters
- or not self.legal_characters.match(unicode(value))
- or (lc_value != value))
-
- def quote_schema(self, schema, force):
- """Quote a schema.
-
- Subclasses should override this to provide database-dependent
- quoting behavior.
- """
- return self.quote(schema, force)
-
- def quote(self, ident, force):
- if force is None:
- if ident in self._strings:
- return self._strings[ident]
- else:
- if self._requires_quotes(ident):
- self._strings[ident] = self.quote_identifier(ident)
- else:
- self._strings[ident] = ident
- return self._strings[ident]
- elif force:
- return self.quote_identifier(ident)
- else:
- return ident
-
- def format_sequence(self, sequence, use_schema=True):
- name = self.quote(sequence.name, sequence.quote)
- if not self.omit_schema and use_schema and \
- sequence.schema is not None:
- name = self.quote_schema(sequence.schema, sequence.quote) + \
- "." + name
- return name
-
- def format_label(self, label, name=None):
- return self.quote(name or label.name, label.quote)
-
- def format_alias(self, alias, name=None):
- return self.quote(name or alias.name, alias.quote)
-
- def format_savepoint(self, savepoint, name=None):
- return self.quote(name or savepoint.ident, savepoint.quote)
-
- def format_constraint(self, constraint):
- return self.quote(constraint.name, constraint.quote)
-
- def format_table(self, table, use_schema=True, name=None):
- """Prepare a quoted table and schema name."""
-
- if name is None:
- name = table.name
- result = self.quote(name, table.quote)
- if not self.omit_schema and use_schema \
- and getattr(table, "schema", None):
- result = self.quote_schema(table.schema, table.quote_schema) + \
- "." + result
- return result
-
- def format_column(self, column, use_table=False,
- name=None, table_name=None):
- """Prepare a quoted column name."""
-
- if name is None:
- name = column.name
- if not getattr(column, 'is_literal', False):
- if use_table:
- return self.format_table(
- column.table, use_schema=False,
- name=table_name) + "." + \
- self.quote(name, column.quote)
- else:
- return self.quote(name, column.quote)
- else:
- # literal textual elements get stuck into ColumnClause alot,
- # which shouldnt get quoted
-
- if use_table:
- return self.format_table(column.table,
- use_schema=False, name=table_name) + '.' + name
- else:
- return name
-
- def format_table_seq(self, table, use_schema=True):
- """Format table name and schema as a tuple."""
-
- # Dialects with more levels in their fully qualified references
- # ('database', 'owner', etc.) could override this and return
- # a longer sequence.
-
- if not self.omit_schema and use_schema and \
- getattr(table, 'schema', None):
- return (self.quote_schema(table.schema, table.quote_schema),
- self.format_table(table, use_schema=False))
- else:
- return (self.format_table(table, use_schema=False), )
-
- @util.memoized_property
- def _r_identifiers(self):
- initial, final, escaped_final = \
- [re.escape(s) for s in
- (self.initial_quote, self.final_quote,
- self._escape_identifier(self.final_quote))]
- r = re.compile(
- r'(?:'
- r'(?:%(initial)s((?:%(escaped)s|[^%(final)s])+)%(final)s'
- r'|([^\.]+))(?=\.|$))+' %
- { 'initial': initial,
- 'final': final,
- 'escaped': escaped_final })
- return r
-
- def unformat_identifiers(self, identifiers):
- """Unpack 'schema.table.column'-like strings into components."""
-
- r = self._r_identifiers
- return [self._unescape_identifier(i)
- for i in [a or b for a, b in r.findall(identifiers)]]
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/sql/expression.py b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/sql/expression.py
deleted file mode 100755
index e06eb61b..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/sql/expression.py
+++ /dev/null
@@ -1,5127 +0,0 @@
-# sql/expression.py
-# Copyright (C) 2005-2011 the SQLAlchemy authors and contributors <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-"""Defines the base components of SQL expression trees.
-
-All components are derived from a common base class
-:class:`.ClauseElement`. Common behaviors are organized
-based on class hierarchies, in some cases via mixins.
-
-All object construction from this package occurs via functions which
-in some cases will construct composite :class:`.ClauseElement` structures
-together, and in other cases simply return a single :class:`.ClauseElement`
-constructed directly. The function interface affords a more "DSL-ish"
-feel to constructing SQL expressions and also allows future class
-reorganizations.
-
-Even though classes are not constructed directly from the outside,
-most classes which have additional public methods are considered to be
-public (i.e. have no leading underscore). Other classes which are
-"semi-public" are marked with a single leading underscore; these
-classes usually have few or no public methods and are less guaranteed
-to stay the same in future releases.
-
-"""
-
-import itertools, re
-from operator import attrgetter
-
-from sqlalchemy import util, exc
-from sqlalchemy.sql import operators
-from sqlalchemy.sql.visitors import Visitable, cloned_traverse
-import operator
-
-functions = util.importlater("sqlalchemy.sql", "functions")
-sqlutil = util.importlater("sqlalchemy.sql", "util")
-sqltypes = util.importlater("sqlalchemy", "types")
-default = util.importlater("sqlalchemy.engine", "default")
-
-__all__ = [
- 'Alias', 'ClauseElement', 'ColumnCollection', 'ColumnElement',
- 'CompoundSelect', 'Delete', 'FromClause', 'Insert', 'Join', 'Select',
- 'Selectable', 'TableClause', 'Update', 'alias', 'and_', 'asc', 'between',
- 'bindparam', 'case', 'cast', 'column', 'delete', 'desc', 'distinct',
- 'except_', 'except_all', 'exists', 'extract', 'func', 'modifier',
- 'collate', 'insert', 'intersect', 'intersect_all', 'join', 'label',
- 'literal', 'literal_column', 'not_', 'null', 'nullsfirst', 'nullslast',
- 'or_', 'outparam', 'outerjoin', 'over', 'select', 'subquery', 'table', 'text',
- 'tuple_', 'type_coerce', 'union', 'union_all', 'update', ]
-
-PARSE_AUTOCOMMIT = util.symbol('PARSE_AUTOCOMMIT')
-
-def nullsfirst(column):
- """Return a NULLS FIRST ``ORDER BY`` clause element.
-
- e.g.::
-
- order_by = [desc(table1.mycol).nullsfirst()]
-
- """
- return _UnaryExpression(column, modifier=operators.nullsfirst_op)
-
-def nullslast(column):
- """Return a NULLS LAST ``ORDER BY`` clause element.
-
- e.g.::
-
- order_by = [desc(table1.mycol).nullslast()]
-
- """
- return _UnaryExpression(column, modifier=operators.nullslast_op)
-
-def desc(column):
- """Return a descending ``ORDER BY`` clause element.
-
- e.g.::
-
- order_by = [desc(table1.mycol)]
-
- """
- return _UnaryExpression(column, modifier=operators.desc_op)
-
-def asc(column):
- """Return an ascending ``ORDER BY`` clause element.
-
- e.g.::
-
- order_by = [asc(table1.mycol)]
-
- """
- return _UnaryExpression(column, modifier=operators.asc_op)
-
-def outerjoin(left, right, onclause=None):
- """Return an ``OUTER JOIN`` clause element.
-
- The returned object is an instance of :class:`.Join`.
-
- Similar functionality is also available via the :func:`outerjoin()`
- method on any :class:`.FromClause`.
-
- left
- The left side of the join.
-
- right
- The right side of the join.
-
- onclause
- Optional criterion for the ``ON`` clause, is derived from
- foreign key relationships established between left and right
- otherwise.
-
- To chain joins together, use the :func:`join()` or :func:`outerjoin()`
- methods on the resulting :class:`.Join` object.
-
- """
- return Join(left, right, onclause, isouter=True)
-
-def join(left, right, onclause=None, isouter=False):
- """Return a ``JOIN`` clause element (regular inner join).
-
- The returned object is an instance of :class:`.Join`.
-
- Similar functionality is also available via the :func:`join()` method
- on any :class:`.FromClause`.
-
- left
- The left side of the join.
-
- right
- The right side of the join.
-
- onclause
- Optional criterion for the ``ON`` clause, is derived from
- foreign key relationships established between left and right
- otherwise.
-
- To chain joins together, use the :func:`join()` or :func:`outerjoin()`
- methods on the resulting :class:`.Join` object.
-
- """
- return Join(left, right, onclause, isouter)
-
-def select(columns=None, whereclause=None, from_obj=[], **kwargs):
- """Returns a ``SELECT`` clause element.
-
- Similar functionality is also available via the :func:`select()`
- method on any :class:`.FromClause`.
-
- The returned object is an instance of :class:`.Select`.
-
- All arguments which accept :class:`.ClauseElement` arguments also accept
- string arguments, which will be converted as appropriate into
- either :func:`text()` or :func:`literal_column()` constructs.
-
- :param columns:
- A list of :class:`.ClauseElement` objects, typically
- :class:`.ColumnElement` objects or subclasses, which will form the
- columns clause of the resulting statement. For all members which are
- instances of :class:`.Selectable`, the individual :class:`.ColumnElement`
- members of the :class:`.Selectable` will be added individually to the
- columns clause. For example, specifying a
- :class:`~sqlalchemy.schema.Table` instance will result in all the
- contained :class:`~sqlalchemy.schema.Column` objects within to be added
- to the columns clause.
-
- This argument is not present on the form of :func:`select()`
- available on :class:`~sqlalchemy.schema.Table`.
-
- :param whereclause:
- A :class:`.ClauseElement` expression which will be used to form the
- ``WHERE`` clause.
-
- :param from_obj:
- A list of :class:`.ClauseElement` objects which will be added to the
- ``FROM`` clause of the resulting statement. Note that "from" objects are
- automatically located within the columns and whereclause ClauseElements.
- Use this parameter to explicitly specify "from" objects which are not
- automatically locatable. This could include
- :class:`~sqlalchemy.schema.Table` objects that aren't otherwise present,
- or :class:`.Join` objects whose presence will supercede that of the
- :class:`~sqlalchemy.schema.Table` objects already located in the other
- clauses.
-
- :param autocommit:
- Deprecated. Use .execution_options(autocommit=<True|False>)
- to set the autocommit option.
-
- :param bind=None:
- an :class:`~.base.Engine` or :class:`~.base.Connection` instance
- to which the
- resulting :class:`.Select` object will be bound. The :class:`.Select`
- object will otherwise automatically bind to whatever
- :class:`~.base.Connectable` instances can be located within its contained
- :class:`.ClauseElement` members.
-
- :param correlate=True:
- indicates that this :class:`.Select` object should have its
- contained :class:`.FromClause` elements "correlated" to an enclosing
- :class:`.Select` object. This means that any :class:`.ClauseElement`
- instance within the "froms" collection of this :class:`.Select`
- which is also present in the "froms" collection of an
- enclosing select will not be rendered in the ``FROM`` clause
- of this select statement.
-
- :param distinct=False:
- when ``True``, applies a ``DISTINCT`` qualifier to the columns
- clause of the resulting statement.
-
- The boolean argument may also be a column expression or list
- of column expressions - this is a special calling form which
- is understood by the Postgresql dialect to render the
- ``DISTINCT ON (<columns>)`` syntax.
-
- ``distinct`` is also available via the :meth:`~.Select.distinct`
- generative method.
-
- .. note:: The ``distinct`` keyword's acceptance of a string
- argument for usage with MySQL is deprecated. Use
- the ``prefixes`` argument or :meth:`~.Select.prefix_with`.
-
- :param for_update=False:
- when ``True``, applies ``FOR UPDATE`` to the end of the
- resulting statement. Certain database dialects also support
- alternate values for this parameter, for example mysql
- supports "read" which translates to ``LOCK IN SHARE MODE``,
- and oracle supports "nowait" which translates to ``FOR UPDATE
- NOWAIT``.
-
- :param group_by:
- a list of :class:`.ClauseElement` objects which will comprise the
- ``GROUP BY`` clause of the resulting select.
-
- :param having:
- a :class:`.ClauseElement` that will comprise the ``HAVING`` clause
- of the resulting select when ``GROUP BY`` is used.
-
- :param limit=None:
- a numerical value which usually compiles to a ``LIMIT``
- expression in the resulting select. Databases that don't
- support ``LIMIT`` will attempt to provide similar
- functionality.
-
- :param offset=None:
- a numeric value which usually compiles to an ``OFFSET``
- expression in the resulting select. Databases that don't
- support ``OFFSET`` will attempt to provide similar
- functionality.
-
- :param order_by:
- a scalar or list of :class:`.ClauseElement` objects which will
- comprise the ``ORDER BY`` clause of the resulting select.
-
- :param prefixes:
- a list of strings or :class:`.ClauseElement` objects to include
- directly after the SELECT keyword in the generated statement,
- for dialect-specific query features. ``prefixes`` is
- also available via the :meth:`~.Select.prefix_with`
- generative method.
-
- :param use_labels=False:
- when ``True``, the statement will be generated using labels
- for each column in the columns clause, which qualify each
- column with its parent table's (or aliases) name so that name
- conflicts between columns in different tables don't occur.
- The format of the label is <tablename>_<column>. The "c"
- collection of the resulting :class:`.Select` object will use these
- names as well for targeting column members.
-
- use_labels is also available via the :meth:`~._SelectBase.apply_labels`
- generative method.
-
- """
- return Select(columns, whereclause=whereclause, from_obj=from_obj,
- **kwargs)
-
-def subquery(alias, *args, **kwargs):
- """Return an :class:`.Alias` object derived
- from a :class:`.Select`.
-
- name
- alias name
-
- \*args, \**kwargs
-
- all other arguments are delivered to the
- :func:`select` function.
-
- """
- return Select(*args, **kwargs).alias(alias)
-
-def insert(table, values=None, inline=False, **kwargs):
- """Return an :class:`.Insert` clause element.
-
- Similar functionality is available via the :func:`insert()` method on
- :class:`~sqlalchemy.schema.Table`.
-
- :param table: The table to be inserted into.
-
- :param values: A dictionary which specifies the column specifications of
- the ``INSERT``, and is optional. If left as None, the column
- specifications are determined from the bind parameters used during the
- compile phase of the ``INSERT`` statement. If the bind parameters also
- are None during the compile phase, then the column specifications will be
- generated from the full list of table columns. Note that the
- :meth:`~Insert.values()` generative method may also be used for this.
-
- :param prefixes: A list of modifier keywords to be inserted between INSERT
- and INTO. Alternatively, the :meth:`~Insert.prefix_with` generative
- method may be used.
-
- :param inline: if True, SQL defaults will be compiled 'inline' into the
- statement and not pre-executed.
-
- If both `values` and compile-time bind parameters are present, the
- compile-time bind parameters override the information specified
- within `values` on a per-key basis.
-
- The keys within `values` can be either :class:`~sqlalchemy.schema.Column`
- objects or their string identifiers. Each key may reference one of:
-
- * a literal data value (i.e. string, number, etc.);
- * a Column object;
- * a SELECT statement.
-
- If a ``SELECT`` statement is specified which references this
- ``INSERT`` statement's table, the statement will be correlated
- against the ``INSERT`` statement.
-
- """
- return Insert(table, values, inline=inline, **kwargs)
-
-def update(table, whereclause=None, values=None, inline=False, **kwargs):
- """Return an :class:`.Update` clause element.
-
- Similar functionality is available via the :func:`update()` method on
- :class:`~sqlalchemy.schema.Table`.
-
- :param table: The table to be updated.
-
- :param whereclause: A :class:`.ClauseElement` describing the ``WHERE``
- condition of the ``UPDATE`` statement. Note that the
- :meth:`~Update.where()` generative method may also be used for this.
-
- :param values:
- A dictionary which specifies the ``SET`` conditions of the
- ``UPDATE``, and is optional. If left as None, the ``SET``
- conditions are determined from the bind parameters used during
- the compile phase of the ``UPDATE`` statement. If the bind
- parameters also are None during the compile phase, then the
- ``SET`` conditions will be generated from the full list of table
- columns. Note that the :meth:`~Update.values()` generative method may
- also be used for this.
-
- :param inline:
- if True, SQL defaults will be compiled 'inline' into the statement
- and not pre-executed.
-
- If both `values` and compile-time bind parameters are present, the
- compile-time bind parameters override the information specified
- within `values` on a per-key basis.
-
- The keys within `values` can be either :class:`~sqlalchemy.schema.Column`
- objects or their
- string identifiers. Each key may reference one of:
-
- * a literal data value (i.e. string, number, etc.);
- * a Column object;
- * a SELECT statement.
-
- If a ``SELECT`` statement is specified which references this
- ``UPDATE`` statement's table, the statement will be correlated
- against the ``UPDATE`` statement.
-
- """
- return Update(
- table,
- whereclause=whereclause,
- values=values,
- inline=inline,
- **kwargs)
-
-def delete(table, whereclause = None, **kwargs):
- """Return a :class:`.Delete` clause element.
-
- Similar functionality is available via the :func:`delete()` method on
- :class:`~sqlalchemy.schema.Table`.
-
- :param table: The table to be updated.
-
- :param whereclause: A :class:`.ClauseElement` describing the ``WHERE``
- condition of the ``UPDATE`` statement. Note that the
- :meth:`~Delete.where()` generative method may be used instead.
-
- """
- return Delete(table, whereclause, **kwargs)
-
-def and_(*clauses):
- """Join a list of clauses together using the ``AND`` operator.
-
- The ``&`` operator is also overloaded on all
- :class:`_CompareMixin` subclasses to produce the
- same result.
-
- """
- if len(clauses) == 1:
- return clauses[0]
- return BooleanClauseList(operator=operators.and_, *clauses)
-
-def or_(*clauses):
- """Join a list of clauses together using the ``OR`` operator.
-
- The ``|`` operator is also overloaded on all
- :class:`_CompareMixin` subclasses to produce the
- same result.
-
- """
- if len(clauses) == 1:
- return clauses[0]
- return BooleanClauseList(operator=operators.or_, *clauses)
-
-def not_(clause):
- """Return a negation of the given clause, i.e. ``NOT(clause)``.
-
- The ``~`` operator is also overloaded on all
- :class:`_CompareMixin` subclasses to produce the
- same result.
-
- """
- return operators.inv(_literal_as_binds(clause))
-
-def distinct(expr):
- """Return a ``DISTINCT`` clause."""
- expr = _literal_as_binds(expr)
- return _UnaryExpression(expr, operator=operators.distinct_op, type_=expr.type)
-
-def between(ctest, cleft, cright):
- """Return a ``BETWEEN`` predicate clause.
-
- Equivalent of SQL ``clausetest BETWEEN clauseleft AND clauseright``.
-
- The :func:`between()` method on all
- :class:`_CompareMixin` subclasses provides
- similar functionality.
-
- """
- ctest = _literal_as_binds(ctest)
- return ctest.between(cleft, cright)
-
-
-def case(whens, value=None, else_=None):
- """Produce a ``CASE`` statement.
-
- whens
- A sequence of pairs, or alternatively a dict,
- to be translated into "WHEN / THEN" clauses.
-
- value
- Optional for simple case statements, produces
- a column expression as in "CASE <expr> WHEN ..."
-
- else\_
- Optional as well, for case defaults produces
- the "ELSE" portion of the "CASE" statement.
-
- The expressions used for THEN and ELSE,
- when specified as strings, will be interpreted
- as bound values. To specify textual SQL expressions
- for these, use the :func:`literal_column`
- construct.
-
- The expressions used for the WHEN criterion
- may only be literal strings when "value" is
- present, i.e. CASE table.somecol WHEN "x" THEN "y".
- Otherwise, literal strings are not accepted
- in this position, and either the text(<string>)
- or literal(<string>) constructs must be used to
- interpret raw string values.
-
- Usage examples::
-
- case([(orderline.c.qty > 100, item.c.specialprice),
- (orderline.c.qty > 10, item.c.bulkprice)
- ], else_=item.c.regularprice)
- case(value=emp.c.type, whens={
- 'engineer': emp.c.salary * 1.1,
- 'manager': emp.c.salary * 3,
- })
-
- Using :func:`literal_column()`, to allow for databases that
- do not support bind parameters in the ``then`` clause. The type
- can be specified which determines the type of the :func:`case()` construct
- overall::
-
- case([(orderline.c.qty > 100,
- literal_column("'greaterthan100'", String)),
- (orderline.c.qty > 10, literal_column("'greaterthan10'",
- String))
- ], else_=literal_column("'lethan10'", String))
-
- """
-
- return _Case(whens, value=value, else_=else_)
-
-def cast(clause, totype, **kwargs):
- """Return a ``CAST`` function.
-
- Equivalent of SQL ``CAST(clause AS totype)``.
-
- Use with a :class:`~sqlalchemy.types.TypeEngine` subclass, i.e::
-
- cast(table.c.unit_price * table.c.qty, Numeric(10,4))
-
- or::
-
- cast(table.c.timestamp, DATE)
-
- """
- return _Cast(clause, totype, **kwargs)
-
-def extract(field, expr):
- """Return the clause ``extract(field FROM expr)``."""
-
- return _Extract(field, expr)
-
-def collate(expression, collation):
- """Return the clause ``expression COLLATE collation``."""
-
- expr = _literal_as_binds(expression)
- return _BinaryExpression(
- expr,
- _literal_as_text(collation),
- operators.collate, type_=expr.type)
-
-def exists(*args, **kwargs):
- """Return an ``EXISTS`` clause as applied to a :class:`.Select` object.
-
- Calling styles are of the following forms::
-
- # use on an existing select()
- s = select([table.c.col1]).where(table.c.col2==5)
- s = exists(s)
-
- # construct a select() at once
- exists(['*'], **select_arguments).where(criterion)
-
- # columns argument is optional, generates "EXISTS (SELECT *)"
- # by default.
- exists().where(table.c.col2==5)
-
- """
- return _Exists(*args, **kwargs)
-
-def union(*selects, **kwargs):
- """Return a ``UNION`` of multiple selectables.
-
- The returned object is an instance of
- :class:`.CompoundSelect`.
-
- A similar :func:`union()` method is available on all
- :class:`.FromClause` subclasses.
-
- \*selects
- a list of :class:`.Select` instances.
-
- \**kwargs
- available keyword arguments are the same as those of
- :func:`select`.
-
- """
- return CompoundSelect(CompoundSelect.UNION, *selects, **kwargs)
-
-def union_all(*selects, **kwargs):
- """Return a ``UNION ALL`` of multiple selectables.
-
- The returned object is an instance of
- :class:`.CompoundSelect`.
-
- A similar :func:`union_all()` method is available on all
- :class:`.FromClause` subclasses.
-
- \*selects
- a list of :class:`.Select` instances.
-
- \**kwargs
- available keyword arguments are the same as those of
- :func:`select`.
-
- """
- return CompoundSelect(CompoundSelect.UNION_ALL, *selects, **kwargs)
-
-def except_(*selects, **kwargs):
- """Return an ``EXCEPT`` of multiple selectables.
-
- The returned object is an instance of
- :class:`.CompoundSelect`.
-
- \*selects
- a list of :class:`.Select` instances.
-
- \**kwargs
- available keyword arguments are the same as those of
- :func:`select`.
-
- """
- return CompoundSelect(CompoundSelect.EXCEPT, *selects, **kwargs)
-
-def except_all(*selects, **kwargs):
- """Return an ``EXCEPT ALL`` of multiple selectables.
-
- The returned object is an instance of
- :class:`.CompoundSelect`.
-
- \*selects
- a list of :class:`.Select` instances.
-
- \**kwargs
- available keyword arguments are the same as those of
- :func:`select`.
-
- """
- return CompoundSelect(CompoundSelect.EXCEPT_ALL, *selects, **kwargs)
-
-def intersect(*selects, **kwargs):
- """Return an ``INTERSECT`` of multiple selectables.
-
- The returned object is an instance of
- :class:`.CompoundSelect`.
-
- \*selects
- a list of :class:`.Select` instances.
-
- \**kwargs
- available keyword arguments are the same as those of
- :func:`select`.
-
- """
- return CompoundSelect(CompoundSelect.INTERSECT, *selects, **kwargs)
-
-def intersect_all(*selects, **kwargs):
- """Return an ``INTERSECT ALL`` of multiple selectables.
-
- The returned object is an instance of
- :class:`.CompoundSelect`.
-
- \*selects
- a list of :class:`.Select` instances.
-
- \**kwargs
- available keyword arguments are the same as those of
- :func:`select`.
-
- """
- return CompoundSelect(CompoundSelect.INTERSECT_ALL, *selects, **kwargs)
-
-def alias(selectable, name=None):
- """Return an :class:`.Alias` object.
-
- An :class:`.Alias` represents any :class:`.FromClause`
- with an alternate name assigned within SQL, typically using the ``AS``
- clause when generated, e.g. ``SELECT * FROM table AS aliasname``.
-
- Similar functionality is available via the
- :meth:`~.FromClause.alias` method
- available on all :class:`.FromClause` subclasses.
-
- When an :class:`.Alias` is created from a :class:`.Table` object,
- this has the effect of the table being rendered
- as ``tablename AS aliasname`` in a SELECT statement.
-
- For :func:`.select` objects, the effect is that of creating a named
- subquery, i.e. ``(select ...) AS aliasname``.
-
- The ``name`` parameter is optional, and provides the name
- to use in the rendered SQL. If blank, an "anonymous" name
- will be deterministically generated at compile time.
- Deterministic means the name is guaranteed to be unique against
- other constructs used in the same statement, and will also be the
- same name for each successive compilation of the same statement
- object.
-
- :param selectable: any :class:`.FromClause` subclass,
- such as a table, select statement, etc.
-
- :param name: string name to be assigned as the alias.
- If ``None``, a name will be deterministically generated
- at compile time.
-
- """
- return Alias(selectable, name=name)
-
-
-def literal(value, type_=None):
- """Return a literal clause, bound to a bind parameter.
-
- Literal clauses are created automatically when non- :class:`.ClauseElement`
- objects (such as strings, ints, dates, etc.) are used in a comparison
- operation with a :class:`_CompareMixin`
- subclass, such as a :class:`~sqlalchemy.schema.Column` object. Use this function to force the
- generation of a literal clause, which will be created as a
- :class:`_BindParamClause` with a bound value.
-
- :param value: the value to be bound. Can be any Python object supported by
- the underlying DB-API, or is translatable via the given type argument.
-
- :param type\_: an optional :class:`~sqlalchemy.types.TypeEngine` which
- will provide bind-parameter translation for this literal.
-
- """
- return _BindParamClause(None, value, type_=type_, unique=True)
-
-def tuple_(*expr):
- """Return a SQL tuple.
-
- Main usage is to produce a composite IN construct::
-
- tuple_(table.c.col1, table.c.col2).in_(
- [(1, 2), (5, 12), (10, 19)]
- )
-
- """
- return _Tuple(*expr)
-
-def type_coerce(expr, type_):
- """Coerce the given expression into the given type, on the Python side only.
-
- :func:`.type_coerce` is roughly similar to :func:.`cast`, except no
- "CAST" expression is rendered - the given type is only applied towards
- expression typing and against received result values.
-
- e.g.::
-
- from sqlalchemy.types import TypeDecorator
- import uuid
-
- class AsGuid(TypeDecorator):
- impl = String
-
- def process_bind_param(self, value, dialect):
- if value is not None:
- return str(value)
- else:
- return None
-
- def process_result_value(self, value, dialect):
- if value is not None:
- return uuid.UUID(value)
- else:
- return None
-
- conn.execute(
- select([type_coerce(mytable.c.ident, AsGuid)]).\\
- where(
- type_coerce(mytable.c.ident, AsGuid) ==
- uuid.uuid3(uuid.NAMESPACE_URL, 'bar')
- )
- )
-
- """
- if hasattr(expr, '__clause_expr__'):
- return type_coerce(expr.__clause_expr__())
-
- elif not isinstance(expr, Visitable):
- if expr is None:
- return null()
- else:
- return literal(expr, type_=type_)
- else:
- return _Label(None, expr, type_=type_)
-
-
-def label(name, obj):
- """Return a :class:`_Label` object for the
- given :class:`.ColumnElement`.
-
- A label changes the name of an element in the columns clause of a
- ``SELECT`` statement, typically via the ``AS`` SQL keyword.
-
- This functionality is more conveniently available via the
- :func:`label()` method on :class:`.ColumnElement`.
-
- name
- label name
-
- obj
- a :class:`.ColumnElement`.
-
- """
- return _Label(name, obj)
-
-def column(text, type_=None):
- """Return a textual column clause, as would be in the columns clause of a
- ``SELECT`` statement.
-
- The object returned is an instance of :class:`.ColumnClause`, which
- represents the "syntactical" portion of the schema-level
- :class:`~sqlalchemy.schema.Column` object. It is often used directly
- within :func:`~.expression.select` constructs or with lightweight :func:`~.expression.table`
- constructs.
-
- Note that the :func:`~.expression.column` function is not part of
- the ``sqlalchemy`` namespace. It must be imported from the ``sql`` package::
-
- from sqlalchemy.sql import table, column
-
- :param text: the name of the column. Quoting rules will be applied
- to the clause like any other column name. For textual column constructs
- that are not to be quoted, use the :func:`literal_column` function.
-
- :param type\_: an optional :class:`~sqlalchemy.types.TypeEngine` object
- which will provide result-set translation for this column.
-
- See :class:`.ColumnClause` for further examples.
-
- """
- return ColumnClause(text, type_=type_)
-
-def literal_column(text, type_=None):
- """Return a textual column expression, as would be in the columns
- clause of a ``SELECT`` statement.
-
- The object returned supports further expressions in the same way as any
- other column object, including comparison, math and string operations.
- The type\_ parameter is important to determine proper expression behavior
- (such as, '+' means string concatenation or numerical addition based on
- the type).
-
- :param text: the text of the expression; can be any SQL expression.
- Quoting rules will not be applied. To specify a column-name expression
- which should be subject to quoting rules, use the :func:`column`
- function.
-
- :param type\_: an optional :class:`~sqlalchemy.types.TypeEngine` object which will
- provide result-set translation and additional expression semantics for
- this column. If left as None the type will be NullType.
-
- """
- return ColumnClause(text, type_=type_, is_literal=True)
-
-def table(name, *columns):
- """Represent a textual table clause.
-
- The object returned is an instance of :class:`.TableClause`, which represents the
- "syntactical" portion of the schema-level :class:`~.schema.Table` object.
- It may be used to construct lightweight table constructs.
-
- Note that the :func:`~.expression.table` function is not part of
- the ``sqlalchemy`` namespace. It must be imported from the ``sql`` package::
-
- from sqlalchemy.sql import table, column
-
- :param name: Name of the table.
-
- :param columns: A collection of :func:`~.expression.column` constructs.
-
- See :class:`.TableClause` for further examples.
-
- """
- return TableClause(name, *columns)
-
-def bindparam(key, value=None, type_=None, unique=False, required=False, callable_=None):
- """Create a bind parameter clause with the given key.
-
- :param key:
- the key for this bind param. Will be used in the generated
- SQL statement for dialects that use named parameters. This
- value may be modified when part of a compilation operation,
- if other :class:`_BindParamClause` objects exist with the same
- key, or if its length is too long and truncation is
- required.
-
- :param value:
- Initial value for this bind param. This value may be
- overridden by the dictionary of parameters sent to statement
- compilation/execution.
-
- :param callable\_:
- A callable function that takes the place of "value". The function
- will be called at statement execution time to determine the
- ultimate value. Used for scenarios where the actual bind
- value cannot be determined at the point at which the clause
- construct is created, but embedded bind values are still desirable.
-
- :param type\_:
- A ``TypeEngine`` object that will be used to pre-process the
- value corresponding to this :class:`_BindParamClause` at
- execution time.
-
- :param unique:
- if True, the key name of this BindParamClause will be
- modified if another :class:`_BindParamClause` of the same name
- already has been located within the containing
- :class:`.ClauseElement`.
-
- :param required:
- a value is required at execution time.
-
- """
- if isinstance(key, ColumnClause):
- return _BindParamClause(key.name, value, type_=key.type,
- callable_=callable_,
- unique=unique, required=required)
- else:
- return _BindParamClause(key, value, type_=type_,
- callable_=callable_,
- unique=unique, required=required)
-
-def outparam(key, type_=None):
- """Create an 'OUT' parameter for usage in functions (stored procedures),
- for databases which support them.
-
- The ``outparam`` can be used like a regular function parameter.
- The "output" value will be available from the
- :class:`~sqlalchemy.engine.ResultProxy` object via its ``out_parameters``
- attribute, which returns a dictionary containing the values.
-
- """
- return _BindParamClause(
- key, None, type_=type_, unique=False, isoutparam=True)
-
-def text(text, bind=None, *args, **kwargs):
- """Create a SQL construct that is represented by a literal string.
-
- E.g.::
-
- t = text("SELECT * FROM users")
- result = connection.execute(t)
-
- The advantages :func:`text` provides over a plain string are
- backend-neutral support for bind parameters, per-statement
- execution options, as well as
- bind parameter and result-column typing behavior, allowing
- SQLAlchemy type constructs to play a role when executing
- a statement that is specified literally.
-
- Bind parameters are specified by name, using the format ``:name``.
- E.g.::
-
- t = text("SELECT * FROM users WHERE id=:user_id")
- result = connection.execute(t, user_id=12)
-
- To invoke SQLAlchemy typing logic for bind parameters, the
- ``bindparams`` list allows specification of :func:`bindparam`
- constructs which specify the type for a given name::
-
- t = text("SELECT id FROM users WHERE updated_at>:updated",
- bindparams=[bindparam('updated', DateTime())]
- )
-
- Typing during result row processing is also an important concern.
- Result column types
- are specified using the ``typemap`` dictionary, where the keys
- match the names of columns. These names are taken from what
- the DBAPI returns as ``cursor.description``::
-
- t = text("SELECT id, name FROM users",
- typemap={
- 'id':Integer,
- 'name':Unicode
- }
- )
-
- The :func:`text` construct is used internally for most cases when
- a literal string is specified for part of a larger query, such as
- within :func:`select()`, :func:`update()`,
- :func:`insert()` or :func:`delete()`. In those cases, the same
- bind parameter syntax is applied::
-
- s = select([users.c.id, users.c.name]).where("id=:user_id")
- result = connection.execute(s, user_id=12)
-
- Using :func:`text` explicitly usually implies the construction
- of a full, standalone statement. As such, SQLAlchemy refers
- to it as an :class:`.Executable` object, and it supports
- the :meth:`Executable.execution_options` method. For example,
- a :func:`text` construct that should be subject to "autocommit"
- can be set explicitly so using the ``autocommit`` option::
-
- t = text("EXEC my_procedural_thing()").\\
- execution_options(autocommit=True)
-
- Note that SQLAlchemy's usual "autocommit" behavior applies to
- :func:`text` constructs - that is, statements which begin
- with a phrase such as ``INSERT``, ``UPDATE``, ``DELETE``,
- or a variety of other phrases specific to certain backends, will
- be eligible for autocommit if no transaction is in progress.
-
- :param text:
- the text of the SQL statement to be created. use ``:<param>``
- to specify bind parameters; they will be compiled to their
- engine-specific format.
-
- :param autocommit:
- Deprecated. Use .execution_options(autocommit=<True|False>)
- to set the autocommit option.
-
- :param bind:
- an optional connection or engine to be used for this text query.
-
- :param bindparams:
- a list of :func:`bindparam()` instances which can be used to define
- the types and/or initial values for the bind parameters within
- the textual statement; the keynames of the bindparams must match
- those within the text of the statement. The types will be used
- for pre-processing on bind values.
-
- :param typemap:
- a dictionary mapping the names of columns represented in the
- columns clause of a ``SELECT`` statement to type objects,
- which will be used to perform post-processing on columns within
- the result set. This argument applies to any expression
- that returns result sets.
-
- """
- return _TextClause(text, bind=bind, *args, **kwargs)
-
-def over(func, partition_by=None, order_by=None):
- """Produce an OVER clause against a function.
-
- Used against aggregate or so-called "window" functions,
- for database backends that support window functions.
-
- E.g.::
-
- from sqlalchemy import over
- over(func.row_number(), order_by='x')
-
- Would produce "ROW_NUMBER() OVER(ORDER BY x)".
-
- :param func: a :class:`.FunctionElement` construct, typically
- generated by :attr:`~.expression.func`.
- :param partition_by: a column element or string, or a list
- of such, that will be used as the PARTITION BY clause
- of the OVER construct.
- :param order_by: a column element or string, or a list
- of such, that will be used as the ORDER BY clause
- of the OVER construct.
-
- This function is also available from the :attr:`~.expression.func`
- construct itself via the :meth:`.FunctionElement.over` method.
-
- New in 0.7.
-
- """
- return _Over(func, partition_by=partition_by, order_by=order_by)
-
-def null():
- """Return a :class:`_Null` object, which compiles to ``NULL``.
-
- """
- return _Null()
-
-def true():
- """Return a :class:`_True` object, which compiles to ``true``, or the
- boolean equivalent for the target dialect.
-
- """
- return _True()
-
-def false():
- """Return a :class:`_False` object, which compiles to ``false``, or the
- boolean equivalent for the target dialect.
-
- """
- return _False()
-
-class _FunctionGenerator(object):
- """Generate :class:`.Function` objects based on getattr calls."""
-
- def __init__(self, **opts):
- self.__names = []
- self.opts = opts
-
- def __getattr__(self, name):
- # passthru __ attributes; fixes pydoc
- if name.startswith('__'):
- try:
- return self.__dict__[name]
- except KeyError:
- raise AttributeError(name)
-
- elif name.endswith('_'):
- name = name[0:-1]
- f = _FunctionGenerator(**self.opts)
- f.__names = list(self.__names) + [name]
- return f
-
- def __call__(self, *c, **kwargs):
- o = self.opts.copy()
- o.update(kwargs)
- if len(self.__names) == 1:
- func = getattr(functions, self.__names[-1].lower(), None)
- if func is not None and \
- isinstance(func, type) and \
- issubclass(func, Function):
- return func(*c, **o)
-
- return Function(self.__names[-1],
- packagenames=self.__names[0:-1], *c, **o)
-
-# "func" global - i.e. func.count()
-func = _FunctionGenerator()
-"""Generate SQL function expressions.
-
- ``func`` is a special object instance which generates SQL functions based on name-based attributes, e.g.::
-
- >>> print func.count(1)
- count(:param_1)
-
- Any name can be given to ``func``. If the function name is unknown to
- SQLAlchemy, it will be rendered exactly as is. For common SQL functions
- which SQLAlchemy is aware of, the name may be interpreted as a *generic
- function* which will be compiled appropriately to the target database::
-
- >>> print func.current_timestamp()
- CURRENT_TIMESTAMP
-
- To call functions which are present in dot-separated packages, specify them in the same manner::
-
- >>> print func.stats.yield_curve(5, 10)
- stats.yield_curve(:yield_curve_1, :yield_curve_2)
-
- SQLAlchemy can be made aware of the return type of functions to enable
- type-specific lexical and result-based behavior. For example, to ensure
- that a string-based function returns a Unicode value and is similarly
- treated as a string in expressions, specify
- :class:`~sqlalchemy.types.Unicode` as the type:
-
- >>> print func.my_string(u'hi', type_=Unicode) + ' ' + \
- ... func.my_string(u'there', type_=Unicode)
- my_string(:my_string_1) || :my_string_2 || my_string(:my_string_3)
-
- The object returned by a ``func`` call is an instance of :class:`.Function`.
- This object meets the "column" interface, including comparison and labeling
- functions. The object can also be passed the :meth:`~.Connectable.execute`
- method of a :class:`.Connection` or :class:`.Engine`, where it will be
- wrapped inside of a SELECT statement first.
-
- Functions which are interpreted as "generic" functions know how to
- calculate their return type automatically. For a listing of known generic
- functions, see :ref:`generic_functions`.
-
-"""
-
-# "modifier" global - i.e. modifier.distinct
-# TODO: use UnaryExpression for this instead ?
-modifier = _FunctionGenerator(group=False)
-
-class _generated_label(unicode):
- """A unicode subclass used to identify dynamically generated names."""
-
-def _escape_for_generated(x):
- if isinstance(x, _generated_label):
- return x
- else:
- return x.replace('%', '%%')
-
-def _clone(element):
- return element._clone()
-
-def _expand_cloned(elements):
- """expand the given set of ClauseElements to be the set of all 'cloned'
- predecessors.
-
- """
- return itertools.chain(*[x._cloned_set for x in elements])
-
-def _select_iterables(elements):
- """expand tables into individual columns in the
- given list of column expressions.
-
- """
- return itertools.chain(*[c._select_iterable for c in elements])
-
-def _cloned_intersection(a, b):
- """return the intersection of sets a and b, counting
- any overlap between 'cloned' predecessors.
-
- The returned set is in terms of the enties present within 'a'.
-
- """
- all_overlap = set(_expand_cloned(a)).intersection(_expand_cloned(b))
- return set(elem for elem in a
- if all_overlap.intersection(elem._cloned_set))
-
-
-def _is_literal(element):
- return not isinstance(element, Visitable) and \
- not hasattr(element, '__clause_element__')
-
-def _from_objects(*elements):
- return itertools.chain(*[element._from_objects for element in elements])
-
-def _labeled(element):
- if not hasattr(element, 'name'):
- return element.label(None)
- else:
- return element
-
-def _column_as_key(element):
- if isinstance(element, basestring):
- return element
- if hasattr(element, '__clause_element__'):
- element = element.__clause_element__()
- return element.key
-
-def _literal_as_text(element):
- if isinstance(element, Visitable):
- return element
- elif hasattr(element, '__clause_element__'):
- return element.__clause_element__()
- elif isinstance(element, basestring):
- return _TextClause(unicode(element))
- elif isinstance(element, (util.NoneType, bool)):
- return _const_expr(element)
- else:
- raise exc.ArgumentError(
- "SQL expression object or string expected."
- )
-
-def _const_expr(element):
- if element is None:
- return null()
- elif element is False:
- return false()
- elif element is True:
- return true()
- else:
- raise exc.ArgumentError(
- "Expected None, False, or True"
- )
-
-def _clause_element_as_expr(element):
- if hasattr(element, '__clause_element__'):
- return element.__clause_element__()
- else:
- return element
-
-def _literal_as_column(element):
- if isinstance(element, Visitable):
- return element
- elif hasattr(element, '__clause_element__'):
- return element.__clause_element__()
- else:
- return literal_column(str(element))
-
-def _literal_as_binds(element, name=None, type_=None):
- if hasattr(element, '__clause_element__'):
- return element.__clause_element__()
- elif not isinstance(element, Visitable):
- if element is None:
- return null()
- else:
- return _BindParamClause(name, element, type_=type_, unique=True)
- else:
- return element
-
-def _type_from_args(args):
- for a in args:
- if not isinstance(a.type, sqltypes.NullType):
- return a.type
- else:
- return sqltypes.NullType
-
-def _no_literals(element):
- if hasattr(element, '__clause_element__'):
- return element.__clause_element__()
- elif not isinstance(element, Visitable):
- raise exc.ArgumentError("Ambiguous literal: %r. Use the 'text()' "
- "function to indicate a SQL expression "
- "literal, or 'literal()' to indicate a "
- "bound value." % element)
- else:
- return element
-
-def _only_column_elements(element, name):
- if hasattr(element, '__clause_element__'):
- element = element.__clause_element__()
- if not isinstance(element, ColumnElement):
- raise exc.ArgumentError("Column-based expression object expected for argument '%s'; "
- "got: '%s', type %s" % (name, element, type(element)))
- return element
-
-def _corresponding_column_or_error(fromclause, column,
- require_embedded=False):
- c = fromclause.corresponding_column(column,
- require_embedded=require_embedded)
- if c is None:
- raise exc.InvalidRequestError(
- "Given column '%s', attached to table '%s', "
- "failed to locate a corresponding column from table '%s'"
- %
- (column,
- getattr(column, 'table', None),fromclause.description)
- )
- return c
-
-@util.decorator
-def _generative(fn, *args, **kw):
- """Mark a method as generative."""
-
- self = args[0]._generate()
- fn(self, *args[1:], **kw)
- return self
-
-
-def is_column(col):
- """True if ``col`` is an instance of :class:`.ColumnElement`."""
-
- return isinstance(col, ColumnElement)
-
-
-class ClauseElement(Visitable):
- """Base class for elements of a programmatically constructed SQL
- expression.
-
- """
- __visit_name__ = 'clause'
-
- _annotations = {}
- supports_execution = False
- _from_objects = []
- bind = None
-
- def _clone(self):
- """Create a shallow copy of this ClauseElement.
-
- This method may be used by a generative API. Its also used as
- part of the "deep" copy afforded by a traversal that combines
- the _copy_internals() method.
-
- """
- c = self.__class__.__new__(self.__class__)
- c.__dict__ = self.__dict__.copy()
- c.__dict__.pop('_cloned_set', None)
-
- # this is a marker that helps to "equate" clauses to each other
- # when a Select returns its list of FROM clauses. the cloning
- # process leaves around a lot of remnants of the previous clause
- # typically in the form of column expressions still attached to the
- # old table.
- c._is_clone_of = self
-
- return c
-
- @property
- def _constructor(self):
- """return the 'constructor' for this ClauseElement.
-
- This is for the purposes for creating a new object of
- this type. Usually, its just the element's __class__.
- However, the "Annotated" version of the object overrides
- to return the class of its proxied element.
-
- """
- return self.__class__
-
- @util.memoized_property
- def _cloned_set(self):
- """Return the set consisting all cloned anscestors of this
- ClauseElement.
-
- Includes this ClauseElement. This accessor tends to be used for
- FromClause objects to identify 'equivalent' FROM clauses, regardless
- of transformative operations.
-
- """
- s = util.column_set()
- f = self
- while f is not None:
- s.add(f)
- f = getattr(f, '_is_clone_of', None)
- return s
-
- def __getstate__(self):
- d = self.__dict__.copy()
- d.pop('_is_clone_of', None)
- return d
-
- if util.jython:
- def __hash__(self):
- """Return a distinct hash code.
-
- ClauseElements may have special equality comparisons which
- makes us rely on them having unique hash codes for use in
- hash-based collections. Stock __hash__ doesn't guarantee
- unique values on platforms with moving GCs.
- """
- return id(self)
-
- def _annotate(self, values):
- """return a copy of this ClauseElement with the given annotations
- dictionary.
-
- """
- return sqlutil.Annotated(self, values)
-
- def _deannotate(self):
- """return a copy of this ClauseElement with an empty annotations
- dictionary.
-
- """
- return self._clone()
-
- def unique_params(self, *optionaldict, **kwargs):
- """Return a copy with :func:`bindparam()` elments replaced.
-
- Same functionality as ``params()``, except adds `unique=True`
- to affected bind parameters so that multiple statements can be
- used.
-
- """
- return self._params(True, optionaldict, kwargs)
-
- def params(self, *optionaldict, **kwargs):
- """Return a copy with :func:`bindparam()` elments replaced.
-
- Returns a copy of this ClauseElement with :func:`bindparam()`
- elements replaced with values taken from the given dictionary::
-
- >>> clause = column('x') + bindparam('foo')
- >>> print clause.compile().params
- {'foo':None}
- >>> print clause.params({'foo':7}).compile().params
- {'foo':7}
-
- """
- return self._params(False, optionaldict, kwargs)
-
- def _params(self, unique, optionaldict, kwargs):
- if len(optionaldict) == 1:
- kwargs.update(optionaldict[0])
- elif len(optionaldict) > 1:
- raise exc.ArgumentError(
- "params() takes zero or one positional dictionary argument")
-
- def visit_bindparam(bind):
- if bind.key in kwargs:
- bind.value = kwargs[bind.key]
- if unique:
- bind._convert_to_unique()
- return cloned_traverse(self, {}, {'bindparam':visit_bindparam})
-
- def compare(self, other, **kw):
- """Compare this ClauseElement to the given ClauseElement.
-
- Subclasses should override the default behavior, which is a
- straight identity comparison.
-
- \**kw are arguments consumed by subclass compare() methods and
- may be used to modify the criteria for comparison.
- (see :class:`.ColumnElement`)
-
- """
- return self is other
-
- def _copy_internals(self, clone=_clone):
- """Reassign internal elements to be clones of themselves.
-
- Called during a copy-and-traverse operation on newly
- shallow-copied elements to create a deep copy.
-
- """
- pass
-
- def get_children(self, **kwargs):
- """Return immediate child elements of this :class:`.ClauseElement`.
-
- This is used for visit traversal.
-
- \**kwargs may contain flags that change the collection that is
- returned, for example to return a subset of items in order to
- cut down on larger traversals, or to return child items from a
- different context (such as schema-level collections instead of
- clause-level).
-
- """
- return []
-
- def self_group(self, against=None):
- """Apply a 'grouping' to this :class:`.ClauseElement`.
-
- This method is overridden by subclasses to return a
- "grouping" construct, i.e. parenthesis. In particular
- it's used by "binary" expressions to provide a grouping
- around themselves when placed into a larger expression,
- as well as by :func:`.select` constructs when placed into
- the FROM clause of another :func:`.select`. (Note that
- subqueries should be normally created using the
- :func:`.Select.alias` method, as many platforms require
- nested SELECT statements to be named).
-
- As expressions are composed together, the application of
- :meth:`self_group` is automatic - end-user code should never
- need to use this method directly. Note that SQLAlchemy's
- clause constructs take operator precedence into account -
- so parenthesis might not be needed, for example, in
- an expression like ``x OR (y AND z)`` - AND takes precedence
- over OR.
-
- The base :meth:`self_group` method of :class:`.ClauseElement`
- just returns self.
- """
- return self
-
-
- @util.deprecated('0.7',
- 'Only SQL expressions which subclass '
- ':class:`.Executable` may provide the '
- ':func:`.execute` method.')
- def execute(self, *multiparams, **params):
- """Compile and execute this :class:`.ClauseElement`.
-
- """
- e = self.bind
- if e is None:
- label = getattr(self, 'description', self.__class__.__name__)
- msg = ('This %s does not support direct execution.' % label)
- raise exc.UnboundExecutionError(msg)
- return e._execute_clauseelement(self, multiparams, params)
-
- @util.deprecated('0.7',
- 'Only SQL expressions which subclass '
- ':class:`.Executable` may provide the '
- ':func:`.scalar` method.')
- def scalar(self, *multiparams, **params):
- """Compile and execute this :class:`.ClauseElement`, returning
- the result's scalar representation.
-
- """
- return self.execute(*multiparams, **params).scalar()
-
- def compile(self, bind=None, dialect=None, **kw):
- """Compile this SQL expression.
-
- The return value is a :class:`~sqlalchemy.engine.Compiled` object.
- Calling ``str()`` or ``unicode()`` on the returned value will yield a
- string representation of the result. The
- :class:`~sqlalchemy.engine.Compiled` object also can return a
- dictionary of bind parameter names and values
- using the ``params`` accessor.
-
- :param bind: An ``Engine`` or ``Connection`` from which a
- ``Compiled`` will be acquired. This argument takes precedence over
- this :class:`.ClauseElement`'s bound engine, if any.
-
- :param column_keys: Used for INSERT and UPDATE statements, a list of
- column names which should be present in the VALUES clause of the
- compiled statement. If ``None``, all columns from the target table
- object are rendered.
-
- :param dialect: A ``Dialect`` instance frmo which a ``Compiled``
- will be acquired. This argument takes precedence over the `bind`
- argument as well as this :class:`.ClauseElement`'s bound engine, if
- any.
-
- :param inline: Used for INSERT statements, for a dialect which does
- not support inline retrieval of newly generated primary key
- columns, will force the expression used to create the new primary
- key value to be rendered inline within the INSERT statement's
- VALUES clause. This typically refers to Sequence execution but may
- also refer to any server-side default generation function
- associated with a primary key `Column`.
-
- """
-
- if not dialect:
- if bind:
- dialect = bind.dialect
- elif self.bind:
- dialect = self.bind.dialect
- bind = self.bind
- else:
- dialect = default.DefaultDialect()
- return self._compiler(dialect, bind=bind, **kw)
-
- def _compiler(self, dialect, **kw):
- """Return a compiler appropriate for this ClauseElement, given a
- Dialect."""
-
- return dialect.statement_compiler(dialect, self, **kw)
-
- def __str__(self):
- # Py3K
- #return unicode(self.compile())
- # Py2K
- return unicode(self.compile()).encode('ascii', 'backslashreplace')
- # end Py2K
-
- def __and__(self, other):
- return and_(self, other)
-
- def __or__(self, other):
- return or_(self, other)
-
- def __invert__(self):
- return self._negate()
-
- def __nonzero__(self):
- raise TypeError("Boolean value of this clause is not defined")
-
- def _negate(self):
- if hasattr(self, 'negation_clause'):
- return self.negation_clause
- else:
- return _UnaryExpression(
- self.self_group(against=operators.inv),
- operator=operators.inv,
- negate=None)
-
- def __repr__(self):
- friendly = getattr(self, 'description', None)
- if friendly is None:
- return object.__repr__(self)
- else:
- return '<%s.%s at 0x%x; %s>' % (
- self.__module__, self.__class__.__name__, id(self), friendly)
-
-
-class _Immutable(object):
- """mark a ClauseElement as 'immutable' when expressions are cloned."""
-
- def unique_params(self, *optionaldict, **kwargs):
- raise NotImplementedError("Immutable objects do not support copying")
-
- def params(self, *optionaldict, **kwargs):
- raise NotImplementedError("Immutable objects do not support copying")
-
- def _clone(self):
- return self
-
-class Operators(object):
- def __and__(self, other):
- return self.operate(operators.and_, other)
-
- def __or__(self, other):
- return self.operate(operators.or_, other)
-
- def __invert__(self):
- return self.operate(operators.inv)
-
- def op(self, opstring):
- def op(b):
- return self.operate(operators.op, opstring, b)
- return op
-
- def operate(self, op, *other, **kwargs):
- raise NotImplementedError(str(op))
-
- def reverse_operate(self, op, other, **kwargs):
- raise NotImplementedError(str(op))
-
-class ColumnOperators(Operators):
- """Defines comparison and math operations."""
-
- timetuple = None
- """Hack, allows datetime objects to be compared on the LHS."""
-
- def __lt__(self, other):
- return self.operate(operators.lt, other)
-
- def __le__(self, other):
- return self.operate(operators.le, other)
-
- __hash__ = Operators.__hash__
-
- def __eq__(self, other):
- return self.operate(operators.eq, other)
-
- def __ne__(self, other):
- return self.operate(operators.ne, other)
-
- def __gt__(self, other):
- return self.operate(operators.gt, other)
-
- def __ge__(self, other):
- return self.operate(operators.ge, other)
-
- def __neg__(self):
- return self.operate(operators.neg)
-
- def concat(self, other):
- return self.operate(operators.concat_op, other)
-
- def like(self, other, escape=None):
- return self.operate(operators.like_op, other, escape=escape)
-
- def ilike(self, other, escape=None):
- return self.operate(operators.ilike_op, other, escape=escape)
-
- def in_(self, other):
- return self.operate(operators.in_op, other)
-
- def startswith(self, other, **kwargs):
- return self.operate(operators.startswith_op, other, **kwargs)
-
- def endswith(self, other, **kwargs):
- return self.operate(operators.endswith_op, other, **kwargs)
-
- def contains(self, other, **kwargs):
- return self.operate(operators.contains_op, other, **kwargs)
-
- def match(self, other, **kwargs):
- return self.operate(operators.match_op, other, **kwargs)
-
- def desc(self):
- return self.operate(operators.desc_op)
-
- def asc(self):
- return self.operate(operators.asc_op)
-
- def nullsfirst(self):
- return self.operate(operators.nullsfirst_op)
-
- def nullslast(self):
- return self.operate(operators.nullslast_op)
-
- def collate(self, collation):
- return self.operate(operators.collate, collation)
-
- def __radd__(self, other):
- return self.reverse_operate(operators.add, other)
-
- def __rsub__(self, other):
- return self.reverse_operate(operators.sub, other)
-
- def __rmul__(self, other):
- return self.reverse_operate(operators.mul, other)
-
- def __rdiv__(self, other):
- return self.reverse_operate(operators.div, other)
-
- def between(self, cleft, cright):
- return self.operate(operators.between_op, cleft, cright)
-
- def distinct(self):
- return self.operate(operators.distinct_op)
-
- def __add__(self, other):
- return self.operate(operators.add, other)
-
- def __sub__(self, other):
- return self.operate(operators.sub, other)
-
- def __mul__(self, other):
- return self.operate(operators.mul, other)
-
- def __div__(self, other):
- return self.operate(operators.div, other)
-
- def __mod__(self, other):
- return self.operate(operators.mod, other)
-
- def __truediv__(self, other):
- return self.operate(operators.truediv, other)
-
- def __rtruediv__(self, other):
- return self.reverse_operate(operators.truediv, other)
-
-class _CompareMixin(ColumnOperators):
- """Defines comparison and math operations for :class:`.ClauseElement`
- instances."""
-
- def __compare(self, op, obj, negate=None, reverse=False,
- **kwargs
- ):
- if obj is None or isinstance(obj, _Null):
- if op == operators.eq:
- return _BinaryExpression(self, null(), operators.is_,
- negate=operators.isnot)
- elif op == operators.ne:
- return _BinaryExpression(self, null(), operators.isnot,
- negate=operators.is_)
- else:
- raise exc.ArgumentError("Only '='/'!=' operators can "
- "be used with NULL")
- else:
- obj = self._check_literal(op, obj)
-
- if reverse:
- return _BinaryExpression(obj,
- self,
- op,
- type_=sqltypes.BOOLEANTYPE,
- negate=negate, modifiers=kwargs)
- else:
- return _BinaryExpression(self,
- obj,
- op,
- type_=sqltypes.BOOLEANTYPE,
- negate=negate, modifiers=kwargs)
-
- def __operate(self, op, obj, reverse=False):
- obj = self._check_literal(op, obj)
-
- if reverse:
- left, right = obj, self
- else:
- left, right = self, obj
-
- if left.type is None:
- op, result_type = sqltypes.NULLTYPE._adapt_expression(op,
- right.type)
- elif right.type is None:
- op, result_type = left.type._adapt_expression(op,
- sqltypes.NULLTYPE)
- else:
- op, result_type = left.type._adapt_expression(op,
- right.type)
- return _BinaryExpression(left, right, op, type_=result_type)
-
-
- # a mapping of operators with the method they use, along with their negated
- # operator for comparison operators
- operators = {
- operators.add : (__operate,),
- operators.mul : (__operate,),
- operators.sub : (__operate,),
- # Py2K
- operators.div : (__operate,),
- # end Py2K
- operators.mod : (__operate,),
- operators.truediv : (__operate,),
- operators.lt : (__compare, operators.ge),
- operators.le : (__compare, operators.gt),
- operators.ne : (__compare, operators.eq),
- operators.gt : (__compare, operators.le),
- operators.ge : (__compare, operators.lt),
- operators.eq : (__compare, operators.ne),
- operators.like_op : (__compare, operators.notlike_op),
- operators.ilike_op : (__compare, operators.notilike_op),
- }
-
- def operate(self, op, *other, **kwargs):
- o = _CompareMixin.operators[op]
- return o[0](self, op, other[0], *o[1:], **kwargs)
-
- def reverse_operate(self, op, other, **kwargs):
- o = _CompareMixin.operators[op]
- return o[0](self, op, other, reverse=True, *o[1:], **kwargs)
-
- def in_(self, other):
- """Compare this element to the given element or collection using IN."""
-
- return self._in_impl(operators.in_op, operators.notin_op, other)
-
- def _in_impl(self, op, negate_op, seq_or_selectable):
- seq_or_selectable = _clause_element_as_expr(seq_or_selectable)
-
- if isinstance(seq_or_selectable, _ScalarSelect):
- return self.__compare(op, seq_or_selectable,
- negate=negate_op)
- elif isinstance(seq_or_selectable, _SelectBase):
-
- # TODO: if we ever want to support (x, y, z) IN (select x,
- # y, z from table), we would need a multi-column version of
- # as_scalar() to produce a multi- column selectable that
- # does not export itself as a FROM clause
-
- return self.__compare(op, seq_or_selectable.as_scalar(),
- negate=negate_op)
- elif isinstance(seq_or_selectable, (Selectable, _TextClause)):
- return self.__compare(op, seq_or_selectable,
- negate=negate_op)
-
-
- # Handle non selectable arguments as sequences
-
- args = []
- for o in seq_or_selectable:
- if not _is_literal(o):
- if not isinstance(o, _CompareMixin):
- raise exc.InvalidRequestError('in() function accept'
- 's either a list of non-selectable values, '
- 'or a selectable: %r' % o)
- else:
- o = self._bind_param(op, o)
- args.append(o)
- if len(args) == 0:
-
- # Special case handling for empty IN's, behave like
- # comparison against zero row selectable. We use != to
- # build the contradiction as it handles NULL values
- # appropriately, i.e. "not (x IN ())" should not return NULL
- # values for x.
-
- util.warn('The IN-predicate on "%s" was invoked with an '
- 'empty sequence. This results in a '
- 'contradiction, which nonetheless can be '
- 'expensive to evaluate. Consider alternative '
- 'strategies for improved performance.' % self)
- return self != self
-
- return self.__compare(op,
- ClauseList(*args).self_group(against=op),
- negate=negate_op)
-
- def __neg__(self):
- return _UnaryExpression(self, operator=operators.neg)
-
- def startswith(self, other, escape=None):
- """Produce the clause ``LIKE '<other>%'``"""
-
- # use __radd__ to force string concat behavior
- return self.__compare(
- operators.like_op,
- literal_column("'%'", type_=sqltypes.String).__radd__(
- self._check_literal(operators.like_op, other)
- ),
- escape=escape)
-
- def endswith(self, other, escape=None):
- """Produce the clause ``LIKE '%<other>'``"""
-
- return self.__compare(
- operators.like_op,
- literal_column("'%'", type_=sqltypes.String) +
- self._check_literal(operators.like_op, other),
- escape=escape)
-
- def contains(self, other, escape=None):
- """Produce the clause ``LIKE '%<other>%'``"""
-
- return self.__compare(
- operators.like_op,
- literal_column("'%'", type_=sqltypes.String) +
- self._check_literal(operators.like_op, other) +
- literal_column("'%'", type_=sqltypes.String),
- escape=escape)
-
- def match(self, other):
- """Produce a MATCH clause, i.e. ``MATCH '<other>'``
-
- The allowed contents of ``other`` are database backend specific.
-
- """
- return self.__compare(operators.match_op,
- self._check_literal(operators.match_op,
- other))
-
- def label(self, name):
- """Produce a column label, i.e. ``<columnname> AS <name>``.
-
- This is a shortcut to the :func:`~.expression.label` function.
-
- if 'name' is None, an anonymous label name will be generated.
-
- """
- return _Label(name, self, self.type)
-
- def desc(self):
- """Produce a DESC clause, i.e. ``<columnname> DESC``"""
-
- return desc(self)
-
- def asc(self):
- """Produce a ASC clause, i.e. ``<columnname> ASC``"""
-
- return asc(self)
-
- def nullsfirst(self):
- """Produce a NULLS FIRST clause, i.e. ``NULLS FIRST``"""
-
- return nullsfirst(self)
-
- def nullslast(self):
- """Produce a NULLS LAST clause, i.e. ``NULLS LAST``"""
-
- return nullslast(self)
-
- def distinct(self):
- """Produce a DISTINCT clause, i.e. ``DISTINCT <columnname>``"""
-
- return _UnaryExpression(self, operator=operators.distinct_op,
- type_=self.type)
-
- def between(self, cleft, cright):
- """Produce a BETWEEN clause, i.e. ``<column> BETWEEN <cleft> AND
- <cright>``"""
-
- return _BinaryExpression(
- self,
- ClauseList(
- self._check_literal(operators.and_, cleft),
- self._check_literal(operators.and_, cright),
- operator=operators.and_,
- group=False),
- operators.between_op)
-
- def collate(self, collation):
- """Produce a COLLATE clause, i.e. ``<column> COLLATE utf8_bin``"""
-
- return collate(self, collation)
-
- def op(self, operator):
- """produce a generic operator function.
-
- e.g.::
-
- somecolumn.op("*")(5)
-
- produces::
-
- somecolumn * 5
-
- :param operator: a string which will be output as the infix operator
- between this :class:`.ClauseElement` and the expression passed to the
- generated function.
-
- This function can also be used to make bitwise operators explicit. For
- example::
-
- somecolumn.op('&')(0xff)
-
- is a bitwise AND of the value in somecolumn.
-
- """
- return lambda other: self.__operate(operator, other)
-
- def _bind_param(self, operator, obj):
- return _BindParamClause(None, obj,
- _compared_to_operator=operator,
- _compared_to_type=self.type, unique=True)
-
- def _check_literal(self, operator, other):
- if isinstance(other, _BindParamClause) and \
- isinstance(other.type, sqltypes.NullType):
- # TODO: perhaps we should not mutate the incoming bindparam()
- # here and instead make a copy of it. this might
- # be the only place that we're mutating an incoming construct.
- other.type = self.type
- return other
- elif hasattr(other, '__clause_element__'):
- return other.__clause_element__()
- elif not isinstance(other, ClauseElement):
- return self._bind_param(operator, other)
- elif isinstance(other, (_SelectBase, Alias)):
- return other.as_scalar()
- else:
- return other
-
-
-class ColumnElement(ClauseElement, _CompareMixin):
- """Represent an element that is usable within the "column clause" portion
- of a ``SELECT`` statement.
-
- This includes columns associated with tables, aliases, and
- subqueries, expressions, function calls, SQL keywords such as
- ``NULL``, literals, etc. :class:`.ColumnElement` is the ultimate base
- class for all such elements.
-
- :class:`.ColumnElement` supports the ability to be a *proxy* element,
- which indicates that the :class:`.ColumnElement` may be associated with
- a :class:`.Selectable` which was derived from another :class:`.Selectable`.
- An example of a "derived" :class:`.Selectable` is an :class:`.Alias` of a
- :class:`~sqlalchemy.schema.Table`.
-
- A :class:`.ColumnElement`, by subclassing the :class:`_CompareMixin` mixin
- class, provides the ability to generate new :class:`.ClauseElement`
- objects using Python expressions. See the :class:`_CompareMixin`
- docstring for more details.
-
- """
-
- __visit_name__ = 'column'
- primary_key = False
- foreign_keys = []
- quote = None
- _label = None
-
- @property
- def _select_iterable(self):
- return (self, )
-
- @util.memoized_property
- def base_columns(self):
- return util.column_set(c for c in self.proxy_set
- if not hasattr(c, 'proxies'))
-
- @util.memoized_property
- def proxy_set(self):
- s = util.column_set([self])
- if hasattr(self, 'proxies'):
- for c in self.proxies:
- s.update(c.proxy_set)
- return s
-
- def shares_lineage(self, othercolumn):
- """Return True if the given :class:`.ColumnElement`
- has a common ancestor to this :class:`.ColumnElement`."""
-
- return bool(self.proxy_set.intersection(othercolumn.proxy_set))
-
- def _make_proxy(self, selectable, name=None):
- """Create a new :class:`.ColumnElement` representing this
- :class:`.ColumnElement` as it appears in the select list of a
- descending selectable.
-
- """
- if name is None:
- name = self.anon_label
- # TODO: may want to change this to anon_label,
- # or some value that is more useful than the
- # compiled form of the expression
- key = str(self)
- else:
- key = name
-
- co = ColumnClause(name, selectable, type_=getattr(self,
- 'type', None))
- co.proxies = [self]
- selectable._columns[key] = co
- return co
-
- def compare(self, other, use_proxies=False, equivalents=None, **kw):
- """Compare this ColumnElement to another.
-
- Special arguments understood:
-
- :param use_proxies: when True, consider two columns that
- share a common base column as equivalent (i.e. shares_lineage())
-
- :param equivalents: a dictionary of columns as keys mapped to sets
- of columns. If the given "other" column is present in this
- dictionary, if any of the columns in the correponding set() pass the
- comparison test, the result is True. This is used to expand the
- comparison to other columns that may be known to be equivalent to
- this one via foreign key or other criterion.
-
- """
- to_compare = (other, )
- if equivalents and other in equivalents:
- to_compare = equivalents[other].union(to_compare)
-
- for oth in to_compare:
- if use_proxies and self.shares_lineage(oth):
- return True
- elif oth is self:
- return True
- else:
- return False
-
- @util.memoized_property
- def anon_label(self):
- """provides a constant 'anonymous label' for this ColumnElement.
-
- This is a label() expression which will be named at compile time.
- The same label() is returned each time anon_label is called so
- that expressions can reference anon_label multiple times, producing
- the same label name at compile time.
-
- the compiler uses this function automatically at compile time
- for expressions that are known to be 'unnamed' like binary
- expressions and function calls.
-
- """
- return _generated_label('%%(%d %s)s' % (id(self), getattr(self,
- 'name', 'anon')))
-
-class ColumnCollection(util.OrderedProperties):
- """An ordered dictionary that stores a list of ColumnElement
- instances.
-
- Overrides the ``__eq__()`` method to produce SQL clauses between
- sets of correlated columns.
-
- """
-
- def __init__(self, *cols):
- super(ColumnCollection, self).__init__()
- self._data.update((c.key, c) for c in cols)
- self.__dict__['_all_cols'] = util.column_set(self)
-
- def __str__(self):
- return repr([str(c) for c in self])
-
- def replace(self, column):
- """add the given column to this collection, removing unaliased
- versions of this column as well as existing columns with the
- same key.
-
- e.g.::
-
- t = Table('sometable', metadata, Column('col1', Integer))
- t.columns.replace(Column('col1', Integer, key='columnone'))
-
- will remove the original 'col1' from the collection, and add
- the new column under the name 'columnname'.
-
- Used by schema.Column to override columns during table reflection.
-
- """
- if column.name in self and column.key != column.name:
- other = self[column.name]
- if other.name == other.key:
- del self._data[other.name]
- self._all_cols.remove(other)
- if column.key in self._data:
- self._all_cols.remove(self._data[column.key])
- self._all_cols.add(column)
- self._data[column.key] = column
-
- def add(self, column):
- """Add a column to this collection.
-
- The key attribute of the column will be used as the hash key
- for this dictionary.
-
- """
- self[column.key] = column
-
- def __delitem__(self, key):
- raise NotImplementedError()
-
- def __setattr__(self, key, object):
- raise NotImplementedError()
-
- def __setitem__(self, key, value):
- if key in self:
-
- # this warning is primarily to catch select() statements
- # which have conflicting column names in their exported
- # columns collection
-
- existing = self[key]
- if not existing.shares_lineage(value):
- util.warn('Column %r on table %r being replaced by '
- 'another column with the same key. Consider '
- 'use_labels for select() statements.' % (key,
- getattr(existing, 'table', None)))
- self._all_cols.remove(existing)
- self._all_cols.add(value)
- self._data[key] = value
-
- def clear(self):
- self._data.clear()
- self._all_cols.clear()
-
- def remove(self, column):
- del self._data[column.key]
- self._all_cols.remove(column)
-
- def update(self, value):
- self._data.update(value)
- self._all_cols.clear()
- self._all_cols.update(self._data.values())
-
- def extend(self, iter):
- self.update((c.key, c) for c in iter)
-
- __hash__ = None
-
- def __eq__(self, other):
- l = []
- for c in other:
- for local in self:
- if c.shares_lineage(local):
- l.append(c==local)
- return and_(*l)
-
- def __contains__(self, other):
- if not isinstance(other, basestring):
- raise exc.ArgumentError("__contains__ requires a string argument")
- return util.OrderedProperties.__contains__(self, other)
-
- def __setstate__(self, state):
- self.__dict__['_data'] = state['_data']
- self.__dict__['_all_cols'] = util.column_set(self._data.values())
-
- def contains_column(self, col):
- # this has to be done via set() membership
- return col in self._all_cols
-
- def as_immutable(self):
- return ImmutableColumnCollection(self._data, self._all_cols)
-
-class ImmutableColumnCollection(util.ImmutableProperties, ColumnCollection):
- def __init__(self, data, colset):
- util.ImmutableProperties.__init__(self, data)
- self.__dict__['_all_cols'] = colset
-
- extend = remove = util.ImmutableProperties._immutable
-
-
-class ColumnSet(util.ordered_column_set):
- def contains_column(self, col):
- return col in self
-
- def extend(self, cols):
- for col in cols:
- self.add(col)
-
- def __add__(self, other):
- return list(self) + list(other)
-
- def __eq__(self, other):
- l = []
- for c in other:
- for local in self:
- if c.shares_lineage(local):
- l.append(c==local)
- return and_(*l)
-
- def __hash__(self):
- return hash(tuple(x for x in self))
-
-class Selectable(ClauseElement):
- """mark a class as being selectable"""
- __visit_name__ = 'selectable'
-
-class FromClause(Selectable):
- """Represent an element that can be used within the ``FROM``
- clause of a ``SELECT`` statement.
-
- """
- __visit_name__ = 'fromclause'
- named_with_column = False
- _hide_froms = []
- quote = None
- schema = None
-
- def count(self, whereclause=None, **params):
- """return a SELECT COUNT generated against this
- :class:`.FromClause`."""
-
- if self.primary_key:
- col = list(self.primary_key)[0]
- else:
- col = list(self.columns)[0]
- return select(
- [func.count(col).label('tbl_row_count')],
- whereclause,
- from_obj=[self],
- **params)
-
- def select(self, whereclause=None, **params):
- """return a SELECT of this :class:`.FromClause`."""
-
- return select([self], whereclause, **params)
-
- def join(self, right, onclause=None, isouter=False):
- """return a join of this :class:`.FromClause` against another
- :class:`.FromClause`."""
-
- return Join(self, right, onclause, isouter)
-
- def outerjoin(self, right, onclause=None):
- """return an outer join of this :class:`.FromClause` against another
- :class:`.FromClause`."""
-
- return Join(self, right, onclause, True)
-
- def alias(self, name=None):
- """return an alias of this :class:`.FromClause`.
-
- This is shorthand for calling::
-
- from sqlalchemy import alias
- a = alias(self, name=name)
-
- See :func:`~.expression.alias` for details.
-
- """
-
- return Alias(self, name)
-
- def is_derived_from(self, fromclause):
- """Return True if this FromClause is 'derived' from the given
- FromClause.
-
- An example would be an Alias of a Table is derived from that Table.
-
- """
- return fromclause in self._cloned_set
-
- def replace_selectable(self, old, alias):
- """replace all occurrences of FromClause 'old' with the given Alias
- object, returning a copy of this :class:`.FromClause`.
-
- """
-
- return sqlutil.ClauseAdapter(alias).traverse(self)
-
- def correspond_on_equivalents(self, column, equivalents):
- """Return corresponding_column for the given column, or if None
- search for a match in the given dictionary.
-
- """
- col = self.corresponding_column(column, require_embedded=True)
- if col is None and col in equivalents:
- for equiv in equivalents[col]:
- nc = self.corresponding_column(equiv, require_embedded=True)
- if nc:
- return nc
- return col
-
- def corresponding_column(self, column, require_embedded=False):
- """Given a :class:`.ColumnElement`, return the exported
- :class:`.ColumnElement` object from this :class:`.Selectable`
- which corresponds to that original
- :class:`~sqlalchemy.schema.Column` via a common anscestor
- column.
-
- :param column: the target :class:`.ColumnElement` to be matched
-
- :param require_embedded: only return corresponding columns for
- the given :class:`.ColumnElement`, if the given
- :class:`.ColumnElement` is actually present within a sub-element
- of this :class:`.FromClause`. Normally the column will match if
- it merely shares a common anscestor with one of the exported
- columns of this :class:`.FromClause`.
-
- """
-
- # dont dig around if the column is locally present
-
- if self.c.contains_column(column):
- return column
- col, intersect = None, None
- target_set = column.proxy_set
- cols = self.c
- for c in cols:
- i = target_set.intersection(itertools.chain(*[p._cloned_set
- for p in c.proxy_set]))
- if i and (not require_embedded
- or c.proxy_set.issuperset(target_set)):
- if col is None:
-
- # no corresponding column yet, pick this one.
-
- col, intersect = c, i
- elif len(i) > len(intersect):
-
- # 'c' has a larger field of correspondence than
- # 'col'. i.e. selectable.c.a1_x->a1.c.x->table.c.x
- # matches a1.c.x->table.c.x better than
- # selectable.c.x->table.c.x does.
-
- col, intersect = c, i
- elif i == intersect:
-
- # they have the same field of correspondence. see
- # which proxy_set has fewer columns in it, which
- # indicates a closer relationship with the root
- # column. Also take into account the "weight"
- # attribute which CompoundSelect() uses to give
- # higher precedence to columns based on vertical
- # position in the compound statement, and discard
- # columns that have no reference to the target
- # column (also occurs with CompoundSelect)
-
- col_distance = util.reduce(operator.add,
- [sc._annotations.get('weight', 1) for sc in
- col.proxy_set if sc.shares_lineage(column)])
- c_distance = util.reduce(operator.add,
- [sc._annotations.get('weight', 1) for sc in
- c.proxy_set if sc.shares_lineage(column)])
- if c_distance < col_distance:
- col, intersect = c, i
- return col
-
- @property
- def description(self):
- """a brief description of this FromClause.
-
- Used primarily for error message formatting.
-
- """
- return getattr(self, 'name', self.__class__.__name__ + " object")
-
- def _reset_exported(self):
- """delete memoized collections when a FromClause is cloned."""
-
- for name in 'primary_key', '_columns', 'columns', \
- 'foreign_keys', 'locate_all_froms':
- self.__dict__.pop(name, None)
-
- @util.memoized_property
- def columns(self):
- """Return the collection of Column objects contained by this
- FromClause."""
-
- if '_columns' not in self.__dict__:
- self._init_collections()
- self._populate_column_collection()
- return self._columns.as_immutable()
-
- @util.memoized_property
- def primary_key(self):
- """Return the collection of Column objects which comprise the
- primary key of this FromClause."""
-
- self._init_collections()
- self._populate_column_collection()
- return self.primary_key
-
- @util.memoized_property
- def foreign_keys(self):
- """Return the collection of ForeignKey objects which this
- FromClause references."""
-
- self._init_collections()
- self._populate_column_collection()
- return self.foreign_keys
-
- c = property(attrgetter('columns'))
- _select_iterable = property(attrgetter('columns'))
-
- def _init_collections(self):
- assert '_columns' not in self.__dict__
- assert 'primary_key' not in self.__dict__
- assert 'foreign_keys' not in self.__dict__
-
- self._columns = ColumnCollection()
- self.primary_key = ColumnSet()
- self.foreign_keys = set()
-
- def _populate_column_collection(self):
- pass
-
-class _BindParamClause(ColumnElement):
- """Represent a bind parameter.
-
- Public constructor is the :func:`bindparam()` function.
-
- """
-
- __visit_name__ = 'bindparam'
- quote = None
-
- def __init__(self, key, value, type_=None, unique=False,
- callable_=None,
- isoutparam=False, required=False,
- _compared_to_operator=None,
- _compared_to_type=None):
- """Construct a _BindParamClause.
-
- :param key:
- the key for this bind param. Will be used in the generated
- SQL statement for dialects that use named parameters. This
- value may be modified when part of a compilation operation,
- if other :class:`_BindParamClause` objects exist with the same
- key, or if its length is too long and truncation is
- required.
-
- :param value:
- Initial value for this bind param. This value may be
- overridden by the dictionary of parameters sent to statement
- compilation/execution.
-
- :param callable\_:
- A callable function that takes the place of "value". The function
- will be called at statement execution time to determine the
- ultimate value. Used for scenarios where the actual bind
- value cannot be determined at the point at which the clause
- construct is created, but embeded bind values are still desirable.
-
- :param type\_:
- A ``TypeEngine`` object that will be used to pre-process the
- value corresponding to this :class:`_BindParamClause` at
- execution time.
-
- :param unique:
- if True, the key name of this BindParamClause will be
- modified if another :class:`_BindParamClause` of the same name
- already has been located within the containing
- :class:`.ClauseElement`.
-
- :param required:
- a value is required at execution time.
-
- :param isoutparam:
- if True, the parameter should be treated like a stored procedure
- "OUT" parameter.
-
- """
- if unique:
- self.key = _generated_label('%%(%d %s)s' % (id(self), key
- or 'param'))
- else:
- self.key = key or _generated_label('%%(%d param)s'
- % id(self))
- self._orig_key = key or 'param'
- self.unique = unique
- self.value = value
- self.callable = callable_
- self.isoutparam = isoutparam
- self.required = required
- if type_ is None:
- if _compared_to_type is not None:
- self.type = \
- _compared_to_type._coerce_compared_value(
- _compared_to_operator, value)
- else:
- self.type = sqltypes._type_map.get(type(value),
- sqltypes.NULLTYPE)
- elif isinstance(type_, type):
- self.type = type_()
- else:
- self.type = type_
-
- def _clone(self):
- c = ClauseElement._clone(self)
- if self.unique:
- c.key = _generated_label('%%(%d %s)s' % (id(c), c._orig_key
- or 'param'))
- return c
-
- def _convert_to_unique(self):
- if not self.unique:
- self.unique = True
- self.key = _generated_label('%%(%d %s)s' % (id(self),
- self._orig_key or 'param'))
-
- def compare(self, other, **kw):
- """Compare this :class:`_BindParamClause` to the given
- clause."""
-
- return isinstance(other, _BindParamClause) \
- and self.type._compare_type_affinity(other.type) \
- and self.value == other.value
-
- def __getstate__(self):
- """execute a deferred value for serialization purposes."""
-
- d = self.__dict__.copy()
- v = self.value
- if self.callable:
- v = self.callable()
- d['callable'] = None
- d['value'] = v
- return d
-
- def __repr__(self):
- return '_BindParamClause(%r, %r, type_=%r)' % (self.key,
- self.value, self.type)
-
-class _TypeClause(ClauseElement):
- """Handle a type keyword in a SQL statement.
-
- Used by the ``Case`` statement.
-
- """
-
- __visit_name__ = 'typeclause'
-
- def __init__(self, type):
- self.type = type
-
-
-class _Generative(object):
- """Allow a ClauseElement to generate itself via the
- @_generative decorator.
-
- """
-
- def _generate(self):
- s = self.__class__.__new__(self.__class__)
- s.__dict__ = self.__dict__.copy()
- return s
-
-
-class Executable(_Generative):
- """Mark a ClauseElement as supporting execution.
-
- :class:`.Executable` is a superclass for all "statement" types
- of objects, including :func:`select`, :func:`delete`, :func:`update`,
- :func:`insert`, :func:`text`.
-
- """
-
- supports_execution = True
- _execution_options = util.immutabledict()
- _bind = None
-
- @_generative
- def execution_options(self, **kw):
- """ Set non-SQL options for the statement which take effect during
- execution.
-
- Execution options can be set on a per-statement or
- per :class:`.Connection` basis. Additionally, the
- :class:`.Engine` and ORM :class:`~.orm.query.Query` objects provide access
- to execution options which they in turn configure upon connections.
-
- The :meth:`execution_options` method is generative. A new
- instance of this statement is returned that contains the options::
-
- statement = select([table.c.x, table.c.y])
- statement = statement.execution_options(autocommit=True)
-
- Note that only a subset of possible execution options can be applied
- to a statement - these include "autocommit" and "stream_results",
- but not "isolation_level" or "compiled_cache".
- See :meth:`.Connection.execution_options` for a full list of
- possible options.
-
- See also:
-
- :meth:`.Connection.execution_options()`
-
- :meth:`.Query.execution_options()`
-
- """
- if 'isolation_level' in kw:
- raise exc.ArgumentError(
- "'isolation_level' execution option may only be specified "
- "on Connection.execution_options(), or "
- "per-engine using the isolation_level "
- "argument to create_engine()."
- )
- if 'compiled_cache' in kw:
- raise exc.ArgumentError(
- "'compiled_cache' execution option may only be specified "
- "on Connection.execution_options(), not per statement."
- )
- self._execution_options = self._execution_options.union(kw)
-
- def execute(self, *multiparams, **params):
- """Compile and execute this :class:`.Executable`."""
-
- e = self.bind
- if e is None:
- label = getattr(self, 'description', self.__class__.__name__)
- msg = ('This %s is not directly bound to a Connection or Engine.'
- 'Use the .execute() method of a Connection or Engine '
- 'to execute this construct.' % label)
- raise exc.UnboundExecutionError(msg)
- return e._execute_clauseelement(self, multiparams, params)
-
- def scalar(self, *multiparams, **params):
- """Compile and execute this :class:`.Executable`, returning the
- result's scalar representation.
-
- """
- return self.execute(*multiparams, **params).scalar()
-
- @property
- def bind(self):
- """Returns the :class:`.Engine` or :class:`.Connection` to
- which this :class:`.Executable` is bound, or None if none found.
-
- This is a traversal which checks locally, then
- checks among the "from" clauses of associated objects
- until a bound engine or connection is found.
-
- """
- if self._bind is not None:
- return self._bind
-
- for f in _from_objects(self):
- if f is self:
- continue
- engine = f.bind
- if engine is not None:
- return engine
- else:
- return None
-
-
-# legacy, some outside users may be calling this
-_Executable = Executable
-
-class _TextClause(Executable, ClauseElement):
- """Represent a literal SQL text fragment.
-
- Public constructor is the :func:`text()` function.
-
- """
-
- __visit_name__ = 'textclause'
-
- _bind_params_regex = re.compile(r'(?<![:\w\x5c]):(\w+)(?!:)', re.UNICODE)
- _execution_options = \
- Executable._execution_options.union({'autocommit'
- : PARSE_AUTOCOMMIT})
-
- @property
- def _select_iterable(self):
- return (self,)
-
- _hide_froms = []
-
- def __init__(
- self,
- text='',
- bind=None,
- bindparams=None,
- typemap=None,
- autocommit=None,
- ):
- self._bind = bind
- self.bindparams = {}
- self.typemap = typemap
- if autocommit is not None:
- util.warn_deprecated('autocommit on text() is deprecated. '
- 'Use .execution_options(autocommit=Tru'
- 'e)')
- self._execution_options = \
- self._execution_options.union({'autocommit'
- : autocommit})
- if typemap is not None:
- for key in typemap.keys():
- typemap[key] = sqltypes.to_instance(typemap[key])
-
- def repl(m):
- self.bindparams[m.group(1)] = bindparam(m.group(1))
- return ':%s' % m.group(1)
-
- # scan the string and search for bind parameter names, add them
- # to the list of bindparams
-
- self.text = self._bind_params_regex.sub(repl, text)
- if bindparams is not None:
- for b in bindparams:
- self.bindparams[b.key] = b
-
- @property
- def type(self):
- if self.typemap is not None and len(self.typemap) == 1:
- return list(self.typemap)[0]
- else:
- return sqltypes.NULLTYPE
-
- def self_group(self, against=None):
- if against is operators.in_op:
- return _Grouping(self)
- else:
- return self
-
- def _copy_internals(self, clone=_clone):
- self.bindparams = dict((b.key, clone(b))
- for b in self.bindparams.values())
-
- def get_children(self, **kwargs):
- return self.bindparams.values()
-
-
-class _Null(ColumnElement):
- """Represent the NULL keyword in a SQL statement.
-
- Public constructor is the :func:`null()` function.
-
- """
-
- __visit_name__ = 'null'
- def __init__(self):
- self.type = sqltypes.NULLTYPE
-
-class _False(ColumnElement):
- """Represent the ``false`` keyword in a SQL statement.
-
- Public constructor is the :func:`false()` function.
-
- """
-
- __visit_name__ = 'false'
- def __init__(self):
- self.type = sqltypes.BOOLEANTYPE
-
-class _True(ColumnElement):
- """Represent the ``true`` keyword in a SQL statement.
-
- Public constructor is the :func:`true()` function.
-
- """
-
- __visit_name__ = 'true'
- def __init__(self):
- self.type = sqltypes.BOOLEANTYPE
-
-
-class ClauseList(ClauseElement):
- """Describe a list of clauses, separated by an operator.
-
- By default, is comma-separated, such as a column listing.
-
- """
- __visit_name__ = 'clauselist'
-
- def __init__(self, *clauses, **kwargs):
- self.operator = kwargs.pop('operator', operators.comma_op)
- self.group = kwargs.pop('group', True)
- self.group_contents = kwargs.pop('group_contents', True)
- if self.group_contents:
- self.clauses = [
- _literal_as_text(clause).self_group(against=self.operator)
- for clause in clauses if clause is not None]
- else:
- self.clauses = [
- _literal_as_text(clause)
- for clause in clauses if clause is not None]
-
- @util.memoized_property
- def type(self):
- if self.clauses:
- return self.clauses[0].type
- else:
- return sqltypes.NULLTYPE
-
- def __iter__(self):
- return iter(self.clauses)
-
- def __len__(self):
- return len(self.clauses)
-
- @property
- def _select_iterable(self):
- return iter(self)
-
- def append(self, clause):
- # TODO: not sure if i like the 'group_contents' flag. need to
- # define the difference between a ClauseList of ClauseLists,
- # and a "flattened" ClauseList of ClauseLists. flatten()
- # method ?
- if self.group_contents:
- self.clauses.append(_literal_as_text(clause).\
- self_group(against=self.operator))
- else:
- self.clauses.append(_literal_as_text(clause))
-
- def _copy_internals(self, clone=_clone):
- self.clauses = [clone(clause) for clause in self.clauses]
-
- def get_children(self, **kwargs):
- return self.clauses
-
- @property
- def _from_objects(self):
- return list(itertools.chain(*[c._from_objects for c in self.clauses]))
-
- def self_group(self, against=None):
- if self.group and operators.is_precedent(self.operator, against):
- return _Grouping(self)
- else:
- return self
-
- def compare(self, other, **kw):
- """Compare this :class:`.ClauseList` to the given :class:`.ClauseList`,
- including a comparison of all the clause items.
-
- """
- if not isinstance(other, ClauseList) and len(self.clauses) == 1:
- return self.clauses[0].compare(other, **kw)
- elif isinstance(other, ClauseList) and \
- len(self.clauses) == len(other.clauses):
- for i in range(0, len(self.clauses)):
- if not self.clauses[i].compare(other.clauses[i], **kw):
- return False
- else:
- return self.operator == other.operator
- else:
- return False
-
-class BooleanClauseList(ClauseList, ColumnElement):
- __visit_name__ = 'clauselist'
-
- def __init__(self, *clauses, **kwargs):
- super(BooleanClauseList, self).__init__(*clauses, **kwargs)
- self.type = sqltypes.to_instance(kwargs.get('type_',
- sqltypes.Boolean))
-
- @property
- def _select_iterable(self):
- return (self, )
-
-class _Tuple(ClauseList, ColumnElement):
-
- def __init__(self, *clauses, **kw):
- clauses = [_literal_as_binds(c) for c in clauses]
- super(_Tuple, self).__init__(*clauses, **kw)
- self.type = _type_from_args(clauses)
-
- @property
- def _select_iterable(self):
- return (self, )
-
- def _bind_param(self, operator, obj):
- return _Tuple(*[
- _BindParamClause(None, o, _compared_to_operator=operator,
- _compared_to_type=self.type, unique=True)
- for o in obj
- ]).self_group()
-
-
-class _Case(ColumnElement):
- __visit_name__ = 'case'
-
- def __init__(self, whens, value=None, else_=None):
- try:
- whens = util.dictlike_iteritems(whens)
- except TypeError:
- pass
-
- if value is not None:
- whenlist = [
- (_literal_as_binds(c).self_group(),
- _literal_as_binds(r)) for (c, r) in whens
- ]
- else:
- whenlist = [
- (_no_literals(c).self_group(),
- _literal_as_binds(r)) for (c, r) in whens
- ]
-
- if whenlist:
- type_ = list(whenlist[-1])[-1].type
- else:
- type_ = None
-
- if value is None:
- self.value = None
- else:
- self.value = _literal_as_binds(value)
-
- self.type = type_
- self.whens = whenlist
- if else_ is not None:
- self.else_ = _literal_as_binds(else_)
- else:
- self.else_ = None
-
- def _copy_internals(self, clone=_clone):
- if self.value is not None:
- self.value = clone(self.value)
- self.whens = [(clone(x), clone(y)) for x, y in self.whens]
- if self.else_ is not None:
- self.else_ = clone(self.else_)
-
- def get_children(self, **kwargs):
- if self.value is not None:
- yield self.value
- for x, y in self.whens:
- yield x
- yield y
- if self.else_ is not None:
- yield self.else_
-
- @property
- def _from_objects(self):
- return list(itertools.chain(*[x._from_objects for x in
- self.get_children()]))
-
-class FunctionElement(Executable, ColumnElement, FromClause):
- """Base for SQL function-oriented constructs."""
-
- packagenames = ()
-
- def __init__(self, *clauses, **kwargs):
- """Construct a :class:`.FunctionElement`.
- """
- args = [_literal_as_binds(c, self.name) for c in clauses]
- self.clause_expr = ClauseList(
- operator=operators.comma_op,
- group_contents=True, *args).\
- self_group()
-
- @property
- def columns(self):
- """Fulfill the 'columns' contrct of :class:`.ColumnElement`.
-
- Returns a single-element list consisting of this object.
-
- """
- return [self]
-
- @util.memoized_property
- def clauses(self):
- """Return the underlying :class:`.ClauseList` which contains
- the arguments for this :class:`.FunctionElement`.
-
- """
- return self.clause_expr.element
-
- def over(self, partition_by=None, order_by=None):
- """Produce an OVER clause against this function.
-
- Used against aggregate or so-called "window" functions,
- for database backends that support window functions.
-
- The expression::
-
- func.row_number().over(order_by='x')
-
- is shorthand for::
-
- from sqlalchemy import over
- over(func.row_number(), order_by='x')
-
- See :func:`~.expression.over` for a full description.
-
- New in 0.7.
-
- """
- return over(self, partition_by=partition_by, order_by=order_by)
-
- @property
- def _from_objects(self):
- return self.clauses._from_objects
-
- def get_children(self, **kwargs):
- return self.clause_expr,
-
- def _copy_internals(self, clone=_clone):
- self.clause_expr = clone(self.clause_expr)
- self._reset_exported()
- util.reset_memoized(self, 'clauses')
-
- def select(self):
- """Produce a :func:`~.expression.select` construct
- against this :class:`.FunctionElement`.
-
- This is shorthand for::
-
- s = select([function_element])
-
- """
- s = select([self])
- if self._execution_options:
- s = s.execution_options(**self._execution_options)
- return s
-
- def scalar(self):
- """Execute this :class:`.FunctionElement` against an embedded
- 'bind' and return a scalar value.
-
- This first calls :meth:`~.FunctionElement.select` to
- produce a SELECT construct.
-
- Note that :class:`.FunctionElement` can be passed to
- the :meth:`.Connectable.scalar` method of :class:`.Connection`
- or :class:`.Engine`.
-
- """
- return self.select().execute().scalar()
-
- def execute(self):
- """Execute this :class:`.FunctionElement` against an embedded
- 'bind'.
-
- This first calls :meth:`~.FunctionElement.select` to
- produce a SELECT construct.
-
- Note that :class:`.FunctionElement` can be passed to
- the :meth:`.Connectable.execute` method of :class:`.Connection`
- or :class:`.Engine`.
-
- """
- return self.select().execute()
-
- def _bind_param(self, operator, obj):
- return _BindParamClause(None, obj, _compared_to_operator=operator,
- _compared_to_type=self.type, unique=True)
-
-
-class Function(FunctionElement):
- """Describe a named SQL function.
-
- See the superclass :class:`.FunctionElement` for a description
- of public methods.
-
- """
-
- __visit_name__ = 'function'
-
- def __init__(self, name, *clauses, **kw):
- """Construct a :class:`.Function`.
-
- The :attr:`.func` construct is normally used to construct
- new :class:`.Function` instances.
-
- """
- self.packagenames = kw.pop('packagenames', None) or []
- self.name = name
- self._bind = kw.get('bind', None)
- self.type = sqltypes.to_instance(kw.get('type_', None))
-
- FunctionElement.__init__(self, *clauses, **kw)
-
- def _bind_param(self, operator, obj):
- return _BindParamClause(self.name, obj,
- _compared_to_operator=operator,
- _compared_to_type=self.type,
- unique=True)
-
-
-class _Cast(ColumnElement):
-
- __visit_name__ = 'cast'
-
- def __init__(self, clause, totype, **kwargs):
- self.type = sqltypes.to_instance(totype)
- self.clause = _literal_as_binds(clause, None)
- self.typeclause = _TypeClause(self.type)
-
- def _copy_internals(self, clone=_clone):
- self.clause = clone(self.clause)
- self.typeclause = clone(self.typeclause)
-
- def get_children(self, **kwargs):
- return self.clause, self.typeclause
-
- @property
- def _from_objects(self):
- return self.clause._from_objects
-
-
-class _Extract(ColumnElement):
-
- __visit_name__ = 'extract'
-
- def __init__(self, field, expr, **kwargs):
- self.type = sqltypes.Integer()
- self.field = field
- self.expr = _literal_as_binds(expr, None)
-
- def _copy_internals(self, clone=_clone):
- self.expr = clone(self.expr)
-
- def get_children(self, **kwargs):
- return self.expr,
-
- @property
- def _from_objects(self):
- return self.expr._from_objects
-
-
-class _UnaryExpression(ColumnElement):
-
- __visit_name__ = 'unary'
-
- def __init__(self, element, operator=None, modifier=None,
- type_=None, negate=None):
- self.operator = operator
- self.modifier = modifier
-
- self.element = _literal_as_text(element).\
- self_group(against=self.operator or self.modifier)
- self.type = sqltypes.to_instance(type_)
- self.negate = negate
-
- @property
- def _from_objects(self):
- return self.element._from_objects
-
- def _copy_internals(self, clone=_clone):
- self.element = clone(self.element)
-
- def get_children(self, **kwargs):
- return self.element,
-
- def compare(self, other, **kw):
- """Compare this :class:`_UnaryExpression` against the given
- :class:`.ClauseElement`."""
-
- return (
- isinstance(other, _UnaryExpression) and
- self.operator == other.operator and
- self.modifier == other.modifier and
- self.element.compare(other.element, **kw)
- )
-
- def _negate(self):
- if self.negate is not None:
- return _UnaryExpression(
- self.element,
- operator=self.negate,
- negate=self.operator,
- modifier=self.modifier,
- type_=self.type)
- else:
- return super(_UnaryExpression, self)._negate()
-
- def self_group(self, against=None):
- if self.operator and operators.is_precedent(self.operator,
- against):
- return _Grouping(self)
- else:
- return self
-
-
-class _BinaryExpression(ColumnElement):
- """Represent an expression that is ``LEFT <operator> RIGHT``."""
-
- __visit_name__ = 'binary'
-
- def __init__(self, left, right, operator, type_=None,
- negate=None, modifiers=None):
- self.left = _literal_as_text(left).self_group(against=operator)
- self.right = _literal_as_text(right).self_group(against=operator)
- self.operator = operator
- self.type = sqltypes.to_instance(type_)
- self.negate = negate
- if modifiers is None:
- self.modifiers = {}
- else:
- self.modifiers = modifiers
-
- def __nonzero__(self):
- try:
- return self.operator(hash(self.left), hash(self.right))
- except:
- raise TypeError("Boolean value of this clause is not defined")
-
- @property
- def _from_objects(self):
- return self.left._from_objects + self.right._from_objects
-
- def _copy_internals(self, clone=_clone):
- self.left = clone(self.left)
- self.right = clone(self.right)
-
- def get_children(self, **kwargs):
- return self.left, self.right
-
- def compare(self, other, **kw):
- """Compare this :class:`_BinaryExpression` against the
- given :class:`_BinaryExpression`."""
-
- return (
- isinstance(other, _BinaryExpression) and
- self.operator == other.operator and
- (
- self.left.compare(other.left, **kw) and
- self.right.compare(other.right, **kw) or
- (
- operators.is_commutative(self.operator) and
- self.left.compare(other.right, **kw) and
- self.right.compare(other.left, **kw)
- )
- )
- )
-
- def self_group(self, against=None):
- if operators.is_precedent(self.operator, against):
- return _Grouping(self)
- else:
- return self
-
- def _negate(self):
- if self.negate is not None:
- return _BinaryExpression(
- self.left,
- self.right,
- self.negate,
- negate=self.operator,
- type_=sqltypes.BOOLEANTYPE,
- modifiers=self.modifiers)
- else:
- return super(_BinaryExpression, self)._negate()
-
-class _Exists(_UnaryExpression):
- __visit_name__ = _UnaryExpression.__visit_name__
- _from_objects = []
-
- def __init__(self, *args, **kwargs):
- if args and isinstance(args[0], (_SelectBase, _ScalarSelect)):
- s = args[0]
- else:
- if not args:
- args = ([literal_column('*')],)
- s = select(*args, **kwargs).as_scalar().self_group()
-
- _UnaryExpression.__init__(self, s, operator=operators.exists,
- type_=sqltypes.Boolean)
-
- def select(self, whereclause=None, **params):
- return select([self], whereclause, **params)
-
- def correlate(self, fromclause):
- e = self._clone()
- e.element = self.element.correlate(fromclause).self_group()
- return e
-
- def select_from(self, clause):
- """return a new exists() construct with the given expression set as
- its FROM clause.
-
- """
- e = self._clone()
- e.element = self.element.select_from(clause).self_group()
- return e
-
- def where(self, clause):
- """return a new exists() construct with the given expression added to
- its WHERE clause, joined to the existing clause via AND, if any.
-
- """
- e = self._clone()
- e.element = self.element.where(clause).self_group()
- return e
-
-class Join(FromClause):
- """represent a ``JOIN`` construct between two :class:`.FromClause`
- elements.
-
- The public constructor function for :class:`.Join` is the module-level
- :func:`join()` function, as well as the :func:`join()` method available
- off all :class:`.FromClause` subclasses.
-
- """
- __visit_name__ = 'join'
-
- def __init__(self, left, right, onclause=None, isouter=False):
- """Construct a new :class:`.Join`.
-
- The usual entrypoint here is the :func:`~.expression.join`
- function or the :meth:`.FromClause.join` method of any
- :class:`.FromClause` object.
-
- """
- self.left = _literal_as_text(left)
- self.right = _literal_as_text(right).self_group()
-
- if onclause is None:
- self.onclause = self._match_primaries(self.left, self.right)
- else:
- self.onclause = onclause
-
- self.isouter = isouter
- self.__folded_equivalents = None
-
- @property
- def description(self):
- return "Join object on %s(%d) and %s(%d)" % (
- self.left.description,
- id(self.left),
- self.right.description,
- id(self.right))
-
- def is_derived_from(self, fromclause):
- return fromclause is self or \
- self.left.is_derived_from(fromclause) or\
- self.right.is_derived_from(fromclause)
-
- def self_group(self, against=None):
- return _FromGrouping(self)
-
- def _populate_column_collection(self):
- columns = [c for c in self.left.columns] + \
- [c for c in self.right.columns]
-
- self.primary_key.extend(sqlutil.reduce_columns(
- (c for c in columns if c.primary_key), self.onclause))
- self._columns.update((col._label, col) for col in columns)
- self.foreign_keys.update(itertools.chain(
- *[col.foreign_keys for col in columns]))
-
- def _copy_internals(self, clone=_clone):
- self._reset_exported()
- self.left = clone(self.left)
- self.right = clone(self.right)
- self.onclause = clone(self.onclause)
- self.__folded_equivalents = None
-
- def get_children(self, **kwargs):
- return self.left, self.right, self.onclause
-
- def _match_primaries(self, left, right):
- if isinstance(left, Join):
- left_right = left.right
- else:
- left_right = None
- return sqlutil.join_condition(left, right, a_subset=left_right)
-
- def select(self, whereclause=None, fold_equivalents=False, **kwargs):
- """Create a :class:`.Select` from this :class:`.Join`.
-
- The equivalent long-hand form, given a :class:`.Join` object
- ``j``, is::
-
- from sqlalchemy import select
- j = select([j.left, j.right], **kw).\\
- where(whereclause).\\
- select_from(j)
-
- :param whereclause: the WHERE criterion that will be sent to
- the :func:`select()` function
-
- :param fold_equivalents: based on the join criterion of this
- :class:`.Join`, do not include
- repeat column names in the column list of the resulting
- select, for columns that are calculated to be "equivalent"
- based on the join criterion of this :class:`.Join`. This will
- recursively apply to any joins directly nested by this one
- as well.
-
- :param \**kwargs: all other kwargs are sent to the
- underlying :func:`select()` function.
-
- """
- if fold_equivalents:
- collist = sqlutil.folded_equivalents(self)
- else:
- collist = [self.left, self.right]
-
- return select(collist, whereclause, from_obj=[self], **kwargs)
-
- @property
- def bind(self):
- return self.left.bind or self.right.bind
-
- def alias(self, name=None):
- """return an alias of this :class:`.Join`.
-
- Used against a :class:`.Join` object,
- :meth:`~.Join.alias` calls the :meth:`~.Join.select`
- method first so that a subquery against a
- :func:`.select` construct is generated.
- the :func:`~expression.select` construct also has the
- ``correlate`` flag set to ``False`` and will not
- auto-correlate inside an enclosing :func:`~expression.select`
- construct.
-
- The equivalent long-hand form, given a :class:`.Join` object
- ``j``, is::
-
- from sqlalchemy import select, alias
- j = alias(
- select([j.left, j.right]).\\
- select_from(j).\\
- with_labels(True).\\
- correlate(False),
- name=name
- )
-
- See :func:`~.expression.alias` for further details on
- aliases.
-
- """
- return self.select(use_labels=True, correlate=False).alias(name)
-
- @property
- def _hide_froms(self):
- return itertools.chain(*[_from_objects(x.left, x.right)
- for x in self._cloned_set])
-
- @property
- def _from_objects(self):
- return [self] + \
- self.onclause._from_objects + \
- self.left._from_objects + \
- self.right._from_objects
-
-class Alias(FromClause):
- """Represents an table or selectable alias (AS).
-
- Represents an alias, as typically applied to any table or
- sub-select within a SQL statement using the ``AS`` keyword (or
- without the keyword on certain databases such as Oracle).
-
- This object is constructed from the :func:`~.expression.alias` module level
- function as well as the :meth:`.FromClause.alias` method available on all
- :class:`.FromClause` subclasses.
-
- """
-
- __visit_name__ = 'alias'
- named_with_column = True
-
- def __init__(self, selectable, name=None):
- baseselectable = selectable
- while isinstance(baseselectable, Alias):
- baseselectable = baseselectable.element
- self.original = baseselectable
- self.supports_execution = baseselectable.supports_execution
- if self.supports_execution:
- self._execution_options = baseselectable._execution_options
- self.element = selectable
- if name is None:
- if self.original.named_with_column:
- name = getattr(self.original, 'name', None)
- name = _generated_label('%%(%d %s)s' % (id(self), name
- or 'anon'))
- self.name = name
-
- @property
- def description(self):
- # Py3K
- #return self.name
- # Py2K
- return self.name.encode('ascii', 'backslashreplace')
- # end Py2K
-
- def as_scalar(self):
- try:
- return self.element.as_scalar()
- except AttributeError:
- raise AttributeError("Element %s does not support "
- "'as_scalar()'" % self.element)
-
- def is_derived_from(self, fromclause):
- if fromclause in self._cloned_set:
- return True
- return self.element.is_derived_from(fromclause)
-
- def _populate_column_collection(self):
- for col in self.element.columns:
- col._make_proxy(self)
-
- def _copy_internals(self, clone=_clone):
- self._reset_exported()
- self.element = _clone(self.element)
- baseselectable = self.element
- while isinstance(baseselectable, Alias):
- baseselectable = baseselectable.element
- self.original = baseselectable
-
- def get_children(self, column_collections=True,
- aliased_selectables=True, **kwargs):
- if column_collections:
- for c in self.c:
- yield c
- if aliased_selectables:
- yield self.element
-
- @property
- def _from_objects(self):
- return [self]
-
- @property
- def bind(self):
- return self.element.bind
-
-
-class _Grouping(ColumnElement):
- """Represent a grouping within a column expression"""
-
- __visit_name__ = 'grouping'
-
- def __init__(self, element):
- self.element = element
- self.type = getattr(element, 'type', None)
-
- @property
- def _label(self):
- return getattr(self.element, '_label', None) or self.anon_label
-
- def _copy_internals(self, clone=_clone):
- self.element = clone(self.element)
-
- def get_children(self, **kwargs):
- return self.element,
-
- @property
- def _from_objects(self):
- return self.element._from_objects
-
- def __getattr__(self, attr):
- return getattr(self.element, attr)
-
- def __getstate__(self):
- return {'element':self.element, 'type':self.type}
-
- def __setstate__(self, state):
- self.element = state['element']
- self.type = state['type']
-
-class _FromGrouping(FromClause):
- """Represent a grouping of a FROM clause"""
- __visit_name__ = 'grouping'
-
- def __init__(self, element):
- self.element = element
-
- def _init_collections(self):
- pass
-
- @property
- def columns(self):
- return self.element.columns
-
- @property
- def primary_key(self):
- return self.element.primary_key
-
- @property
- def foreign_keys(self):
- # this could be
- # self.element.foreign_keys
- # see SelectableTest.test_join_condition
- return set()
-
- @property
- def _hide_froms(self):
- return self.element._hide_froms
-
- def get_children(self, **kwargs):
- return self.element,
-
- def _copy_internals(self, clone=_clone):
- self.element = clone(self.element)
-
- @property
- def _from_objects(self):
- return self.element._from_objects
-
- def __getattr__(self, attr):
- return getattr(self.element, attr)
-
- def __getstate__(self):
- return {'element':self.element}
-
- def __setstate__(self, state):
- self.element = state['element']
-
-class _Over(ColumnElement):
- """Represent an OVER clause.
-
- This is a special operator against a so-called
- "window" function, as well as any aggregate function,
- which produces results relative to the result set
- itself. It's supported only by certain database
- backends.
-
- """
- __visit_name__ = 'over'
-
- order_by = None
- partition_by = None
-
- def __init__(self, func, partition_by=None, order_by=None):
- self.func = func
- if order_by is not None:
- self.order_by = ClauseList(*util.to_list(order_by))
- if partition_by is not None:
- self.partition_by = ClauseList(*util.to_list(partition_by))
-
- @util.memoized_property
- def type(self):
- return self.func.type
-
- def get_children(self, **kwargs):
- return [c for c in
- (self.func, self.partition_by, self.order_by)
- if c is not None]
-
- def _copy_internals(self, clone=_clone):
- self.func = clone(self.func)
- if self.partition_by is not None:
- self.partition_by = clone(self.partition_by)
- if self.order_by is not None:
- self.order_by = clone(self.order_by)
-
- @property
- def _from_objects(self):
- return list(itertools.chain(
- *[c._from_objects for c in
- (self.func, self.partition_by, self.order_by)
- if c is not None]
- ))
-
-class _Label(ColumnElement):
- """Represents a column label (AS).
-
- Represent a label, as typically applied to any column-level
- element using the ``AS`` sql keyword.
-
- This object is constructed from the :func:`label()` module level
- function as well as the :func:`label()` method available on all
- :class:`.ColumnElement` subclasses.
-
- """
-
- __visit_name__ = 'label'
-
- def __init__(self, name, element, type_=None):
- while isinstance(element, _Label):
- element = element.element
- self.name = self.key = self._label = name \
- or _generated_label('%%(%d %s)s' % (id(self),
- getattr(element, 'name', 'anon')))
- self._element = element
- self._type = type_
- self.quote = element.quote
- self.proxies = [element]
-
- @util.memoized_property
- def type(self):
- return sqltypes.to_instance(
- self._type or getattr(self._element, 'type', None)
- )
-
- @util.memoized_property
- def element(self):
- return self._element.self_group(against=operators.as_)
-
- def self_group(self, against=None):
- sub_element = self._element.self_group(against=against)
- if sub_element is not self._element:
- return _Label(self.name,
- sub_element,
- type_=self._type)
- else:
- return self._element
-
- @property
- def primary_key(self):
- return self.element.primary_key
-
- @property
- def foreign_keys(self):
- return self.element.foreign_keys
-
- def get_children(self, **kwargs):
- return self.element,
-
- def _copy_internals(self, clone=_clone):
- self.element = clone(self.element)
-
- @property
- def _from_objects(self):
- return self.element._from_objects
-
- def _make_proxy(self, selectable, name = None):
- e = self.element._make_proxy(selectable, name=name or self.name)
- e.proxies.append(self)
- return e
-
-class ColumnClause(_Immutable, ColumnElement):
- """Represents a generic column expression from any textual string.
-
- This includes columns associated with tables, aliases and select
- statements, but also any arbitrary text. May or may not be bound
- to an underlying :class:`.Selectable`.
-
- :class:`.ColumnClause` is constructed by itself typically via
- the :func:`~.expression.column` function. It may be placed directly
- into constructs such as :func:`.select` constructs::
-
- from sqlalchemy.sql import column, select
-
- c1, c2 = column("c1"), column("c2")
- s = select([c1, c2]).where(c1==5)
-
- There is also a variant on :func:`~.expression.column` known
- as :func:`~.expression.literal_column` - the difference is that
- in the latter case, the string value is assumed to be an exact
- expression, rather than a column name, so that no quoting rules
- or similar are applied::
-
- from sqlalchemy.sql import literal_column, select
-
- s = select([literal_column("5 + 7")])
-
- :class:`.ColumnClause` can also be used in a table-like
- fashion by combining the :func:`~.expression.column` function
- with the :func:`~.expression.table` function, to produce
- a "lightweight" form of table metadata::
-
- from sqlalchemy.sql import table, column
-
- user = table("user",
- column("id"),
- column("name"),
- column("description"),
- )
-
- The above construct can be created in an ad-hoc fashion and is
- not associated with any :class:`.schema.MetaData`, unlike it's
- more full fledged :class:`.schema.Table` counterpart.
-
- :param text: the text of the element.
-
- :param selectable: parent selectable.
-
- :param type: :class:`.types.TypeEngine` object which can associate
- this :class:`.ColumnClause` with a type.
-
- :param is_literal: if True, the :class:`.ColumnClause` is assumed to
- be an exact expression that will be delivered to the output with no
- quoting rules applied regardless of case sensitive settings. the
- :func:`literal_column()` function is usually used to create such a
- :class:`.ColumnClause`.
-
- """
- __visit_name__ = 'column'
-
- onupdate = default = server_default = server_onupdate = None
-
- def __init__(self, text, selectable=None, type_=None, is_literal=False):
- self.key = self.name = text
- self.table = selectable
- self.type = sqltypes.to_instance(type_)
- self.is_literal = is_literal
-
- @util.memoized_property
- def _from_objects(self):
- if self.table is not None:
- return [self.table]
- else:
- return []
-
- @util.memoized_property
- def description(self):
- # Py3K
- #return self.name
- # Py2K
- return self.name.encode('ascii', 'backslashreplace')
- # end Py2K
-
- @util.memoized_property
- def _label(self):
- if self.is_literal:
- return None
-
- elif self.table is not None and self.table.named_with_column:
- if getattr(self.table, 'schema', None):
- label = self.table.schema.replace('.', '_') + "_" + \
- _escape_for_generated(self.table.name) + "_" + \
- _escape_for_generated(self.name)
- else:
- label = _escape_for_generated(self.table.name) + "_" + \
- _escape_for_generated(self.name)
-
- # ensure the label name doesn't conflict with that
- # of an existing column
- if label in self.table.c:
- _label = label
- counter = 1
- while _label in self.table.c:
- _label = label + "_" + str(counter)
- counter += 1
- label = _label
-
- return _generated_label(label)
-
- else:
- return self.name
-
- def label(self, name):
- # currently, anonymous labels don't occur for
- # ColumnClause. The use at the moment
- # is that they do not generate nicely for
- # is_literal clauses. We would like to change
- # this so that label(None) acts as would be expected.
- # See [ticket:2168].
- if name is None:
- return self
- else:
- return super(ColumnClause, self).label(name)
-
-
- def _bind_param(self, operator, obj):
- return _BindParamClause(self.name, obj,
- _compared_to_operator=operator,
- _compared_to_type=self.type,
- unique=True)
-
- def _make_proxy(self, selectable, name=None, attach=True):
- # propagate the "is_literal" flag only if we are keeping our name,
- # otherwise its considered to be a label
- is_literal = self.is_literal and (name is None or name == self.name)
- c = self._constructor(
- name or self.name,
- selectable=selectable,
- type_=self.type,
- is_literal=is_literal
- )
- c.proxies = [self]
- if attach:
- selectable._columns[c.name] = c
- return c
-
-class TableClause(_Immutable, FromClause):
- """Represents a minimal "table" construct.
-
- The constructor for :class:`.TableClause` is the
- :func:`~.expression.table` function. This produces
- a lightweight table object that has only a name and a
- collection of columns, which are typically produced
- by the :func:`~.expression.column` function::
-
- from sqlalchemy.sql import table, column
-
- user = table("user",
- column("id"),
- column("name"),
- column("description"),
- )
-
- The :class:`.TableClause` construct serves as the base for
- the more commonly used :class:`~.schema.Table` object, providing
- the usual set of :class:`~.expression.FromClause` services including
- the ``.c.`` collection and statement generation methods.
-
- It does **not** provide all the additional schema-level services
- of :class:`~.schema.Table`, including constraints, references to other
- tables, or support for :class:`.MetaData`-level services. It's useful
- on its own as an ad-hoc construct used to generate quick SQL
- statements when a more fully fledged :class:`~.schema.Table` is not on hand.
-
- """
-
- __visit_name__ = 'table'
-
- named_with_column = True
-
- def __init__(self, name, *columns):
- super(TableClause, self).__init__()
- self.name = self.fullname = name
- self._columns = ColumnCollection()
- self.primary_key = ColumnSet()
- self.foreign_keys = set()
- for c in columns:
- self.append_column(c)
-
- def _init_collections(self):
- pass
-
- @util.memoized_property
- def description(self):
- # Py3K
- #return self.name
- # Py2K
- return self.name.encode('ascii', 'backslashreplace')
- # end Py2K
-
- def append_column(self, c):
- self._columns[c.name] = c
- c.table = self
-
- def get_children(self, column_collections=True, **kwargs):
- if column_collections:
- return [c for c in self.c]
- else:
- return []
-
- def count(self, whereclause=None, **params):
- """return a SELECT COUNT generated against this
- :class:`.TableClause`."""
-
- if self.primary_key:
- col = list(self.primary_key)[0]
- else:
- col = list(self.columns)[0]
- return select(
- [func.count(col).label('tbl_row_count')],
- whereclause,
- from_obj=[self],
- **params)
-
- def insert(self, values=None, inline=False, **kwargs):
- """Generate an :func:`insert()` construct."""
-
- return insert(self, values=values, inline=inline, **kwargs)
-
- def update(self, whereclause=None, values=None, inline=False, **kwargs):
- """Generate an :func:`update()` construct."""
-
- return update(self, whereclause=whereclause,
- values=values, inline=inline, **kwargs)
-
- def delete(self, whereclause=None, **kwargs):
- """Generate a :func:`delete()` construct."""
-
- return delete(self, whereclause, **kwargs)
-
- @property
- def _from_objects(self):
- return [self]
-
-class _SelectBase(Executable, FromClause):
- """Base class for :class:`.Select` and ``CompoundSelects``."""
-
- _order_by_clause = ClauseList()
- _group_by_clause = ClauseList()
- _limit = None
- _offset = None
-
- def __init__(self,
- use_labels=False,
- for_update=False,
- limit=None,
- offset=None,
- order_by=None,
- group_by=None,
- bind=None,
- autocommit=None):
- self.use_labels = use_labels
- self.for_update = for_update
- if autocommit is not None:
- util.warn_deprecated('autocommit on select() is '
- 'deprecated. Use .execution_options(a'
- 'utocommit=True)')
- self._execution_options = \
- self._execution_options.union({'autocommit'
- : autocommit})
- if limit is not None:
- self._limit = util.asint(limit)
- if offset is not None:
- self._offset = util.asint(offset)
- self._bind = bind
-
- if order_by is not None:
- self._order_by_clause = ClauseList(*util.to_list(order_by))
- if group_by is not None:
- self._group_by_clause = ClauseList(*util.to_list(group_by))
-
- def as_scalar(self):
- """return a 'scalar' representation of this selectable, which can be
- used as a column expression.
-
- Typically, a select statement which has only one column in its columns
- clause is eligible to be used as a scalar expression.
-
- The returned object is an instance of
- :class:`_ScalarSelect`.
-
- """
- return _ScalarSelect(self)
-
- @_generative
- def apply_labels(self):
- """return a new selectable with the 'use_labels' flag set to True.
-
- This will result in column expressions being generated using labels
- against their table name, such as "SELECT somecolumn AS
- tablename_somecolumn". This allows selectables which contain multiple
- FROM clauses to produce a unique set of column names regardless of
- name conflicts among the individual FROM clauses.
-
- """
- self.use_labels = True
-
- def label(self, name):
- """return a 'scalar' representation of this selectable, embedded as a
- subquery with a label.
-
- See also ``as_scalar()``.
-
- """
- return self.as_scalar().label(name)
-
- @_generative
- @util.deprecated('0.6',
- message=":func:`.autocommit` is deprecated. Use "
- ":func:`.Executable.execution_options` with the "
- "'autocommit' flag.")
- def autocommit(self):
- """return a new selectable with the 'autocommit' flag set to
- True."""
-
- self._execution_options = \
- self._execution_options.union({'autocommit': True})
-
- def _generate(self):
- """Override the default _generate() method to also clear out
- exported collections."""
-
- s = self.__class__.__new__(self.__class__)
- s.__dict__ = self.__dict__.copy()
- s._reset_exported()
- return s
-
- @_generative
- def limit(self, limit):
- """return a new selectable with the given LIMIT criterion
- applied."""
-
- self._limit = util.asint(limit)
-
- @_generative
- def offset(self, offset):
- """return a new selectable with the given OFFSET criterion
- applied."""
-
- self._offset = util.asint(offset)
-
- @_generative
- def order_by(self, *clauses):
- """return a new selectable with the given list of ORDER BY
- criterion applied.
-
- The criterion will be appended to any pre-existing ORDER BY
- criterion.
-
- """
-
- self.append_order_by(*clauses)
-
- @_generative
- def group_by(self, *clauses):
- """return a new selectable with the given list of GROUP BY
- criterion applied.
-
- The criterion will be appended to any pre-existing GROUP BY
- criterion.
-
- """
-
- self.append_group_by(*clauses)
-
- def append_order_by(self, *clauses):
- """Append the given ORDER BY criterion applied to this selectable.
-
- The criterion will be appended to any pre-existing ORDER BY criterion.
-
- """
- if len(clauses) == 1 and clauses[0] is None:
- self._order_by_clause = ClauseList()
- else:
- if getattr(self, '_order_by_clause', None) is not None:
- clauses = list(self._order_by_clause) + list(clauses)
- self._order_by_clause = ClauseList(*clauses)
-
- def append_group_by(self, *clauses):
- """Append the given GROUP BY criterion applied to this selectable.
-
- The criterion will be appended to any pre-existing GROUP BY criterion.
-
- """
- if len(clauses) == 1 and clauses[0] is None:
- self._group_by_clause = ClauseList()
- else:
- if getattr(self, '_group_by_clause', None) is not None:
- clauses = list(self._group_by_clause) + list(clauses)
- self._group_by_clause = ClauseList(*clauses)
-
- @property
- def _from_objects(self):
- return [self]
-
-
-class _ScalarSelect(_Grouping):
- _from_objects = []
-
- def __init__(self, element):
- self.element = element
- self.type = element._scalar_type()
-
- @property
- def columns(self):
- raise exc.InvalidRequestError('Scalar Select expression has no '
- 'columns; use this object directly within a '
- 'column-level expression.')
- c = columns
-
- def self_group(self, **kwargs):
- return self
-
- def _make_proxy(self, selectable, name):
- return list(self.inner_columns)[0]._make_proxy(selectable, name)
-
-class CompoundSelect(_SelectBase):
- """Forms the basis of ``UNION``, ``UNION ALL``, and other
- SELECT-based set operations."""
-
- __visit_name__ = 'compound_select'
-
- UNION = util.symbol('UNION')
- UNION_ALL = util.symbol('UNION ALL')
- EXCEPT = util.symbol('EXCEPT')
- EXCEPT_ALL = util.symbol('EXCEPT ALL')
- INTERSECT = util.symbol('INTERSECT')
- INTERSECT_ALL = util.symbol('INTERSECT ALL')
-
- def __init__(self, keyword, *selects, **kwargs):
- self._should_correlate = kwargs.pop('correlate', False)
- self.keyword = keyword
- self.selects = []
-
- numcols = None
-
- # some DBs do not like ORDER BY in the inner queries of a UNION, etc.
- for n, s in enumerate(selects):
- s = _clause_element_as_expr(s)
-
- if not numcols:
- numcols = len(s.c)
- elif len(s.c) != numcols:
- raise exc.ArgumentError('All selectables passed to '
- 'CompoundSelect must have identical numbers of '
- 'columns; select #%d has %d columns, select '
- '#%d has %d' % (1, len(self.selects[0].c), n
- + 1, len(s.c)))
-
- self.selects.append(s.self_group(self))
-
- _SelectBase.__init__(self, **kwargs)
-
- def _scalar_type(self):
- return self.selects[0]._scalar_type()
-
- def self_group(self, against=None):
- return _FromGrouping(self)
-
- def is_derived_from(self, fromclause):
- for s in self.selects:
- if s.is_derived_from(fromclause):
- return True
- return False
-
- def _populate_column_collection(self):
- for cols in zip(*[s.c for s in self.selects]):
-
- # this is a slightly hacky thing - the union exports a
- # column that resembles just that of the *first* selectable.
- # to get at a "composite" column, particularly foreign keys,
- # you have to dig through the proxies collection which we
- # generate below. We may want to improve upon this, such as
- # perhaps _make_proxy can accept a list of other columns
- # that are "shared" - schema.column can then copy all the
- # ForeignKeys in. this would allow the union() to have all
- # those fks too.
-
- proxy = cols[0]._make_proxy(self, name=self.use_labels
- and cols[0]._label or None)
-
- # hand-construct the "proxies" collection to include all
- # derived columns place a 'weight' annotation corresponding
- # to how low in the list of select()s the column occurs, so
- # that the corresponding_column() operation can resolve
- # conflicts
-
- proxy.proxies = [c._annotate({'weight': i + 1}) for (i,
- c) in enumerate(cols)]
-
- def _copy_internals(self, clone=_clone):
- self._reset_exported()
- self.selects = [clone(s) for s in self.selects]
- if hasattr(self, '_col_map'):
- del self._col_map
- for attr in ('_order_by_clause', '_group_by_clause'):
- if getattr(self, attr) is not None:
- setattr(self, attr, clone(getattr(self, attr)))
-
- def get_children(self, column_collections=True, **kwargs):
- return (column_collections and list(self.c) or []) \
- + [self._order_by_clause, self._group_by_clause] \
- + list(self.selects)
-
- def bind(self):
- if self._bind:
- return self._bind
- for s in self.selects:
- e = s.bind
- if e:
- return e
- else:
- return None
- def _set_bind(self, bind):
- self._bind = bind
- bind = property(bind, _set_bind)
-
-class Select(_SelectBase):
- """Represents a ``SELECT`` statement.
-
- Select statements support appendable clauses, as well as the
- ability to execute themselves and return a result set.
-
- """
-
- __visit_name__ = 'select'
-
- _prefixes = ()
- _hints = util.immutabledict()
- _distinct = False
-
- def __init__(self,
- columns,
- whereclause=None,
- from_obj=None,
- distinct=False,
- having=None,
- correlate=True,
- prefixes=None,
- **kwargs):
- """Construct a Select object.
-
- The public constructor for Select is the
- :func:`select` function; see that function for
- argument descriptions.
-
- Additional generative and mutator methods are available on the
- :class:`_SelectBase` superclass.
-
- """
- self._should_correlate = correlate
- if distinct is not False:
- if isinstance(distinct, basestring):
- util.warn_deprecated(
- "A string argument passed to the 'distinct' "
- "keyword argument of 'select()' is deprecated "
- "- please use 'prefixes' or 'prefix_with()' "
- "to specify additional prefixes")
- if prefixes:
- prefixes = util.to_list(prefixes) + [distinct]
- else:
- prefixes = [distinct]
- elif distinct is True:
- self._distinct = True
- else:
- self._distinct = [
- _literal_as_text(e)
- for e in util.to_list(distinct)
- ]
-
- self._correlate = set()
- self._froms = util.OrderedSet()
-
- try:
- cols_present = bool(columns)
- except TypeError:
- raise exc.ArgumentError("columns argument to select() must "
- "be a Python list or other iterable")
-
- if cols_present:
- self._raw_columns = []
- for c in columns:
- c = _literal_as_column(c)
- if isinstance(c, _ScalarSelect):
- c = c.self_group(against=operators.comma_op)
- self._raw_columns.append(c)
-
- self._froms.update(_from_objects(*self._raw_columns))
- else:
- self._raw_columns = []
-
- if whereclause is not None:
- self._whereclause = _literal_as_text(whereclause)
- self._froms.update(_from_objects(self._whereclause))
- else:
- self._whereclause = None
-
- if from_obj is not None:
- for f in util.to_list(from_obj):
- if _is_literal(f):
- self._froms.add(_TextClause(f))
- else:
- self._froms.add(f)
-
- if having is not None:
- self._having = _literal_as_text(having)
- else:
- self._having = None
-
- if prefixes:
- self._prefixes = tuple([_literal_as_text(p) for p in prefixes])
-
- _SelectBase.__init__(self, **kwargs)
-
- def _get_display_froms(self, existing_froms=None):
- """Return the full list of 'from' clauses to be displayed.
-
- Takes into account a set of existing froms which may be
- rendered in the FROM clause of enclosing selects; this Select
- may want to leave those absent if it is automatically
- correlating.
-
- """
- froms = self._froms
-
- toremove = itertools.chain(*[f._hide_froms for f in froms])
- if toremove:
- froms = froms.difference(toremove)
-
- if len(froms) > 1 or self._correlate:
- if self._correlate:
- froms = froms.difference(_cloned_intersection(froms,
- self._correlate))
- if self._should_correlate and existing_froms:
- froms = froms.difference(_cloned_intersection(froms,
- existing_froms))
-
- if not len(froms):
- raise exc.InvalidRequestError("Select statement '%s"
- "' returned no FROM clauses due to "
- "auto-correlation; specify "
- "correlate(<tables>) to control "
- "correlation manually." % self)
-
- return froms
-
- def _scalar_type(self):
- elem = self._raw_columns[0]
- cols = list(elem._select_iterable)
- return cols[0].type
-
- @property
- def froms(self):
- """Return the displayed list of FromClause elements."""
-
- return self._get_display_froms()
-
- @_generative
- def with_hint(self, selectable, text, dialect_name='*'):
- """Add an indexing hint for the given selectable to this
- :class:`.Select`.
-
- The text of the hint is rendered in the appropriate
- location for the database backend in use, relative
- to the given :class:`.Table` or :class:`.Alias` passed as the
- *selectable* argument. The dialect implementation
- typically uses Python string substitution syntax
- with the token ``%(name)s`` to render the name of
- the table or alias. E.g. when using Oracle, the
- following::
-
- select([mytable]).\\
- with_hint(mytable, "+ index(%(name)s ix_mytable)")
-
- Would render SQL as::
-
- select /*+ index(mytable ix_mytable) */ ... from mytable
-
- The ``dialect_name`` option will limit the rendering of a particular
- hint to a particular backend. Such as, to add hints for both Oracle
- and Sybase simultaneously::
-
- select([mytable]).\\
- with_hint(mytable, "+ index(%(name)s ix_mytable)", 'oracle').\\
- with_hint(mytable, "WITH INDEX ix_mytable", 'sybase')
-
- """
- self._hints = self._hints.union({(selectable, dialect_name):text})
-
- @property
- def type(self):
- raise exc.InvalidRequestError("Select objects don't have a type. "
- "Call as_scalar() on this Select object "
- "to return a 'scalar' version of this Select.")
-
- @util.memoized_instancemethod
- def locate_all_froms(self):
- """return a Set of all FromClause elements referenced by this Select.
-
- This set is a superset of that returned by the ``froms`` property,
- which is specifically for those FromClause elements that would
- actually be rendered.
-
- """
- return self._froms.union(_from_objects(*list(self._froms)))
-
- @property
- def inner_columns(self):
- """an iterator of all ColumnElement expressions which would
- be rendered into the columns clause of the resulting SELECT statement.
-
- """
- return _select_iterables(self._raw_columns)
-
- def is_derived_from(self, fromclause):
- if self in fromclause._cloned_set:
- return True
-
- for f in self.locate_all_froms():
- if f.is_derived_from(fromclause):
- return True
- return False
-
- def _copy_internals(self, clone=_clone):
- self._reset_exported()
- from_cloned = dict((f, clone(f))
- for f in self._froms.union(self._correlate))
- self._froms = util.OrderedSet(from_cloned[f] for f in self._froms)
- self._correlate = set(from_cloned[f] for f in self._correlate)
- self._raw_columns = [clone(c) for c in self._raw_columns]
- for attr in '_whereclause', '_having', '_order_by_clause', \
- '_group_by_clause':
- if getattr(self, attr) is not None:
- setattr(self, attr, clone(getattr(self, attr)))
-
- def get_children(self, column_collections=True, **kwargs):
- """return child elements as per the ClauseElement specification."""
-
- return (column_collections and list(self.columns) or []) + \
- self._raw_columns + list(self._froms) + \
- [x for x in
- (self._whereclause, self._having,
- self._order_by_clause, self._group_by_clause)
- if x is not None]
-
- @_generative
- def column(self, column):
- """return a new select() construct with the given column expression
- added to its columns clause.
-
- """
-
- column = _literal_as_column(column)
-
- if isinstance(column, _ScalarSelect):
- column = column.self_group(against=operators.comma_op)
-
- self._raw_columns = self._raw_columns + [column]
- self._froms = self._froms.union(_from_objects(column))
-
- @_generative
- def with_only_columns(self, columns):
- """return a new select() construct with its columns clause replaced
- with the given columns.
-
- """
-
- self._raw_columns = [
- isinstance(c, _ScalarSelect) and
- c.self_group(against=operators.comma_op) or c
- for c in [_literal_as_column(c) for c in columns]
- ]
-
- @_generative
- def where(self, whereclause):
- """return a new select() construct with the given expression added to
- its WHERE clause, joined to the existing clause via AND, if any.
-
- """
-
- self.append_whereclause(whereclause)
-
- @_generative
- def having(self, having):
- """return a new select() construct with the given expression added to
- its HAVING clause, joined to the existing clause via AND, if any.
-
- """
- self.append_having(having)
-
- @_generative
- def distinct(self, *expr):
- """Return a new select() construct which will apply DISTINCT to its
- columns clause.
-
- :param \*expr: optional column expressions. When present,
- the Postgresql dialect will render a ``DISTINCT ON (<expressions>>)``
- construct.
-
- """
- if expr:
- expr = [_literal_as_text(e) for e in expr]
- if isinstance(self._distinct, list):
- self._distinct = self._distinct + expr
- else:
- self._distinct = expr
- else:
- self._distinct = True
-
- @_generative
- def prefix_with(self, *expr):
- """return a new select() construct which will apply the given
- expressions, typically strings, to the start of its columns clause,
- not using any commas. In particular is useful for MySQL
- keywords.
-
- e.g.::
-
- select(['a', 'b']).prefix_with('HIGH_PRIORITY',
- 'SQL_SMALL_RESULT',
- 'ALL')
-
- Would render::
-
- SELECT HIGH_PRIORITY SQL_SMALL_RESULT ALL a, b
-
- """
- expr = tuple(_literal_as_text(e) for e in expr)
- self._prefixes = self._prefixes + expr
-
- @_generative
- def select_from(self, fromclause):
- """return a new select() construct with the given FROM expression
- applied to its list of FROM objects.
-
- """
- fromclause = _literal_as_text(fromclause)
- self._froms = self._froms.union([fromclause])
-
- @_generative
- def correlate(self, *fromclauses):
- """return a new select() construct which will correlate the given FROM
- clauses to that of an enclosing select(), if a match is found.
-
- By "match", the given fromclause must be present in this select's
- list of FROM objects and also present in an enclosing select's list of
- FROM objects.
-
- Calling this method turns off the select's default behavior of
- "auto-correlation". Normally, select() auto-correlates all of its FROM
- clauses to those of an embedded select when compiled.
-
- If the fromclause is None, correlation is disabled for the returned
- select().
-
- """
- self._should_correlate = False
- if fromclauses == (None,):
- self._correlate = set()
- else:
- self._correlate = self._correlate.union(fromclauses)
-
- def append_correlation(self, fromclause):
- """append the given correlation expression to this select()
- construct."""
-
- self._should_correlate = False
- self._correlate = self._correlate.union([fromclause])
-
- def append_column(self, column):
- """append the given column expression to the columns clause of this
- select() construct.
-
- """
- column = _literal_as_column(column)
-
- if isinstance(column, _ScalarSelect):
- column = column.self_group(against=operators.comma_op)
-
- self._raw_columns = self._raw_columns + [column]
- self._froms = self._froms.union(_from_objects(column))
- self._reset_exported()
-
- def append_prefix(self, clause):
- """append the given columns clause prefix expression to this select()
- construct.
-
- """
- clause = _literal_as_text(clause)
- self._prefixes = self._prefixes + (clause,)
-
- def append_whereclause(self, whereclause):
- """append the given expression to this select() construct's WHERE
- criterion.
-
- The expression will be joined to existing WHERE criterion via AND.
-
- """
- whereclause = _literal_as_text(whereclause)
- self._froms = self._froms.union(_from_objects(whereclause))
-
- if self._whereclause is not None:
- self._whereclause = and_(self._whereclause, whereclause)
- else:
- self._whereclause = whereclause
-
- def append_having(self, having):
- """append the given expression to this select() construct's HAVING
- criterion.
-
- The expression will be joined to existing HAVING criterion via AND.
-
- """
- if self._having is not None:
- self._having = and_(self._having, _literal_as_text(having))
- else:
- self._having = _literal_as_text(having)
-
- def append_from(self, fromclause):
- """append the given FromClause expression to this select() construct's
- FROM clause.
-
- """
- if _is_literal(fromclause):
- fromclause = _TextClause(fromclause)
-
- self._froms = self._froms.union([fromclause])
-
- def __exportable_columns(self):
- for column in self._raw_columns:
- if isinstance(column, Selectable):
- for co in column.columns:
- yield co
- elif isinstance(column, ColumnElement):
- yield column
- else:
- continue
-
- def _populate_column_collection(self):
- for c in self.__exportable_columns():
- c._make_proxy(self, name=self.use_labels and c._label or None)
-
- def self_group(self, against=None):
- """return a 'grouping' construct as per the ClauseElement
- specification.
-
- This produces an element that can be embedded in an expression. Note
- that this method is called automatically as needed when constructing
- expressions.
-
- """
- if isinstance(against, CompoundSelect):
- return self
- return _FromGrouping(self)
-
- def union(self, other, **kwargs):
- """return a SQL UNION of this select() construct against the given
- selectable."""
-
- return union(self, other, **kwargs)
-
- def union_all(self, other, **kwargs):
- """return a SQL UNION ALL of this select() construct against the given
- selectable.
-
- """
- return union_all(self, other, **kwargs)
-
- def except_(self, other, **kwargs):
- """return a SQL EXCEPT of this select() construct against the given
- selectable."""
-
- return except_(self, other, **kwargs)
-
- def except_all(self, other, **kwargs):
- """return a SQL EXCEPT ALL of this select() construct against the
- given selectable.
-
- """
- return except_all(self, other, **kwargs)
-
- def intersect(self, other, **kwargs):
- """return a SQL INTERSECT of this select() construct against the given
- selectable.
-
- """
- return intersect(self, other, **kwargs)
-
- def intersect_all(self, other, **kwargs):
- """return a SQL INTERSECT ALL of this select() construct against the
- given selectable.
-
- """
- return intersect_all(self, other, **kwargs)
-
- def bind(self):
- if self._bind:
- return self._bind
- if not self._froms:
- for c in self._raw_columns:
- e = c.bind
- if e:
- self._bind = e
- return e
- else:
- e = list(self._froms)[0].bind
- if e:
- self._bind = e
- return e
-
- return None
-
- def _set_bind(self, bind):
- self._bind = bind
- bind = property(bind, _set_bind)
-
-class UpdateBase(Executable, ClauseElement):
- """Form the base for ``INSERT``, ``UPDATE``, and ``DELETE`` statements."""
-
- __visit_name__ = 'update_base'
-
- _execution_options = \
- Executable._execution_options.union({'autocommit': True})
- kwargs = util.immutabledict()
-
- def _process_colparams(self, parameters):
- if isinstance(parameters, (list, tuple)):
- pp = {}
- for i, c in enumerate(self.table.c):
- pp[c.key] = parameters[i]
- return pp
- else:
- return parameters
-
- def params(self, *arg, **kw):
- raise NotImplementedError(
- "params() is not supported for INSERT/UPDATE/DELETE statements."
- " To set the values for an INSERT or UPDATE statement, use"
- " stmt.values(**parameters).")
-
- def bind(self):
- return self._bind or self.table.bind
-
- def _set_bind(self, bind):
- self._bind = bind
- bind = property(bind, _set_bind)
-
- _returning_re = re.compile(r'(?:firebird|postgres(?:ql)?)_returning')
- def _process_deprecated_kw(self, kwargs):
- for k in list(kwargs):
- m = self._returning_re.match(k)
- if m:
- self._returning = kwargs.pop(k)
- util.warn_deprecated(
- "The %r argument is deprecated. Please "
- "use statement.returning(col1, col2, ...)" % k
- )
- return kwargs
-
- @_generative
- def returning(self, *cols):
- """Add a RETURNING or equivalent clause to this statement.
-
- The given list of columns represent columns within the table that is
- the target of the INSERT, UPDATE, or DELETE. Each element can be any
- column expression. :class:`~sqlalchemy.schema.Table` objects will be
- expanded into their individual columns.
-
- Upon compilation, a RETURNING clause, or database equivalent,
- will be rendered within the statement. For INSERT and UPDATE,
- the values are the newly inserted/updated values. For DELETE,
- the values are those of the rows which were deleted.
-
- Upon execution, the values of the columns to be returned
- are made available via the result set and can be iterated
- using ``fetchone()`` and similar. For DBAPIs which do not
- natively support returning values (i.e. cx_oracle),
- SQLAlchemy will approximate this behavior at the result level
- so that a reasonable amount of behavioral neutrality is
- provided.
-
- Note that not all databases/DBAPIs
- support RETURNING. For those backends with no support,
- an exception is raised upon compilation and/or execution.
- For those who do support it, the functionality across backends
- varies greatly, including restrictions on executemany()
- and other statements which return multiple rows. Please
- read the documentation notes for the database in use in
- order to determine the availability of RETURNING.
-
- """
- self._returning = cols
-
-class ValuesBase(UpdateBase):
- """Supplies support for :meth:`.ValuesBase.values` to INSERT and UPDATE constructs."""
-
- __visit_name__ = 'values_base'
-
- def __init__(self, table, values):
- self.table = table
- self.parameters = self._process_colparams(values)
-
- @_generative
- def values(self, *args, **kwargs):
- """specify the VALUES clause for an INSERT statement, or the SET
- clause for an UPDATE.
-
- \**kwargs
- key=<somevalue> arguments
-
- \*args
- A single dictionary can be sent as the first positional
- argument. This allows non-string based keys, such as Column
- objects, to be used.
-
- """
- if args:
- v = args[0]
- else:
- v = {}
-
- if self.parameters is None:
- self.parameters = self._process_colparams(v)
- self.parameters.update(kwargs)
- else:
- self.parameters = self.parameters.copy()
- self.parameters.update(self._process_colparams(v))
- self.parameters.update(kwargs)
-
-class Insert(ValuesBase):
- """Represent an INSERT construct.
-
- The :class:`.Insert` object is created using the :func:`insert()` function.
-
- """
- __visit_name__ = 'insert'
-
- _prefixes = ()
-
- def __init__(self,
- table,
- values=None,
- inline=False,
- bind=None,
- prefixes=None,
- returning=None,
- **kwargs):
- ValuesBase.__init__(self, table, values)
- self._bind = bind
- self.select = None
- self.inline = inline
- self._returning = returning
- if prefixes:
- self._prefixes = tuple([_literal_as_text(p) for p in prefixes])
-
- if kwargs:
- self.kwargs = self._process_deprecated_kw(kwargs)
-
- def get_children(self, **kwargs):
- if self.select is not None:
- return self.select,
- else:
- return ()
-
- def _copy_internals(self, clone=_clone):
- # TODO: coverage
- self.parameters = self.parameters.copy()
-
- @_generative
- def prefix_with(self, clause):
- """Add a word or expression between INSERT and INTO. Generative.
-
- If multiple prefixes are supplied, they will be separated with
- spaces.
-
- """
- clause = _literal_as_text(clause)
- self._prefixes = self._prefixes + (clause,)
-
-class Update(ValuesBase):
- """Represent an Update construct.
-
- The :class:`.Update` object is created using the :func:`update()` function.
-
- """
- __visit_name__ = 'update'
-
- def __init__(self,
- table,
- whereclause,
- values=None,
- inline=False,
- bind=None,
- returning=None,
- **kwargs):
- ValuesBase.__init__(self, table, values)
- self._bind = bind
- self._returning = returning
- if whereclause is not None:
- self._whereclause = _literal_as_text(whereclause)
- else:
- self._whereclause = None
- self.inline = inline
-
- if kwargs:
- self.kwargs = self._process_deprecated_kw(kwargs)
-
- def get_children(self, **kwargs):
- if self._whereclause is not None:
- return self._whereclause,
- else:
- return ()
-
- def _copy_internals(self, clone=_clone):
- # TODO: coverage
- self._whereclause = clone(self._whereclause)
- self.parameters = self.parameters.copy()
-
- @_generative
- def where(self, whereclause):
- """return a new update() construct with the given expression added to
- its WHERE clause, joined to the existing clause via AND, if any.
-
- """
- if self._whereclause is not None:
- self._whereclause = and_(self._whereclause,
- _literal_as_text(whereclause))
- else:
- self._whereclause = _literal_as_text(whereclause)
-
-
-class Delete(UpdateBase):
- """Represent a DELETE construct.
-
- The :class:`.Delete` object is created using the :func:`delete()` function.
-
- """
-
- __visit_name__ = 'delete'
-
- def __init__(self,
- table,
- whereclause,
- bind=None,
- returning =None,
- **kwargs):
- self._bind = bind
- self.table = table
- self._returning = returning
-
- if whereclause is not None:
- self._whereclause = _literal_as_text(whereclause)
- else:
- self._whereclause = None
-
- if kwargs:
- self.kwargs = self._process_deprecated_kw(kwargs)
-
- def get_children(self, **kwargs):
- if self._whereclause is not None:
- return self._whereclause,
- else:
- return ()
-
- @_generative
- def where(self, whereclause):
- """Add the given WHERE clause to a newly returned delete construct."""
-
- if self._whereclause is not None:
- self._whereclause = and_(self._whereclause,
- _literal_as_text(whereclause))
- else:
- self._whereclause = _literal_as_text(whereclause)
-
- def _copy_internals(self, clone=_clone):
- # TODO: coverage
- self._whereclause = clone(self._whereclause)
-
-class _IdentifiedClause(Executable, ClauseElement):
-
- __visit_name__ = 'identified'
- _execution_options = \
- Executable._execution_options.union({'autocommit': False})
- quote = None
-
- def __init__(self, ident):
- self.ident = ident
-
-class SavepointClause(_IdentifiedClause):
- __visit_name__ = 'savepoint'
-
-class RollbackToSavepointClause(_IdentifiedClause):
- __visit_name__ = 'rollback_to_savepoint'
-
-class ReleaseSavepointClause(_IdentifiedClause):
- __visit_name__ = 'release_savepoint'
-
-
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/sql/functions.py b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/sql/functions.py
deleted file mode 100755
index 71781665..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/sql/functions.py
+++ /dev/null
@@ -1,134 +0,0 @@
-# sql/functions.py
-# Copyright (C) 2005-2011 the SQLAlchemy authors and contributors <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-from sqlalchemy import types as sqltypes, schema
-from sqlalchemy.sql.expression import (
- ClauseList, Function, _literal_as_binds, text, _type_from_args
- )
-from sqlalchemy.sql import operators
-from sqlalchemy.sql.visitors import VisitableType
-
-class _GenericMeta(VisitableType):
- def __call__(self, *args, **kwargs):
- args = [_literal_as_binds(c) for c in args]
- return type.__call__(self, *args, **kwargs)
-
-class GenericFunction(Function):
- __metaclass__ = _GenericMeta
-
- def __init__(self, type_=None, args=(), **kwargs):
- self.packagenames = []
- self.name = self.__class__.__name__
- self._bind = kwargs.get('bind', None)
- self.clause_expr = ClauseList(
- operator=operators.comma_op,
- group_contents=True, *args).self_group()
- self.type = sqltypes.to_instance(
- type_ or getattr(self, '__return_type__', None))
-
-
-class next_value(Function):
- """Represent the 'next value', given a :class:`.Sequence`
- as it's single argument.
-
- Compiles into the appropriate function on each backend,
- or will raise NotImplementedError if used on a backend
- that does not provide support for sequences.
-
- """
- type = sqltypes.Integer()
- name = "next_value"
-
- def __init__(self, seq, **kw):
- assert isinstance(seq, schema.Sequence), \
- "next_value() accepts a Sequence object as input."
- self._bind = kw.get('bind', None)
- self.sequence = seq
-
- @property
- def _from_objects(self):
- return []
-
-class AnsiFunction(GenericFunction):
- def __init__(self, **kwargs):
- GenericFunction.__init__(self, **kwargs)
-
-class ReturnTypeFromArgs(GenericFunction):
- """Define a function whose return type is the same as its arguments."""
-
- def __init__(self, *args, **kwargs):
- kwargs.setdefault('type_', _type_from_args(args))
- GenericFunction.__init__(self, args=args, **kwargs)
-
-class coalesce(ReturnTypeFromArgs):
- pass
-
-class max(ReturnTypeFromArgs):
- pass
-
-class min(ReturnTypeFromArgs):
- pass
-
-class sum(ReturnTypeFromArgs):
- pass
-
-
-class now(GenericFunction):
- __return_type__ = sqltypes.DateTime
-
-class concat(GenericFunction):
- __return_type__ = sqltypes.String
- def __init__(self, *args, **kwargs):
- GenericFunction.__init__(self, args=args, **kwargs)
-
-class char_length(GenericFunction):
- __return_type__ = sqltypes.Integer
-
- def __init__(self, arg, **kwargs):
- GenericFunction.__init__(self, args=[arg], **kwargs)
-
-class random(GenericFunction):
- def __init__(self, *args, **kwargs):
- kwargs.setdefault('type_', None)
- GenericFunction.__init__(self, args=args, **kwargs)
-
-class count(GenericFunction):
- """The ANSI COUNT aggregate function. With no arguments, emits COUNT \*."""
-
- __return_type__ = sqltypes.Integer
-
- def __init__(self, expression=None, **kwargs):
- if expression is None:
- expression = text('*')
- GenericFunction.__init__(self, args=(expression,), **kwargs)
-
-class current_date(AnsiFunction):
- __return_type__ = sqltypes.Date
-
-class current_time(AnsiFunction):
- __return_type__ = sqltypes.Time
-
-class current_timestamp(AnsiFunction):
- __return_type__ = sqltypes.DateTime
-
-class current_user(AnsiFunction):
- __return_type__ = sqltypes.String
-
-class localtime(AnsiFunction):
- __return_type__ = sqltypes.DateTime
-
-class localtimestamp(AnsiFunction):
- __return_type__ = sqltypes.DateTime
-
-class session_user(AnsiFunction):
- __return_type__ = sqltypes.String
-
-class sysdate(AnsiFunction):
- __return_type__ = sqltypes.DateTime
-
-class user(AnsiFunction):
- __return_type__ = sqltypes.String
-
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/sql/operators.py b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/sql/operators.py
deleted file mode 100755
index 494f76f1..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/sql/operators.py
+++ /dev/null
@@ -1,154 +0,0 @@
-# sql/operators.py
-# Copyright (C) 2005-2011 the SQLAlchemy authors and contributors <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-"""Defines operators used in SQL expressions."""
-
-from operator import (
- and_, or_, inv, add, mul, sub, mod, truediv, lt, le, ne, gt, ge, eq, neg
- )
-
-# Py2K
-from operator import (div,)
-# end Py2K
-
-from sqlalchemy.util import symbol
-
-
-def from_():
- raise NotImplementedError()
-
-def as_():
- raise NotImplementedError()
-
-def exists():
- raise NotImplementedError()
-
-def is_():
- raise NotImplementedError()
-
-def isnot():
- raise NotImplementedError()
-
-def collate():
- raise NotImplementedError()
-
-def op(a, opstring, b):
- return a.op(opstring)(b)
-
-def like_op(a, b, escape=None):
- return a.like(b, escape=escape)
-
-def notlike_op(a, b, escape=None):
- raise NotImplementedError()
-
-def ilike_op(a, b, escape=None):
- return a.ilike(b, escape=escape)
-
-def notilike_op(a, b, escape=None):
- raise NotImplementedError()
-
-def between_op(a, b, c):
- return a.between(b, c)
-
-def in_op(a, b):
- return a.in_(b)
-
-def notin_op(a, b):
- raise NotImplementedError()
-
-def distinct_op(a):
- return a.distinct()
-
-def startswith_op(a, b, escape=None):
- return a.startswith(b, escape=escape)
-
-def endswith_op(a, b, escape=None):
- return a.endswith(b, escape=escape)
-
-def contains_op(a, b, escape=None):
- return a.contains(b, escape=escape)
-
-def match_op(a, b):
- return a.match(b)
-
-def comma_op(a, b):
- raise NotImplementedError()
-
-def concat_op(a, b):
- return a.concat(b)
-
-def desc_op(a):
- return a.desc()
-
-def asc_op(a):
- return a.asc()
-
-def nullsfirst_op(a):
- return a.nullsfirst()
-
-def nullslast_op(a):
- return a.nullslast()
-
-_commutative = set([eq, ne, add, mul])
-
-def is_commutative(op):
- return op in _commutative
-
-_associative = _commutative.union([concat_op, and_, or_])
-
-
-_smallest = symbol('_smallest')
-_largest = symbol('_largest')
-
-_PRECEDENCE = {
- from_: 15,
- mul: 7,
- truediv: 7,
- # Py2K
- div: 7,
- # end Py2K
- mod: 7,
- neg: 7,
- add: 6,
- sub: 6,
- concat_op: 6,
- match_op: 6,
- ilike_op: 5,
- notilike_op: 5,
- like_op: 5,
- notlike_op: 5,
- in_op: 5,
- notin_op: 5,
- is_: 5,
- isnot: 5,
- eq: 5,
- ne: 5,
- gt: 5,
- lt: 5,
- ge: 5,
- le: 5,
- between_op: 5,
- distinct_op: 5,
- inv: 5,
- and_: 3,
- or_: 2,
- comma_op: -1,
- collate: 7,
- as_: -1,
- exists: 0,
- _smallest: -1000,
- _largest: 1000
-}
-
-def is_precedent(operator, against):
- if operator is against and operator in _associative:
- return False
- else:
- return (_PRECEDENCE.get(operator, _PRECEDENCE[_smallest]) <=
- _PRECEDENCE.get(against, _PRECEDENCE[_largest]))
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/sql/util.py b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/sql/util.py
deleted file mode 100755
index 1a3f7d2f..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/sql/util.py
+++ /dev/null
@@ -1,717 +0,0 @@
-# sql/util.py
-# Copyright (C) 2005-2011 the SQLAlchemy authors and contributors <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-from sqlalchemy import exc, schema, util, sql, types as sqltypes
-from sqlalchemy.util import topological
-from sqlalchemy.sql import expression, operators, visitors
-from itertools import chain
-
-"""Utility functions that build upon SQL and Schema constructs."""
-
-def sort_tables(tables):
- """sort a collection of Table objects in order of their foreign-key dependency."""
-
- tables = list(tables)
- tuples = []
- def visit_foreign_key(fkey):
- if fkey.use_alter:
- return
- parent_table = fkey.column.table
- if parent_table in tables:
- child_table = fkey.parent.table
- if parent_table is not child_table:
- tuples.append((parent_table, child_table))
-
- for table in tables:
- visitors.traverse(table,
- {'schema_visitor':True},
- {'foreign_key':visit_foreign_key})
-
- tuples.extend(
- [parent, table] for parent in table._extra_dependencies
- )
-
- return list(topological.sort(tuples, tables))
-
-def find_join_source(clauses, join_to):
- """Given a list of FROM clauses and a selectable,
- return the first index and element from the list of
- clauses which can be joined against the selectable. returns
- None, None if no match is found.
-
- e.g.::
-
- clause1 = table1.join(table2)
- clause2 = table4.join(table5)
-
- join_to = table2.join(table3)
-
- find_join_source([clause1, clause2], join_to) == clause1
-
- """
-
- selectables = list(expression._from_objects(join_to))
- for i, f in enumerate(clauses):
- for s in selectables:
- if f.is_derived_from(s):
- return i, f
- else:
- return None, None
-
-def find_tables(clause, check_columns=False,
- include_aliases=False, include_joins=False,
- include_selects=False, include_crud=False):
- """locate Table objects within the given expression."""
-
- tables = []
- _visitors = {}
-
- if include_selects:
- _visitors['select'] = _visitors['compound_select'] = tables.append
-
- if include_joins:
- _visitors['join'] = tables.append
-
- if include_aliases:
- _visitors['alias'] = tables.append
-
- if include_crud:
- _visitors['insert'] = _visitors['update'] = \
- _visitors['delete'] = lambda ent: tables.append(ent.table)
-
- if check_columns:
- def visit_column(column):
- tables.append(column.table)
- _visitors['column'] = visit_column
-
- _visitors['table'] = tables.append
-
- visitors.traverse(clause, {'column_collections':False}, _visitors)
- return tables
-
-def find_columns(clause):
- """locate Column objects within the given expression."""
-
- cols = util.column_set()
- visitors.traverse(clause, {}, {'column':cols.add})
- return cols
-
-def clause_is_present(clause, search):
- """Given a target clause and a second to search within, return True
- if the target is plainly present in the search without any
- subqueries or aliases involved.
-
- Basically descends through Joins.
-
- """
-
- stack = [search]
- while stack:
- elem = stack.pop()
- if clause is elem:
- return True
- elif isinstance(elem, expression.Join):
- stack.extend((elem.left, elem.right))
- return False
-
-
-def bind_values(clause):
- """Return an ordered list of "bound" values in the given clause.
-
- E.g.::
-
- >>> expr = and_(
- ... table.c.foo==5, table.c.foo==7
- ... )
- >>> bind_values(expr)
- [5, 7]
- """
-
- v = []
- def visit_bindparam(bind):
- value = bind.value
-
- # evaluate callables
- if callable(value):
- value = value()
-
- v.append(value)
-
- visitors.traverse(clause, {}, {'bindparam':visit_bindparam})
- return v
-
-def _quote_ddl_expr(element):
- if isinstance(element, basestring):
- element = element.replace("'", "''")
- return "'%s'" % element
- else:
- return repr(element)
-
-def expression_as_ddl(clause):
- """Given a SQL expression, convert for usage in DDL, such as
- CREATE INDEX and CHECK CONSTRAINT.
-
- Converts bind params into quoted literals, column identifiers
- into detached column constructs so that the parent table
- identifier is not included.
-
- """
- def repl(element):
- if isinstance(element, expression._BindParamClause):
- return expression.literal_column(_quote_ddl_expr(element.value))
- elif isinstance(element, expression.ColumnClause) and \
- element.table is not None:
- return expression.column(element.name)
- else:
- return None
-
- return visitors.replacement_traverse(clause, {}, repl)
-
-def adapt_criterion_to_null(crit, nulls):
- """given criterion containing bind params, convert selected elements to IS NULL."""
-
- def visit_binary(binary):
- if isinstance(binary.left, expression._BindParamClause) and binary.left.key in nulls:
- # reverse order if the NULL is on the left side
- binary.left = binary.right
- binary.right = expression.null()
- binary.operator = operators.is_
- binary.negate = operators.isnot
- elif isinstance(binary.right, expression._BindParamClause) and binary.right.key in nulls:
- binary.right = expression.null()
- binary.operator = operators.is_
- binary.negate = operators.isnot
-
- return visitors.cloned_traverse(crit, {}, {'binary':visit_binary})
-
-def join_condition(a, b, ignore_nonexistent_tables=False, a_subset=None):
- """create a join condition between two tables or selectables.
-
- e.g.::
-
- join_condition(tablea, tableb)
-
- would produce an expression along the lines of::
-
- tablea.c.id==tableb.c.tablea_id
-
- The join is determined based on the foreign key relationships
- between the two selectables. If there are multiple ways
- to join, or no way to join, an error is raised.
-
- :param ignore_nonexistent_tables: Deprecated - this
- flag is no longer used. Only resolution errors regarding
- the two given tables are propagated.
-
- :param a_subset: An optional expression that is a sub-component
- of ``a``. An attempt will be made to join to just this sub-component
- first before looking at the full ``a`` construct, and if found
- will be successful even if there are other ways to join to ``a``.
- This allows the "right side" of a join to be passed thereby
- providing a "natural join".
-
- """
- crit = []
- constraints = set()
-
- for left in (a_subset, a):
- if left is None:
- continue
- for fk in sorted(
- b.foreign_keys,
- key=lambda fk:fk.parent._creation_order):
- try:
- col = fk.get_referent(left)
- except exc.NoReferenceError, nrte:
- if nrte.table_name == left.name:
- raise
- else:
- continue
-
- if col is not None:
- crit.append(col == fk.parent)
- constraints.add(fk.constraint)
- if left is not b:
- for fk in sorted(
- left.foreign_keys,
- key=lambda fk:fk.parent._creation_order):
- try:
- col = fk.get_referent(b)
- except exc.NoReferenceError, nrte:
- if nrte.table_name == b.name:
- raise
- else:
- # this is totally covered. can't get
- # coverage to mark it.
- continue
-
- if col is not None:
- crit.append(col == fk.parent)
- constraints.add(fk.constraint)
- if crit:
- break
-
- if len(crit) == 0:
- if isinstance(b, expression._FromGrouping):
- hint = " Perhaps you meant to convert the right side to a "\
- "subquery using alias()?"
- else:
- hint = ""
- raise exc.ArgumentError(
- "Can't find any foreign key relationships "
- "between '%s' and '%s'.%s" % (a.description, b.description, hint))
- elif len(constraints) > 1:
- raise exc.ArgumentError(
- "Can't determine join between '%s' and '%s'; "
- "tables have more than one foreign key "
- "constraint relationship between them. "
- "Please specify the 'onclause' of this "
- "join explicitly." % (a.description, b.description))
- elif len(crit) == 1:
- return (crit[0])
- else:
- return sql.and_(*crit)
-
-
-class Annotated(object):
- """clones a ClauseElement and applies an 'annotations' dictionary.
-
- Unlike regular clones, this clone also mimics __hash__() and
- __cmp__() of the original element so that it takes its place
- in hashed collections.
-
- A reference to the original element is maintained, for the important
- reason of keeping its hash value current. When GC'ed, the
- hash value may be reused, causing conflicts.
-
- """
-
- def __new__(cls, *args):
- if not args:
- # clone constructor
- return object.__new__(cls)
- else:
- element, values = args
- # pull appropriate subclass from registry of annotated
- # classes
- try:
- cls = annotated_classes[element.__class__]
- except KeyError:
- cls = annotated_classes[element.__class__] = type.__new__(type,
- "Annotated%s" % element.__class__.__name__,
- (Annotated, element.__class__), {})
- return object.__new__(cls)
-
- def __init__(self, element, values):
- # force FromClause to generate their internal
- # collections into __dict__
- if isinstance(element, expression.FromClause):
- element.c
-
- self.__dict__ = element.__dict__.copy()
- self.__element = element
- self._annotations = values
-
- def _annotate(self, values):
- _values = self._annotations.copy()
- _values.update(values)
- clone = self.__class__.__new__(self.__class__)
- clone.__dict__ = self.__dict__.copy()
- clone._annotations = _values
- return clone
-
- def _deannotate(self):
- return self.__element
-
- def _compiler_dispatch(self, visitor, **kw):
- return self.__element.__class__._compiler_dispatch(self, visitor, **kw)
-
- @property
- def _constructor(self):
- return self.__element._constructor
-
- def _clone(self):
- clone = self.__element._clone()
- if clone is self.__element:
- # detect immutable, don't change anything
- return self
- else:
- # update the clone with any changes that have occurred
- # to this object's __dict__.
- clone.__dict__.update(self.__dict__)
- return Annotated(clone, self._annotations)
-
- def __hash__(self):
- return hash(self.__element)
-
- def __cmp__(self, other):
- return cmp(hash(self.__element), hash(other))
-
-# hard-generate Annotated subclasses. this technique
-# is used instead of on-the-fly types (i.e. type.__new__())
-# so that the resulting objects are pickleable.
-annotated_classes = {}
-
-for cls in expression.__dict__.values() + [schema.Column, schema.Table]:
- if isinstance(cls, type) and issubclass(cls, expression.ClauseElement):
- exec "class Annotated%s(Annotated, cls):\n" \
- " pass" % (cls.__name__, ) in locals()
- exec "annotated_classes[cls] = Annotated%s" % (cls.__name__)
-
-def _deep_annotate(element, annotations, exclude=None):
- """Deep copy the given ClauseElement, annotating each element with the given annotations dictionary.
-
- Elements within the exclude collection will be cloned but not annotated.
-
- """
- def clone(elem):
- # check if element is present in the exclude list.
- # take into account proxying relationships.
- if exclude and \
- hasattr(elem, 'proxy_set') and \
- elem.proxy_set.intersection(exclude):
- elem = elem._clone()
- elif annotations != elem._annotations:
- elem = elem._annotate(annotations.copy())
- elem._copy_internals(clone=clone)
- return elem
-
- if element is not None:
- element = clone(element)
- return element
-
-def _deep_deannotate(element):
- """Deep copy the given element, removing all annotations."""
-
- def clone(elem):
- elem = elem._deannotate()
- elem._copy_internals(clone=clone)
- return elem
-
- if element is not None:
- element = clone(element)
- return element
-
-
-def splice_joins(left, right, stop_on=None):
- if left is None:
- return right
-
- stack = [(right, None)]
-
- adapter = ClauseAdapter(left)
- ret = None
- while stack:
- (right, prevright) = stack.pop()
- if isinstance(right, expression.Join) and right is not stop_on:
- right = right._clone()
- right._reset_exported()
- right.onclause = adapter.traverse(right.onclause)
- stack.append((right.left, right))
- else:
- right = adapter.traverse(right)
- if prevright is not None:
- prevright.left = right
- if ret is None:
- ret = right
-
- return ret
-
-def reduce_columns(columns, *clauses, **kw):
- """given a list of columns, return a 'reduced' set based on natural equivalents.
-
- the set is reduced to the smallest list of columns which have no natural
- equivalent present in the list. A "natural equivalent" means that two columns
- will ultimately represent the same value because they are related by a foreign key.
-
- \*clauses is an optional list of join clauses which will be traversed
- to further identify columns that are "equivalent".
-
- \**kw may specify 'ignore_nonexistent_tables' to ignore foreign keys
- whose tables are not yet configured.
-
- This function is primarily used to determine the most minimal "primary key"
- from a selectable, by reducing the set of primary key columns present
- in the the selectable to just those that are not repeated.
-
- """
- ignore_nonexistent_tables = kw.pop('ignore_nonexistent_tables', False)
-
- columns = util.ordered_column_set(columns)
-
- omit = util.column_set()
- for col in columns:
- for fk in chain(*[c.foreign_keys for c in col.proxy_set]):
- for c in columns:
- if c is col:
- continue
- try:
- fk_col = fk.column
- except exc.NoReferencedTableError:
- if ignore_nonexistent_tables:
- continue
- else:
- raise
- if fk_col.shares_lineage(c):
- omit.add(col)
- break
-
- if clauses:
- def visit_binary(binary):
- if binary.operator == operators.eq:
- cols = util.column_set(chain(*[c.proxy_set for c in columns.difference(omit)]))
- if binary.left in cols and binary.right in cols:
- for c in columns:
- if c.shares_lineage(binary.right):
- omit.add(c)
- break
- for clause in clauses:
- visitors.traverse(clause, {}, {'binary':visit_binary})
-
- return expression.ColumnSet(columns.difference(omit))
-
-def criterion_as_pairs(expression, consider_as_foreign_keys=None,
- consider_as_referenced_keys=None, any_operator=False):
- """traverse an expression and locate binary criterion pairs."""
-
- if consider_as_foreign_keys and consider_as_referenced_keys:
- raise exc.ArgumentError("Can only specify one of "
- "'consider_as_foreign_keys' or "
- "'consider_as_referenced_keys'")
-
- def visit_binary(binary):
- if not any_operator and binary.operator is not operators.eq:
- return
- if not isinstance(binary.left, sql.ColumnElement) or \
- not isinstance(binary.right, sql.ColumnElement):
- return
-
- if consider_as_foreign_keys:
- if binary.left in consider_as_foreign_keys and \
- (binary.right is binary.left or
- binary.right not in consider_as_foreign_keys):
- pairs.append((binary.right, binary.left))
- elif binary.right in consider_as_foreign_keys and \
- (binary.left is binary.right or
- binary.left not in consider_as_foreign_keys):
- pairs.append((binary.left, binary.right))
- elif consider_as_referenced_keys:
- if binary.left in consider_as_referenced_keys and \
- (binary.right is binary.left or
- binary.right not in consider_as_referenced_keys):
- pairs.append((binary.left, binary.right))
- elif binary.right in consider_as_referenced_keys and \
- (binary.left is binary.right or
- binary.left not in consider_as_referenced_keys):
- pairs.append((binary.right, binary.left))
- else:
- if isinstance(binary.left, schema.Column) and \
- isinstance(binary.right, schema.Column):
- if binary.left.references(binary.right):
- pairs.append((binary.right, binary.left))
- elif binary.right.references(binary.left):
- pairs.append((binary.left, binary.right))
- pairs = []
- visitors.traverse(expression, {}, {'binary':visit_binary})
- return pairs
-
-def folded_equivalents(join, equivs=None):
- """Return a list of uniquely named columns.
-
- The column list of the given Join will be narrowed
- down to a list of all equivalently-named,
- equated columns folded into one column, where 'equated' means they are
- equated to each other in the ON clause of this join.
-
- This function is used by Join.select(fold_equivalents=True).
-
- Deprecated. This function is used for a certain kind of
- "polymorphic_union" which is designed to achieve joined
- table inheritance where the base table has no "discriminator"
- column; [ticket:1131] will provide a better way to
- achieve this.
-
- """
- if equivs is None:
- equivs = set()
- def visit_binary(binary):
- if binary.operator == operators.eq and binary.left.name == binary.right.name:
- equivs.add(binary.right)
- equivs.add(binary.left)
- visitors.traverse(join.onclause, {}, {'binary':visit_binary})
- collist = []
- if isinstance(join.left, expression.Join):
- left = folded_equivalents(join.left, equivs)
- else:
- left = list(join.left.columns)
- if isinstance(join.right, expression.Join):
- right = folded_equivalents(join.right, equivs)
- else:
- right = list(join.right.columns)
- used = set()
- for c in left + right:
- if c in equivs:
- if c.name not in used:
- collist.append(c)
- used.add(c.name)
- else:
- collist.append(c)
- return collist
-
-class AliasedRow(object):
- """Wrap a RowProxy with a translation map.
-
- This object allows a set of keys to be translated
- to those present in a RowProxy.
-
- """
- def __init__(self, row, map):
- # AliasedRow objects don't nest, so un-nest
- # if another AliasedRow was passed
- if isinstance(row, AliasedRow):
- self.row = row.row
- else:
- self.row = row
- self.map = map
-
- def __contains__(self, key):
- return self.map[key] in self.row
-
- def has_key(self, key):
- return key in self
-
- def __getitem__(self, key):
- return self.row[self.map[key]]
-
- def keys(self):
- return self.row.keys()
-
-
-class ClauseAdapter(visitors.ReplacingCloningVisitor):
- """Clones and modifies clauses based on column correspondence.
-
- E.g.::
-
- table1 = Table('sometable', metadata,
- Column('col1', Integer),
- Column('col2', Integer)
- )
- table2 = Table('someothertable', metadata,
- Column('col1', Integer),
- Column('col2', Integer)
- )
-
- condition = table1.c.col1 == table2.c.col1
-
- make an alias of table1::
-
- s = table1.alias('foo')
-
- calling ``ClauseAdapter(s).traverse(condition)`` converts
- condition to read::
-
- s.c.col1 == table2.c.col1
-
- """
- def __init__(self, selectable, equivalents=None, include=None, exclude=None):
- self.__traverse_options__ = {'column_collections':False, 'stop_on':[selectable]}
- self.selectable = selectable
- self.include = include
- self.exclude = exclude
- self.equivalents = util.column_dict(equivalents or {})
-
- def _corresponding_column(self, col, require_embedded, _seen=util.EMPTY_SET):
- newcol = self.selectable.corresponding_column(col, require_embedded=require_embedded)
-
- if newcol is None and col in self.equivalents and col not in _seen:
- for equiv in self.equivalents[col]:
- newcol = self._corresponding_column(equiv, require_embedded=require_embedded, _seen=_seen.union([col]))
- if newcol is not None:
- return newcol
- return newcol
-
- def replace(self, col):
- if isinstance(col, expression.FromClause):
- if self.selectable.is_derived_from(col):
- return self.selectable
-
- if not isinstance(col, expression.ColumnElement):
- return None
-
- if self.include and col not in self.include:
- return None
- elif self.exclude and col in self.exclude:
- return None
-
- return self._corresponding_column(col, True)
-
-class ColumnAdapter(ClauseAdapter):
- """Extends ClauseAdapter with extra utility functions.
-
- Provides the ability to "wrap" this ClauseAdapter
- around another, a columns dictionary which returns
- adapted elements given an original, and an
- adapted_row() factory.
-
- """
- def __init__(self, selectable, equivalents=None,
- chain_to=None, include=None,
- exclude=None, adapt_required=False):
- ClauseAdapter.__init__(self, selectable, equivalents, include, exclude)
- if chain_to:
- self.chain(chain_to)
- self.columns = util.populate_column_dict(self._locate_col)
- self.adapt_required = adapt_required
-
- def wrap(self, adapter):
- ac = self.__class__.__new__(self.__class__)
- ac.__dict__ = self.__dict__.copy()
- ac._locate_col = ac._wrap(ac._locate_col, adapter._locate_col)
- ac.adapt_clause = ac._wrap(ac.adapt_clause, adapter.adapt_clause)
- ac.adapt_list = ac._wrap(ac.adapt_list, adapter.adapt_list)
- ac.columns = util.populate_column_dict(ac._locate_col)
- return ac
-
- adapt_clause = ClauseAdapter.traverse
- adapt_list = ClauseAdapter.copy_and_process
-
- def _wrap(self, local, wrapped):
- def locate(col):
- col = local(col)
- return wrapped(col)
- return locate
-
- def _locate_col(self, col):
- c = self._corresponding_column(col, True)
- if c is None:
- c = self.adapt_clause(col)
-
- # anonymize labels in case they have a hardcoded name
- if isinstance(c, expression._Label):
- c = c.label(None)
-
- # adapt_required indicates that if we got the same column
- # back which we put in (i.e. it passed through),
- # it's not correct. this is used by eagerloading which
- # knows that all columns and expressions need to be adapted
- # to a result row, and a "passthrough" is definitely targeting
- # the wrong column.
- if self.adapt_required and c is col:
- return None
-
- return c
-
- def adapted_row(self, row):
- return AliasedRow(row, self.columns)
-
- def __getstate__(self):
- d = self.__dict__.copy()
- del d['columns']
- return d
-
- def __setstate__(self, state):
- self.__dict__.update(state)
- self.columns = util.PopulateDict(self._locate_col)
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/sql/visitors.py b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/sql/visitors.py
deleted file mode 100755
index 0c6be97d..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/sql/visitors.py
+++ /dev/null
@@ -1,266 +0,0 @@
-# sql/visitors.py
-# Copyright (C) 2005-2011 the SQLAlchemy authors and contributors <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-"""Visitor/traversal interface and library functions.
-
-SQLAlchemy schema and expression constructs rely on a Python-centric
-version of the classic "visitor" pattern as the primary way in which
-they apply functionality. The most common use of this pattern
-is statement compilation, where individual expression classes match
-up to rendering methods that produce a string result. Beyond this,
-the visitor system is also used to inspect expressions for various
-information and patterns, as well as for usage in
-some kinds of expression transformation. Other kinds of transformation
-use a non-visitor traversal system.
-
-For many examples of how the visit system is used, see the
-sqlalchemy.sql.util and the sqlalchemy.sql.compiler modules.
-For an introduction to clause adaption, see
-http://techspot.zzzeek.org/?p=19 .
-
-"""
-
-from collections import deque
-import re
-from sqlalchemy import util
-import operator
-
-__all__ = ['VisitableType', 'Visitable', 'ClauseVisitor',
- 'CloningVisitor', 'ReplacingCloningVisitor', 'iterate',
- 'iterate_depthfirst', 'traverse_using', 'traverse',
- 'cloned_traverse', 'replacement_traverse']
-
-class VisitableType(type):
- """Metaclass which checks for a `__visit_name__` attribute and
- applies `_compiler_dispatch` method to classes.
-
- """
-
- def __init__(cls, clsname, bases, clsdict):
- if cls.__name__ == 'Visitable' or not hasattr(cls, '__visit_name__'):
- super(VisitableType, cls).__init__(clsname, bases, clsdict)
- return
-
- _generate_dispatch(cls)
-
- super(VisitableType, cls).__init__(clsname, bases, clsdict)
-
-def _generate_dispatch(cls):
- # set up an optimized visit dispatch function
- # for use by the compiler
- if '__visit_name__' in cls.__dict__:
- visit_name = cls.__visit_name__
- if isinstance(visit_name, str):
- getter = operator.attrgetter("visit_%s" % visit_name)
- def _compiler_dispatch(self, visitor, **kw):
- return getter(visitor)(self, **kw)
- else:
- def _compiler_dispatch(self, visitor, **kw):
- return getattr(visitor, 'visit_%s' % self.__visit_name__)(self, **kw)
-
- cls._compiler_dispatch = _compiler_dispatch
-
-class Visitable(object):
- """Base class for visitable objects, applies the
- ``VisitableType`` metaclass.
-
- """
-
- __metaclass__ = VisitableType
-
-class ClauseVisitor(object):
- """Base class for visitor objects which can traverse using
- the traverse() function.
-
- """
-
- __traverse_options__ = {}
-
- def traverse_single(self, obj, **kw):
- for v in self._visitor_iterator:
- meth = getattr(v, "visit_%s" % obj.__visit_name__, None)
- if meth:
- return meth(obj, **kw)
-
- def iterate(self, obj):
- """traverse the given expression structure, returning an iterator of all elements."""
-
- return iterate(obj, self.__traverse_options__)
-
- def traverse(self, obj):
- """traverse and visit the given expression structure."""
-
- return traverse(obj, self.__traverse_options__, self._visitor_dict)
-
- @util.memoized_property
- def _visitor_dict(self):
- visitors = {}
-
- for name in dir(self):
- if name.startswith('visit_'):
- visitors[name[6:]] = getattr(self, name)
- return visitors
-
- @property
- def _visitor_iterator(self):
- """iterate through this visitor and each 'chained' visitor."""
-
- v = self
- while v:
- yield v
- v = getattr(v, '_next', None)
-
- def chain(self, visitor):
- """'chain' an additional ClauseVisitor onto this ClauseVisitor.
-
- the chained visitor will receive all visit events after this one.
-
- """
- tail = list(self._visitor_iterator)[-1]
- tail._next = visitor
- return self
-
-class CloningVisitor(ClauseVisitor):
- """Base class for visitor objects which can traverse using
- the cloned_traverse() function.
-
- """
-
- def copy_and_process(self, list_):
- """Apply cloned traversal to the given list of elements, and return the new list."""
-
- return [self.traverse(x) for x in list_]
-
- def traverse(self, obj):
- """traverse and visit the given expression structure."""
-
- return cloned_traverse(obj, self.__traverse_options__, self._visitor_dict)
-
-class ReplacingCloningVisitor(CloningVisitor):
- """Base class for visitor objects which can traverse using
- the replacement_traverse() function.
-
- """
-
- def replace(self, elem):
- """receive pre-copied elements during a cloning traversal.
-
- If the method returns a new element, the element is used
- instead of creating a simple copy of the element. Traversal
- will halt on the newly returned element if it is re-encountered.
- """
- return None
-
- def traverse(self, obj):
- """traverse and visit the given expression structure."""
-
- def replace(elem):
- for v in self._visitor_iterator:
- e = v.replace(elem)
- if e is not None:
- return e
- return replacement_traverse(obj, self.__traverse_options__, replace)
-
-def iterate(obj, opts):
- """traverse the given expression structure, returning an iterator.
-
- traversal is configured to be breadth-first.
-
- """
- stack = deque([obj])
- while stack:
- t = stack.popleft()
- yield t
- for c in t.get_children(**opts):
- stack.append(c)
-
-def iterate_depthfirst(obj, opts):
- """traverse the given expression structure, returning an iterator.
-
- traversal is configured to be depth-first.
-
- """
- stack = deque([obj])
- traversal = deque()
- while stack:
- t = stack.pop()
- traversal.appendleft(t)
- for c in t.get_children(**opts):
- stack.append(c)
- return iter(traversal)
-
-def traverse_using(iterator, obj, visitors):
- """visit the given expression structure using the given iterator of objects."""
-
- for target in iterator:
- meth = visitors.get(target.__visit_name__, None)
- if meth:
- meth(target)
- return obj
-
-def traverse(obj, opts, visitors):
- """traverse and visit the given expression structure using the default iterator."""
-
- return traverse_using(iterate(obj, opts), obj, visitors)
-
-def traverse_depthfirst(obj, opts, visitors):
- """traverse and visit the given expression structure using the depth-first iterator."""
-
- return traverse_using(iterate_depthfirst(obj, opts), obj, visitors)
-
-def cloned_traverse(obj, opts, visitors):
- """clone the given expression structure, allowing modifications by visitors."""
-
- cloned = util.column_dict()
-
- def clone(element):
- if element not in cloned:
- cloned[element] = element._clone()
- return cloned[element]
-
- obj = clone(obj)
- stack = [obj]
-
- while stack:
- t = stack.pop()
- if t in cloned:
- continue
- t._copy_internals(clone=clone)
-
- meth = visitors.get(t.__visit_name__, None)
- if meth:
- meth(t)
-
- for c in t.get_children(**opts):
- stack.append(c)
- return obj
-
-def replacement_traverse(obj, opts, replace):
- """clone the given expression structure, allowing element replacement by a given replacement function."""
-
- cloned = util.column_dict()
- stop_on = util.column_set(opts.get('stop_on', []))
-
- def clone(element):
- newelem = replace(element)
- if newelem is not None:
- stop_on.add(newelem)
- return newelem
-
- if element not in cloned:
- cloned[element] = element._clone()
- return cloned[element]
-
- obj = clone(obj)
- stack = [obj]
- while stack:
- t = stack.pop()
- if t in stop_on:
- continue
- t._copy_internals(clone=clone)
- for c in t.get_children(**opts):
- stack.append(c)
- return obj
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/types.py b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/types.py
deleted file mode 100755
index e8d0b6f2..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/types.py
+++ /dev/null
@@ -1,2140 +0,0 @@
-# sqlalchemy/types.py
-# Copyright (C) 2005-2011 the SQLAlchemy authors and contributors <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-"""defines genericized SQL types, each represented by a subclass of
-:class:`~sqlalchemy.types.AbstractType`. Dialects define further subclasses of these
-types.
-
-For more information see the SQLAlchemy documentation on types.
-
-"""
-__all__ = [ 'TypeEngine', 'TypeDecorator', 'AbstractType', 'UserDefinedType',
- 'INT', 'CHAR', 'VARCHAR', 'NCHAR', 'NVARCHAR','TEXT', 'Text',
- 'FLOAT', 'NUMERIC', 'REAL', 'DECIMAL', 'TIMESTAMP', 'DATETIME',
- 'CLOB', 'BLOB', 'BOOLEAN', 'SMALLINT', 'INTEGER', 'DATE', 'TIME',
- 'String', 'Integer', 'SmallInteger', 'BigInteger', 'Numeric',
- 'Float', 'DateTime', 'Date', 'Time', 'LargeBinary', 'Binary',
- 'Boolean', 'Unicode', 'MutableType', 'Concatenable',
- 'UnicodeText','PickleType', 'Interval', 'Enum' ]
-
-import inspect
-import datetime as dt
-import codecs
-
-from sqlalchemy import exc, schema
-from sqlalchemy.sql import expression, operators
-from sqlalchemy.util import pickle
-from sqlalchemy.util.compat import decimal
-from sqlalchemy.sql.visitors import Visitable
-from sqlalchemy import util
-from sqlalchemy import processors, events
-import collections
-default = util.importlater("sqlalchemy.engine", "default")
-
-NoneType = type(None)
-if util.jython:
- import array
-
-class AbstractType(Visitable):
- """Base for all types - not needed except for backwards
- compatibility."""
-
-class TypeEngine(AbstractType):
- """Base for built-in types."""
-
- def copy_value(self, value):
- return value
-
- def bind_processor(self, dialect):
- """Return a conversion function for processing bind values.
-
- Returns a callable which will receive a bind parameter value
- as the sole positional argument and will return a value to
- send to the DB-API.
-
- If processing is not necessary, the method should return ``None``.
-
- :param dialect: Dialect instance in use.
-
- """
- return None
-
- def result_processor(self, dialect, coltype):
- """Return a conversion function for processing result row values.
-
- Returns a callable which will receive a result row column
- value as the sole positional argument and will return a value
- to return to the user.
-
- If processing is not necessary, the method should return ``None``.
-
- :param dialect: Dialect instance in use.
-
- :param coltype: DBAPI coltype argument received in cursor.description.
-
- """
- return None
-
- def compare_values(self, x, y):
- """Compare two values for equality."""
-
- return x == y
-
- def is_mutable(self):
- """Return True if the target Python type is 'mutable'.
-
- This allows systems like the ORM to know if a column value can
- be considered 'not changed' by comparing the identity of
- objects alone. Values such as dicts, lists which
- are serialized into strings are examples of "mutable"
- column structures.
-
- .. note:: This functionality is now superseded by the
- ``sqlalchemy.ext.mutable`` extension described in
- :ref:`mutable_toplevel`.
-
- When this method is overridden, :meth:`copy_value` should
- also be supplied. The :class:`.MutableType` mixin
- is recommended as a helper.
-
- """
- return False
-
- def get_dbapi_type(self, dbapi):
- """Return the corresponding type object from the underlying DB-API, if
- any.
-
- This can be useful for calling ``setinputsizes()``, for example.
-
- """
- return None
-
- def _adapt_expression(self, op, othertype):
- """evaluate the return type of <self> <op> <othertype>,
- and apply any adaptations to the given operator.
-
- """
- return op, self
-
- @util.memoized_property
- def _type_affinity(self):
- """Return a rudimental 'affinity' value expressing the general class
- of type."""
-
- typ = None
- for t in self.__class__.__mro__:
- if t is TypeEngine or t is UserDefinedType:
- return typ
- elif issubclass(t, TypeEngine):
- typ = t
- else:
- return self.__class__
-
- def dialect_impl(self, dialect):
- """Return a dialect-specific implementation for this :class:`.TypeEngine`."""
-
- try:
- return dialect._type_memos[self]['impl']
- except KeyError:
- return self._dialect_info(dialect)['impl']
-
- def _cached_bind_processor(self, dialect):
- """Return a dialect-specific bind processor for this type."""
-
- try:
- return dialect._type_memos[self]['bind']
- except KeyError:
- d = self._dialect_info(dialect)
- d['bind'] = bp = d['impl'].bind_processor(dialect)
- return bp
-
- def _cached_result_processor(self, dialect, coltype):
- """Return a dialect-specific result processor for this type."""
-
- try:
- return dialect._type_memos[self][coltype]
- except KeyError:
- d = self._dialect_info(dialect)
- # key assumption: DBAPI type codes are
- # constants. Else this dictionary would
- # grow unbounded.
- d[coltype] = rp = d['impl'].result_processor(dialect, coltype)
- return rp
-
- def _dialect_info(self, dialect):
- """Return a dialect-specific registry which
- caches a dialect-specific implementation, bind processing
- function, and one or more result processing functions."""
-
- if self in dialect._type_memos:
- return dialect._type_memos[self]
- else:
- impl = self._gen_dialect_impl(dialect)
- if impl is self:
- impl = self.adapt(type(self))
- # this can't be self, else we create a cycle
- assert impl is not self
- dialect._type_memos[self] = d = {'impl':impl}
- return d
-
- def _gen_dialect_impl(self, dialect):
- return dialect.type_descriptor(self)
-
- def adapt(self, cls, **kw):
- """Produce an "adapted" form of this type, given an "impl" class
- to work with.
-
- This method is used internally to associate generic
- types with "implementation" types that are specific to a particular
- dialect.
- """
- return util.constructor_copy(self, cls, **kw)
-
- def _coerce_compared_value(self, op, value):
- """Suggest a type for a 'coerced' Python value in an expression.
-
- Given an operator and value, gives the type a chance
- to return a type which the value should be coerced into.
-
- The default behavior here is conservative; if the right-hand
- side is already coerced into a SQL type based on its
- Python type, it is usually left alone.
-
- End-user functionality extension here should generally be via
- :class:`.TypeDecorator`, which provides more liberal behavior in that
- it defaults to coercing the other side of the expression into this
- type, thus applying special Python conversions above and beyond those
- needed by the DBAPI to both ides. It also provides the public method
- :meth:`.TypeDecorator.coerce_compared_value` which is intended for
- end-user customization of this behavior.
-
- """
- _coerced_type = _type_map.get(type(value), NULLTYPE)
- if _coerced_type is NULLTYPE or _coerced_type._type_affinity \
- is self._type_affinity:
- return self
- else:
- return _coerced_type
-
- def _compare_type_affinity(self, other):
- return self._type_affinity is other._type_affinity
-
- def compile(self, dialect=None):
- """Produce a string-compiled form of this :class:`.TypeEngine`.
-
- When called with no arguments, uses a "default" dialect
- to produce a string result.
-
- :param dialect: a :class:`.Dialect` instance.
-
- """
- # arg, return value is inconsistent with
- # ClauseElement.compile()....this is a mistake.
-
- if not dialect:
- dialect = self._default_dialect
-
- return dialect.type_compiler.process(self)
-
- @property
- def _default_dialect(self):
- if self.__class__.__module__.startswith("sqlalchemy.dialects"):
- tokens = self.__class__.__module__.split(".")[0:3]
- mod = ".".join(tokens)
- return getattr(__import__(mod).dialects, tokens[-1]).dialect()
- else:
- return default.DefaultDialect()
-
- def __str__(self):
- # Py3K
- #return unicode(self.compile())
- # Py2K
- return unicode(self.compile()).\
- encode('ascii', 'backslashreplace')
- # end Py2K
-
- def __init__(self, *args, **kwargs):
- """Support implementations that were passing arguments"""
- if args or kwargs:
- util.warn_deprecated("Passing arguments to type object "
- "constructor %s is deprecated" % self.__class__)
-
- def __repr__(self):
- return "%s(%s)" % (
- self.__class__.__name__,
- ", ".join("%s=%r" % (k, getattr(self, k, None))
- for k in inspect.getargspec(self.__init__)[0][1:]))
-
-
-class UserDefinedType(TypeEngine):
- """Base for user defined types.
-
- This should be the base of new types. Note that
- for most cases, :class:`.TypeDecorator` is probably
- more appropriate::
-
- import sqlalchemy.types as types
-
- class MyType(types.UserDefinedType):
- def __init__(self, precision = 8):
- self.precision = precision
-
- def get_col_spec(self):
- return "MYTYPE(%s)" % self.precision
-
- def bind_processor(self, dialect):
- def process(value):
- return value
- return process
-
- def result_processor(self, dialect, coltype):
- def process(value):
- return value
- return process
-
- Once the type is made, it's immediately usable::
-
- table = Table('foo', meta,
- Column('id', Integer, primary_key=True),
- Column('data', MyType(16))
- )
-
- """
- __visit_name__ = "user_defined"
-
- def _adapt_expression(self, op, othertype):
- """evaluate the return type of <self> <op> <othertype>,
- and apply any adaptations to the given operator.
-
- """
- return self.adapt_operator(op), self
-
- def adapt_operator(self, op):
- """A hook which allows the given operator to be adapted
- to something new.
-
- See also UserDefinedType._adapt_expression(), an as-yet-
- semi-public method with greater capability in this regard.
-
- """
- return op
-
-class TypeDecorator(TypeEngine):
- """Allows the creation of types which add additional functionality
- to an existing type.
-
- This method is preferred to direct subclassing of SQLAlchemy's
- built-in types as it ensures that all required functionality of
- the underlying type is kept in place.
-
- Typical usage::
-
- import sqlalchemy.types as types
-
- class MyType(types.TypeDecorator):
- '''Prefixes Unicode values with "PREFIX:" on the way in and
- strips it off on the way out.
- '''
-
- impl = types.Unicode
-
- def process_bind_param(self, value, dialect):
- return "PREFIX:" + value
-
- def process_result_value(self, value, dialect):
- return value[7:]
-
- def copy(self):
- return MyType(self.impl.length)
-
- The class-level "impl" variable is required, and can reference any
- TypeEngine class. Alternatively, the load_dialect_impl() method
- can be used to provide different type classes based on the dialect
- given; in this case, the "impl" variable can reference
- ``TypeEngine`` as a placeholder.
-
- Types that receive a Python type that isn't similar to the ultimate type
- used may want to define the :meth:`TypeDecorator.coerce_compared_value`
- method. This is used to give the expression system a hint when coercing
- Python objects into bind parameters within expressions. Consider this
- expression::
-
- mytable.c.somecol + datetime.date(2009, 5, 15)
-
- Above, if "somecol" is an ``Integer`` variant, it makes sense that
- we're doing date arithmetic, where above is usually interpreted
- by databases as adding a number of days to the given date.
- The expression system does the right thing by not attempting to
- coerce the "date()" value into an integer-oriented bind parameter.
-
- However, in the case of ``TypeDecorator``, we are usually changing an
- incoming Python type to something new - ``TypeDecorator`` by default will
- "coerce" the non-typed side to be the same type as itself. Such as below,
- we define an "epoch" type that stores a date value as an integer::
-
- class MyEpochType(types.TypeDecorator):
- impl = types.Integer
-
- epoch = datetime.date(1970, 1, 1)
-
- def process_bind_param(self, value, dialect):
- return (value - self.epoch).days
-
- def process_result_value(self, value, dialect):
- return self.epoch + timedelta(days=value)
-
- Our expression of ``somecol + date`` with the above type will coerce the
- "date" on the right side to also be treated as ``MyEpochType``.
-
- This behavior can be overridden via the
- :meth:`~TypeDecorator.coerce_compared_value` method, which returns a type
- that should be used for the value of the expression. Below we set it such
- that an integer value will be treated as an ``Integer``, and any other
- value is assumed to be a date and will be treated as a ``MyEpochType``::
-
- def coerce_compared_value(self, op, value):
- if isinstance(value, int):
- return Integer()
- else:
- return self
-
- """
-
- __visit_name__ = "type_decorator"
-
- def __init__(self, *args, **kwargs):
- """Construct a :class:`.TypeDecorator`.
-
- Arguments sent here are passed to the constructor
- of the class assigned to the ``impl`` class level attribute,
- where the ``self.impl`` attribute is assigned an instance
- of the implementation type. If ``impl`` at the class level
- is already an instance, then it's assigned to ``self.impl``
- as is.
-
- Subclasses can override this to customize the generation
- of ``self.impl``.
-
- """
- if not hasattr(self.__class__, 'impl'):
- raise AssertionError("TypeDecorator implementations "
- "require a class-level variable "
- "'impl' which refers to the class of "
- "type being decorated")
- self.impl = to_instance(self.__class__.impl, *args, **kwargs)
-
-
- def _gen_dialect_impl(self, dialect):
- adapted = dialect.type_descriptor(self)
- if adapted is not self:
- return adapted
-
- # otherwise adapt the impl type, link
- # to a copy of this TypeDecorator and return
- # that.
- typedesc = self.load_dialect_impl(dialect).dialect_impl(dialect)
- tt = self.copy()
- if not isinstance(tt, self.__class__):
- raise AssertionError('Type object %s does not properly '
- 'implement the copy() method, it must '
- 'return an object of type %s' % (self,
- self.__class__))
- tt.impl = typedesc
- return tt
-
- @util.memoized_property
- def _type_affinity(self):
- return self.impl._type_affinity
-
- def type_engine(self, dialect):
- """Return a dialect-specific :class:`.TypeEngine` instance for this :class:`.TypeDecorator`.
-
- In most cases this returns a dialect-adapted form of
- the :class:`.TypeEngine` type represented by ``self.impl``.
- Makes usage of :meth:`dialect_impl` but also traverses
- into wrapped :class:`.TypeDecorator` instances.
- Behavior can be customized here by overriding :meth:`load_dialect_impl`.
-
- """
- adapted = dialect.type_descriptor(self)
- if type(adapted) is not type(self):
- return adapted
- elif isinstance(self.impl, TypeDecorator):
- return self.impl.type_engine(dialect)
- else:
- return self.load_dialect_impl(dialect)
-
- def load_dialect_impl(self, dialect):
- """Return a :class:`.TypeEngine` object corresponding to a dialect.
-
- This is an end-user override hook that can be used to provide
- differing types depending on the given dialect. It is used
- by the :class:`.TypeDecorator` implementation of :meth:`type_engine`
- to help determine what type should ultimately be returned
- for a given :class:`.TypeDecorator`.
-
- By default returns ``self.impl``.
-
- """
- return self.impl
-
- def __getattr__(self, key):
- """Proxy all other undefined accessors to the underlying
- implementation."""
-
- return getattr(self.impl, key)
-
- def process_bind_param(self, value, dialect):
- """Receive a bound parameter value to be converted.
-
- Subclasses override this method to return the
- value that should be passed along to the underlying
- :class:`.TypeEngine` object, and from there to the
- DBAPI ``execute()`` method.
-
- :param value: the value. Can be None.
- :param dialect: the :class:`.Dialect` in use.
-
- """
- raise NotImplementedError()
-
- def process_result_value(self, value, dialect):
- """Receive a result-row column value to be converted.
-
- Subclasses override this method to return the
- value that should be passed back to the application,
- given a value that is already processed by
- the underlying :class:`.TypeEngine` object, originally
- from the DBAPI cursor method ``fetchone()`` or similar.
-
- :param value: the value. Can be None.
- :param dialect: the :class:`.Dialect` in use.
-
- """
- raise NotImplementedError()
-
- def bind_processor(self, dialect):
- """Provide a bound value processing function for the given :class:`.Dialect`.
-
- This is the method that fulfills the :class:`.TypeEngine`
- contract for bound value conversion. :class:`.TypeDecorator`
- will wrap a user-defined implementation of
- :meth:`process_bind_param` here.
-
- User-defined code can override this method directly,
- though its likely best to use :meth:`process_bind_param` so that
- the processing provided by ``self.impl`` is maintained.
-
- """
- if self.__class__.process_bind_param.func_code \
- is not TypeDecorator.process_bind_param.func_code:
- process_param = self.process_bind_param
- impl_processor = self.impl.bind_processor(dialect)
- if impl_processor:
- def process(value):
- return impl_processor(process_param(value, dialect))
-
- else:
- def process(value):
- return process_param(value, dialect)
-
- return process
- else:
- return self.impl.bind_processor(dialect)
-
- def result_processor(self, dialect, coltype):
- """Provide a result value processing function for the given :class:`.Dialect`.
-
- This is the method that fulfills the :class:`.TypeEngine`
- contract for result value conversion. :class:`.TypeDecorator`
- will wrap a user-defined implementation of
- :meth:`process_result_value` here.
-
- User-defined code can override this method directly,
- though its likely best to use :meth:`process_result_value` so that
- the processing provided by ``self.impl`` is maintained.
-
- """
- if self.__class__.process_result_value.func_code \
- is not TypeDecorator.process_result_value.func_code:
- process_value = self.process_result_value
- impl_processor = self.impl.result_processor(dialect,
- coltype)
- if impl_processor:
- def process(value):
- return process_value(impl_processor(value), dialect)
-
- else:
- def process(value):
- return process_value(value, dialect)
-
- return process
- else:
- return self.impl.result_processor(dialect, coltype)
-
- def coerce_compared_value(self, op, value):
- """Suggest a type for a 'coerced' Python value in an expression.
-
- By default, returns self. This method is called by
- the expression system when an object using this type is
- on the left or right side of an expression against a plain Python
- object which does not yet have a SQLAlchemy type assigned::
-
- expr = table.c.somecolumn + 35
-
- Where above, if ``somecolumn`` uses this type, this method will
- be called with the value ``operator.add``
- and ``35``. The return value is whatever SQLAlchemy type should
- be used for ``35`` for this particular operation.
-
- """
- return self
-
- def _coerce_compared_value(self, op, value):
- """See :meth:`.TypeEngine._coerce_compared_value` for a description."""
-
- return self.coerce_compared_value(op, value)
-
- def copy(self):
- """Produce a copy of this :class:`.TypeDecorator` instance.
-
- This is a shallow copy and is provided to fulfill part of
- the :class:`.TypeEngine` contract. It usually does not
- need to be overridden unless the user-defined :class:`.TypeDecorator`
- has local state that should be deep-copied.
-
- """
- instance = self.__class__.__new__(self.__class__)
- instance.__dict__.update(self.__dict__)
- return instance
-
- def get_dbapi_type(self, dbapi):
- """Return the DBAPI type object represented by this :class:`.TypeDecorator`.
-
- By default this calls upon :meth:`.TypeEngine.get_dbapi_type` of the
- underlying "impl".
- """
- return self.impl.get_dbapi_type(dbapi)
-
- def copy_value(self, value):
- """Given a value, produce a copy of it.
-
- By default this calls upon :meth:`.TypeEngine.copy_value`
- of the underlying "impl".
-
- :meth:`.copy_value` will return the object
- itself, assuming "mutability" is not enabled.
- Only the :class:`.MutableType` mixin provides a copy
- function that actually produces a new object.
- The copying function is used by the ORM when
- "mutable" types are used, to memoize the original
- version of an object as loaded from the database,
- which is then compared to the possibly mutated
- version to check for changes.
-
- Modern implementations should use the
- ``sqlalchemy.ext.mutable`` extension described in
- :ref:`mutable_toplevel` for intercepting in-place
- changes to values.
-
- """
- return self.impl.copy_value(value)
-
- def compare_values(self, x, y):
- """Given two values, compare them for equality.
-
- By default this calls upon :meth:`.TypeEngine.compare_values`
- of the underlying "impl", which in turn usually
- uses the Python equals operator ``==``.
-
- This function is used by the ORM to compare
- an original-loaded value with an intercepted
- "changed" value, to determine if a net change
- has occurred.
-
- """
- return self.impl.compare_values(x, y)
-
- def is_mutable(self):
- """Return True if the target Python type is 'mutable'.
-
- This allows systems like the ORM to know if a column value can
- be considered 'not changed' by comparing the identity of
- objects alone. Values such as dicts, lists which
- are serialized into strings are examples of "mutable"
- column structures.
-
- .. note:: This functionality is now superseded by the
- ``sqlalchemy.ext.mutable`` extension described in
- :ref:`mutable_toplevel`.
-
- """
- return self.impl.is_mutable()
-
- def _adapt_expression(self, op, othertype):
- op, typ =self.impl._adapt_expression(op, othertype)
- if typ is self.impl:
- return op, self
- else:
- return op, typ
-
-class MutableType(object):
- """A mixin that marks a :class:`.TypeEngine` as representing
- a mutable Python object type. This functionality is used
- only by the ORM.
-
- .. note:: :class:`.MutableType` is superseded as of SQLAlchemy 0.7
- by the ``sqlalchemy.ext.mutable`` extension described in
- :ref:`mutable_toplevel`. This extension provides an event
- driven approach to in-place mutation detection that does not
- incur the severe performance penalty of the :class:`.MutableType`
- approach.
-
- "mutable" means that changes can occur in place to a value
- of this type. Examples includes Python lists, dictionaries,
- and sets, as well as user-defined objects. The primary
- need for identification of "mutable" types is by the ORM,
- which applies special rules to such values in order to guarantee
- that changes are detected. These rules may have a significant
- performance impact, described below.
-
- A :class:`.MutableType` usually allows a flag called
- ``mutable=False`` to enable/disable the "mutability" flag,
- represented on this class by :meth:`is_mutable`. Examples
- include :class:`.PickleType` and
- :class:`~sqlalchemy.dialects.postgresql.base.ARRAY`. Setting
- this flag to ``True`` enables mutability-specific behavior
- by the ORM.
-
- The :meth:`copy_value` and :meth:`compare_values` functions
- represent a copy and compare function for values of this
- type - implementing subclasses should override these
- appropriately.
-
- .. warning:: The usage of mutable types has significant performance
- implications when using the ORM. In order to detect changes, the
- ORM must create a copy of the value when it is first
- accessed, so that changes to the current value can be compared
- against the "clean" database-loaded value. Additionally, when the
- ORM checks to see if any data requires flushing, it must scan
- through all instances in the session which are known to have
- "mutable" attributes and compare the current value of each
- one to its "clean"
- value. So for example, if the Session contains 6000 objects (a
- fairly large amount) and autoflush is enabled, every individual
- execution of :class:`.Query` will require a full scan of that subset of
- the 6000 objects that have mutable attributes, possibly resulting
- in tens of thousands of additional method calls for every query.
-
- As of SQLAlchemy 0.7, the ``sqlalchemy.ext.mutable`` is provided which
- allows an event driven approach to in-place mutation detection. This
- approach should now be favored over the usage of :class:`.MutableType`
- with ``mutable=True``. ``sqlalchemy.ext.mutable`` is described in
- :ref:`mutable_toplevel`.
-
- """
-
- def is_mutable(self):
- """Return True if the target Python type is 'mutable'.
-
- For :class:`.MutableType`, this method is set to
- return ``True``.
-
- """
- return True
-
- def copy_value(self, value):
- """Unimplemented."""
- raise NotImplementedError()
-
- def compare_values(self, x, y):
- """Compare *x* == *y*."""
- return x == y
-
-def to_instance(typeobj, *arg, **kw):
- if typeobj is None:
- return NULLTYPE
-
- if util.callable(typeobj):
- return typeobj(*arg, **kw)
- else:
- return typeobj
-
-def adapt_type(typeobj, colspecs):
- if isinstance(typeobj, type):
- typeobj = typeobj()
- for t in typeobj.__class__.__mro__[0:-1]:
- try:
- impltype = colspecs[t]
- break
- except KeyError:
- pass
- else:
- # couldnt adapt - so just return the type itself
- # (it may be a user-defined type)
- return typeobj
- # if we adapted the given generic type to a database-specific type,
- # but it turns out the originally given "generic" type
- # is actually a subclass of our resulting type, then we were already
- # given a more specific type than that required; so use that.
- if (issubclass(typeobj.__class__, impltype)):
- return typeobj
- return typeobj.adapt(impltype)
-
-
-
-
-class NullType(TypeEngine):
- """An unknown type.
-
- NullTypes will stand in if :class:`~sqlalchemy.Table` reflection
- encounters a column data type unknown to SQLAlchemy. The
- resulting columns are nearly fully usable: the DB-API adapter will
- handle all translation to and from the database data type.
-
- NullType does not have sufficient information to particpate in a
- ``CREATE TABLE`` statement and will raise an exception if
- encountered during a :meth:`~sqlalchemy.Table.create` operation.
-
- """
- __visit_name__ = 'null'
-
- def _adapt_expression(self, op, othertype):
- if isinstance(othertype, NullType) or not operators.is_commutative(op):
- return op, self
- else:
- return othertype._adapt_expression(op, self)
-
-NullTypeEngine = NullType
-
-class Concatenable(object):
- """A mixin that marks a type as supporting 'concatenation',
- typically strings."""
-
- def _adapt_expression(self, op, othertype):
- if op is operators.add and issubclass(othertype._type_affinity,
- (Concatenable, NullType)):
- return operators.concat_op, self
- else:
- return op, self
-
-class _DateAffinity(object):
- """Mixin date/time specific expression adaptations.
-
- Rules are implemented within Date,Time,Interval,DateTime, Numeric,
- Integer. Based on http://www.postgresql.org/docs/current/static
- /functions-datetime.html.
-
- """
-
- @property
- def _expression_adaptations(self):
- raise NotImplementedError()
-
- _blank_dict = util.immutabledict()
- def _adapt_expression(self, op, othertype):
- othertype = othertype._type_affinity
- return op, \
- self._expression_adaptations.get(op, self._blank_dict).\
- get(othertype, NULLTYPE)
-
-class String(Concatenable, TypeEngine):
- """The base for all string and character types.
-
- In SQL, corresponds to VARCHAR. Can also take Python unicode objects
- and encode to the database's encoding in bind params (and the reverse for
- result sets.)
-
- The `length` field is usually required when the `String` type is
- used within a CREATE TABLE statement, as VARCHAR requires a length
- on most databases.
-
- """
-
- __visit_name__ = 'string'
-
- def __init__(self, length=None, convert_unicode=False,
- assert_unicode=None, unicode_error=None,
- _warn_on_bytestring=False
- ):
- """
- Create a string-holding type.
-
- :param length: optional, a length for the column for use in
- DDL statements. May be safely omitted if no ``CREATE
- TABLE`` will be issued. Certain databases may require a
- *length* for use in DDL, and will raise an exception when
- the ``CREATE TABLE`` DDL is issued. Whether the value is
- interpreted as bytes or characters is database specific.
-
- :param convert_unicode: defaults to False. If True, the
- type will do what is necessary in order to accept
- Python Unicode objects as bind parameters, and to return
- Python Unicode objects in result rows. This may
- require SQLAlchemy to explicitly coerce incoming Python
- unicodes into an encoding, and from an encoding
- back to Unicode, or it may not require any interaction
- from SQLAlchemy at all, depending on the DBAPI in use.
-
- When SQLAlchemy performs the encoding/decoding,
- the encoding used is configured via
- :attr:`~sqlalchemy.engine.base.Dialect.encoding`, which
- defaults to `utf-8`.
-
- The "convert_unicode" behavior can also be turned on
- for all String types by setting
- :attr:`sqlalchemy.engine.base.Dialect.convert_unicode`
- on create_engine().
-
- To instruct SQLAlchemy to perform Unicode encoding/decoding
- even on a platform that already handles Unicode natively,
- set convert_unicode='force'. This will incur significant
- performance overhead when fetching unicode result columns.
-
- :param assert_unicode: Deprecated. A warning is raised in all cases
- when a non-Unicode object is passed when SQLAlchemy would coerce
- into an encoding (note: but **not** when the DBAPI handles unicode
- objects natively). To suppress or raise this warning to an error,
- use the Python warnings filter documented at:
- http://docs.python.org/library/warnings.html
-
- :param unicode_error: Optional, a method to use to handle Unicode
- conversion errors. Behaves like the 'errors' keyword argument to
- the standard library's string.decode() functions. This flag
- requires that `convert_unicode` is set to `"force"` - otherwise,
- SQLAlchemy is not guaranteed to handle the task of unicode
- conversion. Note that this flag adds significant performance
- overhead to row-fetching operations for backends that already
- return unicode objects natively (which most DBAPIs do). This
- flag should only be used as an absolute last resort for reading
- strings from a column with varied or corrupted encodings,
- which only applies to databases that accept invalid encodings
- in the first place (i.e. MySQL. *not* PG, Sqlite, etc.)
-
- """
- if unicode_error is not None and convert_unicode != 'force':
- raise exc.ArgumentError("convert_unicode must be 'force' "
- "when unicode_error is set.")
-
- if assert_unicode:
- util.warn_deprecated('assert_unicode is deprecated. '
- 'SQLAlchemy emits a warning in all '
- 'cases where it would otherwise like '
- 'to encode a Python unicode object '
- 'into a specific encoding but a plain '
- 'bytestring is received. This does '
- '*not* apply to DBAPIs that coerce '
- 'Unicode natively.')
- self.length = length
- self.convert_unicode = convert_unicode
- self.unicode_error = unicode_error
- self._warn_on_bytestring = _warn_on_bytestring
-
- def bind_processor(self, dialect):
- if self.convert_unicode or dialect.convert_unicode:
- if dialect.supports_unicode_binds and \
- self.convert_unicode != 'force':
- if self._warn_on_bytestring:
- def process(value):
- # Py3K
- #if isinstance(value, bytes):
- # Py2K
- if isinstance(value, str):
- # end Py2K
- util.warn("Unicode type received non-unicode bind "
- "param value.")
- return value
- return process
- else:
- return None
- else:
- encoder = codecs.getencoder(dialect.encoding)
- warn_on_bytestring = self._warn_on_bytestring
- def process(value):
- if isinstance(value, unicode):
- return encoder(value, self.unicode_error)[0]
- elif warn_on_bytestring and value is not None:
- util.warn("Unicode type received non-unicode bind "
- "param value")
- return value
- return process
- else:
- return None
-
- def result_processor(self, dialect, coltype):
- wants_unicode = self.convert_unicode or dialect.convert_unicode
- needs_convert = wants_unicode and \
- (dialect.returns_unicode_strings is not True or
- self.convert_unicode == 'force')
-
- if needs_convert:
- to_unicode = processors.to_unicode_processor_factory(
- dialect.encoding, self.unicode_error)
-
- if dialect.returns_unicode_strings:
- # we wouldn't be here unless convert_unicode='force'
- # was specified, or the driver has erratic unicode-returning
- # habits. since we will be getting back unicode
- # in most cases, we check for it (decode will fail).
- def process(value):
- if isinstance(value, unicode):
- return value
- else:
- return to_unicode(value)
- return process
- else:
- # here, we assume that the object is not unicode,
- # avoiding expensive isinstance() check.
- return to_unicode
- else:
- return None
-
- def get_dbapi_type(self, dbapi):
- return dbapi.STRING
-
-class Text(String):
- """A variably sized string type.
-
- In SQL, usually corresponds to CLOB or TEXT. Can also take Python
- unicode objects and encode to the database's encoding in bind
- params (and the reverse for result sets.)
-
- """
- __visit_name__ = 'text'
-
-class Unicode(String):
- """A variable length Unicode string.
-
- The ``Unicode`` type is a :class:`.String` which converts Python
- ``unicode`` objects (i.e., strings that are defined as
- ``u'somevalue'``) into encoded bytestrings when passing the value
- to the database driver, and similarly decodes values from the
- database back into Python ``unicode`` objects.
-
- It's roughly equivalent to using a ``String`` object with
- ``convert_unicode=True``, however
- the type has other significances in that it implies the usage
- of a unicode-capable type being used on the backend, such as NVARCHAR.
- This may affect what type is emitted when issuing CREATE TABLE
- and also may effect some DBAPI-specific details, such as type
- information passed along to ``setinputsizes()``.
-
- When using the ``Unicode`` type, it is only appropriate to pass
- Python ``unicode`` objects, and not plain ``str``. If a
- bytestring (``str``) is passed, a runtime warning is issued. If
- you notice your application raising these warnings but you're not
- sure where, the Python ``warnings`` filter can be used to turn
- these warnings into exceptions which will illustrate a stack
- trace::
-
- import warnings
- warnings.simplefilter('error')
-
- Bytestrings sent to and received from the database are encoded
- using the dialect's
- :attr:`~sqlalchemy.engine.base.Dialect.encoding`, which defaults
- to `utf-8`.
-
- """
-
- __visit_name__ = 'unicode'
-
- def __init__(self, length=None, **kwargs):
- """
- Create a Unicode-converting String type.
-
- :param length: optional, a length for the column for use in
- DDL statements. May be safely omitted if no ``CREATE
- TABLE`` will be issued. Certain databases may require a
- *length* for use in DDL, and will raise an exception when
- the ``CREATE TABLE`` DDL is issued. Whether the value is
- interpreted as bytes or characters is database specific.
-
- :param \**kwargs: passed through to the underlying ``String``
- type.
-
- """
- kwargs.setdefault('convert_unicode', True)
- kwargs.setdefault('_warn_on_bytestring', True)
- super(Unicode, self).__init__(length=length, **kwargs)
-
-class UnicodeText(Text):
- """An unbounded-length Unicode string.
-
- See :class:`.Unicode` for details on the unicode
- behavior of this object.
-
- Like ``Unicode``, usage the ``UnicodeText`` type implies a
- unicode-capable type being used on the backend, such as NCLOB.
-
- """
-
- __visit_name__ = 'unicode_text'
-
- def __init__(self, length=None, **kwargs):
- """
- Create a Unicode-converting Text type.
-
- :param length: optional, a length for the column for use in
- DDL statements. May be safely omitted if no ``CREATE
- TABLE`` will be issued. Certain databases may require a
- *length* for use in DDL, and will raise an exception when
- the ``CREATE TABLE`` DDL is issued. Whether the value is
- interpreted as bytes or characters is database specific.
-
- """
- kwargs.setdefault('convert_unicode', True)
- kwargs.setdefault('_warn_on_bytestring', True)
- super(UnicodeText, self).__init__(length=length, **kwargs)
-
-
-class Integer(_DateAffinity, TypeEngine):
- """A type for ``int`` integers."""
-
- __visit_name__ = 'integer'
-
- def get_dbapi_type(self, dbapi):
- return dbapi.NUMBER
-
- @util.memoized_property
- def _expression_adaptations(self):
- # TODO: need a dictionary object that will
- # handle operators generically here, this is incomplete
- return {
- operators.add:{
- Date:Date,
- Integer:Integer,
- Numeric:Numeric,
- },
- operators.mul:{
- Interval:Interval,
- Integer:Integer,
- Numeric:Numeric,
- },
- # Py2K
- operators.div:{
- Integer:Integer,
- Numeric:Numeric,
- },
- # end Py2K
- operators.truediv:{
- Integer:Integer,
- Numeric:Numeric,
- },
- operators.sub:{
- Integer:Integer,
- Numeric:Numeric,
- },
- }
-
-class SmallInteger(Integer):
- """A type for smaller ``int`` integers.
-
- Typically generates a ``SMALLINT`` in DDL, and otherwise acts like
- a normal :class:`.Integer` on the Python side.
-
- """
-
- __visit_name__ = 'small_integer'
-
-
-class BigInteger(Integer):
- """A type for bigger ``int`` integers.
-
- Typically generates a ``BIGINT`` in DDL, and otherwise acts like
- a normal :class:`.Integer` on the Python side.
-
- """
-
- __visit_name__ = 'big_integer'
-
-
-class Numeric(_DateAffinity, TypeEngine):
- """A type for fixed precision numbers.
-
- Typically generates DECIMAL or NUMERIC. Returns
- ``decimal.Decimal`` objects by default, applying
- conversion as needed.
-
- .. note:: The `cdecimal <http://pypi.python.org/pypi/cdecimal/>`_ library
- is a high performing alternative to Python's built-in
- ``decimal.Decimal`` type, which performs very poorly in high volume
- situations. SQLAlchemy 0.7 is tested against ``cdecimal`` and supports
- it fully. The type is not necessarily supported by DBAPI
- implementations however, most of which contain an import for plain
- ``decimal`` in their source code, even though some such as psycopg2
- provide hooks for alternate adapters. SQLAlchemy imports ``decimal``
- globally as well. While the alternate ``Decimal`` class can be patched
- into SQLA's ``decimal`` module, overall the most straightforward and
- foolproof way to use "cdecimal" given current DBAPI and Python support
- is to patch it directly into sys.modules before anything else is
- imported::
-
- import sys
- import cdecimal
- sys.modules["decimal"] = cdecimal
-
- While the global patch is a little ugly, it's particularly
- important to use just one decimal library at a time since
- Python Decimal and cdecimal Decimal objects
- are not currently compatible *with each other*::
-
- >>> import cdecimal
- >>> import decimal
- >>> decimal.Decimal("10") == cdecimal.Decimal("10")
- False
-
- SQLAlchemy will provide more natural support of
- cdecimal if and when it becomes a standard part of Python
- installations and is supported by all DBAPIs.
-
- """
-
- __visit_name__ = 'numeric'
-
- def __init__(self, precision=None, scale=None, asdecimal=True):
- """
- Construct a Numeric.
-
- :param precision: the numeric precision for use in DDL ``CREATE
- TABLE``.
-
- :param scale: the numeric scale for use in DDL ``CREATE TABLE``.
-
- :param asdecimal: default True. Return whether or not
- values should be sent as Python Decimal objects, or
- as floats. Different DBAPIs send one or the other based on
- datatypes - the Numeric type will ensure that return values
- are one or the other across DBAPIs consistently.
-
- When using the ``Numeric`` type, care should be taken to ensure
- that the asdecimal setting is apppropriate for the DBAPI in use -
- when Numeric applies a conversion from Decimal->float or float->
- Decimal, this conversion incurs an additional performance overhead
- for all result columns received.
-
- DBAPIs that return Decimal natively (e.g. psycopg2) will have
- better accuracy and higher performance with a setting of ``True``,
- as the native translation to Decimal reduces the amount of floating-
- point issues at play, and the Numeric type itself doesn't need
- to apply any further conversions. However, another DBAPI which
- returns floats natively *will* incur an additional conversion
- overhead, and is still subject to floating point data loss - in
- which case ``asdecimal=False`` will at least remove the extra
- conversion overhead.
-
- """
- self.precision = precision
- self.scale = scale
- self.asdecimal = asdecimal
-
- def get_dbapi_type(self, dbapi):
- return dbapi.NUMBER
-
- def bind_processor(self, dialect):
- if dialect.supports_native_decimal:
- return None
- else:
- return processors.to_float
-
- def result_processor(self, dialect, coltype):
- if self.asdecimal:
- if dialect.supports_native_decimal:
- # we're a "numeric", DBAPI will give us Decimal directly
- return None
- else:
- util.warn('Dialect %s+%s does *not* support Decimal '
- 'objects natively, and SQLAlchemy must '
- 'convert from floating point - rounding '
- 'errors and other issues may occur. Please '
- 'consider storing Decimal numbers as strings '
- 'or integers on this platform for lossless '
- 'storage.' % (dialect.name, dialect.driver))
-
- # we're a "numeric", DBAPI returns floats, convert.
- if self.scale is not None:
- return processors.to_decimal_processor_factory(
- decimal.Decimal, self.scale)
- else:
- return processors.to_decimal_processor_factory(
- decimal.Decimal)
- else:
- if dialect.supports_native_decimal:
- return processors.to_float
- else:
- return None
-
- @util.memoized_property
- def _expression_adaptations(self):
- return {
- operators.mul:{
- Interval:Interval,
- Numeric:Numeric,
- Integer:Numeric,
- },
- # Py2K
- operators.div:{
- Numeric:Numeric,
- Integer:Numeric,
- },
- # end Py2K
- operators.truediv:{
- Numeric:Numeric,
- Integer:Numeric,
- },
- operators.add:{
- Numeric:Numeric,
- Integer:Numeric,
- },
- operators.sub:{
- Numeric:Numeric,
- Integer:Numeric,
- }
- }
-
-class Float(Numeric):
- """A type for ``float`` numbers.
-
- Returns Python ``float`` objects by default, applying
- conversion as needed.
-
- """
-
- __visit_name__ = 'float'
-
- scale = None
-
- def __init__(self, precision=None, asdecimal=False, **kwargs):
- """
- Construct a Float.
-
- :param precision: the numeric precision for use in DDL ``CREATE
- TABLE``.
-
- :param asdecimal: the same flag as that of :class:`.Numeric`, but
- defaults to ``False``. Note that setting this flag to ``True``
- results in floating point conversion.
-
- """
- self.precision = precision
- self.asdecimal = asdecimal
-
- def result_processor(self, dialect, coltype):
- if self.asdecimal:
- return processors.to_decimal_processor_factory(decimal.Decimal)
- else:
- return None
-
- @util.memoized_property
- def _expression_adaptations(self):
- return {
- operators.mul:{
- Interval:Interval,
- Numeric:Float,
- },
- # Py2K
- operators.div:{
- Numeric:Float,
- },
- # end Py2K
- operators.truediv:{
- Numeric:Float,
- },
- operators.add:{
- Numeric:Float,
- },
- operators.sub:{
- Numeric:Float,
- }
- }
-
-
-class DateTime(_DateAffinity, TypeEngine):
- """A type for ``datetime.datetime()`` objects.
-
- Date and time types return objects from the Python ``datetime``
- module. Most DBAPIs have built in support for the datetime
- module, with the noted exception of SQLite. In the case of
- SQLite, date and time types are stored as strings which are then
- converted back to datetime objects when rows are returned.
-
- """
-
- __visit_name__ = 'datetime'
-
- def __init__(self, timezone=False):
- self.timezone = timezone
-
- def get_dbapi_type(self, dbapi):
- return dbapi.DATETIME
-
- @util.memoized_property
- def _expression_adaptations(self):
- return {
- operators.add:{
- Interval:DateTime,
- },
- operators.sub:{
- Interval:DateTime,
- DateTime:Interval,
- },
- }
-
-
-class Date(_DateAffinity,TypeEngine):
- """A type for ``datetime.date()`` objects."""
-
- __visit_name__ = 'date'
-
- def get_dbapi_type(self, dbapi):
- return dbapi.DATETIME
-
- @util.memoized_property
- def _expression_adaptations(self):
- return {
- operators.add:{
- Integer:Date,
- Interval:DateTime,
- Time:DateTime,
- },
- operators.sub:{
- # date - integer = date
- Integer:Date,
-
- # date - date = integer.
- Date:Integer,
-
- Interval:DateTime,
-
- # date - datetime = interval,
- # this one is not in the PG docs
- # but works
- DateTime:Interval,
- },
- }
-
-
-class Time(_DateAffinity,TypeEngine):
- """A type for ``datetime.time()`` objects."""
-
- __visit_name__ = 'time'
-
- def __init__(self, timezone=False):
- self.timezone = timezone
-
- def get_dbapi_type(self, dbapi):
- return dbapi.DATETIME
-
- @util.memoized_property
- def _expression_adaptations(self):
- return {
- operators.add:{
- Date:DateTime,
- Interval:Time
- },
- operators.sub:{
- Time:Interval,
- Interval:Time,
- },
- }
-
-
-class _Binary(TypeEngine):
- """Define base behavior for binary types."""
-
- def __init__(self, length=None):
- self.length = length
-
- # Python 3 - sqlite3 doesn't need the `Binary` conversion
- # here, though pg8000 does to indicate "bytea"
- def bind_processor(self, dialect):
- DBAPIBinary = dialect.dbapi.Binary
- def process(value):
- x = self
- if value is not None:
- return DBAPIBinary(value)
- else:
- return None
- return process
-
- # Python 3 has native bytes() type
- # both sqlite3 and pg8000 seem to return it
- # (i.e. and not 'memoryview')
- # Py2K
- def result_processor(self, dialect, coltype):
- if util.jython:
- def process(value):
- if value is not None:
- if isinstance(value, array.array):
- return value.tostring()
- return str(value)
- else:
- return None
- else:
- process = processors.to_str
- return process
- # end Py2K
-
- def _coerce_compared_value(self, op, value):
- """See :meth:`.TypeEngine._coerce_compared_value` for a description."""
-
- if isinstance(value, basestring):
- return self
- else:
- return super(_Binary, self)._coerce_compared_value(op, value)
-
- def get_dbapi_type(self, dbapi):
- return dbapi.BINARY
-
-class LargeBinary(_Binary):
- """A type for large binary byte data.
-
- The Binary type generates BLOB or BYTEA when tables are created,
- and also converts incoming values using the ``Binary`` callable
- provided by each DB-API.
-
- """
-
- __visit_name__ = 'large_binary'
-
- def __init__(self, length=None):
- """
- Construct a LargeBinary type.
-
- :param length: optional, a length for the column for use in
- DDL statements, for those BLOB types that accept a length
- (i.e. MySQL). It does *not* produce a small BINARY/VARBINARY
- type - use the BINARY/VARBINARY types specifically for those.
- May be safely omitted if no ``CREATE
- TABLE`` will be issued. Certain databases may require a
- *length* for use in DDL, and will raise an exception when
- the ``CREATE TABLE`` DDL is issued.
-
- """
- _Binary.__init__(self, length=length)
-
-class Binary(LargeBinary):
- """Deprecated. Renamed to LargeBinary."""
-
- def __init__(self, *arg, **kw):
- util.warn_deprecated('The Binary type has been renamed to '
- 'LargeBinary.')
- LargeBinary.__init__(self, *arg, **kw)
-
-class SchemaType(events.SchemaEventTarget):
- """Mark a type as possibly requiring schema-level DDL for usage.
-
- Supports types that must be explicitly created/dropped (i.e. PG ENUM type)
- as well as types that are complimented by table or schema level
- constraints, triggers, and other rules.
-
- :class:`.SchemaType` classes can also be targets for the
- :meth:`.DDLEvents.before_parent_attach` and :meth:`.DDLEvents.after_parent_attach`
- events, where the events fire off surrounding the association of
- the type object with a parent :class:`.Column`.
-
- """
-
- def __init__(self, **kw):
- self.name = kw.pop('name', None)
- self.quote = kw.pop('quote', None)
- self.schema = kw.pop('schema', None)
- self.metadata = kw.pop('metadata', None)
- if self.metadata:
- self.metadata.append_ddl_listener('before-create',
- util.portable_instancemethod(self._on_metadata_create))
- self.metadata.append_ddl_listener('after-drop',
- util.portable_instancemethod(self._on_metadata_drop))
-
- def _set_parent(self, column):
- column._on_table_attach(util.portable_instancemethod(self._set_table))
-
- def _set_table(self, column, table):
- table.append_ddl_listener('before-create',
- util.portable_instancemethod(
- self._on_table_create))
- table.append_ddl_listener('after-drop',
- util.portable_instancemethod(
- self._on_table_drop))
- if self.metadata is None:
- table.metadata.append_ddl_listener('before-create',
- util.portable_instancemethod(self._on_metadata_create))
- table.metadata.append_ddl_listener('after-drop',
- util.portable_instancemethod(self._on_metadata_drop))
-
- @property
- def bind(self):
- return self.metadata and self.metadata.bind or None
-
- def create(self, bind=None, checkfirst=False):
- """Issue CREATE ddl for this type, if applicable."""
-
- if bind is None:
- bind = schema._bind_or_error(self)
- t = self.dialect_impl(bind.dialect)
- if t.__class__ is not self.__class__ and isinstance(t, SchemaType):
- t.create(bind=bind, checkfirst=checkfirst)
-
- def drop(self, bind=None, checkfirst=False):
- """Issue DROP ddl for this type, if applicable."""
-
- if bind is None:
- bind = schema._bind_or_error(self)
- t = self.dialect_impl(bind.dialect)
- if t.__class__ is not self.__class__ and isinstance(t, SchemaType):
- t.drop(bind=bind, checkfirst=checkfirst)
-
- def _on_table_create(self, event, target, bind, **kw):
- t = self.dialect_impl(bind.dialect)
- if t.__class__ is not self.__class__ and isinstance(t, SchemaType):
- t._on_table_create(event, target, bind, **kw)
-
- def _on_table_drop(self, event, target, bind, **kw):
- t = self.dialect_impl(bind.dialect)
- if t.__class__ is not self.__class__ and isinstance(t, SchemaType):
- t._on_table_drop(event, target, bind, **kw)
-
- def _on_metadata_create(self, event, target, bind, **kw):
- t = self.dialect_impl(bind.dialect)
- if t.__class__ is not self.__class__ and isinstance(t, SchemaType):
- t._on_metadata_create(event, target, bind, **kw)
-
- def _on_metadata_drop(self, event, target, bind, **kw):
- t = self.dialect_impl(bind.dialect)
- if t.__class__ is not self.__class__ and isinstance(t, SchemaType):
- t._on_metadata_drop(event, target, bind, **kw)
-
-class Enum(String, SchemaType):
- """Generic Enum Type.
-
- The Enum type provides a set of possible string values which the
- column is constrained towards.
-
- By default, uses the backend's native ENUM type if available,
- else uses VARCHAR + a CHECK constraint.
- """
-
- __visit_name__ = 'enum'
-
- def __init__(self, *enums, **kw):
- """Construct an enum.
-
- Keyword arguments which don't apply to a specific backend are ignored
- by that backend.
-
- :param \*enums: string or unicode enumeration labels. If unicode
- labels are present, the `convert_unicode` flag is auto-enabled.
-
- :param convert_unicode: Enable unicode-aware bind parameter and
- result-set processing for this Enum's data. This is set
- automatically based on the presence of unicode label strings.
-
- :param metadata: Associate this type directly with a ``MetaData``
- object. For types that exist on the target database as an
- independent schema construct (Postgresql), this type will be
- created and dropped within ``create_all()`` and ``drop_all()``
- operations. If the type is not associated with any ``MetaData``
- object, it will associate itself with each ``Table`` in which it is
- used, and will be created when any of those individual tables are
- created, after a check is performed for it's existence. The type is
- only dropped when ``drop_all()`` is called for that ``Table``
- object's metadata, however.
-
- :param name: The name of this type. This is required for Postgresql
- and any future supported database which requires an explicitly
- named type, or an explicitly named constraint in order to generate
- the type and/or a table that uses it.
-
- :param native_enum: Use the database's native ENUM type when
- available. Defaults to True. When False, uses VARCHAR + check
- constraint for all backends.
-
- :param schema: Schemaname of this type. For types that exist on the
- target database as an independent schema construct (Postgresql),
- this parameter specifies the named schema in which the type is
- present.
-
- :param quote: Force quoting to be on or off on the type's name. If
- left as the default of `None`, the usual schema-level "case
- sensitive"/"reserved name" rules are used to determine if this
- type's name should be quoted.
-
- """
- self.enums = enums
- self.native_enum = kw.pop('native_enum', True)
- convert_unicode= kw.pop('convert_unicode', None)
- if convert_unicode is None:
- for e in enums:
- if isinstance(e, unicode):
- convert_unicode = True
- break
- else:
- convert_unicode = False
-
- if self.enums:
- length =max(len(x) for x in self.enums)
- else:
- length = 0
- String.__init__(self,
- length =length,
- convert_unicode=convert_unicode,
- )
- SchemaType.__init__(self, **kw)
-
- def _should_create_constraint(self, compiler):
- return not self.native_enum or \
- not compiler.dialect.supports_native_enum
-
- def _set_table(self, column, table):
- if self.native_enum:
- SchemaType._set_table(self, column, table)
-
-
- e = schema.CheckConstraint(
- column.in_(self.enums),
- name=self.name,
- _create_rule=util.portable_instancemethod(
- self._should_create_constraint)
- )
- table.append_constraint(e)
-
- def adapt(self, impltype, **kw):
- if issubclass(impltype, Enum):
- return impltype(name=self.name,
- quote=self.quote,
- schema=self.schema,
- metadata=self.metadata,
- convert_unicode=self.convert_unicode,
- native_enum=self.native_enum,
- *self.enums,
- **kw
- )
- else:
- return super(Enum, self).adapt(impltype, **kw)
-
-class PickleType(MutableType, TypeDecorator):
- """Holds Python objects, which are serialized using pickle.
-
- PickleType builds upon the Binary type to apply Python's
- ``pickle.dumps()`` to incoming objects, and ``pickle.loads()`` on
- the way out, allowing any pickleable Python object to be stored as
- a serialized binary field.
-
- """
-
- impl = LargeBinary
-
- def __init__(self, protocol=pickle.HIGHEST_PROTOCOL,
- pickler=None, mutable=False, comparator=None):
- """
- Construct a PickleType.
-
- :param protocol: defaults to ``pickle.HIGHEST_PROTOCOL``.
-
- :param pickler: defaults to cPickle.pickle or pickle.pickle if
- cPickle is not available. May be any object with
- pickle-compatible ``dumps` and ``loads`` methods.
-
- :param mutable: defaults to False; implements
- :meth:`AbstractType.is_mutable`. When ``True``, incoming
- objects will be compared against copies of themselves
- using the Python "equals" operator, unless the
- ``comparator`` argument is present. See
- :class:`.MutableType` for details on "mutable" type
- behavior. (default changed from ``True`` in
- 0.7.0).
-
- .. note:: This functionality is now superseded by the
- ``sqlalchemy.ext.mutable`` extension described in
- :ref:`mutable_toplevel`.
-
- :param comparator: a 2-arg callable predicate used
- to compare values of this type. If left as ``None``,
- the Python "equals" operator is used to compare values.
-
- """
- self.protocol = protocol
- self.pickler = pickler or pickle
- self.mutable = mutable
- self.comparator = comparator
- super(PickleType, self).__init__()
-
- def __reduce__(self):
- return PickleType, (self.protocol,
- None,
- self.mutable,
- self.comparator)
-
- def bind_processor(self, dialect):
- impl_processor = self.impl.bind_processor(dialect)
- dumps = self.pickler.dumps
- protocol = self.protocol
- if impl_processor:
- def process(value):
- if value is not None:
- value = dumps(value, protocol)
- return impl_processor(value)
- else:
- def process(value):
- if value is not None:
- value = dumps(value, protocol)
- return value
- return process
-
- def result_processor(self, dialect, coltype):
- impl_processor = self.impl.result_processor(dialect, coltype)
- loads = self.pickler.loads
- if impl_processor:
- def process(value):
- value = impl_processor(value)
- if value is None:
- return None
- return loads(value)
- else:
- def process(value):
- if value is None:
- return None
- return loads(value)
- return process
-
- def copy_value(self, value):
- if self.mutable:
- return self.pickler.loads(
- self.pickler.dumps(value, self.protocol))
- else:
- return value
-
- def compare_values(self, x, y):
- if self.comparator:
- return self.comparator(x, y)
- else:
- return x == y
-
- def is_mutable(self):
- """Return True if the target Python type is 'mutable'.
-
- When this method is overridden, :meth:`copy_value` should
- also be supplied. The :class:`.MutableType` mixin
- is recommended as a helper.
-
- """
- return self.mutable
-
-
-class Boolean(TypeEngine, SchemaType):
- """A bool datatype.
-
- Boolean typically uses BOOLEAN or SMALLINT on the DDL side, and on
- the Python side deals in ``True`` or ``False``.
-
- """
-
- __visit_name__ = 'boolean'
-
- def __init__(self, create_constraint=True, name=None):
- """Construct a Boolean.
-
- :param create_constraint: defaults to True. If the boolean
- is generated as an int/smallint, also create a CHECK constraint
- on the table that ensures 1 or 0 as a value.
-
- :param name: if a CHECK constraint is generated, specify
- the name of the constraint.
-
- """
- self.create_constraint = create_constraint
- self.name = name
-
- def _should_create_constraint(self, compiler):
- return not compiler.dialect.supports_native_boolean
-
- def _set_table(self, column, table):
- if not self.create_constraint:
- return
-
- e = schema.CheckConstraint(
- column.in_([0, 1]),
- name=self.name,
- _create_rule=util.portable_instancemethod(
- self._should_create_constraint)
- )
- table.append_constraint(e)
-
- def bind_processor(self, dialect):
- if dialect.supports_native_boolean:
- return None
- else:
- return processors.boolean_to_int
-
- def result_processor(self, dialect, coltype):
- if dialect.supports_native_boolean:
- return None
- else:
- return processors.int_to_boolean
-
-class Interval(_DateAffinity, TypeDecorator):
- """A type for ``datetime.timedelta()`` objects.
-
- The Interval type deals with ``datetime.timedelta`` objects. In
- PostgreSQL, the native ``INTERVAL`` type is used; for others, the
- value is stored as a date which is relative to the "epoch"
- (Jan. 1, 1970).
-
- Note that the ``Interval`` type does not currently provide date arithmetic
- operations on platforms which do not support interval types natively. Such
- operations usually require transformation of both sides of the expression
- (such as, conversion of both sides into integer epoch values first) which
- currently is a manual procedure (such as via
- :attr:`~sqlalchemy.sql.expression.func`).
-
- """
-
- impl = DateTime
- epoch = dt.datetime.utcfromtimestamp(0)
-
- def __init__(self, native=True,
- second_precision=None,
- day_precision=None):
- """Construct an Interval object.
-
- :param native: when True, use the actual
- INTERVAL type provided by the database, if
- supported (currently Postgresql, Oracle).
- Otherwise, represent the interval data as
- an epoch value regardless.
-
- :param second_precision: For native interval types
- which support a "fractional seconds precision" parameter,
- i.e. Oracle and Postgresql
-
- :param day_precision: for native interval types which
- support a "day precision" parameter, i.e. Oracle.
-
- """
- super(Interval, self).__init__()
- self.native = native
- self.second_precision = second_precision
- self.day_precision = day_precision
-
- def adapt(self, cls, **kw):
- if self.native and hasattr(cls, '_adapt_from_generic_interval'):
- return cls._adapt_from_generic_interval(self, **kw)
- else:
- return self.__class__(
- native=self.native,
- second_precision=self.second_precision,
- day_precision=self.day_precision,
- **kw)
-
- def bind_processor(self, dialect):
- impl_processor = self.impl.bind_processor(dialect)
- epoch = self.epoch
- if impl_processor:
- def process(value):
- if value is not None:
- value = epoch + value
- return impl_processor(value)
- else:
- def process(value):
- if value is not None:
- value = epoch + value
- return value
- return process
-
- def result_processor(self, dialect, coltype):
- impl_processor = self.impl.result_processor(dialect, coltype)
- epoch = self.epoch
- if impl_processor:
- def process(value):
- value = impl_processor(value)
- if value is None:
- return None
- return value - epoch
- else:
- def process(value):
- if value is None:
- return None
- return value - epoch
- return process
-
- @util.memoized_property
- def _expression_adaptations(self):
- return {
- operators.add:{
- Date:DateTime,
- Interval:Interval,
- DateTime:DateTime,
- Time:Time,
- },
- operators.sub:{
- Interval:Interval
- },
- operators.mul:{
- Numeric:Interval
- },
- operators.truediv: {
- Numeric:Interval
- },
- # Py2K
- operators.div: {
- Numeric:Interval
- }
- # end Py2K
- }
-
- @property
- def _type_affinity(self):
- return Interval
-
- def _coerce_compared_value(self, op, value):
- """See :meth:`.TypeEngine._coerce_compared_value` for a description."""
-
- return self.impl._coerce_compared_value(op, value)
-
-
-class REAL(Float):
- """The SQL REAL type."""
-
- __visit_name__ = 'REAL'
-
-class FLOAT(Float):
- """The SQL FLOAT type."""
-
- __visit_name__ = 'FLOAT'
-
-class NUMERIC(Numeric):
- """The SQL NUMERIC type."""
-
- __visit_name__ = 'NUMERIC'
-
-
-class DECIMAL(Numeric):
- """The SQL DECIMAL type."""
-
- __visit_name__ = 'DECIMAL'
-
-
-class INTEGER(Integer):
- """The SQL INT or INTEGER type."""
-
- __visit_name__ = 'INTEGER'
-INT = INTEGER
-
-
-class SMALLINT(SmallInteger):
- """The SQL SMALLINT type."""
-
- __visit_name__ = 'SMALLINT'
-
-
-class BIGINT(BigInteger):
- """The SQL BIGINT type."""
-
- __visit_name__ = 'BIGINT'
-
-class TIMESTAMP(DateTime):
- """The SQL TIMESTAMP type."""
-
- __visit_name__ = 'TIMESTAMP'
-
- def get_dbapi_type(self, dbapi):
- return dbapi.TIMESTAMP
-
-class DATETIME(DateTime):
- """The SQL DATETIME type."""
-
- __visit_name__ = 'DATETIME'
-
-
-class DATE(Date):
- """The SQL DATE type."""
-
- __visit_name__ = 'DATE'
-
-
-class TIME(Time):
- """The SQL TIME type."""
-
- __visit_name__ = 'TIME'
-
-class TEXT(Text):
- """The SQL TEXT type."""
-
- __visit_name__ = 'TEXT'
-
-class CLOB(Text):
- """The CLOB type.
-
- This type is found in Oracle and Informix.
- """
-
- __visit_name__ = 'CLOB'
-
-class VARCHAR(String):
- """The SQL VARCHAR type."""
-
- __visit_name__ = 'VARCHAR'
-
-class NVARCHAR(Unicode):
- """The SQL NVARCHAR type."""
-
- __visit_name__ = 'NVARCHAR'
-
-class CHAR(String):
- """The SQL CHAR type."""
-
- __visit_name__ = 'CHAR'
-
-
-class NCHAR(Unicode):
- """The SQL NCHAR type."""
-
- __visit_name__ = 'NCHAR'
-
-
-class BLOB(LargeBinary):
- """The SQL BLOB type."""
-
- __visit_name__ = 'BLOB'
-
-class BINARY(_Binary):
- """The SQL BINARY type."""
-
- __visit_name__ = 'BINARY'
-
-class VARBINARY(_Binary):
- """The SQL VARBINARY type."""
-
- __visit_name__ = 'VARBINARY'
-
-
-class BOOLEAN(Boolean):
- """The SQL BOOLEAN type."""
-
- __visit_name__ = 'BOOLEAN'
-
-NULLTYPE = NullType()
-BOOLEANTYPE = Boolean()
-STRINGTYPE = String()
-
-_type_map = {
- str: String(),
- # Py3K
- #bytes : LargeBinary(),
- # Py2K
- unicode : Unicode(),
- # end Py2K
- int : Integer(),
- float : Numeric(),
- bool: BOOLEANTYPE,
- decimal.Decimal : Numeric(),
- dt.date : Date(),
- dt.datetime : DateTime(),
- dt.time : Time(),
- dt.timedelta : Interval(),
- NoneType: NULLTYPE
-}
-
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/util/__init__.py b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/util/__init__.py
deleted file mode 100755
index 93c418ed..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/util/__init__.py
+++ /dev/null
@@ -1,32 +0,0 @@
-# util/__init__.py
-# Copyright (C) 2005-2011 the SQLAlchemy authors and contributors <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-from compat import callable, cmp, reduce, defaultdict, py25_dict, \
- threading, py3k, jython, pypy, win32, set_types, buffer, pickle, \
- update_wrapper, partial, md5_hex, decode_slice, dottedgetter
-
-from _collections import NamedTuple, ImmutableContainer, immutabledict, \
- Properties, OrderedProperties, ImmutableProperties, OrderedDict, \
- OrderedSet, IdentitySet, OrderedIdentitySet, column_set, \
- column_dict, ordered_column_set, populate_column_dict, unique_list, \
- UniqueAppender, PopulateDict, EMPTY_SET, to_list, to_set, \
- to_column_set, update_copy, flatten_iterator, WeakIdentityMapping, \
- LRUCache, ScopedRegistry, ThreadLocalRegistry
-
-from langhelpers import iterate_attributes, class_hierarchy, \
- portable_instancemethod, unbound_method_to_callable, \
- getargspec_init, format_argspec_init, format_argspec_plus, \
- get_func_kwargs, get_cls_kwargs, decorator, as_interface, \
- memoized_property, memoized_instancemethod, \
- reset_memoized, group_expirable_memoized_property, importlater, \
- monkeypatch_proxied_specials, asbool, bool_or_str, coerce_kw_type,\
- duck_type_collection, assert_arg_type, symbol, dictlike_iteritems,\
- classproperty, set_creation_order, warn_exception, warn, NoneType,\
- constructor_copy, methods_equivalent, chop_traceback, asint
-
-from deprecations import warn_deprecated, warn_pending_deprecation, \
- deprecated, pending_deprecation
-
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/util/_collections.py b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/util/_collections.py
deleted file mode 100755
index 3adbf991..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/util/_collections.py
+++ /dev/null
@@ -1,897 +0,0 @@
-# util/_collections.py
-# Copyright (C) 2005-2011 the SQLAlchemy authors and contributors <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-"""Collection classes and helpers."""
-
-import sys
-import itertools
-import weakref
-import operator
-from langhelpers import symbol
-from compat import time_func, threading
-
-EMPTY_SET = frozenset()
-
-
-class NamedTuple(tuple):
- """tuple() subclass that adds labeled names.
-
- Is also pickleable.
-
- """
-
- def __new__(cls, vals, labels=None):
- t = tuple.__new__(cls, vals)
- if labels:
- t.__dict__.update(zip(labels, vals))
- t._labels = labels
- return t
-
- def keys(self):
- return [l for l in self._labels if l is not None]
-
-class ImmutableContainer(object):
- def _immutable(self, *arg, **kw):
- raise TypeError("%s object is immutable" % self.__class__.__name__)
-
- __delitem__ = __setitem__ = __setattr__ = _immutable
-
-class immutabledict(ImmutableContainer, dict):
-
- clear = pop = popitem = setdefault = \
- update = ImmutableContainer._immutable
-
- def __new__(cls, *args):
- new = dict.__new__(cls)
- dict.__init__(new, *args)
- return new
-
- def __init__(self, *args):
- pass
-
- def __reduce__(self):
- return immutabledict, (dict(self), )
-
- def union(self, d):
- if not self:
- return immutabledict(d)
- else:
- d2 = immutabledict(self)
- dict.update(d2, d)
- return d2
-
- def __repr__(self):
- return "immutabledict(%s)" % dict.__repr__(self)
-
-class Properties(object):
- """Provide a __getattr__/__setattr__ interface over a dict."""
-
- def __init__(self, data):
- self.__dict__['_data'] = data
-
- def __len__(self):
- return len(self._data)
-
- def __iter__(self):
- return self._data.itervalues()
-
- def __add__(self, other):
- return list(self) + list(other)
-
- def __setitem__(self, key, object):
- self._data[key] = object
-
- def __getitem__(self, key):
- return self._data[key]
-
- def __delitem__(self, key):
- del self._data[key]
-
- def __setattr__(self, key, object):
- self._data[key] = object
-
- def __getstate__(self):
- return {'_data': self.__dict__['_data']}
-
- def __setstate__(self, state):
- self.__dict__['_data'] = state['_data']
-
- def __getattr__(self, key):
- try:
- return self._data[key]
- except KeyError:
- raise AttributeError(key)
-
- def __contains__(self, key):
- return key in self._data
-
- def as_immutable(self):
- """Return an immutable proxy for this :class:`.Properties`."""
-
- return ImmutableProperties(self._data)
-
- def update(self, value):
- self._data.update(value)
-
- def get(self, key, default=None):
- if key in self:
- return self[key]
- else:
- return default
-
- def keys(self):
- return self._data.keys()
-
- def has_key(self, key):
- return key in self._data
-
- def clear(self):
- self._data.clear()
-
-class OrderedProperties(Properties):
- """Provide a __getattr__/__setattr__ interface with an OrderedDict
- as backing store."""
- def __init__(self):
- Properties.__init__(self, OrderedDict())
-
-
-class ImmutableProperties(ImmutableContainer, Properties):
- """Provide immutable dict/object attribute to an underlying dictionary."""
-
-
-class OrderedDict(dict):
- """A dict that returns keys/values/items in the order they were added."""
-
- def __init__(self, ____sequence=None, **kwargs):
- self._list = []
- if ____sequence is None:
- if kwargs:
- self.update(**kwargs)
- else:
- self.update(____sequence, **kwargs)
-
- def clear(self):
- self._list = []
- dict.clear(self)
-
- def copy(self):
- return self.__copy__()
-
- def __copy__(self):
- return OrderedDict(self)
-
- def sort(self, *arg, **kw):
- self._list.sort(*arg, **kw)
-
- def update(self, ____sequence=None, **kwargs):
- if ____sequence is not None:
- if hasattr(____sequence, 'keys'):
- for key in ____sequence.keys():
- self.__setitem__(key, ____sequence[key])
- else:
- for key, value in ____sequence:
- self[key] = value
- if kwargs:
- self.update(kwargs)
-
- def setdefault(self, key, value):
- if key not in self:
- self.__setitem__(key, value)
- return value
- else:
- return self.__getitem__(key)
-
- def __iter__(self):
- return iter(self._list)
-
- def values(self):
- return [self[key] for key in self._list]
-
- def itervalues(self):
- return iter([self[key] for key in self._list])
-
- def keys(self):
- return list(self._list)
-
- def iterkeys(self):
- return iter(self.keys())
-
- def items(self):
- return [(key, self[key]) for key in self.keys()]
-
- def iteritems(self):
- return iter(self.items())
-
- def __setitem__(self, key, object):
- if key not in self:
- try:
- self._list.append(key)
- except AttributeError:
- # work around Python pickle loads() with
- # dict subclass (seems to ignore __setstate__?)
- self._list = [key]
- dict.__setitem__(self, key, object)
-
- def __delitem__(self, key):
- dict.__delitem__(self, key)
- self._list.remove(key)
-
- def pop(self, key, *default):
- present = key in self
- value = dict.pop(self, key, *default)
- if present:
- self._list.remove(key)
- return value
-
- def popitem(self):
- item = dict.popitem(self)
- self._list.remove(item[0])
- return item
-
-class OrderedSet(set):
- def __init__(self, d=None):
- set.__init__(self)
- self._list = []
- if d is not None:
- self.update(d)
-
- def add(self, element):
- if element not in self:
- self._list.append(element)
- set.add(self, element)
-
- def remove(self, element):
- set.remove(self, element)
- self._list.remove(element)
-
- def insert(self, pos, element):
- if element not in self:
- self._list.insert(pos, element)
- set.add(self, element)
-
- def discard(self, element):
- if element in self:
- self._list.remove(element)
- set.remove(self, element)
-
- def clear(self):
- set.clear(self)
- self._list = []
-
- def __getitem__(self, key):
- return self._list[key]
-
- def __iter__(self):
- return iter(self._list)
-
- def __add__(self, other):
- return self.union(other)
-
- def __repr__(self):
- return '%s(%r)' % (self.__class__.__name__, self._list)
-
- __str__ = __repr__
-
- def update(self, iterable):
- for e in iterable:
- if e not in self:
- self._list.append(e)
- set.add(self, e)
- return self
-
- __ior__ = update
-
- def union(self, other):
- result = self.__class__(self)
- result.update(other)
- return result
-
- __or__ = union
-
- def intersection(self, other):
- other = set(other)
- return self.__class__(a for a in self if a in other)
-
- __and__ = intersection
-
- def symmetric_difference(self, other):
- other = set(other)
- result = self.__class__(a for a in self if a not in other)
- result.update(a for a in other if a not in self)
- return result
-
- __xor__ = symmetric_difference
-
- def difference(self, other):
- other = set(other)
- return self.__class__(a for a in self if a not in other)
-
- __sub__ = difference
-
- def intersection_update(self, other):
- other = set(other)
- set.intersection_update(self, other)
- self._list = [ a for a in self._list if a in other]
- return self
-
- __iand__ = intersection_update
-
- def symmetric_difference_update(self, other):
- set.symmetric_difference_update(self, other)
- self._list = [ a for a in self._list if a in self]
- self._list += [ a for a in other._list if a in self]
- return self
-
- __ixor__ = symmetric_difference_update
-
- def difference_update(self, other):
- set.difference_update(self, other)
- self._list = [ a for a in self._list if a in self]
- return self
-
- __isub__ = difference_update
-
-
-class IdentitySet(object):
- """A set that considers only object id() for uniqueness.
-
- This strategy has edge cases for builtin types- it's possible to have
- two 'foo' strings in one of these sets, for example. Use sparingly.
-
- """
-
- _working_set = set
-
- def __init__(self, iterable=None):
- self._members = dict()
- if iterable:
- for o in iterable:
- self.add(o)
-
- def add(self, value):
- self._members[id(value)] = value
-
- def __contains__(self, value):
- return id(value) in self._members
-
- def remove(self, value):
- del self._members[id(value)]
-
- def discard(self, value):
- try:
- self.remove(value)
- except KeyError:
- pass
-
- def pop(self):
- try:
- pair = self._members.popitem()
- return pair[1]
- except KeyError:
- raise KeyError('pop from an empty set')
-
- def clear(self):
- self._members.clear()
-
- def __cmp__(self, other):
- raise TypeError('cannot compare sets using cmp()')
-
- def __eq__(self, other):
- if isinstance(other, IdentitySet):
- return self._members == other._members
- else:
- return False
-
- def __ne__(self, other):
- if isinstance(other, IdentitySet):
- return self._members != other._members
- else:
- return True
-
- def issubset(self, iterable):
- other = type(self)(iterable)
-
- if len(self) > len(other):
- return False
- for m in itertools.ifilterfalse(other._members.__contains__,
- self._members.iterkeys()):
- return False
- return True
-
- def __le__(self, other):
- if not isinstance(other, IdentitySet):
- return NotImplemented
- return self.issubset(other)
-
- def __lt__(self, other):
- if not isinstance(other, IdentitySet):
- return NotImplemented
- return len(self) < len(other) and self.issubset(other)
-
- def issuperset(self, iterable):
- other = type(self)(iterable)
-
- if len(self) < len(other):
- return False
-
- for m in itertools.ifilterfalse(self._members.__contains__,
- other._members.iterkeys()):
- return False
- return True
-
- def __ge__(self, other):
- if not isinstance(other, IdentitySet):
- return NotImplemented
- return self.issuperset(other)
-
- def __gt__(self, other):
- if not isinstance(other, IdentitySet):
- return NotImplemented
- return len(self) > len(other) and self.issuperset(other)
-
- def union(self, iterable):
- result = type(self)()
- # testlib.pragma exempt:__hash__
- result._members.update(
- self._working_set(self._member_id_tuples()).union(_iter_id(iterable)))
- return result
-
- def __or__(self, other):
- if not isinstance(other, IdentitySet):
- return NotImplemented
- return self.union(other)
-
- def update(self, iterable):
- self._members = self.union(iterable)._members
-
- def __ior__(self, other):
- if not isinstance(other, IdentitySet):
- return NotImplemented
- self.update(other)
- return self
-
- def difference(self, iterable):
- result = type(self)()
- # testlib.pragma exempt:__hash__
- result._members.update(
- self._working_set(self._member_id_tuples()).difference(_iter_id(iterable)))
- return result
-
- def __sub__(self, other):
- if not isinstance(other, IdentitySet):
- return NotImplemented
- return self.difference(other)
-
- def difference_update(self, iterable):
- self._members = self.difference(iterable)._members
-
- def __isub__(self, other):
- if not isinstance(other, IdentitySet):
- return NotImplemented
- self.difference_update(other)
- return self
-
- def intersection(self, iterable):
- result = type(self)()
- # testlib.pragma exempt:__hash__
- result._members.update(
- self._working_set(self._member_id_tuples()).intersection(_iter_id(iterable)))
- return result
-
- def __and__(self, other):
- if not isinstance(other, IdentitySet):
- return NotImplemented
- return self.intersection(other)
-
- def intersection_update(self, iterable):
- self._members = self.intersection(iterable)._members
-
- def __iand__(self, other):
- if not isinstance(other, IdentitySet):
- return NotImplemented
- self.intersection_update(other)
- return self
-
- def symmetric_difference(self, iterable):
- result = type(self)()
- # testlib.pragma exempt:__hash__
- result._members.update(
- self._working_set(self._member_id_tuples()).symmetric_difference(_iter_id(iterable)))
- return result
-
- def _member_id_tuples(self):
- return ((id(v), v) for v in self._members.itervalues())
-
- def __xor__(self, other):
- if not isinstance(other, IdentitySet):
- return NotImplemented
- return self.symmetric_difference(other)
-
- def symmetric_difference_update(self, iterable):
- self._members = self.symmetric_difference(iterable)._members
-
- def __ixor__(self, other):
- if not isinstance(other, IdentitySet):
- return NotImplemented
- self.symmetric_difference(other)
- return self
-
- def copy(self):
- return type(self)(self._members.itervalues())
-
- __copy__ = copy
-
- def __len__(self):
- return len(self._members)
-
- def __iter__(self):
- return self._members.itervalues()
-
- def __hash__(self):
- raise TypeError('set objects are unhashable')
-
- def __repr__(self):
- return '%s(%r)' % (type(self).__name__, self._members.values())
-
-
-class OrderedIdentitySet(IdentitySet):
- class _working_set(OrderedSet):
- # a testing pragma: exempt the OIDS working set from the test suite's
- # "never call the user's __hash__" assertions. this is a big hammer,
- # but it's safe here: IDS operates on (id, instance) tuples in the
- # working set.
- __sa_hash_exempt__ = True
-
- def __init__(self, iterable=None):
- IdentitySet.__init__(self)
- self._members = OrderedDict()
- if iterable:
- for o in iterable:
- self.add(o)
-
-
-if sys.version_info >= (2, 5):
- class PopulateDict(dict):
- """A dict which populates missing values via a creation function.
-
- Note the creation function takes a key, unlike
- collections.defaultdict.
-
- """
-
- def __init__(self, creator):
- self.creator = creator
-
- def __missing__(self, key):
- self[key] = val = self.creator(key)
- return val
-else:
- class PopulateDict(dict):
- """A dict which populates missing values via a creation function."""
-
- def __init__(self, creator):
- self.creator = creator
-
- def __getitem__(self, key):
- try:
- return dict.__getitem__(self, key)
- except KeyError:
- self[key] = value = self.creator(key)
- return value
-
-# define collections that are capable of storing
-# ColumnElement objects as hashable keys/elements.
-column_set = set
-column_dict = dict
-ordered_column_set = OrderedSet
-populate_column_dict = PopulateDict
-
-def unique_list(seq, hashfunc=None):
- seen = {}
- if not hashfunc:
- return [x for x in seq
- if x not in seen
- and not seen.__setitem__(x, True)]
- else:
- return [x for x in seq
- if hashfunc(x) not in seen
- and not seen.__setitem__(hashfunc(x), True)]
-
-class UniqueAppender(object):
- """Appends items to a collection ensuring uniqueness.
-
- Additional appends() of the same object are ignored. Membership is
- determined by identity (``is a``) not equality (``==``).
- """
-
- def __init__(self, data, via=None):
- self.data = data
- self._unique = {}
- if via:
- self._data_appender = getattr(data, via)
- elif hasattr(data, 'append'):
- self._data_appender = data.append
- elif hasattr(data, 'add'):
- self._data_appender = data.add
-
- def append(self, item):
- id_ = id(item)
- if id_ not in self._unique:
- self._data_appender(item)
- self._unique[id_] = True
-
- def __iter__(self):
- return iter(self.data)
-
-def to_list(x, default=None):
- if x is None:
- return default
- if not isinstance(x, (list, tuple)):
- return [x]
- else:
- return x
-
-def to_set(x):
- if x is None:
- return set()
- if not isinstance(x, set):
- return set(to_list(x))
- else:
- return x
-
-def to_column_set(x):
- if x is None:
- return column_set()
- if not isinstance(x, column_set):
- return column_set(to_list(x))
- else:
- return x
-
-def update_copy(d, _new=None, **kw):
- """Copy the given dict and update with the given values."""
-
- d = d.copy()
- if _new:
- d.update(_new)
- d.update(**kw)
- return d
-
-def flatten_iterator(x):
- """Given an iterator of which further sub-elements may also be
- iterators, flatten the sub-elements into a single iterator.
-
- """
- for elem in x:
- if not isinstance(elem, basestring) and hasattr(elem, '__iter__'):
- for y in flatten_iterator(elem):
- yield y
- else:
- yield elem
-
-class WeakIdentityMapping(weakref.WeakKeyDictionary):
- """A WeakKeyDictionary with an object identity index.
-
- Adds a .by_id dictionary to a regular WeakKeyDictionary. Trades
- performance during mutation operations for accelerated lookups by id().
-
- The usual cautions about weak dictionaries and iteration also apply to
- this subclass.
-
- """
- _none = symbol('none')
-
- def __init__(self):
- weakref.WeakKeyDictionary.__init__(self)
- self.by_id = {}
- self._weakrefs = {}
-
- def __setitem__(self, object, value):
- oid = id(object)
- self.by_id[oid] = value
- if oid not in self._weakrefs:
- self._weakrefs[oid] = self._ref(object)
- weakref.WeakKeyDictionary.__setitem__(self, object, value)
-
- def __delitem__(self, object):
- del self._weakrefs[id(object)]
- del self.by_id[id(object)]
- weakref.WeakKeyDictionary.__delitem__(self, object)
-
- def setdefault(self, object, default=None):
- value = weakref.WeakKeyDictionary.setdefault(self, object, default)
- oid = id(object)
- if value is default:
- self.by_id[oid] = default
- if oid not in self._weakrefs:
- self._weakrefs[oid] = self._ref(object)
- return value
-
- def pop(self, object, default=_none):
- if default is self._none:
- value = weakref.WeakKeyDictionary.pop(self, object)
- else:
- value = weakref.WeakKeyDictionary.pop(self, object, default)
- if id(object) in self.by_id:
- del self._weakrefs[id(object)]
- del self.by_id[id(object)]
- return value
-
- def popitem(self):
- item = weakref.WeakKeyDictionary.popitem(self)
- oid = id(item[0])
- del self._weakrefs[oid]
- del self.by_id[oid]
- return item
-
- def clear(self):
- # Py2K
- # in 3k, MutableMapping calls popitem()
- self._weakrefs.clear()
- self.by_id.clear()
- # end Py2K
- weakref.WeakKeyDictionary.clear(self)
-
- def update(self, *a, **kw):
- raise NotImplementedError
-
- def _cleanup(self, wr, key=None):
- if key is None:
- key = wr.key
- try:
- del self._weakrefs[key]
- except (KeyError, AttributeError): # pragma: no cover
- pass # pragma: no cover
- try:
- del self.by_id[key]
- except (KeyError, AttributeError): # pragma: no cover
- pass # pragma: no cover
-
- class _keyed_weakref(weakref.ref):
- def __init__(self, object, callback):
- weakref.ref.__init__(self, object, callback)
- self.key = id(object)
-
- def _ref(self, object):
- return self._keyed_weakref(object, self._cleanup)
-
-
-class LRUCache(dict):
- """Dictionary with 'squishy' removal of least
- recently used items.
-
- """
- def __init__(self, capacity=100, threshold=.5):
- self.capacity = capacity
- self.threshold = threshold
-
- def __getitem__(self, key):
- item = dict.__getitem__(self, key)
- item[2] = time_func()
- return item[1]
-
- def values(self):
- return [i[1] for i in dict.values(self)]
-
- def setdefault(self, key, value):
- if key in self:
- return self[key]
- else:
- self[key] = value
- return value
-
- def __setitem__(self, key, value):
- item = dict.get(self, key)
- if item is None:
- item = [key, value, time_func()]
- dict.__setitem__(self, key, item)
- else:
- item[1] = value
- self._manage_size()
-
- def _manage_size(self):
- while len(self) > self.capacity + self.capacity * self.threshold:
- bytime = sorted(dict.values(self),
- key=operator.itemgetter(2),
- reverse=True)
- for item in bytime[self.capacity:]:
- try:
- del self[item[0]]
- except KeyError:
- # if we couldnt find a key, most
- # likely some other thread broke in
- # on us. loop around and try again
- break
-
-
-class ScopedRegistry(object):
- """A Registry that can store one or multiple instances of a single
- class on the basis of a "scope" function.
-
- The object implements ``__call__`` as the "getter", so by
- calling ``myregistry()`` the contained object is returned
- for the current scope.
-
- :param createfunc:
- a callable that returns a new object to be placed in the registry
-
- :param scopefunc:
- a callable that will return a key to store/retrieve an object.
- """
-
- def __init__(self, createfunc, scopefunc):
- """Construct a new :class:`.ScopedRegistry`.
-
- :param createfunc: A creation function that will generate
- a new value for the current scope, if none is present.
-
- :param scopefunc: A function that returns a hashable
- token representing the current scope (such as, current
- thread identifier).
-
- """
- self.createfunc = createfunc
- self.scopefunc = scopefunc
- self.registry = {}
-
- def __call__(self):
- key = self.scopefunc()
- try:
- return self.registry[key]
- except KeyError:
- return self.registry.setdefault(key, self.createfunc())
-
- def has(self):
- """Return True if an object is present in the current scope."""
-
- return self.scopefunc() in self.registry
-
- def set(self, obj):
- """Set the value forthe current scope."""
-
- self.registry[self.scopefunc()] = obj
-
- def clear(self):
- """Clear the current scope, if any."""
-
- try:
- del self.registry[self.scopefunc()]
- except KeyError:
- pass
-
-class ThreadLocalRegistry(ScopedRegistry):
- """A :class:`.ScopedRegistry` that uses a ``threading.local()``
- variable for storage.
-
- """
- def __init__(self, createfunc):
- self.createfunc = createfunc
- self.registry = threading.local()
-
- def __call__(self):
- try:
- return self.registry.value
- except AttributeError:
- val = self.registry.value = self.createfunc()
- return val
-
- def has(self):
- return hasattr(self.registry, "value")
-
- def set(self, obj):
- self.registry.value = obj
-
- def clear(self):
- try:
- del self.registry.value
- except AttributeError:
- pass
-
-def _iter_id(iterable):
- """Generator: ((id(o), o) for o in iterable)."""
-
- for item in iterable:
- yield id(item), item
-
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/util/compat.py b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/util/compat.py
deleted file mode 100755
index 0fb00450..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/util/compat.py
+++ /dev/null
@@ -1,211 +0,0 @@
-# util/compat.py
-# Copyright (C) 2005-2011 the SQLAlchemy authors and contributors <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-"""Handle Python version/platform incompatibilities."""
-
-import sys
-
-# Py2K
-import __builtin__
-# end Py2K
-
-try:
- import threading
-except ImportError:
- import dummy_threading as threading
-
-py32 = sys.version_info >= (3, 2)
-py3k = getattr(sys, 'py3kwarning', False) or sys.version_info >= (3, 0)
-jython = sys.platform.startswith('java')
-pypy = hasattr(sys, 'pypy_version_info')
-win32 = sys.platform.startswith('win')
-
-if py3k:
- set_types = set
-elif sys.version_info < (2, 6):
- import sets
- set_types = set, sets.Set
-else:
- # 2.6 deprecates sets.Set, but we still need to be able to detect them
- # in user code and as return values from DB-APIs
- ignore = ('ignore', None, DeprecationWarning, None, 0)
- import warnings
- try:
- warnings.filters.insert(0, ignore)
- except Exception:
- import sets
- else:
- import sets
- warnings.filters.remove(ignore)
-
- set_types = set, sets.Set
-
-if py3k:
- import pickle
-else:
- try:
- import cPickle as pickle
- except ImportError:
- import pickle
-
-# a controversial feature, required by MySQLdb currently
-def buffer(x):
- return x
-
-# Py2K
-buffer = getattr(__builtin__, 'buffer', buffer)
-# end Py2K
-
-try:
- from functools import update_wrapper
-except ImportError:
- def update_wrapper(wrapper, wrapped,
- assigned=('__doc__', '__module__', '__name__'),
- updated=('__dict__',)):
- for attr in assigned:
- setattr(wrapper, attr, getattr(wrapped, attr))
- for attr in updated:
- getattr(wrapper, attr).update(getattr(wrapped, attr, ()))
- return wrapper
-
-try:
- from functools import partial
-except ImportError:
- def partial(func, *args, **keywords):
- def newfunc(*fargs, **fkeywords):
- newkeywords = keywords.copy()
- newkeywords.update(fkeywords)
- return func(*(args + fargs), **newkeywords)
- return newfunc
-
-
-if py3k:
- # they're bringing it back in 3.2. brilliant !
- def callable(fn):
- return hasattr(fn, '__call__')
- def cmp(a, b):
- return (a > b) - (a < b)
-
- from functools import reduce
-else:
- callable = __builtin__.callable
- cmp = __builtin__.cmp
- reduce = __builtin__.reduce
-
-try:
- from collections import defaultdict
-except ImportError:
- class defaultdict(dict):
- def __init__(self, default_factory=None, *a, **kw):
- if (default_factory is not None and
- not hasattr(default_factory, '__call__')):
- raise TypeError('first argument must be callable')
- dict.__init__(self, *a, **kw)
- self.default_factory = default_factory
- def __getitem__(self, key):
- try:
- return dict.__getitem__(self, key)
- except KeyError:
- return self.__missing__(key)
- def __missing__(self, key):
- if self.default_factory is None:
- raise KeyError(key)
- self[key] = value = self.default_factory()
- return value
- def __reduce__(self):
- if self.default_factory is None:
- args = tuple()
- else:
- args = self.default_factory,
- return type(self), args, None, None, self.iteritems()
- def copy(self):
- return self.__copy__()
- def __copy__(self):
- return type(self)(self.default_factory, self)
- def __deepcopy__(self, memo):
- import copy
- return type(self)(self.default_factory,
- copy.deepcopy(self.items()))
- def __repr__(self):
- return 'defaultdict(%s, %s)' % (self.default_factory,
- dict.__repr__(self))
-
-
-# find or create a dict implementation that supports __missing__
-class _probe(dict):
- def __missing__(self, key):
- return 1
-
-try:
- try:
- _probe()['missing']
- py25_dict = dict
- except KeyError:
- class py25_dict(dict):
- def __getitem__(self, key):
- try:
- return dict.__getitem__(self, key)
- except KeyError:
- try:
- missing = self.__missing__
- except AttributeError:
- raise KeyError(key)
- else:
- return missing(key)
-finally:
- del _probe
-
-
-try:
- import hashlib
- _md5 = hashlib.md5
-except ImportError:
- import md5
- _md5 = md5.new
-
-def md5_hex(x):
- # Py3K
- #x = x.encode('utf-8')
- m = _md5()
- m.update(x)
- return m.hexdigest()
-
-import time
-if win32 or jython:
- time_func = time.clock
-else:
- time_func = time.time
-
-if sys.version_info >= (2, 5):
- def decode_slice(slc):
- """decode a slice object as sent to __getitem__.
-
- takes into account the 2.5 __index__() method, basically.
-
- """
- ret = []
- for x in slc.start, slc.stop, slc.step:
- if hasattr(x, '__index__'):
- x = x.__index__()
- ret.append(x)
- return tuple(ret)
-else:
- def decode_slice(slc):
- return (slc.start, slc.stop, slc.step)
-
-if sys.version_info >= (2, 6):
- from operator import attrgetter as dottedgetter
-else:
- def dottedgetter(attr):
- def g(obj):
- for name in attr.split("."):
- obj = getattr(obj, name)
- return obj
- return g
-
-
-import decimal
-
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/util/deprecations.py b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/util/deprecations.py
deleted file mode 100755
index d9018a26..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/util/deprecations.py
+++ /dev/null
@@ -1,118 +0,0 @@
-# util/deprecations.py
-# Copyright (C) 2005-2011 the SQLAlchemy authors and contributors <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-"""Helpers related to deprecation of functions, methods, classes, other
-functionality."""
-
-from sqlalchemy import exc
-import warnings
-import re
-from langhelpers import decorator
-
-def warn_deprecated(msg, stacklevel=3):
- warnings.warn(msg, exc.SADeprecationWarning, stacklevel=stacklevel)
-
-def warn_pending_deprecation(msg, stacklevel=3):
- warnings.warn(msg, exc.SAPendingDeprecationWarning, stacklevel=stacklevel)
-
-def deprecated(version, message=None, add_deprecation_to_docstring=True):
- """Decorates a function and issues a deprecation warning on use.
-
- :param message:
- If provided, issue message in the warning. A sensible default
- is used if not provided.
-
- :param add_deprecation_to_docstring:
- Default True. If False, the wrapped function's __doc__ is left
- as-is. If True, the 'message' is prepended to the docs if
- provided, or sensible default if message is omitted.
-
- """
-
- if add_deprecation_to_docstring:
- header = ".. deprecated:: %s %s" % \
- (version, (message or ''))
- else:
- header = None
-
- if message is None:
- message = "Call to deprecated function %(func)s"
-
- def decorate(fn):
- return _decorate_with_warning(
- fn, exc.SADeprecationWarning,
- message % dict(func=fn.__name__), header)
- return decorate
-
-def pending_deprecation(version, message=None,
- add_deprecation_to_docstring=True):
- """Decorates a function and issues a pending deprecation warning on use.
-
- :param version:
- An approximate future version at which point the pending deprecation
- will become deprecated. Not used in messaging.
-
- :param message:
- If provided, issue message in the warning. A sensible default
- is used if not provided.
-
- :param add_deprecation_to_docstring:
- Default True. If False, the wrapped function's __doc__ is left
- as-is. If True, the 'message' is prepended to the docs if
- provided, or sensible default if message is omitted.
- """
-
- if add_deprecation_to_docstring:
- header = ".. deprecated:: %s (pending) %s" % \
- (version, (message or ''))
- else:
- header = None
-
- if message is None:
- message = "Call to deprecated function %(func)s"
-
- def decorate(fn):
- return _decorate_with_warning(
- fn, exc.SAPendingDeprecationWarning,
- message % dict(func=fn.__name__), header)
- return decorate
-
-def _sanitize_restructured_text(text):
- def repl(m):
- type_, name = m.group(1, 2)
- if type_ in ("func", "meth"):
- name += "()"
- return name
- return re.sub(r'\:(\w+)\:`~?\.?(.+?)`', repl, text)
-
-
-def _decorate_with_warning(func, wtype, message, docstring_header=None):
- """Wrap a function with a warnings.warn and augmented docstring."""
-
- message = _sanitize_restructured_text(message)
-
- @decorator
- def warned(fn, *args, **kwargs):
- warnings.warn(wtype(message), stacklevel=3)
- return fn(*args, **kwargs)
-
- doc = func.__doc__ is not None and func.__doc__ or ''
- if docstring_header is not None:
- docstring_header %= dict(func=func.__name__)
- docs = doc and doc.expandtabs().split('\n') or []
- indent = ''
- for line in docs[1:]:
- text = line.lstrip()
- if text:
- indent = line[0:len(line) - len(text)]
- break
- point = min(len(docs), 1)
- docs.insert(point, '\n' + indent + docstring_header.rstrip())
- doc = '\n'.join(docs)
-
- decorated = warned(func)
- decorated.__doc__ = doc
- return decorated
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/util/langhelpers.py b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/util/langhelpers.py
deleted file mode 100755
index ba612bc2..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/util/langhelpers.py
+++ /dev/null
@@ -1,791 +0,0 @@
-# util/langhelpers.py
-# Copyright (C) 2005-2011 the SQLAlchemy authors and contributors <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-"""Routines to help with the creation, loading and introspection of
-modules, classes, hierarchies, attributes, functions, and methods.
-
-"""
-import itertools
-import inspect
-import operator
-import re
-import sys
-import types
-import warnings
-from compat import update_wrapper, set_types, threading
-from sqlalchemy import exc
-
-def _unique_symbols(used, *bases):
- used = set(used)
- for base in bases:
- pool = itertools.chain((base,),
- itertools.imap(lambda i: base + str(i),
- xrange(1000)))
- for sym in pool:
- if sym not in used:
- used.add(sym)
- yield sym
- break
- else:
- raise NameError("exhausted namespace for symbol base %s" % base)
-
-def decorator(target):
- """A signature-matching decorator factory."""
-
- def decorate(fn):
- if not inspect.isfunction(fn):
- raise Exception("not a decoratable function")
- spec = inspect.getargspec(fn)
- names = tuple(spec[0]) + spec[1:3] + (fn.func_name,)
- targ_name, fn_name = _unique_symbols(names, 'target', 'fn')
-
- metadata = dict(target=targ_name, fn=fn_name)
- metadata.update(format_argspec_plus(spec, grouped=False))
-
- code = 'lambda %(args)s: %(target)s(%(fn)s, %(apply_kw)s)' % (
- metadata)
- decorated = eval(code, {targ_name:target, fn_name:fn})
- decorated.func_defaults = getattr(fn, 'im_func', fn).func_defaults
- return update_wrapper(decorated, fn)
- return update_wrapper(decorate, target)
-
-
-
-def get_cls_kwargs(cls):
- """Return the full set of inherited kwargs for the given `cls`.
-
- Probes a class's __init__ method, collecting all named arguments. If the
- __init__ defines a \**kwargs catch-all, then the constructor is presumed to
- pass along unrecognized keywords to it's base classes, and the collection
- process is repeated recursively on each of the bases.
-
- Uses a subset of inspect.getargspec() to cut down on method overhead.
- No anonymous tuple arguments please !
-
- """
-
- for c in cls.__mro__:
- if '__init__' in c.__dict__:
- stack = set([c])
- break
- else:
- return []
-
- args = set()
- while stack:
- class_ = stack.pop()
- ctr = class_.__dict__.get('__init__', False)
- if (not ctr or
- not isinstance(ctr, types.FunctionType) or
- not isinstance(ctr.func_code, types.CodeType)):
- stack.update(class_.__bases__)
- continue
-
- # this is shorthand for
- # names, _, has_kw, _ = inspect.getargspec(ctr)
-
- names, has_kw = inspect_func_args(ctr)
- args.update(names)
- if has_kw:
- stack.update(class_.__bases__)
- args.discard('self')
- return args
-
-try:
- from inspect import CO_VARKEYWORDS
- def inspect_func_args(fn):
- co = fn.func_code
- nargs = co.co_argcount
- names = co.co_varnames
- args = list(names[:nargs])
- has_kw = bool(co.co_flags & CO_VARKEYWORDS)
- return args, has_kw
-except ImportError:
- def inspect_func_args(fn):
- names, _, has_kw, _ = inspect.getargspec(fn)
- return names, bool(has_kw)
-
-def get_func_kwargs(func):
- """Return the set of legal kwargs for the given `func`.
-
- Uses getargspec so is safe to call for methods, functions,
- etc.
-
- """
-
- return inspect.getargspec(func)[0]
-
-def format_argspec_plus(fn, grouped=True):
- """Returns a dictionary of formatted, introspected function arguments.
-
- A enhanced variant of inspect.formatargspec to support code generation.
-
- fn
- An inspectable callable or tuple of inspect getargspec() results.
- grouped
- Defaults to True; include (parens, around, argument) lists
-
- Returns:
-
- args
- Full inspect.formatargspec for fn
- self_arg
- The name of the first positional argument, varargs[0], or None
- if the function defines no positional arguments.
- apply_pos
- args, re-written in calling rather than receiving syntax. Arguments are
- passed positionally.
- apply_kw
- Like apply_pos, except keyword-ish args are passed as keywords.
-
- Example::
-
- >>> format_argspec_plus(lambda self, a, b, c=3, **d: 123)
- {'args': '(self, a, b, c=3, **d)',
- 'self_arg': 'self',
- 'apply_kw': '(self, a, b, c=c, **d)',
- 'apply_pos': '(self, a, b, c, **d)'}
-
- """
- spec = callable(fn) and inspect.getargspec(fn) or fn
- args = inspect.formatargspec(*spec)
- if spec[0]:
- self_arg = spec[0][0]
- elif spec[1]:
- self_arg = '%s[0]' % spec[1]
- else:
- self_arg = None
- apply_pos = inspect.formatargspec(spec[0], spec[1], spec[2])
- defaulted_vals = spec[3] is not None and spec[0][0-len(spec[3]):] or ()
- apply_kw = inspect.formatargspec(spec[0], spec[1], spec[2], defaulted_vals,
- formatvalue=lambda x: '=' + x)
- if grouped:
- return dict(args=args, self_arg=self_arg,
- apply_pos=apply_pos, apply_kw=apply_kw)
- else:
- return dict(args=args[1:-1], self_arg=self_arg,
- apply_pos=apply_pos[1:-1], apply_kw=apply_kw[1:-1])
-
-def format_argspec_init(method, grouped=True):
- """format_argspec_plus with considerations for typical __init__ methods
-
- Wraps format_argspec_plus with error handling strategies for typical
- __init__ cases::
-
- object.__init__ -> (self)
- other unreflectable (usually C) -> (self, *args, **kwargs)
-
- """
- try:
- return format_argspec_plus(method, grouped=grouped)
- except TypeError:
- self_arg = 'self'
- if method is object.__init__:
- args = grouped and '(self)' or 'self'
- else:
- args = (grouped and '(self, *args, **kwargs)'
- or 'self, *args, **kwargs')
- return dict(self_arg='self', args=args, apply_pos=args, apply_kw=args)
-
-def getargspec_init(method):
- """inspect.getargspec with considerations for typical __init__ methods
-
- Wraps inspect.getargspec with error handling for typical __init__ cases::
-
- object.__init__ -> (self)
- other unreflectable (usually C) -> (self, *args, **kwargs)
-
- """
- try:
- return inspect.getargspec(method)
- except TypeError:
- if method is object.__init__:
- return (['self'], None, None, None)
- else:
- return (['self'], 'args', 'kwargs', None)
-
-
-def unbound_method_to_callable(func_or_cls):
- """Adjust the incoming callable such that a 'self' argument is not required."""
-
- if isinstance(func_or_cls, types.MethodType) and not func_or_cls.im_self:
- return func_or_cls.im_func
- else:
- return func_or_cls
-
-class portable_instancemethod(object):
- """Turn an instancemethod into a (parent, name) pair
- to produce a serializable callable.
-
- """
- def __init__(self, meth):
- self.target = meth.im_self
- self.name = meth.__name__
-
- def __call__(self, *arg, **kw):
- return getattr(self.target, self.name)(*arg, **kw)
-
-def class_hierarchy(cls):
- """Return an unordered sequence of all classes related to cls.
-
- Traverses diamond hierarchies.
-
- Fibs slightly: subclasses of builtin types are not returned. Thus
- class_hierarchy(class A(object)) returns (A, object), not A plus every
- class systemwide that derives from object.
-
- Old-style classes are discarded and hierarchies rooted on them
- will not be descended.
-
- """
- # Py2K
- if isinstance(cls, types.ClassType):
- return list()
- # end Py2K
- hier = set([cls])
- process = list(cls.__mro__)
- while process:
- c = process.pop()
- # Py2K
- if isinstance(c, types.ClassType):
- continue
- for b in (_ for _ in c.__bases__
- if _ not in hier and not isinstance(_, types.ClassType)):
- # end Py2K
- # Py3K
- #for b in (_ for _ in c.__bases__
- # if _ not in hier):
- process.append(b)
- hier.add(b)
- # Py3K
- #if c.__module__ == 'builtins' or not hasattr(c, '__subclasses__'):
- # continue
- # Py2K
- if c.__module__ == '__builtin__' or not hasattr(c, '__subclasses__'):
- continue
- # end Py2K
- for s in [_ for _ in c.__subclasses__() if _ not in hier]:
- process.append(s)
- hier.add(s)
- return list(hier)
-
-def iterate_attributes(cls):
- """iterate all the keys and attributes associated
- with a class, without using getattr().
-
- Does not use getattr() so that class-sensitive
- descriptors (i.e. property.__get__()) are not called.
-
- """
- keys = dir(cls)
- for key in keys:
- for c in cls.__mro__:
- if key in c.__dict__:
- yield (key, c.__dict__[key])
- break
-
-def monkeypatch_proxied_specials(into_cls, from_cls, skip=None, only=None,
- name='self.proxy', from_instance=None):
- """Automates delegation of __specials__ for a proxying type."""
-
- if only:
- dunders = only
- else:
- if skip is None:
- skip = ('__slots__', '__del__', '__getattribute__',
- '__metaclass__', '__getstate__', '__setstate__')
- dunders = [m for m in dir(from_cls)
- if (m.startswith('__') and m.endswith('__') and
- not hasattr(into_cls, m) and m not in skip)]
- for method in dunders:
- try:
- fn = getattr(from_cls, method)
- if not hasattr(fn, '__call__'):
- continue
- fn = getattr(fn, 'im_func', fn)
- except AttributeError:
- continue
- try:
- spec = inspect.getargspec(fn)
- fn_args = inspect.formatargspec(spec[0])
- d_args = inspect.formatargspec(spec[0][1:])
- except TypeError:
- fn_args = '(self, *args, **kw)'
- d_args = '(*args, **kw)'
-
- py = ("def %(method)s%(fn_args)s: "
- "return %(name)s.%(method)s%(d_args)s" % locals())
-
- env = from_instance is not None and {name: from_instance} or {}
- exec py in env
- try:
- env[method].func_defaults = fn.func_defaults
- except AttributeError:
- pass
- setattr(into_cls, method, env[method])
-
-
-def methods_equivalent(meth1, meth2):
- """Return True if the two methods are the same implementation."""
-
- # Py3K
- #return getattr(meth1, '__func__', meth1) is getattr(meth2, '__func__', meth2)
- # Py2K
- return getattr(meth1, 'im_func', meth1) is getattr(meth2, 'im_func', meth2)
- # end Py2K
-
-def as_interface(obj, cls=None, methods=None, required=None):
- """Ensure basic interface compliance for an instance or dict of callables.
-
- Checks that ``obj`` implements public methods of ``cls`` or has members
- listed in ``methods``. If ``required`` is not supplied, implementing at
- least one interface method is sufficient. Methods present on ``obj`` that
- are not in the interface are ignored.
-
- If ``obj`` is a dict and ``dict`` does not meet the interface
- requirements, the keys of the dictionary are inspected. Keys present in
- ``obj`` that are not in the interface will raise TypeErrors.
-
- Raises TypeError if ``obj`` does not meet the interface criteria.
-
- In all passing cases, an object with callable members is returned. In the
- simple case, ``obj`` is returned as-is; if dict processing kicks in then
- an anonymous class is returned.
-
- obj
- A type, instance, or dictionary of callables.
- cls
- Optional, a type. All public methods of cls are considered the
- interface. An ``obj`` instance of cls will always pass, ignoring
- ``required``..
- methods
- Optional, a sequence of method names to consider as the interface.
- required
- Optional, a sequence of mandatory implementations. If omitted, an
- ``obj`` that provides at least one interface method is considered
- sufficient. As a convenience, required may be a type, in which case
- all public methods of the type are required.
-
- """
- if not cls and not methods:
- raise TypeError('a class or collection of method names are required')
-
- if isinstance(cls, type) and isinstance(obj, cls):
- return obj
-
- interface = set(methods or [m for m in dir(cls) if not m.startswith('_')])
- implemented = set(dir(obj))
-
- complies = operator.ge
- if isinstance(required, type):
- required = interface
- elif not required:
- required = set()
- complies = operator.gt
- else:
- required = set(required)
-
- if complies(implemented.intersection(interface), required):
- return obj
-
- # No dict duck typing here.
- if not type(obj) is dict:
- qualifier = complies is operator.gt and 'any of' or 'all of'
- raise TypeError("%r does not implement %s: %s" % (
- obj, qualifier, ', '.join(interface)))
-
- class AnonymousInterface(object):
- """A callable-holding shell."""
-
- if cls:
- AnonymousInterface.__name__ = 'Anonymous' + cls.__name__
- found = set()
-
- for method, impl in dictlike_iteritems(obj):
- if method not in interface:
- raise TypeError("%r: unknown in this interface" % method)
- if not callable(impl):
- raise TypeError("%r=%r is not callable" % (method, impl))
- setattr(AnonymousInterface, method, staticmethod(impl))
- found.add(method)
-
- if complies(found, required):
- return AnonymousInterface
-
- raise TypeError("dictionary does not contain required keys %s" %
- ', '.join(required - found))
-
-
-class memoized_property(object):
- """A read-only @property that is only evaluated once."""
- def __init__(self, fget, doc=None):
- self.fget = fget
- self.__doc__ = doc or fget.__doc__
- self.__name__ = fget.__name__
-
- def __get__(self, obj, cls):
- if obj is None:
- return self
- obj.__dict__[self.__name__] = result = self.fget(obj)
- return result
-
-
-class memoized_instancemethod(object):
- """Decorate a method memoize its return value.
-
- Best applied to no-arg methods: memoization is not sensitive to
- argument values, and will always return the same value even when
- called with different arguments.
-
- """
- def __init__(self, fget, doc=None):
- self.fget = fget
- self.__doc__ = doc or fget.__doc__
- self.__name__ = fget.__name__
-
- def __get__(self, obj, cls):
- if obj is None:
- return self
- def oneshot(*args, **kw):
- result = self.fget(obj, *args, **kw)
- memo = lambda *a, **kw: result
- memo.__name__ = self.__name__
- memo.__doc__ = self.__doc__
- obj.__dict__[self.__name__] = memo
- return result
- oneshot.__name__ = self.__name__
- oneshot.__doc__ = self.__doc__
- return oneshot
-
-def reset_memoized(instance, name):
- instance.__dict__.pop(name, None)
-
-
-class group_expirable_memoized_property(object):
- """A family of @memoized_properties that can be expired in tandem."""
-
- def __init__(self):
- self.attributes = []
-
- def expire_instance(self, instance):
- """Expire all memoized properties for *instance*."""
- stash = instance.__dict__
- for attribute in self.attributes:
- stash.pop(attribute, None)
-
- def __call__(self, fn):
- self.attributes.append(fn.__name__)
- return memoized_property(fn)
-
-class importlater(object):
- """Deferred import object.
-
- e.g.::
-
- somesubmod = importlater("mypackage.somemodule", "somesubmod")
-
- is equivalent to::
-
- from mypackage.somemodule import somesubmod
-
- except evaluted upon attribute access to "somesubmod".
-
- """
- def __init__(self, path, addtl=None):
- self._il_path = path
- self._il_addtl = addtl
-
- @memoized_property
- def module(self):
- if self._il_addtl:
- m = __import__(self._il_path, globals(), locals(),
- [self._il_addtl])
- try:
- return getattr(m, self._il_addtl)
- except AttributeError:
- raise ImportError(
- "Module %s has no attribute '%s'" %
- (self._il_path, self._il_addtl)
- )
- else:
- m = __import__(self._il_path)
- for token in self._il_path.split(".")[1:]:
- m = getattr(m, token)
- return m
-
- def __getattr__(self, key):
- try:
- attr = getattr(self.module, key)
- except AttributeError:
- raise AttributeError(
- "Module %s has no attribute '%s'" %
- (self._il_path, key)
- )
- self.__dict__[key] = attr
- return attr
-
-# from paste.deploy.converters
-def asbool(obj):
- if isinstance(obj, (str, unicode)):
- obj = obj.strip().lower()
- if obj in ['true', 'yes', 'on', 'y', 't', '1']:
- return True
- elif obj in ['false', 'no', 'off', 'n', 'f', '0']:
- return False
- else:
- raise ValueError("String is not true/false: %r" % obj)
- return bool(obj)
-
-def bool_or_str(*text):
- """Return a callable that will evaulate a string as
- boolean, or one of a set of "alternate" string values.
-
- """
- def bool_or_value(obj):
- if obj in text:
- return obj
- else:
- return asbool(obj)
- return bool_or_value
-
-def asint(value):
- """Coerce to integer."""
-
- if value is None:
- return value
- return int(value)
-
-
-def coerce_kw_type(kw, key, type_, flexi_bool=True):
- """If 'key' is present in dict 'kw', coerce its value to type 'type\_' if
- necessary. If 'flexi_bool' is True, the string '0' is considered false
- when coercing to boolean.
- """
-
- if key in kw and type(kw[key]) is not type_ and kw[key] is not None:
- if type_ is bool and flexi_bool:
- kw[key] = asbool(kw[key])
- else:
- kw[key] = type_(kw[key])
-
-
-def constructor_copy(obj, cls, **kw):
- """Instantiate cls using the __dict__ of obj as constructor arguments.
-
- Uses inspect to match the named arguments of ``cls``.
-
- """
-
- names = get_cls_kwargs(cls)
- kw.update((k, obj.__dict__[k]) for k in names if k in obj.__dict__)
- return cls(**kw)
-
-
-def duck_type_collection(specimen, default=None):
- """Given an instance or class, guess if it is or is acting as one of
- the basic collection types: list, set and dict. If the __emulates__
- property is present, return that preferentially.
- """
-
- if hasattr(specimen, '__emulates__'):
- # canonicalize set vs sets.Set to a standard: the builtin set
- if (specimen.__emulates__ is not None and
- issubclass(specimen.__emulates__, set_types)):
- return set
- else:
- return specimen.__emulates__
-
- isa = isinstance(specimen, type) and issubclass or isinstance
- if isa(specimen, list):
- return list
- elif isa(specimen, set_types):
- return set
- elif isa(specimen, dict):
- return dict
-
- if hasattr(specimen, 'append'):
- return list
- elif hasattr(specimen, 'add'):
- return set
- elif hasattr(specimen, 'set'):
- return dict
- else:
- return default
-
-def assert_arg_type(arg, argtype, name):
- if isinstance(arg, argtype):
- return arg
- else:
- if isinstance(argtype, tuple):
- raise exc.ArgumentError(
- "Argument '%s' is expected to be one of type %s, got '%s'" %
- (name, ' or '.join("'%s'" % a for a in argtype), type(arg)))
- else:
- raise exc.ArgumentError(
- "Argument '%s' is expected to be of type '%s', got '%s'" %
- (name, argtype, type(arg)))
-
-
-def dictlike_iteritems(dictlike):
- """Return a (key, value) iterator for almost any dict-like object."""
-
- # Py3K
- #if hasattr(dictlike, 'items'):
- # return dictlike.items()
- # Py2K
- if hasattr(dictlike, 'iteritems'):
- return dictlike.iteritems()
- elif hasattr(dictlike, 'items'):
- return iter(dictlike.items())
- # end Py2K
-
- getter = getattr(dictlike, '__getitem__', getattr(dictlike, 'get', None))
- if getter is None:
- raise TypeError(
- "Object '%r' is not dict-like" % dictlike)
-
- if hasattr(dictlike, 'iterkeys'):
- def iterator():
- for key in dictlike.iterkeys():
- yield key, getter(key)
- return iterator()
- elif hasattr(dictlike, 'keys'):
- return iter((key, getter(key)) for key in dictlike.keys())
- else:
- raise TypeError(
- "Object '%r' is not dict-like" % dictlike)
-
-
-class classproperty(property):
- """A decorator that behaves like @property except that operates
- on classes rather than instances.
-
- The decorator is currently special when using the declarative
- module, but note that the
- :class:`~.sqlalchemy.ext.declarative.declared_attr`
- decorator should be used for this purpose with declarative.
-
- """
-
- def __init__(self, fget, *arg, **kw):
- super(classproperty, self).__init__(fget, *arg, **kw)
- self.__doc__ = fget.__doc__
-
- def __get__(desc, self, cls):
- return desc.fget(cls)
-
-
-class _symbol(object):
- def __init__(self, name, doc=None):
- """Construct a new named symbol."""
- assert isinstance(name, str)
- self.name = name
- if doc:
- self.__doc__ = doc
- def __reduce__(self):
- return symbol, (self.name,)
- def __repr__(self):
- return "<symbol '%s>" % self.name
-
-_symbol.__name__ = 'symbol'
-
-
-class symbol(object):
- """A constant symbol.
-
- >>> symbol('foo') is symbol('foo')
- True
- >>> symbol('foo')
- <symbol 'foo>
-
- A slight refinement of the MAGICCOOKIE=object() pattern. The primary
- advantage of symbol() is its repr(). They are also singletons.
-
- Repeated calls of symbol('name') will all return the same instance.
-
- The optional ``doc`` argument assigns to ``__doc__``. This
- is strictly so that Sphinx autoattr picks up the docstring we want
- (it doesn't appear to pick up the in-module docstring if the datamember
- is in a different module - autoattribute also blows up completely).
- If Sphinx fixes/improves this then we would no longer need
- ``doc`` here.
-
- """
- symbols = {}
- _lock = threading.Lock()
-
- def __new__(cls, name, doc=None):
- cls._lock.acquire()
- try:
- sym = cls.symbols.get(name)
- if sym is None:
- cls.symbols[name] = sym = _symbol(name, doc)
- return sym
- finally:
- symbol._lock.release()
-
-
-_creation_order = 1
-def set_creation_order(instance):
- """Assign a '_creation_order' sequence to the given instance.
-
- This allows multiple instances to be sorted in order of creation
- (typically within a single thread; the counter is not particularly
- threadsafe).
-
- """
- global _creation_order
- instance._creation_order = _creation_order
- _creation_order +=1
-
-def warn_exception(func, *args, **kwargs):
- """executes the given function, catches all exceptions and converts to a warning."""
- try:
- return func(*args, **kwargs)
- except:
- warn("%s('%s') ignored" % sys.exc_info()[0:2])
-
-
-def warn(msg, stacklevel=3):
- """Issue a warning.
-
- If msg is a string, :class:`.exc.SAWarning` is used as
- the category.
-
- .. note:: This function is swapped out when the test suite
- runs, with a compatible version that uses
- warnings.warn_explicit, so that the warnings registry can
- be controlled.
-
- """
- if isinstance(msg, basestring):
- warnings.warn(msg, exc.SAWarning, stacklevel=stacklevel)
- else:
- warnings.warn(msg, stacklevel=stacklevel)
-
-_SQLA_RE = re.compile(r'sqlalchemy/([a-z_]+/){0,2}[a-z_]+\.py')
-_UNITTEST_RE = re.compile(r'unit(?:2|test2?/)')
-def chop_traceback(tb, exclude_prefix=_UNITTEST_RE, exclude_suffix=_SQLA_RE):
- """Chop extraneous lines off beginning and end of a traceback.
-
- :param tb:
- a list of traceback lines as returned by ``traceback.format_stack()``
-
- :param exclude_prefix:
- a regular expression object matching lines to skip at beginning of ``tb``
-
- :param exclude_suffix:
- a regular expression object matching lines to skip at end of ``tb``
- """
- start = 0
- end = len(tb) - 1
- while start <= end and exclude_prefix.search(tb[start]):
- start += 1
- while start <= end and exclude_suffix.search(tb[end]):
- end -= 1
- return tb[start:end+1]
-
-NoneType = type(None)
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/util/queue.py b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/util/queue.py
deleted file mode 100755
index db717595..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/util/queue.py
+++ /dev/null
@@ -1,191 +0,0 @@
-# util/queue.py
-# Copyright (C) 2005-2011 the SQLAlchemy authors and contributors <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-"""An adaptation of Py2.3/2.4's Queue module which supports reentrant
-behavior, using RLock instead of Lock for its mutex object.
-
-This is to support the connection pool's usage of weakref callbacks to return
-connections to the underlying Queue, which can in extremely
-rare cases be invoked within the ``get()`` method of the Queue itself,
-producing a ``put()`` inside the ``get()`` and therefore a reentrant
-condition."""
-
-from collections import deque
-from time import time as _time
-from sqlalchemy.util import threading
-
-__all__ = ['Empty', 'Full', 'Queue']
-
-class Empty(Exception):
- "Exception raised by Queue.get(block=0)/get_nowait()."
-
- pass
-
-class Full(Exception):
- "Exception raised by Queue.put(block=0)/put_nowait()."
-
- pass
-
-class Queue:
- def __init__(self, maxsize=0):
- """Initialize a queue object with a given maximum size.
-
- If `maxsize` is <= 0, the queue size is infinite.
- """
-
- self._init(maxsize)
- # mutex must be held whenever the queue is mutating. All methods
- # that acquire mutex must release it before returning. mutex
- # is shared between the two conditions, so acquiring and
- # releasing the conditions also acquires and releases mutex.
- self.mutex = threading.RLock()
- # Notify not_empty whenever an item is added to the queue; a
- # thread waiting to get is notified then.
- self.not_empty = threading.Condition(self.mutex)
- # Notify not_full whenever an item is removed from the queue;
- # a thread waiting to put is notified then.
- self.not_full = threading.Condition(self.mutex)
-
- def qsize(self):
- """Return the approximate size of the queue (not reliable!)."""
-
- self.mutex.acquire()
- n = self._qsize()
- self.mutex.release()
- return n
-
- def empty(self):
- """Return True if the queue is empty, False otherwise (not
- reliable!)."""
-
- self.mutex.acquire()
- n = self._empty()
- self.mutex.release()
- return n
-
- def full(self):
- """Return True if the queue is full, False otherwise (not
- reliable!)."""
-
- self.mutex.acquire()
- n = self._full()
- self.mutex.release()
- return n
-
- def put(self, item, block=True, timeout=None):
- """Put an item into the queue.
-
- If optional args `block` is True and `timeout` is None (the
- default), block if necessary until a free slot is
- available. If `timeout` is a positive number, it blocks at
- most `timeout` seconds and raises the ``Full`` exception if no
- free slot was available within that time. Otherwise (`block`
- is false), put an item on the queue if a free slot is
- immediately available, else raise the ``Full`` exception
- (`timeout` is ignored in that case).
- """
-
- self.not_full.acquire()
- try:
- if not block:
- if self._full():
- raise Full
- elif timeout is None:
- while self._full():
- self.not_full.wait()
- else:
- if timeout < 0:
- raise ValueError("'timeout' must be a positive number")
- endtime = _time() + timeout
- while self._full():
- remaining = endtime - _time()
- if remaining <= 0.0:
- raise Full
- self.not_full.wait(remaining)
- self._put(item)
- self.not_empty.notify()
- finally:
- self.not_full.release()
-
- def put_nowait(self, item):
- """Put an item into the queue without blocking.
-
- Only enqueue the item if a free slot is immediately available.
- Otherwise raise the ``Full`` exception.
- """
- return self.put(item, False)
-
- def get(self, block=True, timeout=None):
- """Remove and return an item from the queue.
-
- If optional args `block` is True and `timeout` is None (the
- default), block if necessary until an item is available. If
- `timeout` is a positive number, it blocks at most `timeout`
- seconds and raises the ``Empty`` exception if no item was
- available within that time. Otherwise (`block` is false),
- return an item if one is immediately available, else raise the
- ``Empty`` exception (`timeout` is ignored in that case).
- """
-
- self.not_empty.acquire()
- try:
- if not block:
- if self._empty():
- raise Empty
- elif timeout is None:
- while self._empty():
- self.not_empty.wait()
- else:
- if timeout < 0:
- raise ValueError("'timeout' must be a positive number")
- endtime = _time() + timeout
- while self._empty():
- remaining = endtime - _time()
- if remaining <= 0.0:
- raise Empty
- self.not_empty.wait(remaining)
- item = self._get()
- self.not_full.notify()
- return item
- finally:
- self.not_empty.release()
-
- def get_nowait(self):
- """Remove and return an item from the queue without blocking.
-
- Only get an item if one is immediately available. Otherwise
- raise the ``Empty`` exception.
- """
-
- return self.get(False)
-
- # Override these methods to implement other queue organizations
- # (e.g. stack or priority queue).
- # These will only be called with appropriate locks held
-
- # Initialize the queue representation
- def _init(self, maxsize):
- self.maxsize = maxsize
- self.queue = deque()
-
- def _qsize(self):
- return len(self.queue)
-
- # Check whether the queue is empty
- def _empty(self):
- return not self.queue
-
- # Check whether the queue is full
- def _full(self):
- return self.maxsize > 0 and len(self.queue) == self.maxsize
-
- # Put a new item in the queue
- def _put(self, item):
- self.queue.append(item)
-
- # Get an item from the queue
- def _get(self):
- return self.queue.popleft()
diff --git a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/util/topological.py b/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/util/topological.py
deleted file mode 100755
index 8f340647..00000000
--- a/lib/python2.7/site-packages/SQLAlchemy-0.7.0-py2.7-linux-x86_64.egg/sqlalchemy/util/topological.py
+++ /dev/null
@@ -1,83 +0,0 @@
-# util/topological.py
-# Copyright (C) 2005-2011 the SQLAlchemy authors and contributors <see AUTHORS file>
-#
-# This module is part of SQLAlchemy and is released under
-# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-"""Topological sorting algorithms."""
-
-from sqlalchemy.exc import CircularDependencyError
-from sqlalchemy import util
-
-
-__all__ = ['sort', 'sort_as_subsets', 'find_cycles']
-
-def sort_as_subsets(tuples, allitems):
-
- edges = util.defaultdict(set)
- for parent, child in tuples:
- edges[child].add(parent)
-
- todo = set(allitems)
-
- while todo:
- output = set()
- for node in list(todo):
- if not todo.intersection(edges[node]):
- output.add(node)
-
- if not output:
- raise CircularDependencyError(
- "Circular dependency detected",
- find_cycles(tuples, allitems),
- _gen_edges(edges)
- )
-
- todo.difference_update(output)
- yield output
-
-def sort(tuples, allitems):
- """sort the given list of items by dependency.
-
- 'tuples' is a list of tuples representing a partial ordering.
- """
-
- for set_ in sort_as_subsets(tuples, allitems):
- for s in set_:
- yield s
-
-def find_cycles(tuples, allitems):
- # straight from gvr with some mods
- todo = set(allitems)
-
- edges = util.defaultdict(set)
- for parent, child in tuples:
- edges[parent].add(child)
-
- output = set()
-
- while todo:
- node = todo.pop()
- stack = [node]
- while stack:
- top = stack[-1]
- for node in edges[top]:
- if node in stack:
- cyc = stack[stack.index(node):]
- todo.difference_update(cyc)
- output.update(cyc)
-
- if node in todo:
- stack.append(node)
- todo.remove(node)
- break
- else:
- node = stack.pop()
- return output
-
-def _gen_edges(edges):
- return set([
- (right, left)
- for left in edges
- for right in edges[left]
- ])