summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorMike Bayer <mike_mp@zzzcomputing.com>2013-05-26 18:08:03 -0400
committerMike Bayer <mike_mp@zzzcomputing.com>2013-05-26 18:08:03 -0400
commitebc03d5e63f41b51a5adf24b9c84978d09b74818 (patch)
treef139a322bfdb78498e6c908d3cf2271cb83d1bb3
parent38f65baf1895a679aacf9981d60af6cd7782df82 (diff)
downloadsqlalchemy-ebc03d5e63f41b51a5adf24b9c84978d09b74818.tar.gz
remove this
-rw-r--r--clean_2to3.diff13522
1 files changed, 0 insertions, 13522 deletions
diff --git a/clean_2to3.diff b/clean_2to3.diff
deleted file mode 100644
index 7020fb956..000000000
--- a/clean_2to3.diff
+++ /dev/null
@@ -1,13522 +0,0 @@
-diff -r 9d0639b9d3be examples/adjacency_list/adjacency_list.py
---- a/examples/adjacency_list/adjacency_list.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/examples/adjacency_list/adjacency_list.py Sat Apr 27 19:42:17 2013 -0400
-@@ -47,7 +47,7 @@
- "\n" + \
- "".join([
- c.dump(_indent +1)
-- for c in self.children.values()]
-+ for c in list(self.children.values())]
- )
-
- if __name__ == '__main__':
-@@ -55,9 +55,9 @@
-
- def msg(msg, *args):
- msg = msg % args
-- print "\n\n\n" + "-" * len(msg.split("\n")[0])
-- print msg
-- print "-" * len(msg.split("\n")[0])
-+ print("\n\n\n" + "-" * len(msg.split("\n")[0]))
-+ print(msg)
-+ print("-" * len(msg.split("\n")[0]))
-
- msg("Creating Tree Table:")
-
-diff -r 9d0639b9d3be examples/association/basic_association.py
---- a/examples/association/basic_association.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/examples/association/basic_association.py Sat Apr 27 19:42:17 2013 -0400
-@@ -83,12 +83,12 @@
-
- # query the order, print items
- order = session.query(Order).filter_by(customer_name='john smith').one()
-- print [(order_item.item.description, order_item.price)
-- for order_item in order.order_items]
-+ print([(order_item.item.description, order_item.price)
-+ for order_item in order.order_items])
-
- # print customers who bought 'MySQL Crowbar' on sale
- q = session.query(Order).join('order_items', 'item')
- q = q.filter(and_(Item.description == 'MySQL Crowbar',
- Item.price > OrderItem.price))
-
-- print [order.customer_name for order in q]
-+ print([order.customer_name for order in q])
-diff -r 9d0639b9d3be examples/association/dict_of_sets_with_default.py
---- a/examples/association/dict_of_sets_with_default.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/examples/association/dict_of_sets_with_default.py Sat Apr 27 19:42:17 2013 -0400
-@@ -75,13 +75,13 @@
- session.commit()
-
- a1 = session.query(A).first()
-- print a1.collections["1"]
-+ print(a1.collections["1"])
- a1.collections["1"].add(4)
- session.commit()
-
- a1.collections["2"].update([7, 8, 9])
- session.commit()
-
-- print a1.collections["2"]
-+ print(a1.collections["2"])
-
-
-diff -r 9d0639b9d3be examples/association/proxied_association.py
---- a/examples/association/proxied_association.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/examples/association/proxied_association.py Sat Apr 27 19:42:17 2013 -0400
-@@ -86,16 +86,16 @@
- order = session.query(Order).filter_by(customer_name='john smith').one()
-
- # print items based on the OrderItem collection directly
-- print [(assoc.item.description, assoc.price, assoc.item.price)
-- for assoc in order.order_items]
-+ print([(assoc.item.description, assoc.price, assoc.item.price)
-+ for assoc in order.order_items])
-
- # print items based on the "proxied" items collection
-- print [(item.description, item.price)
-- for item in order.items]
-+ print([(item.description, item.price)
-+ for item in order.items])
-
- # print customers who bought 'MySQL Crowbar' on sale
- orders = session.query(Order).\
- join('order_items', 'item').\
- filter(Item.description == 'MySQL Crowbar').\
- filter(Item.price > OrderItem.price)
-- print [o.customer_name for o in orders]
-+ print([o.customer_name for o in orders])
-diff -r 9d0639b9d3be examples/custom_attributes/listen_for_events.py
---- a/examples/custom_attributes/listen_for_events.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/examples/custom_attributes/listen_for_events.py Sat Apr 27 19:42:17 2013 -0400
-@@ -34,7 +34,7 @@
- if oldvalue:
- s += "which replaced the value '%s', " % oldvalue
- s += "on object %s" % self
-- print s
-+ print(s)
-
- Base = declarative_base(cls=Base)
-
-diff -r 9d0639b9d3be examples/dogpile_caching/advanced.py
---- a/examples/dogpile_caching/advanced.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/examples/dogpile_caching/advanced.py Sat Apr 27 19:42:17 2013 -0400
-@@ -6,9 +6,9 @@
-
- """
-
--from environment import Session
--from model import Person, Address, cache_address_bits
--from caching_query import FromCache, RelationshipCache
-+from .environment import Session
-+from .model import Person, Address, cache_address_bits
-+from .caching_query import FromCache, RelationshipCache
- from sqlalchemy.orm import joinedload
-
- def load_name_range(start, end, invalidate=False):
-@@ -49,31 +49,31 @@
-
- return q.all()
-
--print "two through twelve, possibly from cache:\n"
--print ", ".join([p.name for p in load_name_range(2, 12)])
-+print("two through twelve, possibly from cache:\n")
-+print(", ".join([p.name for p in load_name_range(2, 12)]))
-
--print "\ntwenty five through forty, possibly from cache:\n"
--print ", ".join([p.name for p in load_name_range(25, 40)])
-+print("\ntwenty five through forty, possibly from cache:\n")
-+print(", ".join([p.name for p in load_name_range(25, 40)]))
-
- # loading them again, no SQL is emitted
--print "\ntwo through twelve, from the cache:\n"
--print ", ".join([p.name for p in load_name_range(2, 12)])
-+print("\ntwo through twelve, from the cache:\n")
-+print(", ".join([p.name for p in load_name_range(2, 12)]))
-
- # but with invalidate, they are
--print "\ntwenty five through forty, invalidate first:\n"
--print ", ".join([p.name for p in load_name_range(25, 40, True)])
-+print("\ntwenty five through forty, invalidate first:\n")
-+print(", ".join([p.name for p in load_name_range(25, 40, True)]))
-
- # illustrate the address loading from either cache/already
- # on the Person
--print "\n\nPeople plus addresses, two through twelve, addresses possibly from cache"
-+print("\n\nPeople plus addresses, two through twelve, addresses possibly from cache")
- for p in load_name_range(2, 12):
-- print p.format_full()
-+ print(p.format_full())
-
- # illustrate the address loading from either cache/already
- # on the Person
--print "\n\nPeople plus addresses, two through twelve, addresses from cache"
-+print("\n\nPeople plus addresses, two through twelve, addresses from cache")
- for p in load_name_range(2, 12):
-- print p.format_full()
-+ print(p.format_full())
-
--print "\n\nIf this was the first run of advanced.py, try "\
-- "a second run. Only one SQL statement will be emitted."
-+print("\n\nIf this was the first run of advanced.py, try "\
-+ "a second run. Only one SQL statement will be emitted.")
-diff -r 9d0639b9d3be examples/dogpile_caching/caching_query.py
---- a/examples/dogpile_caching/caching_query.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/examples/dogpile_caching/caching_query.py Sat Apr 27 19:42:17 2013 -0400
-@@ -143,8 +143,8 @@
- # here we return the key as a long string. our "key mangler"
- # set up with the region will boil it down to an md5.
- return " ".join(
-- [unicode(compiled)] +
-- [unicode(params[k]) for k in sorted(params)])
-+ [str(compiled)] +
-+ [str(params[k]) for k in sorted(params)])
-
- class FromCache(MapperOption):
- """Specifies that a Query should load results from a cache."""
-diff -r 9d0639b9d3be examples/dogpile_caching/environment.py
---- a/examples/dogpile_caching/environment.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/examples/dogpile_caching/environment.py Sat Apr 27 19:42:17 2013 -0400
-@@ -4,7 +4,7 @@
- bootstrap fixture data if necessary.
-
- """
--import caching_query
-+from . import caching_query
- from sqlalchemy import create_engine
- from sqlalchemy.orm import scoped_session, sessionmaker
- from sqlalchemy.ext.declarative import declarative_base
-@@ -31,7 +31,7 @@
- root = "./dogpile_data/"
-
- if not os.path.exists(root):
-- raw_input("Will create datafiles in %r.\n"
-+ input("Will create datafiles in %r.\n"
- "To reset the cache + database, delete this directory.\n"
- "Press enter to continue.\n" % root
- )
-@@ -77,7 +77,7 @@
-
- def bootstrap():
- global installed
-- import fixture_data
-+ from . import fixture_data
- if not os.path.exists(dbfile):
- fixture_data.install()
- installed = True
-\ No newline at end of file
-diff -r 9d0639b9d3be examples/dogpile_caching/fixture_data.py
---- a/examples/dogpile_caching/fixture_data.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/examples/dogpile_caching/fixture_data.py Sat Apr 27 19:42:17 2013 -0400
-@@ -5,8 +5,8 @@
- randomly selected postal code.
-
- """
--from environment import Session, Base
--from model import City, Country, PostalCode, Person, Address
-+from .environment import Session, Base
-+from .model import City, Country, PostalCode, Person, Address
- import random
-
- def install():
-@@ -35,7 +35,7 @@
- Session.add_all(pc)
- all_post_codes.extend(pc)
-
-- for i in xrange(1, 51):
-+ for i in range(1, 51):
- person = Person(
- "person %.2d" % i,
- Address(
-diff -r 9d0639b9d3be examples/dogpile_caching/helloworld.py
---- a/examples/dogpile_caching/helloworld.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/examples/dogpile_caching/helloworld.py Sat Apr 27 19:42:17 2013 -0400
-@@ -4,12 +4,12 @@
-
- """
-
--from environment import Session
--from model import Person
--from caching_query import FromCache
-+from .environment import Session
-+from .model import Person
-+from .caching_query import FromCache
-
- # load Person objects. cache the result under the namespace "all_people".
--print "loading people...."
-+print("loading people....")
- people = Session.query(Person).options(FromCache("default")).all()
-
- # remove the Session. next query starts from scratch.
-@@ -17,12 +17,12 @@
-
- # load again, using the same FromCache option. now they're cached
- # under "all_people", no SQL is emitted.
--print "loading people....again!"
-+print("loading people....again!")
- people = Session.query(Person).options(FromCache("default")).all()
-
- # want to load on some different kind of query ? change the namespace
- # you send to FromCache
--print "loading people two through twelve"
-+print("loading people two through twelve")
- people_two_through_twelve = Session.query(Person).\
- options(FromCache("default")).\
- filter(Person.name.between("person 02", "person 12")).\
-@@ -32,7 +32,7 @@
- # the bind parameters of the query. So this query, having
- # different literal parameters under "Person.name.between()" than the
- # previous one, issues new SQL...
--print "loading people five through fifteen"
-+print("loading people five through fifteen")
- people_five_through_fifteen = Session.query(Person).\
- options(FromCache("default")).\
- filter(Person.name.between("person 05", "person 15")).\
-@@ -40,7 +40,7 @@
-
-
- # ... but using the same params as are already cached, no SQL
--print "loading people two through twelve...again!"
-+print("loading people two through twelve...again!")
- people_two_through_twelve = Session.query(Person).\
- options(FromCache("default")).\
- filter(Person.name.between("person 02", "person 12")).\
-@@ -51,7 +51,7 @@
- # each Query, which includes at the very least the same FromCache,
- # same list of objects to be loaded, and the same parameters in the
- # same order, then call invalidate().
--print "invalidating everything"
-+print("invalidating everything")
- Session.query(Person).options(FromCache("default")).invalidate()
- Session.query(Person).\
- options(FromCache("default")).\
-diff -r 9d0639b9d3be examples/dogpile_caching/local_session_caching.py
---- a/examples/dogpile_caching/local_session_caching.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/examples/dogpile_caching/local_session_caching.py Sat Apr 27 19:42:17 2013 -0400
-@@ -53,8 +53,8 @@
-
-
- if __name__ == '__main__':
-- from environment import Session, regions
-- from caching_query import FromCache
-+ from .environment import Session, regions
-+ from .caching_query import FromCache
- from dogpile.cache import make_region
-
- # set up a region based on the ScopedSessionBackend,
-@@ -67,7 +67,7 @@
- }
- )
-
-- from model import Person
-+ from .model import Person
-
- # query to load Person by name, with criterion
- # of "person 10"
-diff -r 9d0639b9d3be examples/dogpile_caching/model.py
---- a/examples/dogpile_caching/model.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/examples/dogpile_caching/model.py Sat Apr 27 19:42:17 2013 -0400
-@@ -10,8 +10,8 @@
- """
- from sqlalchemy import Column, Integer, String, ForeignKey
- from sqlalchemy.orm import relationship
--from caching_query import FromCache, RelationshipCache
--from environment import Base, bootstrap
-+from .caching_query import FromCache, RelationshipCache
-+from .environment import Base, bootstrap
-
- class Country(Base):
- __tablename__ = 'country'
-diff -r 9d0639b9d3be examples/dogpile_caching/relation_caching.py
---- a/examples/dogpile_caching/relation_caching.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/examples/dogpile_caching/relation_caching.py Sat Apr 27 19:42:17 2013 -0400
-@@ -5,16 +5,16 @@
- term cache.
-
- """
--from environment import Session, root
--from model import Person, cache_address_bits
-+from .environment import Session, root
-+from .model import Person, cache_address_bits
- from sqlalchemy.orm import joinedload
- import os
-
- for p in Session.query(Person).options(joinedload(Person.addresses), cache_address_bits):
-- print p.format_full()
-+ print(p.format_full())
-
-
--print "\n\nIf this was the first run of relationship_caching.py, SQL was likely emitted to "\
-+print("\n\nIf this was the first run of relationship_caching.py, SQL was likely emitted to "\
- "load postal codes, cities, countries.\n"\
- "If run a second time, assuming the cache is still valid, "\
- "only a single SQL statement will run - all "\
-@@ -22,4 +22,4 @@
- "To clear the cache, delete the file %r. \n"\
- "This will cause a re-load of cities, postal codes and countries on "\
- "the next run.\n"\
-- % os.path.join(root, 'cache.dbm')
-+ % os.path.join(root, 'cache.dbm'))
-diff -r 9d0639b9d3be examples/dynamic_dict/dynamic_dict.py
---- a/examples/dynamic_dict/dynamic_dict.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/examples/dynamic_dict/dynamic_dict.py Sat Apr 27 19:42:17 2013 -0400
-@@ -1,88 +1,88 @@
--class ProxyDict(object):
-- def __init__(self, parent, collection_name, childclass, keyname):
-- self.parent = parent
-- self.collection_name = collection_name
-- self.childclass = childclass
-- self.keyname = keyname
--
-- @property
-- def collection(self):
-- return getattr(self.parent, self.collection_name)
--
-- def keys(self):
-- descriptor = getattr(self.childclass, self.keyname)
-- return [x[0] for x in self.collection.values(descriptor)]
--
-- def __getitem__(self, key):
-- x = self.collection.filter_by(**{self.keyname:key}).first()
-- if x:
-- return x
-- else:
-- raise KeyError(key)
--
-- def __setitem__(self, key, value):
-- try:
-- existing = self[key]
-- self.collection.remove(existing)
-- except KeyError:
-- pass
-- self.collection.append(value)
--
--from sqlalchemy.ext.declarative import declarative_base
--from sqlalchemy import create_engine, Column, Integer, String, ForeignKey
--from sqlalchemy.orm import sessionmaker, relationship
--
--engine = create_engine('sqlite://', echo=True)
--Base = declarative_base(engine)
--
--class Parent(Base):
-- __tablename__ = 'parent'
-- id = Column(Integer, primary_key=True)
-- name = Column(String(50))
-- _collection = relationship("Child", lazy="dynamic",
-- cascade="all, delete-orphan")
--
-- @property
-- def child_map(self):
-- return ProxyDict(self, '_collection', Child, 'key')
--
--class Child(Base):
-- __tablename__ = 'child'
-- id = Column(Integer, primary_key=True)
-- key = Column(String(50))
-- parent_id = Column(Integer, ForeignKey('parent.id'))
--
-- def __repr__(self):
-- return "Child(key=%r)" % self.key
--
--Base.metadata.create_all()
--
--sess = sessionmaker()()
--
--p1 = Parent(name='p1')
--sess.add(p1)
--
--print "\n---------begin setting nodes, autoflush occurs\n"
--p1.child_map['k1'] = Child(key='k1')
--p1.child_map['k2'] = Child(key='k2')
--
--# this will autoflush the current map.
--# ['k1', 'k2']
--print "\n---------print keys - flushes first\n"
--print p1.child_map.keys()
--
--# k1
--print "\n---------print 'k1' node\n"
--print p1.child_map['k1']
--
--print "\n---------update 'k2' node - must find existing, and replace\n"
--p1.child_map['k2'] = Child(key='k2')
--
--print "\n---------print 'k2' key - flushes first\n"
--# k2
--print p1.child_map['k2']
--
--print "\n---------print all child nodes\n"
--# [k1, k2b]
--print sess.query(Child).all()
--
-+class ProxyDict(object):
-+ def __init__(self, parent, collection_name, childclass, keyname):
-+ self.parent = parent
-+ self.collection_name = collection_name
-+ self.childclass = childclass
-+ self.keyname = keyname
-+
-+ @property
-+ def collection(self):
-+ return getattr(self.parent, self.collection_name)
-+
-+ def keys(self):
-+ descriptor = getattr(self.childclass, self.keyname)
-+ return [x[0] for x in self.collection.values(descriptor)]
-+
-+ def __getitem__(self, key):
-+ x = self.collection.filter_by(**{self.keyname:key}).first()
-+ if x:
-+ return x
-+ else:
-+ raise KeyError(key)
-+
-+ def __setitem__(self, key, value):
-+ try:
-+ existing = self[key]
-+ self.collection.remove(existing)
-+ except KeyError:
-+ pass
-+ self.collection.append(value)
-+
-+from sqlalchemy.ext.declarative import declarative_base
-+from sqlalchemy import create_engine, Column, Integer, String, ForeignKey
-+from sqlalchemy.orm import sessionmaker, relationship
-+
-+engine = create_engine('sqlite://', echo=True)
-+Base = declarative_base(engine)
-+
-+class Parent(Base):
-+ __tablename__ = 'parent'
-+ id = Column(Integer, primary_key=True)
-+ name = Column(String(50))
-+ _collection = relationship("Child", lazy="dynamic",
-+ cascade="all, delete-orphan")
-+
-+ @property
-+ def child_map(self):
-+ return ProxyDict(self, '_collection', Child, 'key')
-+
-+class Child(Base):
-+ __tablename__ = 'child'
-+ id = Column(Integer, primary_key=True)
-+ key = Column(String(50))
-+ parent_id = Column(Integer, ForeignKey('parent.id'))
-+
-+ def __repr__(self):
-+ return "Child(key=%r)" % self.key
-+
-+Base.metadata.create_all()
-+
-+sess = sessionmaker()()
-+
-+p1 = Parent(name='p1')
-+sess.add(p1)
-+
-+print("\n---------begin setting nodes, autoflush occurs\n")
-+p1.child_map['k1'] = Child(key='k1')
-+p1.child_map['k2'] = Child(key='k2')
-+
-+# this will autoflush the current map.
-+# ['k1', 'k2']
-+print("\n---------print keys - flushes first\n")
-+print(list(p1.child_map.keys()))
-+
-+# k1
-+print("\n---------print 'k1' node\n")
-+print(p1.child_map['k1'])
-+
-+print("\n---------update 'k2' node - must find existing, and replace\n")
-+p1.child_map['k2'] = Child(key='k2')
-+
-+print("\n---------print 'k2' key - flushes first\n")
-+# k2
-+print(p1.child_map['k2'])
-+
-+print("\n---------print all child nodes\n")
-+# [k1, k2b]
-+print(sess.query(Child).all())
-+
-diff -r 9d0639b9d3be examples/elementtree/adjacency_list.py
---- a/examples/elementtree/adjacency_list.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/examples/elementtree/adjacency_list.py Sat Apr 27 19:42:17 2013 -0400
-@@ -11,7 +11,7 @@
- Unicode, and_, create_engine)
- from sqlalchemy.orm import mapper, relationship, Session, lazyload
-
--import sys, os, StringIO, re
-+import sys, os, io, re
-
- from xml.etree import ElementTree
-
-@@ -56,7 +56,7 @@
- self.element = element
-
- def __str__(self):
-- buf = StringIO.StringIO()
-+ buf = io.StringIO()
- self.element.write(buf)
- return buf.getvalue()
-
-@@ -120,11 +120,11 @@
- def __set__(self, document, element):
- def traverse(node):
- n = _Node()
-- n.tag = unicode(node.tag)
-- n.text = unicode(node.text)
-- n.tail = unicode(node.tail)
-+ n.tag = str(node.tag)
-+ n.text = str(node.text)
-+ n.tail = str(node.tail)
- n.children = [traverse(n2) for n2 in node]
-- n.attributes = [_Attribute(unicode(k), unicode(v)) for k, v in node.attrib.iteritems()]
-+ n.attributes = [_Attribute(str(k), str(v)) for k, v in node.attrib.items()]
- return n
-
- document._root = traverse(element.getroot())
-@@ -150,23 +150,23 @@
- doc = ElementTree.parse(filename)
- session.add(Document(file, doc))
-
--print "\nSaving three documents...", line
-+print("\nSaving three documents...", line)
- session.commit()
--print "Done."
-+print("Done.")
-
--print "\nFull text of document 'text.xml':", line
-+print("\nFull text of document 'text.xml':", line)
- document = session.query(Document).filter_by(filename="test.xml").first()
-
--print document
-+print(document)
-
- ############################################ PART VI - Searching for Paths #########################
-
- # manually search for a document which contains "/somefile/header/field1:hi"
--d = session.query(Document).join('_root', aliased=True).filter(_Node.tag==u'somefile').\
-- join('children', aliased=True, from_joinpoint=True).filter(_Node.tag==u'header').\
-+d = session.query(Document).join('_root', aliased=True).filter(_Node.tag=='somefile').\
-+ join('children', aliased=True, from_joinpoint=True).filter(_Node.tag=='header').\
- join('children', aliased=True, from_joinpoint=True).filter(
-- and_(_Node.tag==u'field1', _Node.text==u'hi')).one()
--print d
-+ and_(_Node.tag=='field1', _Node.text=='hi')).one()
-+print(d)
-
- # generalize the above approach into an extremely impoverished xpath function:
- def find_document(path, compareto):
-@@ -188,11 +188,11 @@
- return query.options(lazyload('_root')).filter(_Node.text==compareto).all()
-
- for path, compareto in (
-- (u'/somefile/header/field1', u'hi'),
-- (u'/somefile/field1', u'hi'),
-- (u'/somefile/header/field2', u'there'),
-- (u'/somefile/header/field2[@attr=foo]', u'there')
-+ ('/somefile/header/field1', 'hi'),
-+ ('/somefile/field1', 'hi'),
-+ ('/somefile/header/field2', 'there'),
-+ ('/somefile/header/field2[@attr=foo]', 'there')
- ):
-- print "\nDocuments containing '%s=%s':" % (path, compareto), line
-- print [d.filename for d in find_document(path, compareto)]
-+ print("\nDocuments containing '%s=%s':" % (path, compareto), line)
-+ print([d.filename for d in find_document(path, compareto)])
-
-diff -r 9d0639b9d3be examples/elementtree/optimized_al.py
---- a/examples/elementtree/optimized_al.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/examples/elementtree/optimized_al.py Sat Apr 27 19:42:17 2013 -0400
-@@ -10,7 +10,7 @@
- Unicode, and_, create_engine)
- from sqlalchemy.orm import mapper, relationship, Session, lazyload
-
--import sys, os, StringIO, re
-+import sys, os, io, re
-
- from xml.etree import ElementTree
-
-@@ -55,7 +55,7 @@
- self.element = element
-
- def __str__(self):
-- buf = StringIO.StringIO()
-+ buf = io.StringIO()
- self.element.write(buf)
- return buf.getvalue()
-
-@@ -127,12 +127,12 @@
- def __set__(self, document, element):
- def traverse(node):
- n = _Node()
-- n.tag = unicode(node.tag)
-- n.text = unicode(node.text)
-- n.tail = unicode(node.tail)
-+ n.tag = str(node.tag)
-+ n.text = str(node.text)
-+ n.tail = str(node.tail)
- document._nodes.append(n)
- n.children = [traverse(n2) for n2 in node]
-- n.attributes = [_Attribute(unicode(k), unicode(v)) for k, v in node.attrib.iteritems()]
-+ n.attributes = [_Attribute(str(k), str(v)) for k, v in node.attrib.items()]
- return n
-
- traverse(element.getroot())
-@@ -158,27 +158,27 @@
- doc = ElementTree.parse(filename)
- session.add(Document(file, doc))
-
--print "\nSaving three documents...", line
-+print("\nSaving three documents...", line)
- session.commit()
--print "Done."
-+print("Done.")
-
--print "\nFull text of document 'text.xml':", line
-+print("\nFull text of document 'text.xml':", line)
- document = session.query(Document).filter_by(filename="test.xml").first()
-
--print document
-+print(document)
-
- ######################## PART VI - Searching for Paths #######################
-
- # manually search for a document which contains "/somefile/header/field1:hi"
--print "\nManual search for /somefile/header/field1=='hi':", line
-+print("\nManual search for /somefile/header/field1=='hi':", line)
- d = session.query(Document).join('_nodes', aliased=True).\
-- filter(and_(_Node.parent_id==None, _Node.tag==u'somefile')).\
-+ filter(and_(_Node.parent_id==None, _Node.tag=='somefile')).\
- join('children', aliased=True, from_joinpoint=True).\
-- filter(_Node.tag==u'header').\
-+ filter(_Node.tag=='header').\
- join('children', aliased=True, from_joinpoint=True).\
-- filter(and_(_Node.tag==u'field1', _Node.text==u'hi')).\
-+ filter(and_(_Node.tag=='field1', _Node.text=='hi')).\
- one()
--print d
-+print(d)
-
- # generalize the above approach into an extremely impoverished xpath function:
- def find_document(path, compareto):
-@@ -203,11 +203,11 @@
- return query.options(lazyload('_nodes')).filter(_Node.text==compareto).all()
-
- for path, compareto in (
-- (u'/somefile/header/field1', u'hi'),
-- (u'/somefile/field1', u'hi'),
-- (u'/somefile/header/field2', u'there'),
-- (u'/somefile/header/field2[@attr=foo]', u'there')
-+ ('/somefile/header/field1', 'hi'),
-+ ('/somefile/field1', 'hi'),
-+ ('/somefile/header/field2', 'there'),
-+ ('/somefile/header/field2[@attr=foo]', 'there')
- ):
-- print "\nDocuments containing '%s=%s':" % (path, compareto), line
-- print [d.filename for d in find_document(path, compareto)]
-+ print("\nDocuments containing '%s=%s':" % (path, compareto), line)
-+ print([d.filename for d in find_document(path, compareto)])
-
-diff -r 9d0639b9d3be examples/generic_associations/discriminator_on_association.py
---- a/examples/generic_associations/discriminator_on_association.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/examples/generic_associations/discriminator_on_association.py Sat Apr 27 19:42:17 2013 -0400
-@@ -144,5 +144,5 @@
-
- for customer in session.query(Customer):
- for address in customer.addresses:
-- print address
-- print address.parent
-\ No newline at end of file
-+ print(address)
-+ print(address.parent)
-\ No newline at end of file
-diff -r 9d0639b9d3be examples/generic_associations/table_per_association.py
---- a/examples/generic_associations/table_per_association.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/examples/generic_associations/table_per_association.py Sat Apr 27 19:42:17 2013 -0400
-@@ -102,5 +102,5 @@
-
- for customer in session.query(Customer):
- for address in customer.addresses:
-- print address
-+ print(address)
- # no parent here
-\ No newline at end of file
-diff -r 9d0639b9d3be examples/generic_associations/table_per_related.py
---- a/examples/generic_associations/table_per_related.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/examples/generic_associations/table_per_related.py Sat Apr 27 19:42:17 2013 -0400
-@@ -103,5 +103,5 @@
-
- for customer in session.query(Customer):
- for address in customer.addresses:
-- print address
-- print address.parent
-\ No newline at end of file
-+ print(address)
-+ print(address.parent)
-\ No newline at end of file
-diff -r 9d0639b9d3be examples/inheritance/concrete.py
---- a/examples/inheritance/concrete.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/examples/inheritance/concrete.py Sat Apr 27 19:42:17 2013 -0400
-@@ -68,5 +68,5 @@
- session.add(e2)
- session.commit()
-
--print session.query(Employee).all()
-+print(session.query(Employee).all())
-
-diff -r 9d0639b9d3be examples/inheritance/joined.py
---- a/examples/inheritance/joined.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/examples/inheritance/joined.py Sat Apr 27 19:42:17 2013 -0400
-@@ -92,10 +92,10 @@
-
- c = session.query(Company).get(1)
- for e in c.employees:
-- print e, inspect(e).key, e.company
-+ print(e, inspect(e).key, e.company)
- assert set([e.name for e in c.employees]) == set(['pointy haired boss',
- 'dilbert', 'joesmith', 'wally', 'jsmith'])
--print "\n"
-+print("\n")
-
- dilbert = session.query(Person).filter_by(name='dilbert').one()
- dilbert2 = session.query(Engineer).filter_by(name='dilbert').one()
-@@ -107,29 +107,29 @@
-
- c = session.query(Company).get(1)
- for e in c.employees:
-- print e
-+ print(e)
-
- # query using with_polymorphic.
- eng_manager = with_polymorphic(Person, [Engineer, Manager], aliased=True)
--print session.query(eng_manager).\
-+print(session.query(eng_manager).\
- filter(
- or_(eng_manager.Engineer.engineer_name=='engineer1',
- eng_manager.Manager.manager_name=='manager2'
- )
-- ).all()
-+ ).all())
-
- # illustrate join from Company,
- # We use aliased=True
- # to help when the selectable is used as the target of a join.
- eng_manager = with_polymorphic(Person, [Engineer, Manager], aliased=True)
--print session.query(Company).\
-+print(session.query(Company).\
- join(
- eng_manager,
- Company.employees
- ).filter(
- or_(eng_manager.Engineer.engineer_name=='engineer1',
- eng_manager.Manager.manager_name=='manager2')
-- ).all()
-+ ).all())
-
- session.commit()
-
-diff -r 9d0639b9d3be examples/inheritance/single.py
---- a/examples/inheritance/single.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/examples/inheritance/single.py Sat Apr 27 19:42:17 2013 -0400
-@@ -23,7 +23,7 @@
-
- class Person(object):
- def __init__(self, **kwargs):
-- for key, value in kwargs.iteritems():
-+ for key, value in kwargs.items():
- setattr(self, key, value)
- def __repr__(self):
- return "Ordinary person %s" % self.name
-@@ -39,7 +39,7 @@
- (self.name, self.status, self.manager_name)
- class Company(object):
- def __init__(self, **kwargs):
-- for key, value in kwargs.iteritems():
-+ for key, value in kwargs.items():
- setattr(self, key, value)
- def __repr__(self):
- return "Company %s" % self.name
-@@ -79,9 +79,9 @@
-
- c = session.query(Company).get(1)
- for e in c.employees:
-- print e, e.company
-+ print(e, e.company)
-
--print "\n"
-+print("\n")
-
- dilbert = session.query(Person).filter_by(name='dilbert').one()
- dilbert2 = session.query(Engineer).filter_by(name='dilbert').one()
-@@ -94,7 +94,7 @@
-
- c = session.query(Company).get(1)
- for e in c.employees:
-- print e
-+ print(e)
-
- session.delete(c)
- session.commit()
-diff -r 9d0639b9d3be examples/large_collection/large_collection.py
---- a/examples/large_collection/large_collection.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/examples/large_collection/large_collection.py Sat Apr 27 19:42:17 2013 -0400
-@@ -63,14 +63,14 @@
-
- sess.add(org)
-
-- print "-------------------------\nflush one - save org + 3 members\n"
-+ print("-------------------------\nflush one - save org + 3 members\n")
- sess.commit()
-
- # the 'members' collection is a Query. it issues
- # SQL as needed to load subsets of the collection.
-- print "-------------------------\nload subset of members\n"
-+ print("-------------------------\nload subset of members\n")
- members = org.members.filter(member_table.c.name.like('%member t%')).all()
-- print members
-+ print(members)
-
- # new Members can be appended without any
- # SQL being emitted to load the full collection
-@@ -78,19 +78,19 @@
- org.members.append(Member('member five'))
- org.members.append(Member('member six'))
-
-- print "-------------------------\nflush two - save 3 more members\n"
-+ print("-------------------------\nflush two - save 3 more members\n")
- sess.commit()
-
- # delete the object. Using ON DELETE CASCADE
- # SQL is only emitted for the head row - the Member rows
- # disappear automatically without the need for additional SQL.
- sess.delete(org)
-- print "-------------------------\nflush three - delete org, delete members in one statement\n"
-+ print("-------------------------\nflush three - delete org, delete members in one statement\n")
- sess.commit()
-
-- print "-------------------------\nno Member rows should remain:\n"
-- print sess.query(Member).count()
-+ print("-------------------------\nno Member rows should remain:\n")
-+ print(sess.query(Member).count())
- sess.close()
-
-- print "------------------------\ndone. dropping tables."
-+ print("------------------------\ndone. dropping tables.")
- meta.drop_all(engine)
-\ No newline at end of file
-diff -r 9d0639b9d3be examples/nested_sets/nested_sets.py
---- a/examples/nested_sets/nested_sets.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/examples/nested_sets/nested_sets.py Sat Apr 27 19:42:17 2013 -0400
-@@ -86,19 +86,19 @@
- session.add_all([albert, bert, chuck, donna, eddie, fred])
- session.commit()
-
--print(session.query(Employee).all())
-+print((session.query(Employee).all()))
-
- # 1. Find an employee and all his/her supervisors, no matter how deep the tree.
- ealias = aliased(Employee)
--print(session.query(Employee).\
-+print((session.query(Employee).\
- filter(ealias.left.between(Employee.left, Employee.right)).\
-- filter(ealias.emp == 'Eddie').all())
-+ filter(ealias.emp == 'Eddie').all()))
-
- #2. Find the employee and all his/her subordinates.
- # (This query has a nice symmetry with the first query.)
--print(session.query(Employee).\
-+print((session.query(Employee).\
- filter(Employee.left.between(ealias.left, ealias.right)).\
-- filter(ealias.emp == 'Chuck').all())
-+ filter(ealias.emp == 'Chuck').all()))
-
- #3. Find the level of each node, so you can print the tree
- # as an indented listing.
-@@ -107,5 +107,5 @@
- filter(ealias.left.between(Employee.left, Employee.right)).\
- group_by(ealias.emp).\
- order_by(ealias.left):
-- print(" " * indentation + str(employee))
-+ print((" " * indentation + str(employee)))
-
-diff -r 9d0639b9d3be examples/postgis/postgis.py
---- a/examples/postgis/postgis.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/examples/postgis/postgis.py Sat Apr 27 19:42:17 2013 -0400
-@@ -251,7 +251,7 @@
-
- road_table = Road.__table__
- stmt = select([road_table]).where(road_table.c.road_geom.intersects(r1.road_geom))
-- print session.execute(stmt).fetchall()
-+ print(session.execute(stmt).fetchall())
-
- # TODO: for some reason the auto-generated labels have the internal replacement
- # strings exposed, even though PG doesn't complain
-diff -r 9d0639b9d3be examples/versioning/_lib.py
---- a/examples/versioning/_lib.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/examples/versioning/_lib.py Sat Apr 27 19:42:17 2013 -0400
-@@ -17,7 +17,7 @@
- _repr_stack = set()
- class BasicEntity(object):
- def __init__(self, **kw):
-- for key, value in kw.iteritems():
-+ for key, value in kw.items():
- setattr(self, key, value)
-
- def __repr__(self):
-@@ -74,7 +74,7 @@
- a = self
- b = other
-
-- for attr in a.__dict__.keys():
-+ for attr in list(a.__dict__.keys()):
- if attr.startswith('_'):
- continue
- value = getattr(a, attr)
-diff -r 9d0639b9d3be examples/versioning/history_meta.py
---- a/examples/versioning/history_meta.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/examples/versioning/history_meta.py Sat Apr 27 19:42:17 2013 -0400
-@@ -49,7 +49,7 @@
- cols.append(Column('version', Integer, primary_key=True))
-
- if super_fks:
-- cols.append(ForeignKeyConstraint(*zip(*super_fks)))
-+ cols.append(ForeignKeyConstraint(*list(zip(*super_fks))))
-
- table = Table(local_mapper.local_table.name + '_history', local_mapper.local_table.metadata,
- *cols
-@@ -166,7 +166,7 @@
-
- attr['version'] = obj.version
- hist = history_cls()
-- for key, value in attr.iteritems():
-+ for key, value in attr.items():
- setattr(hist, key, value)
- session.add(hist)
- obj.version += 1
-diff -r 9d0639b9d3be examples/versioning/test_versioning.py
---- a/examples/versioning/test_versioning.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/examples/versioning/test_versioning.py Sat Apr 27 19:42:17 2013 -0400
-@@ -1,9 +1,9 @@
- from unittest import TestCase
- from sqlalchemy.ext.declarative import declarative_base
--from history_meta import Versioned, versioned_session
-+from .history_meta import Versioned, versioned_session
- from sqlalchemy import create_engine, Column, Integer, String, ForeignKey
- from sqlalchemy.orm import clear_mappers, sessionmaker, deferred, relationship
--from _lib import ComparableEntity, eq_
-+from ._lib import ComparableEntity, eq_
-
- engine = Session = None
-
-@@ -188,9 +188,9 @@
- eq_(
- sess.query(BaseClassHistory).order_by(BaseClassHistory.id).all(),
- [
-- SubClassSeparatePkHistory(id=1, name=u'sep1', type=u'sep', version=1),
-- BaseClassHistory(id=2, name=u'base1', type=u'base', version=1),
-- SubClassSamePkHistory(id=3, name=u'same1', type=u'same', version=1)
-+ SubClassSeparatePkHistory(id=1, name='sep1', type='sep', version=1),
-+ BaseClassHistory(id=2, name='base1', type='base', version=1),
-+ SubClassSamePkHistory(id=3, name='same1', type='same', version=1)
- ]
- )
-
-@@ -199,10 +199,10 @@
- eq_(
- sess.query(BaseClassHistory).order_by(BaseClassHistory.id, BaseClassHistory.version).all(),
- [
-- SubClassSeparatePkHistory(id=1, name=u'sep1', type=u'sep', version=1),
-- BaseClassHistory(id=2, name=u'base1', type=u'base', version=1),
-- SubClassSamePkHistory(id=3, name=u'same1', type=u'same', version=1),
-- SubClassSamePkHistory(id=3, name=u'same1', type=u'same', version=2)
-+ SubClassSeparatePkHistory(id=1, name='sep1', type='sep', version=1),
-+ BaseClassHistory(id=2, name='base1', type='base', version=1),
-+ SubClassSamePkHistory(id=3, name='same1', type='same', version=1),
-+ SubClassSamePkHistory(id=3, name='same1', type='same', version=2)
- ]
- )
-
-@@ -210,11 +210,11 @@
- eq_(
- sess.query(BaseClassHistory).order_by(BaseClassHistory.id, BaseClassHistory.version).all(),
- [
-- SubClassSeparatePkHistory(id=1, name=u'sep1', type=u'sep', version=1),
-- BaseClassHistory(id=2, name=u'base1', type=u'base', version=1),
-- BaseClassHistory(id=2, name=u'base1mod', type=u'base', version=2),
-- SubClassSamePkHistory(id=3, name=u'same1', type=u'same', version=1),
-- SubClassSamePkHistory(id=3, name=u'same1', type=u'same', version=2)
-+ SubClassSeparatePkHistory(id=1, name='sep1', type='sep', version=1),
-+ BaseClassHistory(id=2, name='base1', type='base', version=1),
-+ BaseClassHistory(id=2, name='base1mod', type='base', version=2),
-+ SubClassSamePkHistory(id=3, name='same1', type='same', version=1),
-+ SubClassSamePkHistory(id=3, name='same1', type='same', version=2)
- ]
- )
-
-@@ -249,7 +249,7 @@
-
- eq_(
- sess.query(BaseClassHistory).order_by(BaseClassHistory.id, BaseClassHistory.version).all(),
-- [BaseClassHistory(id=1, name=u'b1', type=u'base', version=1)]
-+ [BaseClassHistory(id=1, name='b1', type='base', version=1)]
- )
-
- sc.name ='s1modified'
-@@ -258,9 +258,9 @@
- eq_(
- sess.query(BaseClassHistory).order_by(BaseClassHistory.id, BaseClassHistory.version).all(),
- [
-- BaseClassHistory(id=1, name=u'b1', type=u'base', version=1),
-- BaseClassHistory(id=1, name=u'b1modified', type=u'base', version=2),
-- SubClassHistory(id=2, name=u's1', type=u'sub', version=1)
-+ BaseClassHistory(id=1, name='b1', type='base', version=1),
-+ BaseClassHistory(id=1, name='b1modified', type='base', version=2),
-+ SubClassHistory(id=2, name='s1', type='sub', version=1)
- ]
- )
-
-diff -r 9d0639b9d3be examples/vertical/dictlike-polymorphic.py
---- a/examples/vertical/dictlike-polymorphic.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/examples/vertical/dictlike-polymorphic.py Sat Apr 27 19:42:17 2013 -0400
-@@ -33,7 +33,7 @@
- from sqlalchemy.ext.hybrid import hybrid_property
-
- # Using the VerticalPropertyDictMixin from the base example
--from dictlike import VerticalPropertyDictMixin
-+from .dictlike import VerticalPropertyDictMixin
-
- class PolymorphicVerticalProperty(object):
- """A key/value pair with polymorphic value storage.
-@@ -80,7 +80,7 @@
-
- @hybrid_property
- def value(self):
-- for discriminator, field in self.type_map.values():
-+ for discriminator, field in list(self.type_map.values()):
- if self.type == discriminator:
- return getattr(self, field)
- return None
-@@ -114,7 +114,7 @@
-
- def _case(self):
- whens = [(text("'%s'" % p[0]), cast(getattr(self.cls, p[1]), String))
-- for p in self.cls.type_map.values()
-+ for p in list(self.cls.type_map.values())
- if p[1] is not None]
- return case(whens, self.cls.type, null())
- def __eq__(self, other):
-@@ -150,9 +150,9 @@
-
- class AnimalFact(PolymorphicVerticalProperty):
- type_map = {
-- int: (u'integer', 'int_value'),
-- unicode: (u'char', 'char_value'),
-- bool: (u'boolean', 'boolean_value'),
-+ int: ('integer', 'int_value'),
-+ str: ('char', 'char_value'),
-+ bool: ('boolean', 'boolean_value'),
- type(None): (None, None),
- }
-
-@@ -190,42 +190,42 @@
- metadata.create_all(engine)
- session = Session(engine)
-
-- stoat = Animal(u'stoat')
-- stoat[u'color'] = u'red'
-- stoat[u'cuteness'] = 7
-- stoat[u'weasel-like'] = True
-+ stoat = Animal('stoat')
-+ stoat['color'] = 'red'
-+ stoat['cuteness'] = 7
-+ stoat['weasel-like'] = True
-
- session.add(stoat)
- session.commit()
-
-- critter = session.query(Animal).filter(Animal.name == u'stoat').one()
-- print critter[u'color']
-- print critter[u'cuteness']
-+ critter = session.query(Animal).filter(Animal.name == 'stoat').one()
-+ print(critter['color'])
-+ print(critter['cuteness'])
-
-- print "changing cuteness value and type:"
-- critter[u'cuteness'] = u'very cute'
-+ print("changing cuteness value and type:")
-+ critter['cuteness'] = 'very cute'
-
- session.commit()
-
-- marten = Animal(u'marten')
-- marten[u'cuteness'] = 5
-- marten[u'weasel-like'] = True
-- marten[u'poisonous'] = False
-+ marten = Animal('marten')
-+ marten['cuteness'] = 5
-+ marten['weasel-like'] = True
-+ marten['poisonous'] = False
- session.add(marten)
-
-- shrew = Animal(u'shrew')
-- shrew[u'cuteness'] = 5
-- shrew[u'weasel-like'] = False
-- shrew[u'poisonous'] = True
-+ shrew = Animal('shrew')
-+ shrew['cuteness'] = 5
-+ shrew['weasel-like'] = False
-+ shrew['poisonous'] = True
-
- session.add(shrew)
- session.commit()
-
- q = (session.query(Animal).
- filter(Animal.facts.any(
-- and_(AnimalFact.key == u'weasel-like',
-+ and_(AnimalFact.key == 'weasel-like',
- AnimalFact.value == True))))
-- print 'weasel-like animals', q.all()
-+ print('weasel-like animals', q.all())
-
- # Save some typing by wrapping that up in a function:
- with_characteristic = lambda key, value: and_(AnimalFact.key == key,
-@@ -233,24 +233,24 @@
-
- q = (session.query(Animal).
- filter(Animal.facts.any(
-- with_characteristic(u'weasel-like', True))))
-- print 'weasel-like animals again', q.all()
-+ with_characteristic('weasel-like', True))))
-+ print('weasel-like animals again', q.all())
-
- q = (session.query(Animal).
-- filter(Animal.facts.any(with_characteristic(u'poisonous', False))))
-- print 'animals with poisonous=False', q.all()
-+ filter(Animal.facts.any(with_characteristic('poisonous', False))))
-+ print('animals with poisonous=False', q.all())
-
- q = (session.query(Animal).
- filter(or_(Animal.facts.any(
-- with_characteristic(u'poisonous', False)),
-- not_(Animal.facts.any(AnimalFact.key == u'poisonous')))))
-- print 'non-poisonous animals', q.all()
-+ with_characteristic('poisonous', False)),
-+ not_(Animal.facts.any(AnimalFact.key == 'poisonous')))))
-+ print('non-poisonous animals', q.all())
-
- q = (session.query(Animal).
- filter(Animal.facts.any(AnimalFact.value == 5)))
-- print 'any animal with a .value of 5', q.all()
-+ print('any animal with a .value of 5', q.all())
-
- # Facts can be queried as well.
- q = (session.query(AnimalFact).
-- filter(with_characteristic(u'cuteness', u'very cute')))
-- print q.all()
-+ filter(with_characteristic('cuteness', 'very cute')))
-+ print(q.all())
-diff -r 9d0639b9d3be examples/vertical/dictlike.py
---- a/examples/vertical/dictlike.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/examples/vertical/dictlike.py Sat Apr 27 19:42:17 2013 -0400
-@@ -110,16 +110,16 @@
-
- # Implement other dict methods to taste. Here are some examples:
- def keys(self):
-- return self.__map.keys()
-+ return list(self.__map.keys())
-
- def values(self):
-- return [prop.value for prop in self.__map.values()]
-+ return [prop.value for prop in list(self.__map.values())]
-
- def items(self):
-- return [(key, prop.value) for key, prop in self.__map.items()]
-+ return [(key, prop.value) for key, prop in list(self.__map.items())]
-
- def __iter__(self):
-- return iter(self.keys())
-+ return iter(list(self.keys()))
-
-
- if __name__ == '__main__':
-@@ -176,49 +176,49 @@
- metadata.create_all(engine)
- session = Session(bind=engine)
-
-- stoat = Animal(u'stoat')
-- stoat[u'color'] = u'reddish'
-- stoat[u'cuteness'] = u'somewhat'
-+ stoat = Animal('stoat')
-+ stoat['color'] = 'reddish'
-+ stoat['cuteness'] = 'somewhat'
-
- # dict-like assignment transparently creates entries in the
- # stoat.facts collection:
-- print stoat.facts[u'color']
-+ print(stoat.facts['color'])
-
- session.add(stoat)
- session.commit()
-
-- critter = session.query(Animal).filter(Animal.name == u'stoat').one()
-- print critter[u'color']
-- print critter[u'cuteness']
-+ critter = session.query(Animal).filter(Animal.name == 'stoat').one()
-+ print(critter['color'])
-+ print(critter['cuteness'])
-
-- critter[u'cuteness'] = u'very'
-+ critter['cuteness'] = 'very'
-
-- print 'changing cuteness:'
-+ print('changing cuteness:')
- engine.echo = True
- session.commit()
- engine.echo = False
-
-- marten = Animal(u'marten')
-- marten[u'color'] = u'brown'
-- marten[u'cuteness'] = u'somewhat'
-+ marten = Animal('marten')
-+ marten['color'] = 'brown'
-+ marten['cuteness'] = 'somewhat'
- session.add(marten)
-
-- shrew = Animal(u'shrew')
-- shrew[u'cuteness'] = u'somewhat'
-- shrew[u'poisonous-part'] = u'saliva'
-+ shrew = Animal('shrew')
-+ shrew['cuteness'] = 'somewhat'
-+ shrew['poisonous-part'] = 'saliva'
- session.add(shrew)
-
-- loris = Animal(u'slow loris')
-- loris[u'cuteness'] = u'fairly'
-- loris[u'poisonous-part'] = u'elbows'
-+ loris = Animal('slow loris')
-+ loris['cuteness'] = 'fairly'
-+ loris['poisonous-part'] = 'elbows'
- session.add(loris)
- session.commit()
-
- q = (session.query(Animal).
- filter(Animal.facts.any(
-- and_(AnimalFact.key == u'color',
-- AnimalFact.value == u'reddish'))))
-- print 'reddish animals', q.all()
-+ and_(AnimalFact.key == 'color',
-+ AnimalFact.value == 'reddish'))))
-+ print('reddish animals', q.all())
-
- # Save some typing by wrapping that up in a function:
- with_characteristic = lambda key, value: and_(AnimalFact.key == key,
-@@ -226,21 +226,21 @@
-
- q = (session.query(Animal).
- filter(Animal.facts.any(
-- with_characteristic(u'color', u'brown'))))
-- print 'brown animals', q.all()
-+ with_characteristic('color', 'brown'))))
-+ print('brown animals', q.all())
-
- q = (session.query(Animal).
- filter(not_(Animal.facts.any(
-- with_characteristic(u'poisonous-part', u'elbows')))))
-- print 'animals without poisonous-part == elbows', q.all()
-+ with_characteristic('poisonous-part', 'elbows')))))
-+ print('animals without poisonous-part == elbows', q.all())
-
- q = (session.query(Animal).
-- filter(Animal.facts.any(AnimalFact.value == u'somewhat')))
-- print 'any animal with any .value of "somewhat"', q.all()
-+ filter(Animal.facts.any(AnimalFact.value == 'somewhat')))
-+ print('any animal with any .value of "somewhat"', q.all())
-
- # Facts can be queried as well.
- q = (session.query(AnimalFact).
-- filter(with_characteristic(u'cuteness', u'very')))
-- print 'just the facts', q.all()
-+ filter(with_characteristic('cuteness', 'very')))
-+ print('just the facts', q.all())
-
-
-diff -r 9d0639b9d3be lib/sqlalchemy/__init__.py
---- a/lib/sqlalchemy/__init__.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/lib/sqlalchemy/__init__.py Sat Apr 27 19:42:17 2013 -0400
-@@ -117,7 +117,7 @@
- from .engine import create_engine, engine_from_config
-
-
--__all__ = sorted(name for name, obj in locals().items()
-+__all__ = sorted(name for name, obj in locals().items())
- if not (name.startswith('_') or _inspect.ismodule(obj)))
-
- __version__ = '0.8.1'
-diff -r 9d0639b9d3be lib/sqlalchemy/connectors/mxodbc.py
---- a/lib/sqlalchemy/connectors/mxodbc.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/lib/sqlalchemy/connectors/mxodbc.py Sat Apr 27 19:42:17 2013 -0400
-@@ -82,7 +82,7 @@
- category=errorclass,
- stacklevel=2)
- else:
-- raise errorclass, errorvalue
-+ raise errorclass(errorvalue)
- return error_handler
-
- def create_connect_args(self, url):
-diff -r 9d0639b9d3be lib/sqlalchemy/connectors/pyodbc.py
---- a/lib/sqlalchemy/connectors/pyodbc.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/lib/sqlalchemy/connectors/pyodbc.py Sat Apr 27 19:42:17 2013 -0400
-@@ -9,7 +9,7 @@
-
- import sys
- import re
--import urllib
-+import urllib.request, urllib.parse, urllib.error
-
-
- class PyODBCConnector(Connector):
-@@ -59,7 +59,7 @@
- connect_args[param] = asbool(keys.pop(param))
-
- if 'odbc_connect' in keys:
-- connectors = [urllib.unquote_plus(keys.pop('odbc_connect'))]
-+ connectors = [urllib.parse.unquote_plus(keys.pop('odbc_connect'))]
- else:
- dsn_connection = 'dsn' in keys or \
- ('host' in keys and 'database' not in keys)
-@@ -91,7 +91,7 @@
- connectors.append("AutoTranslate=%s" %
- keys.pop("odbc_autotranslate"))
-
-- connectors.extend(['%s=%s' % (k, v) for k, v in keys.iteritems()])
-+ connectors.extend(['%s=%s' % (k, v) for k, v in keys.items()])
- return [[";".join(connectors)], connect_args]
-
- def is_disconnect(self, e, connection, cursor):
-@@ -123,16 +123,16 @@
-
- # the "Py2K only" part here is theoretical.
- # have not tried pyodbc + python3.1 yet.
-- # Py2K
-- self.supports_unicode_statements = (
-- not self.freetds and not self.easysoft)
-- if self._user_supports_unicode_binds is not None:
-- self.supports_unicode_binds = self._user_supports_unicode_binds
-- else:
-- self.supports_unicode_binds = (
-- not self.freetds or self.freetds_driver_version >= '0.91'
-- ) and not self.easysoft
-- # end Py2K
-+# start Py2K
-+# self.supports_unicode_statements = (
-+# not self.freetds and not self.easysoft)
-+# if self._user_supports_unicode_binds is not None:
-+# self.supports_unicode_binds = self._user_supports_unicode_binds
-+# else:
-+# self.supports_unicode_binds = (
-+# not self.freetds or self.freetds_driver_version >= '0.91'
-+# ) and not self.easysoft
-+# end Py2K
-
- # run other initialization which asks for user name, etc.
- super(PyODBCConnector, self).initialize(connection)
-diff -r 9d0639b9d3be lib/sqlalchemy/dialects/firebird/base.py
---- a/lib/sqlalchemy/dialects/firebird/base.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/lib/sqlalchemy/dialects/firebird/base.py Sat Apr 27 19:42:17 2013 -0400
-@@ -685,7 +685,7 @@
- self.normalize_name(row['fname']))
- fk['referred_columns'].append(
- self.normalize_name(row['targetfname']))
-- return fks.values()
-+ return list(fks.values())
-
- @reflection.cache
- def get_indexes(self, connection, table_name, schema=None, **kw):
-@@ -716,7 +716,7 @@
- indexrec['column_names'].append(
- self.normalize_name(row['field_name']))
-
-- return indexes.values()
-+ return list(indexes.values())
-
- def do_execute(self, cursor, statement, parameters, context=None):
- # kinterbase does not accept a None, but wants an empty list
-diff -r 9d0639b9d3be lib/sqlalchemy/dialects/informix/base.py
---- a/lib/sqlalchemy/dialects/informix/base.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/lib/sqlalchemy/dialects/informix/base.py Sat Apr 27 19:42:17 2013 -0400
-@@ -24,6 +24,7 @@
- from sqlalchemy.sql import compiler, text
- from sqlalchemy.engine import default, reflection
- from sqlalchemy import types as sqltypes
-+from functools import reduce
-
- RESERVED_WORDS = set(
- ["abs", "absolute", "access", "access_method", "acos", "active", "add",
-@@ -298,7 +299,7 @@
-
- def get_column_default_string(self, column):
- if (isinstance(column.server_default, schema.DefaultClause) and
-- isinstance(column.server_default.arg, basestring)):
-+ isinstance(column.server_default.arg, str)):
- if isinstance(column.type, (sqltypes.Integer, sqltypes.Numeric)):
- return self.sql_compiler.process(text(column.server_default.arg))
-
-@@ -322,10 +323,10 @@
- remote_table = list(constraint._elements.values())[0].column.table
- text = "FOREIGN KEY (%s) REFERENCES %s (%s)" % (
- ', '.join(preparer.quote(f.parent.name, f.parent.quote)
-- for f in constraint._elements.values()),
-+ for f in list(constraint._elements.values())),
- preparer.format_table(remote_table),
- ', '.join(preparer.quote(f.column.name, f.column.quote)
-- for f in constraint._elements.values())
-+ for f in list(constraint._elements.values()))
- )
- text += self.define_constraint_cascades(constraint)
- text += self.define_constraint_deferrability(constraint)
-@@ -506,7 +507,7 @@
- if remote_column not in remote_cols:
- remote_cols.append(remote_column)
-
-- return fkeys.values()
-+ return list(fkeys.values())
-
- @reflection.cache
- def get_pk_constraint(self, connection, table_name, schema=None, **kw):
-diff -r 9d0639b9d3be lib/sqlalchemy/dialects/mssql/adodbapi.py
---- a/lib/sqlalchemy/dialects/mssql/adodbapi.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/lib/sqlalchemy/dialects/mssql/adodbapi.py Sat Apr 27 19:42:17 2013 -0400
-@@ -44,7 +44,7 @@
-
- @classmethod
- def import_dbapi(cls):
-- import adodbapi as module
-+ from . import adodbapi as module
- return module
-
- colspecs = util.update_copy(
-diff -r 9d0639b9d3be lib/sqlalchemy/dialects/mssql/base.py
---- a/lib/sqlalchemy/dialects/mssql/base.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/lib/sqlalchemy/dialects/mssql/base.py Sat Apr 27 19:42:17 2013 -0400
-@@ -295,7 +295,7 @@
- def process(value):
- if isinstance(value, datetime.datetime):
- return value.date()
-- elif isinstance(value, basestring):
-+ elif isinstance(value, str):
- return datetime.date(*[
- int(x or 0)
- for x in self._reg.match(value).groups()
-@@ -328,7 +328,7 @@
- def process(value):
- if isinstance(value, datetime.datetime):
- return value.time()
-- elif isinstance(value, basestring):
-+ elif isinstance(value, str):
- return datetime.time(*[
- int(x or 0)
- for x in self._reg.match(value).groups()])
-@@ -1002,7 +1002,7 @@
- # handle other included columns
- if index.kwargs.get("mssql_include"):
- inclusions = [index.table.c[col]
-- if isinstance(col, basestring) else col
-+ if isinstance(col, str) else col
- for col in index.kwargs["mssql_include"]]
-
- text += " INCLUDE (%s)" \
-@@ -1103,7 +1103,7 @@
- query_timeout=None,
- use_scope_identity=True,
- max_identifier_length=None,
-- schema_name=u"dbo", **opts):
-+ schema_name="dbo", **opts):
- self.query_timeout = int(query_timeout or 0)
- self.schema_name = schema_name
-
-@@ -1123,7 +1123,7 @@
-
- def initialize(self, connection):
- super(MSDialect, self).initialize(connection)
-- if self.server_version_info[0] not in range(8, 17):
-+ if self.server_version_info[0] not in list(range(8, 17)):
- # FreeTDS with version 4.2 seems to report here
- # a number like "95.10.255". Don't know what
- # that is. So emit warning.
-@@ -1150,7 +1150,7 @@
- try:
- default_schema_name = connection.scalar(query, name=user_name)
- if default_schema_name is not None:
-- return unicode(default_schema_name)
-+ return str(default_schema_name)
- except:
- pass
- return self.schema_name
-@@ -1188,7 +1188,7 @@
- s = sql.select([tables.c.table_name],
- sql.and_(
- tables.c.table_schema == owner,
-- tables.c.table_type == u'BASE TABLE'
-+ tables.c.table_type == 'BASE TABLE'
- ),
- order_by=[tables.c.table_name]
- )
-@@ -1202,7 +1202,7 @@
- s = sql.select([tables.c.table_name],
- sql.and_(
- tables.c.table_schema == owner,
-- tables.c.table_type == u'VIEW'
-+ tables.c.table_type == 'VIEW'
- ),
- order_by=[tables.c.table_name]
- )
-@@ -1267,7 +1267,7 @@
- if row['index_id'] in indexes:
- indexes[row['index_id']]['column_names'].append(row['name'])
-
-- return indexes.values()
-+ return list(indexes.values())
-
- @reflection.cache
- @_db_plus_owner
-@@ -1474,4 +1474,4 @@
- local_cols.append(scol)
- remote_cols.append(rcol)
-
-- return fkeys.values()
-+ return list(fkeys.values())
-diff -r 9d0639b9d3be lib/sqlalchemy/dialects/mssql/information_schema.py
---- a/lib/sqlalchemy/dialects/mssql/information_schema.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/lib/sqlalchemy/dialects/mssql/information_schema.py Sat Apr 27 19:42:17 2013 -0400
-@@ -17,10 +17,10 @@
- impl = Unicode
-
- def process_bind_param(self, value, dialect):
-- # Py2K
-- if isinstance(value, str):
-- value = value.decode(dialect.encoding)
-- # end Py2K
-+# start Py2K
-+# if isinstance(value, str):
-+# value = value.decode(dialect.encoding)
-+# end Py2K
- return value
-
- def bind_expression(self, bindvalue):
-diff -r 9d0639b9d3be lib/sqlalchemy/dialects/mssql/pyodbc.py
---- a/lib/sqlalchemy/dialects/mssql/pyodbc.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/lib/sqlalchemy/dialects/mssql/pyodbc.py Sat Apr 27 19:42:17 2013 -0400
-@@ -219,7 +219,7 @@
- # without closing it (FreeTDS particularly)
- row = self.cursor.fetchall()[0]
- break
-- except self.dialect.dbapi.Error, e:
-+ except self.dialect.dbapi.Error as e:
- # no way around this - nextset() consumes the previous set
- # so we need to just keep flipping
- self.cursor.nextset()
-diff -r 9d0639b9d3be lib/sqlalchemy/dialects/mysql/base.py
---- a/lib/sqlalchemy/dialects/mysql/base.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/lib/sqlalchemy/dialects/mysql/base.py Sat Apr 27 19:42:17 2013 -0400
-@@ -652,7 +652,7 @@
-
- def process(value):
- if value is not None:
-- v = 0L
-+ v = 0
- for i in map(ord, value):
- v = v << 8 | i
- return v
-@@ -1171,7 +1171,7 @@
- super_convert = super(SET, self).bind_processor(dialect)
-
- def process(value):
-- if value is None or isinstance(value, (int, long, basestring)):
-+ if value is None or isinstance(value, (int, str)):
- pass
- else:
- if None in value:
-@@ -1352,7 +1352,7 @@
- of a SELECT.
-
- """
-- if isinstance(select._distinct, basestring):
-+ if isinstance(select._distinct, str):
- return select._distinct.upper() + " "
- elif select._distinct:
- return "DISTINCT "
-@@ -1441,7 +1441,7 @@
- MySQLDDLCompiler, self).create_table_constraints(table)
-
- engine_key = '%s_engine' % self.dialect.name
-- is_innodb = table.kwargs.has_key(engine_key) and \
-+ is_innodb = engine_key in table.kwargs and \
- table.kwargs[engine_key].lower() == 'innodb'
-
- auto_inc_column = table._autoincrement_column
-@@ -1494,7 +1494,7 @@
- k[len(self.dialect.name) + 1:].upper(),
- v
- )
-- for k, v in table.kwargs.items()
-+ for k, v in list(table.kwargs.items())
- if k.startswith('%s_' % self.dialect.name)
- )
-
-@@ -2045,7 +2045,7 @@
- have = rs.fetchone() is not None
- rs.close()
- return have
-- except exc.DBAPIError, e:
-+ except exc.DBAPIError as e:
- if self._extract_error_code(e.orig) == 1146:
- return False
- raise
-@@ -2328,7 +2328,7 @@
- rp = None
- try:
- rp = connection.execute(st)
-- except exc.DBAPIError, e:
-+ except exc.DBAPIError as e:
- if self._extract_error_code(e.orig) == 1146:
- raise exc.NoSuchTableError(full_name)
- else:
-@@ -2352,7 +2352,7 @@
- try:
- try:
- rp = connection.execute(st)
-- except exc.DBAPIError, e:
-+ except exc.DBAPIError as e:
- if self._extract_error_code(e.orig) == 1146:
- raise exc.NoSuchTableError(full_name)
- else:
-@@ -2485,7 +2485,7 @@
- for nope in ('auto_increment', 'data directory', 'index directory'):
- options.pop(nope, None)
-
-- for opt, val in options.items():
-+ for opt, val in list(options.items()):
- state.table_options['%s_%s' % (self.dialect.name, opt)] = val
-
- def _parse_column(self, line, state):
-@@ -2626,11 +2626,11 @@
-
- _final = self.preparer.final_quote
-
-- quotes = dict(zip(('iq', 'fq', 'esc_fq'),
-+ quotes = dict(list(zip(('iq', 'fq', 'esc_fq'),
- [re.escape(s) for s in
- (self.preparer.initial_quote,
- _final,
-- self.preparer._escape_identifier(_final))]))
-+ self.preparer._escape_identifier(_final))])))
-
- self._pr_name = _pr_compile(
- r'^CREATE (?:\w+ +)?TABLE +'
-@@ -2802,11 +2802,12 @@
- item = self.rowproxy[index]
- if isinstance(item, _array):
- item = item.tostring()
-- # Py2K
-- if self.charset and isinstance(item, str):
-- # end Py2K
-- # Py3K
-- #if self.charset and isinstance(item, bytes):
-+# start Py2K
-+# if self.charset and isinstance(item, str):
-+# end Py2K
-+# start Py3K
-+ if self.charset and isinstance(item, bytes):
-+# end Py3K
- return item.decode(self.charset)
- else:
- return item
-@@ -2815,11 +2816,12 @@
- item = getattr(self.rowproxy, attr)
- if isinstance(item, _array):
- item = item.tostring()
-- # Py2K
-- if self.charset and isinstance(item, str):
-- # end Py2K
-- # Py3K
-- #if self.charset and isinstance(item, bytes):
-+# start Py2K
-+# if self.charset and isinstance(item, str):
-+# end Py2K
-+# start Py3K
-+ if self.charset and isinstance(item, bytes):
-+# end Py3K
- return item.decode(self.charset)
- else:
- return item
-diff -r 9d0639b9d3be lib/sqlalchemy/dialects/mysql/cymysql.py
---- a/lib/sqlalchemy/dialects/mysql/cymysql.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/lib/sqlalchemy/dialects/mysql/cymysql.py Sat Apr 27 19:42:17 2013 -0400
-@@ -25,15 +25,16 @@
-
- def process(value):
- if value is not None:
-- # Py2K
-- v = 0L
-- for i in map(ord, value):
-+# start Py2K
-+# v = 0L
-+# for i in map(ord, value):
-+# v = v << 8 | i
-+# end Py2K
-+# start Py3K
-+ v = 0
-+ for i in value:
- v = v << 8 | i
-- # end Py2K
-- # Py3K
-- #v = 0
-- #for i in value:
-- # v = v << 8 | i
-+# end Py3K
- return v
- return value
- return process
-diff -r 9d0639b9d3be lib/sqlalchemy/dialects/mysql/oursql.py
---- a/lib/sqlalchemy/dialects/mysql/oursql.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/lib/sqlalchemy/dialects/mysql/oursql.py Sat Apr 27 19:42:17 2013 -0400
-@@ -55,9 +55,9 @@
-
- class MySQLDialect_oursql(MySQLDialect):
- driver = 'oursql'
--# Py2K
-- supports_unicode_binds = True
-- supports_unicode_statements = True
-+# start Py2K
-+# supports_unicode_binds = True
-+# supports_unicode_statements = True
- # end Py2K
-
- supports_native_decimal = True
-@@ -90,12 +90,13 @@
- connection.cursor().execute('BEGIN', plain_query=True)
-
- def _xa_query(self, connection, query, xid):
--# Py2K
-- arg = connection.connection._escape_string(xid)
-+# start Py2K
-+# arg = connection.connection._escape_string(xid)
- # end Py2K
--# Py3K
--# charset = self._connection_charset
--# arg = connection.connection._escape_string(xid.encode(charset)).decode(charset)
-+# start Py3K
-+ charset = self._connection_charset
-+ arg = connection.connection._escape_string(xid.encode(charset)).decode(charset)
-+# end Py3K
- arg = "'%s'" % arg
- connection.execution_options(_oursql_plain_query=True).execute(query % arg)
-
-diff -r 9d0639b9d3be lib/sqlalchemy/dialects/mysql/zxjdbc.py
---- a/lib/sqlalchemy/dialects/mysql/zxjdbc.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/lib/sqlalchemy/dialects/mysql/zxjdbc.py Sat Apr 27 19:42:17 2013 -0400
-@@ -37,7 +37,7 @@
- return value
- if isinstance(value, bool):
- return int(value)
-- v = 0L
-+ v = 0
- for i in value:
- v = v << 8 | (i & 0xff)
- value = v
-diff -r 9d0639b9d3be lib/sqlalchemy/dialects/oracle/base.py
---- a/lib/sqlalchemy/dialects/oracle/base.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/lib/sqlalchemy/dialects/oracle/base.py Sat Apr 27 19:42:17 2013 -0400
-@@ -437,7 +437,7 @@
-
- def get_select_hint_text(self, byfroms):
- return " ".join(
-- "/*+ %s */" % text for table, text in byfroms.items()
-+ "/*+ %s */" % text for table, text in list(byfroms.items())
- )
-
- def function_argspec(self, fn, **kw):
-@@ -654,14 +654,14 @@
- class OracleIdentifierPreparer(compiler.IdentifierPreparer):
-
- reserved_words = set([x.lower() for x in RESERVED_WORDS])
-- illegal_initial_characters = set(xrange(0, 10)).union(["_", "$"])
-+ illegal_initial_characters = set(range(0, 10)).union(["_", "$"])
-
- def _bindparam_requires_quotes(self, value):
- """Return True if the given identifier requires quoting."""
- lc_value = value.lower()
- return (lc_value in self.reserved_words
- or value[0] in self.illegal_initial_characters
-- or not self.legal_characters.match(unicode(value))
-+ or not self.legal_characters.match(str(value))
- )
-
- def format_savepoint(self, savepoint):
-@@ -765,10 +765,10 @@
- def normalize_name(self, name):
- if name is None:
- return None
-- # Py2K
-- if isinstance(name, str):
-- name = name.decode(self.encoding)
-- # end Py2K
-+# start Py2K
-+# if isinstance(name, str):
-+# name = name.decode(self.encoding)
-+# end Py2K
- if name.upper() == name and \
- not self.identifier_preparer._requires_quotes(name.lower()):
- return name.lower()
-@@ -780,16 +780,16 @@
- return None
- elif name.lower() == name and not self.identifier_preparer._requires_quotes(name.lower()):
- name = name.upper()
-- # Py2K
-- if not self.supports_unicode_binds:
-- name = name.encode(self.encoding)
-- else:
-- name = unicode(name)
-- # end Py2K
-+# start Py2K
-+# if not self.supports_unicode_binds:
-+# name = name.encode(self.encoding)
-+# else:
-+# name = unicode(name)
-+# end Py2K
- return name
-
- def _get_default_schema_name(self, connection):
-- return self.normalize_name(connection.execute(u'SELECT USER FROM DUAL').scalar())
-+ return self.normalize_name(connection.execute('SELECT USER FROM DUAL').scalar())
-
- def _resolve_synonym(self, connection, desired_owner=None, desired_synonym=None, desired_table=None):
- """search for a local synonym matching the given desired owner/name.
-@@ -1167,7 +1167,7 @@
- local_cols.append(local_column)
- remote_cols.append(remote_column)
-
-- return fkeys.values()
-+ return list(fkeys.values())
-
- @reflection.cache
- def get_view_definition(self, connection, view_name, schema=None,
-diff -r 9d0639b9d3be lib/sqlalchemy/dialects/oracle/cx_oracle.py
---- a/lib/sqlalchemy/dialects/oracle/cx_oracle.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/lib/sqlalchemy/dialects/oracle/cx_oracle.py Sat Apr 27 19:42:17 2013 -0400
-@@ -182,7 +182,7 @@
-
- """
-
--from __future__ import absolute_import
-+
-
- from .base import OracleCompiler, OracleDialect, OracleExecutionContext
- from . import base as oracle
-@@ -268,20 +268,21 @@
-
-
- class _NativeUnicodeMixin(object):
-- # Py3K
-- #pass
-- # Py2K
-- def bind_processor(self, dialect):
-- if dialect._cx_oracle_with_unicode:
-- def process(value):
-- if value is None:
-- return value
-- else:
-- return unicode(value)
-- return process
-- else:
-- return super(_NativeUnicodeMixin, self).bind_processor(dialect)
-- # end Py2K
-+# start Py3K
-+ pass
-+# end Py3K
-+# start Py2K
-+# def bind_processor(self, dialect):
-+# if dialect._cx_oracle_with_unicode:
-+# def process(value):
-+# if value is None:
-+# return value
-+# else:
-+# return unicode(value)
-+# return process
-+# else:
-+# return super(_NativeUnicodeMixin, self).bind_processor(dialect)
-+# end Py2K
-
- # we apply a connection output handler that returns
- # unicode in all cases, so the "native_unicode" flag
-@@ -391,10 +392,10 @@
- (fromname.encode(self.dialect.encoding),
- toname.encode(self.dialect.encoding))
- for fromname, toname in
-- quoted_bind_names.items()
-+ list(quoted_bind_names.items())
- )
- for param in self.parameters:
-- for fromname, toname in quoted_bind_names.items():
-+ for fromname, toname in list(quoted_bind_names.items()):
- param[toname] = param[fromname]
- del param[fromname]
-
-@@ -408,7 +409,7 @@
-
- # if a single execute, check for outparams
- if len(self.compiled_parameters) == 1:
-- for bindparam in self.compiled.binds.values():
-+ for bindparam in list(self.compiled.binds.values()):
- if bindparam.isoutparam:
- dbtype = bindparam.type.dialect_impl(self.dialect).\
- get_dbapi_type(self.dialect.dbapi)
-@@ -437,7 +438,7 @@
- if hasattr(self, 'out_parameters') and self.compiled.returning:
- returning_params = dict(
- (k, v.getvalue())
-- for k, v in self.out_parameters.items()
-+ for k, v in list(self.out_parameters.items())
- )
- return ReturningResultProxy(self, returning_params)
-
-@@ -456,7 +457,7 @@
- len(self.compiled_parameters) == 1:
- result.out_parameters = out_parameters = {}
-
-- for bind, name in self.compiled.bind_names.items():
-+ for bind, name in list(self.compiled.bind_names.items()):
- if name in self.out_parameters:
- type = bind.type
- impl_type = type.dialect_impl(self.dialect)
-@@ -472,7 +473,7 @@
- else:
- result.out_parameters = dict(
- (k, v.getvalue())
-- for k, v in self.out_parameters.items()
-+ for k, v in list(self.out_parameters.items())
- )
-
- return result
-@@ -493,11 +494,11 @@
- """
- def __init__(self, *arg, **kw):
- OracleExecutionContext_cx_oracle.__init__(self, *arg, **kw)
-- self.statement = unicode(self.statement)
-+ self.statement = str(self.statement)
-
- def _execute_scalar(self, stmt):
- return super(OracleExecutionContext_cx_oracle_with_unicode, self).\
-- _execute_scalar(unicode(stmt))
-+ _execute_scalar(str(stmt))
-
-
- class ReturningResultProxy(_result.FullyBufferedResultProxy):
-@@ -607,19 +608,19 @@
- self.supports_unicode_statements = True
- self.supports_unicode_binds = True
- self._cx_oracle_with_unicode = True
-- # Py2K
-- # There's really no reason to run with WITH_UNICODE under Python 2.x.
-- # Give the user a hint.
-- util.warn("cx_Oracle is compiled under Python 2.xx using the "
-- "WITH_UNICODE flag. Consider recompiling cx_Oracle without "
-- "this flag, which is in no way necessary for full support of Unicode. "
-- "Otherwise, all string-holding bind parameters must "
-- "be explicitly typed using SQLAlchemy's String type or one of its subtypes,"
-- "or otherwise be passed as Python unicode. Plain Python strings "
-- "passed as bind parameters will be silently corrupted by cx_Oracle."
-- )
-- self.execution_ctx_cls = OracleExecutionContext_cx_oracle_with_unicode
-- # end Py2K
-+# start Py2K
-+# # There's really no reason to run with WITH_UNICODE under Python 2.x.
-+# # Give the user a hint.
-+# util.warn("cx_Oracle is compiled under Python 2.xx using the "
-+# "WITH_UNICODE flag. Consider recompiling cx_Oracle without "
-+# "this flag, which is in no way necessary for full support of Unicode. "
-+# "Otherwise, all string-holding bind parameters must "
-+# "be explicitly typed using SQLAlchemy's String type or one of its subtypes,"
-+# "or otherwise be passed as Python unicode. Plain Python strings "
-+# "passed as bind parameters will be silently corrupted by cx_Oracle."
-+# )
-+# self.execution_ctx_cls = OracleExecutionContext_cx_oracle_with_unicode
-+# end Py2K
- else:
- self._cx_oracle_with_unicode = False
-
-@@ -731,7 +732,7 @@
- arraysize=cursor.arraysize)
- # allow all strings to come back natively as Unicode
- elif defaultType in (cx_Oracle.STRING, cx_Oracle.FIXED_CHAR):
-- return cursor.var(unicode, size, cursor.arraysize)
-+ return cursor.var(str, size, cursor.arraysize)
-
- def on_connect(conn):
- conn.outputtypehandler = output_type_handler
-@@ -766,20 +767,20 @@
- twophase=self.allow_twophase,
- )
-
-- # Py2K
-- if self._cx_oracle_with_unicode:
-- for k, v in opts.items():
-- if isinstance(v, str):
-- opts[k] = unicode(v)
-- else:
-- for k, v in opts.items():
-- if isinstance(v, unicode):
-- opts[k] = str(v)
-- # end Py2K
-+# start Py2K
-+# if self._cx_oracle_with_unicode:
-+# for k, v in opts.items():
-+# if isinstance(v, str):
-+# opts[k] = unicode(v)
-+# else:
-+# for k, v in opts.items():
-+# if isinstance(v, unicode):
-+# opts[k] = str(v)
-+# end Py2K
-
- if 'mode' in url.query:
- opts['mode'] = url.query['mode']
-- if isinstance(opts['mode'], basestring):
-+ if isinstance(opts['mode'], str):
- mode = opts['mode'].upper()
- if mode == 'SYSDBA':
- opts['mode'] = self.dbapi.SYSDBA
-diff -r 9d0639b9d3be lib/sqlalchemy/dialects/oracle/zxjdbc.py
---- a/lib/sqlalchemy/dialects/oracle/zxjdbc.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/lib/sqlalchemy/dialects/oracle/zxjdbc.py Sat Apr 27 19:42:17 2013 -0400
-@@ -95,8 +95,8 @@
- try:
- try:
- rrs = self.statement.__statement__.getReturnResultSet()
-- rrs.next()
-- except SQLException, sqle:
-+ next(rrs)
-+ except SQLException as sqle:
- msg = '%s [SQLCode: %d]' % (sqle.getMessage(), sqle.getErrorCode())
- if sqle.getSQLState() is not None:
- msg += ' [SQLState: %s]' % sqle.getSQLState()
-diff -r 9d0639b9d3be lib/sqlalchemy/dialects/postgresql/base.py
---- a/lib/sqlalchemy/dialects/postgresql/base.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/lib/sqlalchemy/dialects/postgresql/base.py Sat Apr 27 19:42:17 2013 -0400
-@@ -1419,7 +1419,7 @@
- query,
- bindparams=[
- sql.bindparam(
-- 'schema', unicode(schema.lower()),
-+ 'schema', str(schema.lower()),
- type_=sqltypes.Unicode)]
- )
- )
-@@ -1435,7 +1435,7 @@
- "n.oid=c.relnamespace where n.nspname=current_schema() and "
- "relname=:name",
- bindparams=[
-- sql.bindparam('name', unicode(table_name),
-+ sql.bindparam('name', str(table_name),
- type_=sqltypes.Unicode)]
- )
- )
-@@ -1447,9 +1447,9 @@
- "relname=:name",
- bindparams=[
- sql.bindparam('name',
-- unicode(table_name), type_=sqltypes.Unicode),
-+ str(table_name), type_=sqltypes.Unicode),
- sql.bindparam('schema',
-- unicode(schema), type_=sqltypes.Unicode)]
-+ str(schema), type_=sqltypes.Unicode)]
- )
- )
- return bool(cursor.first())
-@@ -1463,7 +1463,7 @@
- "n.nspname=current_schema() "
- "and relname=:name",
- bindparams=[
-- sql.bindparam('name', unicode(sequence_name),
-+ sql.bindparam('name', str(sequence_name),
- type_=sqltypes.Unicode)
- ]
- )
-@@ -1475,10 +1475,10 @@
- "n.oid=c.relnamespace where relkind='S' and "
- "n.nspname=:schema and relname=:name",
- bindparams=[
-- sql.bindparam('name', unicode(sequence_name),
-+ sql.bindparam('name', str(sequence_name),
- type_=sqltypes.Unicode),
- sql.bindparam('schema',
-- unicode(schema), type_=sqltypes.Unicode)
-+ str(schema), type_=sqltypes.Unicode)
- ]
- )
- )
-@@ -1488,9 +1488,9 @@
- def has_type(self, connection, type_name, schema=None):
- bindparams = [
- sql.bindparam('typname',
-- unicode(type_name), type_=sqltypes.Unicode),
-+ str(type_name), type_=sqltypes.Unicode),
- sql.bindparam('nspname',
-- unicode(schema), type_=sqltypes.Unicode),
-+ str(schema), type_=sqltypes.Unicode),
- ]
- if schema is not None:
- query = """
-@@ -1546,9 +1546,9 @@
- """ % schema_where_clause
- # Since we're binding to unicode, table_name and schema_name must be
- # unicode.
-- table_name = unicode(table_name)
-+ table_name = str(table_name)
- if schema is not None:
-- schema = unicode(schema)
-+ schema = str(schema)
- s = sql.text(query, bindparams=[
- sql.bindparam('table_name', type_=sqltypes.Unicode),
- sql.bindparam('schema', type_=sqltypes.Unicode)
-@@ -1570,13 +1570,14 @@
- """
- rp = connection.execute(s)
- # what about system tables?
-- # Py3K
-- #schema_names = [row[0] for row in rp \
-- # if not row[0].startswith('pg_')]
-- # Py2K
-- schema_names = [row[0].decode(self.encoding) for row in rp \
-+# start Py3K
-+ schema_names = [row[0] for row in rp \
- if not row[0].startswith('pg_')]
-- # end Py2K
-+# end Py3K
-+# start Py2K
-+# schema_names = [row[0].decode(self.encoding) for row in rp \
-+# if not row[0].startswith('pg_')]
-+# end Py2K
- return schema_names
-
- @reflection.cache
-@@ -1587,7 +1588,7 @@
- current_schema = self.default_schema_name
-
- result = connection.execute(
-- sql.text(u"SELECT relname FROM pg_class c "
-+ sql.text("SELECT relname FROM pg_class c "
- "WHERE relkind = 'r' "
- "AND '%s' = (select nspname from pg_namespace n "
- "where n.oid = c.relnamespace) " %
-@@ -1610,12 +1611,13 @@
- AND '%(schema)s' = (select nspname from pg_namespace n
- where n.oid = c.relnamespace)
- """ % dict(schema=current_schema)
-- # Py3K
-- #view_names = [row[0] for row in connection.execute(s)]
-- # Py2K
-- view_names = [row[0].decode(self.encoding)
-- for row in connection.execute(s)]
-- # end Py2K
-+# start Py3K
-+ view_names = [row[0] for row in connection.execute(s)]
-+# end Py3K
-+# start Py2K
-+# view_names = [row[0].decode(self.encoding)
-+# for row in connection.execute(s)]
-+# end Py2K
- return view_names
-
- @reflection.cache
-@@ -1632,11 +1634,12 @@
- rp = connection.execute(sql.text(s),
- view_name=view_name, schema=current_schema)
- if rp:
-- # Py3K
-- #view_def = rp.scalar()
-- # Py2K
-- view_def = rp.scalar().decode(self.encoding)
-- # end Py2K
-+# start Py3K
-+ view_def = rp.scalar()
-+# end Py3K
-+# start Py2K
-+# view_def = rp.scalar().decode(self.encoding)
-+# end Py2K
- return view_def
-
- @reflection.cache
-diff -r 9d0639b9d3be lib/sqlalchemy/dialects/postgresql/hstore.py
---- a/lib/sqlalchemy/dialects/postgresql/hstore.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/lib/sqlalchemy/dialects/postgresql/hstore.py Sat Apr 27 19:42:17 2013 -0400
-@@ -96,14 +96,14 @@
- def esc(s, position):
- if position == 'value' and s is None:
- return 'NULL'
-- elif isinstance(s, basestring):
-+ elif isinstance(s, str):
- return '"%s"' % s.replace('"', r'\"')
- else:
- raise ValueError("%r in %s position is not a string." %
- (s, position))
-
- return ', '.join('%s=>%s' % (esc(k, 'key'), esc(v, 'value'))
-- for k, v in val.iteritems())
-+ for k, v in val.items())
-
-
- class HSTORE(sqltypes.Concatenable, sqltypes.TypeEngine):
-diff -r 9d0639b9d3be lib/sqlalchemy/dialects/postgresql/psycopg2.py
---- a/lib/sqlalchemy/dialects/postgresql/psycopg2.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/lib/sqlalchemy/dialects/postgresql/psycopg2.py Sat Apr 27 19:42:17 2013 -0400
-@@ -142,7 +142,7 @@
- effect for other DBAPIs.
-
- """
--from __future__ import absolute_import
-+
- import re
- import logging
-
-@@ -190,22 +190,22 @@
- class _PGEnum(ENUM):
- def __init__(self, *arg, **kw):
- super(_PGEnum, self).__init__(*arg, **kw)
-- # Py2K
-- if self.convert_unicode:
-- self.convert_unicode = "force"
-- # end Py2K
-+# start Py2K
-+# if self.convert_unicode:
-+# self.convert_unicode = "force"
-+# end Py2K
-
-
- class _PGArray(ARRAY):
- def __init__(self, *arg, **kw):
- super(_PGArray, self).__init__(*arg, **kw)
-- # Py2K
-- # FIXME: this check won't work for setups that
-- # have convert_unicode only on their create_engine().
-- if isinstance(self.item_type, sqltypes.String) and \
-- self.item_type.convert_unicode:
-- self.item_type.convert_unicode = "force"
-- # end Py2K
-+# start Py2K
-+# # FIXME: this check won't work for setups that
-+# # have convert_unicode only on their create_engine().
-+# if isinstance(self.item_type, sqltypes.String) and \
-+# self.item_type.convert_unicode:
-+# self.item_type.convert_unicode = "force"
-+# end Py2K
-
-
- class _PGHStore(HSTORE):
-@@ -294,9 +294,9 @@
-
- class PGDialect_psycopg2(PGDialect):
- driver = 'psycopg2'
-- # Py2K
-- supports_unicode_statements = False
-- # end Py2K
-+# start Py2K
-+# supports_unicode_statements = False
-+# end Py2K
- default_paramstyle = 'pyformat'
- supports_sane_multi_rowcount = False
- execution_ctx_cls = PGExecutionContext_psycopg2
-diff -r 9d0639b9d3be lib/sqlalchemy/dialects/sqlite/base.py
---- a/lib/sqlalchemy/dialects/sqlite/base.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/lib/sqlalchemy/dialects/sqlite/base.py Sat Apr 27 19:42:17 2013 -0400
-@@ -508,7 +508,7 @@
-
- def visit_foreign_key_constraint(self, constraint):
-
-- local_table = constraint._elements.values()[0].parent.table
-+ local_table = list(constraint._elements.values())[0].parent.table
- remote_table = list(constraint._elements.values())[0].column.table
-
- if local_table.schema != remote_table.schema:
-@@ -812,7 +812,7 @@
- coltype = sqltypes.NullType()
-
- if default is not None:
-- default = unicode(default)
-+ default = str(default)
-
- return {
- 'name': name,
-diff -r 9d0639b9d3be lib/sqlalchemy/dialects/sqlite/pysqlite.py
---- a/lib/sqlalchemy/dialects/sqlite/pysqlite.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/lib/sqlalchemy/dialects/sqlite/pysqlite.py Sat Apr 27 19:42:17 2013 -0400
-@@ -267,8 +267,9 @@
- }
- )
-
-- # Py3K
-- #description_encoding = None
-+# start Py3K
-+ description_encoding = None
-+# end Py3K
-
- driver = 'pysqlite'
-
-@@ -288,7 +289,7 @@
- def dbapi(cls):
- try:
- from pysqlite2 import dbapi2 as sqlite
-- except ImportError, e:
-+ except ImportError as e:
- try:
- from sqlite3 import dbapi2 as sqlite # try 2.5+ stdlib name.
- except ImportError:
-diff -r 9d0639b9d3be lib/sqlalchemy/dialects/sybase/__init__.py
---- a/lib/sqlalchemy/dialects/sybase/__init__.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/lib/sqlalchemy/dialects/sybase/__init__.py Sat Apr 27 19:42:17 2013 -0400
-@@ -9,7 +9,7 @@
- # default dialect
- base.dialect = pyodbc.dialect
-
--from base import CHAR, VARCHAR, TIME, NCHAR, NVARCHAR,\
-+from .base import CHAR, VARCHAR, TIME, NCHAR, NVARCHAR,\
- TEXT, DATE, DATETIME, FLOAT, NUMERIC,\
- BIGINT, INT, INTEGER, SMALLINT, BINARY,\
- VARBINARY, UNITEXT, UNICHAR, UNIVARCHAR,\
-diff -r 9d0639b9d3be lib/sqlalchemy/dialects/sybase/base.py
---- a/lib/sqlalchemy/dialects/sybase/base.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/lib/sqlalchemy/dialects/sybase/base.py Sat Apr 27 19:42:17 2013 -0400
-@@ -475,12 +475,12 @@
- AND o.type in ('U', 'V')
- """)
-
-- # Py2K
-- if isinstance(schema, unicode):
-- schema = schema.encode("ascii")
-- if isinstance(table_name, unicode):
-- table_name = table_name.encode("ascii")
-- # end Py2K
-+# start Py2K
-+# if isinstance(schema, unicode):
-+# schema = schema.encode("ascii")
-+# if isinstance(table_name, unicode):
-+# table_name = table_name.encode("ascii")
-+# end Py2K
- result = connection.execute(TABLEID_SQL,
- schema_name=schema,
- table_name=table_name)
-@@ -759,10 +759,10 @@
- AND o.type = 'U'
- """)
-
-- # Py2K
-- if isinstance(schema, unicode):
-- schema = schema.encode("ascii")
-- # end Py2K
-+# start Py2K
-+# if isinstance(schema, unicode):
-+# schema = schema.encode("ascii")
-+# end Py2K
- tables = connection.execute(TABLE_SQL, schema_name=schema)
-
- return [t["name"] for t in tables]
-@@ -779,10 +779,10 @@
- AND o.type = 'V'
- """)
-
-- # Py2K
-- if isinstance(view_name, unicode):
-- view_name = view_name.encode("ascii")
-- # end Py2K
-+# start Py2K
-+# if isinstance(view_name, unicode):
-+# view_name = view_name.encode("ascii")
-+# end Py2K
- view = connection.execute(VIEW_DEF_SQL, view_name=view_name)
-
- return view.scalar()
-@@ -799,10 +799,10 @@
- AND o.type = 'V'
- """)
-
-- # Py2K
-- if isinstance(schema, unicode):
-- schema = schema.encode("ascii")
-- # end Py2K
-+# start Py2K
-+# if isinstance(schema, unicode):
-+# schema = schema.encode("ascii")
-+# end Py2K
- views = connection.execute(VIEW_SQL, schema_name=schema)
-
- return [v["name"] for v in views]
-diff -r 9d0639b9d3be lib/sqlalchemy/engine/base.py
---- a/lib/sqlalchemy/engine/base.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/lib/sqlalchemy/engine/base.py Sat Apr 27 19:42:17 2013 -0400
-@@ -9,7 +9,7 @@
-
- """
-
--from __future__ import with_statement
-+
- import sys
- from .. import exc, schema, util, log, interfaces
- from ..sql import expression, util as sql_util
-@@ -460,7 +460,7 @@
-
- try:
- self.engine.dialect.do_begin(self.connection)
-- except Exception, e:
-+ except Exception as e:
- self._handle_dbapi_exception(e, None, None, None, None)
-
- def _rollback_impl(self):
-@@ -473,7 +473,7 @@
- try:
- self.engine.dialect.do_rollback(self.connection)
- self.__transaction = None
-- except Exception, e:
-+ except Exception as e:
- self._handle_dbapi_exception(e, None, None, None, None)
- else:
- self.__transaction = None
-@@ -487,7 +487,7 @@
- try:
- self.engine.dialect.do_commit(self.connection)
- self.__transaction = None
-- except Exception, e:
-+ except Exception as e:
- self._handle_dbapi_exception(e, None, None, None, None)
-
- def _savepoint_impl(self, name=None):
-@@ -688,7 +688,7 @@
- dialect = self.dialect
- ctx = dialect.execution_ctx_cls._init_default(
- dialect, self, conn)
-- except Exception, e:
-+ except Exception as e:
- self._handle_dbapi_exception(e, None, None, None, None)
-
- ret = ctx._exec_default(default, None)
-@@ -734,7 +734,7 @@
-
- distilled_params = _distill_params(multiparams, params)
- if distilled_params:
-- keys = distilled_params[0].keys()
-+ keys = list(distilled_params[0].keys())
- else:
- keys = []
-
-@@ -822,7 +822,7 @@
- conn = self._revalidate_connection()
-
- context = constructor(dialect, self, conn, *args)
-- except Exception, e:
-+ except Exception as e:
- self._handle_dbapi_exception(e,
- str(statement), parameters,
- None, None)
-@@ -865,7 +865,7 @@
- statement,
- parameters,
- context)
-- except Exception, e:
-+ except Exception as e:
- self._handle_dbapi_exception(
- e,
- statement,
-@@ -939,7 +939,7 @@
- cursor,
- statement,
- parameters)
-- except Exception, e:
-+ except Exception as e:
- self._handle_dbapi_exception(
- e,
- statement,
-@@ -954,7 +954,7 @@
- """
- try:
- cursor.close()
-- except Exception, e:
-+ except Exception as e:
- try:
- ex_text = str(e)
- except TypeError:
-@@ -1045,7 +1045,7 @@
- Compiled: _execute_compiled,
- schema.SchemaItem: _execute_default,
- schema.DDLElement: _execute_ddl,
-- basestring: _execute_text
-+ str: _execute_text
- }
-
- def default_schema_name(self):
-diff -r 9d0639b9d3be lib/sqlalchemy/engine/ddl.py
---- a/lib/sqlalchemy/engine/ddl.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/lib/sqlalchemy/engine/ddl.py Sat Apr 27 19:42:17 2013 -0400
-@@ -52,10 +52,10 @@
- if self.tables is not None:
- tables = self.tables
- else:
-- tables = metadata.tables.values()
-+ tables = list(metadata.tables.values())
- collection = [t for t in sql_util.sort_tables(tables)
- if self._can_create_table(t)]
-- seq_coll = [s for s in metadata._sequences.values()
-+ seq_coll = [s for s in list(metadata._sequences.values())
- if s.column is None and self._can_create_sequence(s)]
-
- metadata.dispatch.before_create(metadata, self.connection,
-@@ -120,7 +120,7 @@
- if self.tables is not None:
- tables = self.tables
- else:
-- tables = metadata.tables.values()
-+ tables = list(metadata.tables.values())
-
- collection = [
- t
-@@ -130,7 +130,7 @@
-
- seq_coll = [
- s
-- for s in metadata._sequences.values()
-+ for s in list(metadata._sequences.values())
- if s.column is None and self._can_drop_sequence(s)
- ]
-
-diff -r 9d0639b9d3be lib/sqlalchemy/engine/default.py
---- a/lib/sqlalchemy/engine/default.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/lib/sqlalchemy/engine/default.py Sat Apr 27 19:42:17 2013 -0400
-@@ -57,17 +57,18 @@
- # *not* the FLOAT type however.
- supports_native_decimal = False
-
-- # Py3K
-- #supports_unicode_statements = True
-- #supports_unicode_binds = True
-- #returns_unicode_strings = True
-- #description_encoding = None
-- # Py2K
-- supports_unicode_statements = False
-- supports_unicode_binds = False
-- returns_unicode_strings = False
-- description_encoding = 'use_encoding'
-- # end Py2K
-+# start Py3K
-+ supports_unicode_statements = True
-+ supports_unicode_binds = True
-+ returns_unicode_strings = True
-+ description_encoding = None
-+# end Py3K
-+# start Py2K
-+# supports_unicode_statements = False
-+# supports_unicode_binds = False
-+# returns_unicode_strings = False
-+# description_encoding = 'use_encoding'
-+# end Py2K
-
- name = 'default'
-
-@@ -201,14 +202,15 @@
- return None
-
- def _check_unicode_returns(self, connection):
-- # Py2K
-- if self.supports_unicode_statements:
-- cast_to = unicode
-- else:
-- cast_to = str
-- # end Py2K
-- # Py3K
-- #cast_to = str
-+# start Py2K
-+# if self.supports_unicode_statements:
-+# cast_to = unicode
-+# else:
-+# cast_to = str
-+# end Py2K
-+# start Py3K
-+ cast_to = str
-+# end Py3K
-
- def check_unicode(formatstr, type_):
- cursor = connection.connection.cursor()
-@@ -226,8 +228,8 @@
- )
- row = cursor.fetchone()
-
-- return isinstance(row[0], unicode)
-- except self.dbapi.Error, de:
-+ return isinstance(row[0], str)
-+ except self.dbapi.Error as de:
- util.warn("Exception attempting to "
- "detect unicode returns: %r" % de)
- return False
-@@ -373,10 +375,10 @@
- self.execution_options.update(connection._execution_options)
-
- if not dialect.supports_unicode_statements:
-- self.unicode_statement = unicode(compiled)
-+ self.unicode_statement = str(compiled)
- self.statement = dialect._encoder(self.unicode_statement)[0]
- else:
-- self.statement = self.unicode_statement = unicode(compiled)
-+ self.statement = self.unicode_statement = str(compiled)
-
- self.cursor = self.create_cursor()
- self.compiled_parameters = []
-@@ -414,7 +416,7 @@
-
- self.result_map = compiled.result_map
-
-- self.unicode_statement = unicode(compiled)
-+ self.unicode_statement = str(compiled)
- if not dialect.supports_unicode_statements:
- self.statement = self.unicode_statement.encode(
- self.dialect.encoding)
-@@ -519,7 +521,7 @@
- self.executemany = len(parameters) > 1
-
- if not dialect.supports_unicode_statements and \
-- isinstance(statement, unicode):
-+ isinstance(statement, str):
- self.unicode_statement = statement
- self.statement = dialect._encoder(statement)[0]
- else:
-@@ -573,7 +575,7 @@
- """
-
- conn = self.root_connection
-- if isinstance(stmt, unicode) and \
-+ if isinstance(stmt, str) and \
- not self.dialect.supports_unicode_statements:
- stmt = self.dialect._encoder(stmt)[0]
-
-@@ -734,12 +736,12 @@
- inputsizes.append(dbtype)
- try:
- self.cursor.setinputsizes(*inputsizes)
-- except Exception, e:
-+ except Exception as e:
- self.root_connection._handle_dbapi_exception(
- e, None, None, None, self)
- else:
- inputsizes = {}
-- for key in self.compiled.bind_names.values():
-+ for key in list(self.compiled.bind_names.values()):
- typeengine = types[key]
- dbtype = typeengine.dialect_impl(self.dialect).\
- get_dbapi_type(self.dialect.dbapi)
-@@ -752,7 +754,7 @@
- inputsizes[key] = dbtype
- try:
- self.cursor.setinputsizes(**inputsizes)
-- except Exception, e:
-+ except Exception as e:
- self.root_connection._handle_dbapi_exception(
- e, None, None, None, self)
-
-diff -r 9d0639b9d3be lib/sqlalchemy/engine/reflection.py
---- a/lib/sqlalchemy/engine/reflection.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/lib/sqlalchemy/engine/reflection.py Sat Apr 27 19:42:17 2013 -0400
-@@ -41,8 +41,8 @@
- return fn(self, con, *args, **kw)
- key = (
- fn.__name__,
-- tuple(a for a in args if isinstance(a, basestring)),
-- tuple((k, v) for k, v in kw.iteritems() if isinstance(v, (basestring, int, float)))
-+ tuple(a for a in args if isinstance(a, str)),
-+ tuple((k, v) for k, v in kw.items() if isinstance(v, (str, int, float)))
- )
- ret = info_cache.get(key)
- if ret is None:
-@@ -381,16 +381,16 @@
- # table.kwargs will need to be passed to each reflection method. Make
- # sure keywords are strings.
- tblkw = table.kwargs.copy()
-- for (k, v) in tblkw.items():
-+ for (k, v) in list(tblkw.items()):
- del tblkw[k]
- tblkw[str(k)] = v
-
-- # Py2K
-- if isinstance(schema, str):
-- schema = schema.decode(dialect.encoding)
-- if isinstance(table_name, str):
-- table_name = table_name.decode(dialect.encoding)
-- # end Py2K
-+# start Py2K
-+# if isinstance(schema, str):
-+# schema = schema.decode(dialect.encoding)
-+# if isinstance(table_name, str):
-+# table_name = table_name.decode(dialect.encoding)
-+# end Py2K
-
- # columns
- found_table = False
-diff -r 9d0639b9d3be lib/sqlalchemy/engine/result.py
---- a/lib/sqlalchemy/engine/result.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/lib/sqlalchemy/engine/result.py Sat Apr 27 19:42:17 2013 -0400
-@@ -8,7 +8,7 @@
- and :class:`.RowProxy."""
-
-
--from itertools import izip
-+
- from .. import exc, types, util
- from ..sql import expression
- import collections
-@@ -55,7 +55,7 @@
- return list(self)
-
- def __iter__(self):
-- for processor, value in izip(self._processors, self._row):
-+ for processor, value in zip(self._processors, self._row):
- if processor is None:
- yield value
- else:
-@@ -72,7 +72,7 @@
- except TypeError:
- if isinstance(key, slice):
- l = []
-- for processor, value in izip(self._processors[key],
-+ for processor, value in zip(self._processors[key],
- self._row[key]):
- if processor is None:
- l.append(value)
-@@ -93,7 +93,7 @@
- def __getattr__(self, name):
- try:
- return self[name]
-- except KeyError, e:
-+ except KeyError as e:
- raise AttributeError(e.args[0])
-
-
-@@ -142,7 +142,7 @@
- def items(self):
- """Return a list of tuples, each tuple containing a key/value pair."""
- # TODO: no coverage here
-- return [(key, self[key]) for key in self.iterkeys()]
-+ return [(key, self[key]) for key in self.keys()]
-
- def keys(self):
- """Return the list of keys as strings represented by this RowProxy."""
-@@ -274,7 +274,7 @@
- def _key_fallback(self, key, raiseerr=True):
- map = self._keymap
- result = None
-- if isinstance(key, basestring):
-+ if isinstance(key, str):
- result = map.get(key if self.case_sensitive else key.lower())
- # fallback for targeting a ColumnElement to a textual expression
- # this is a rare use case which only occurs when matching text()
-@@ -328,8 +328,8 @@
- return {
- '_pickled_keymap': dict(
- (key, index)
-- for key, (processor, obj, index) in self._keymap.iteritems()
-- if isinstance(key, (basestring, int))
-+ for key, (processor, obj, index) in self._keymap.items()
-+ if isinstance(key, (str, int))
- ),
- 'keys': self.keys,
- "case_sensitive": self.case_sensitive,
-@@ -338,9 +338,9 @@
- def __setstate__(self, state):
- # the row has been processed at pickling time so we don't need any
- # processor anymore
-- self._processors = [None for _ in xrange(len(state['keys']))]
-+ self._processors = [None for _ in range(len(state['keys']))]
- self._keymap = keymap = {}
-- for key, index in state['_pickled_keymap'].iteritems():
-+ for key, index in state['_pickled_keymap'].items():
- # not preserving "obj" here, unfortunately our
- # proxy comparison fails with the unpickle
- keymap[key] = (None, None, index)
-@@ -440,7 +440,7 @@
- """
- try:
- return self.context.rowcount
-- except Exception, e:
-+ except Exception as e:
- self.connection._handle_dbapi_exception(
- e, None, None, self.cursor, self.context)
-
-@@ -462,7 +462,7 @@
- """
- try:
- return self._saved_cursor.lastrowid
-- except Exception, e:
-+ except Exception as e:
- self.connection._handle_dbapi_exception(
- e, None, None,
- self._saved_cursor, self.context)
-@@ -746,7 +746,7 @@
- l = self.process_rows(self._fetchall_impl())
- self.close()
- return l
-- except Exception, e:
-+ except Exception as e:
- self.connection._handle_dbapi_exception(
- e, None, None,
- self.cursor, self.context)
-@@ -765,7 +765,7 @@
- if len(l) == 0:
- self.close()
- return l
-- except Exception, e:
-+ except Exception as e:
- self.connection._handle_dbapi_exception(
- e, None, None,
- self.cursor, self.context)
-@@ -784,7 +784,7 @@
- else:
- self.close()
- return None
-- except Exception, e:
-+ except Exception as e:
- self.connection._handle_dbapi_exception(
- e, None, None,
- self.cursor, self.context)
-@@ -800,7 +800,7 @@
-
- try:
- row = self._fetchone_impl()
-- except Exception, e:
-+ except Exception as e:
- self.connection._handle_dbapi_exception(
- e, None, None,
- self.cursor, self.context)
-@@ -966,9 +966,9 @@
- # constructed.
- metadata._orig_processors = metadata._processors
- # replace the all type processors by None processors.
-- metadata._processors = [None for _ in xrange(len(metadata.keys))]
-+ metadata._processors = [None for _ in range(len(metadata.keys))]
- keymap = {}
-- for k, (func, obj, index) in metadata._keymap.iteritems():
-+ for k, (func, obj, index) in metadata._keymap.items():
- keymap[k] = (None, obj, index)
- self._metadata._keymap = keymap
-
-@@ -989,7 +989,7 @@
- if size is None:
- return self.fetchall()
- l = []
-- for i in xrange(size):
-+ for i in range(size):
- row = self.fetchone()
- if row is None:
- break
-diff -r 9d0639b9d3be lib/sqlalchemy/engine/strategies.py
---- a/lib/sqlalchemy/engine/strategies.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/lib/sqlalchemy/engine/strategies.py Sat Apr 27 19:42:17 2013 -0400
-@@ -78,20 +78,21 @@
- def connect():
- try:
- return dialect.connect(*cargs, **cparams)
-- except Exception, e:
-+ except Exception as e:
- invalidated = dialect.is_disconnect(e, None, None)
-- # Py3K
-- #raise exc.DBAPIError.instance(None, None,
-- # e, dialect.dbapi.Error,
-- # connection_invalidated=invalidated
-- #) from e
-- # Py2K
-- import sys
-- raise exc.DBAPIError.instance(
-- None, None, e, dialect.dbapi.Error,
-+# start Py3K
-+ raise exc.DBAPIError.instance(None, None,
-+ e, dialect.dbapi.Error,
- connection_invalidated=invalidated
-- ), None, sys.exc_info()[2]
-- # end Py2K
-+ ) from e
-+# end Py3K
-+# start Py2K
-+# import sys
-+# raise exc.DBAPIError.instance(
-+# None, None, e, dialect.dbapi.Error,
-+# connection_invalidated=invalidated
-+# ), None, sys.exc_info()[2]
-+# end Py2K
-
- creator = kwargs.pop('creator', connect)
-
-diff -r 9d0639b9d3be lib/sqlalchemy/engine/url.py
---- a/lib/sqlalchemy/engine/url.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/lib/sqlalchemy/engine/url.py Sat Apr 27 19:42:17 2013 -0400
-@@ -14,7 +14,7 @@
- """
-
- import re
--import urllib
-+import urllib.request, urllib.parse, urllib.error
- from .. import exc, util
- from . import Dialect
-
-@@ -67,7 +67,7 @@
- if self.username is not None:
- s += self.username
- if self.password is not None:
-- s += ':' + urllib.quote_plus(self.password)
-+ s += ':' + urllib.parse.quote_plus(self.password)
- s += "@"
- if self.host is not None:
- s += self.host
-@@ -76,7 +76,7 @@
- if self.database is not None:
- s += '/' + self.database
- if self.query:
-- keys = self.query.keys()
-+ keys = list(self.query.keys())
- keys.sort()
- s += '?' + "&".join("%s=%s" % (k, self.query[k]) for k in keys)
- return s
-@@ -150,7 +150,7 @@
- existing URL object is passed, just returns the object.
- """
-
-- if isinstance(name_or_url, basestring):
-+ if isinstance(name_or_url, str):
- return _parse_rfc1738_args(name_or_url)
- else:
- return name_or_url
-@@ -177,17 +177,17 @@
- tokens = components['database'].split('?', 2)
- components['database'] = tokens[0]
- query = (len(tokens) > 1 and dict(util.parse_qsl(tokens[1]))) or None
-- # Py2K
-- if query is not None:
-- query = dict((k.encode('ascii'), query[k]) for k in query)
-- # end Py2K
-+# start Py2K
-+# if query is not None:
-+# query = dict((k.encode('ascii'), query[k]) for k in query)
-+# end Py2K
- else:
- query = None
- components['query'] = query
-
- if components['password'] is not None:
- components['password'] = \
-- urllib.unquote_plus(components['password'])
-+ urllib.parse.unquote_plus(components['password'])
-
- name = components.pop('name')
- return URL(name, **components)
-diff -r 9d0639b9d3be lib/sqlalchemy/event.py
---- a/lib/sqlalchemy/event.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/lib/sqlalchemy/event.py Sat Apr 27 19:42:17 2013 -0400
-@@ -201,11 +201,9 @@
- del _registrars[k]
-
-
--class Events(object):
-+class Events(object, metaclass=_EventMeta):
- """Define event listening functions for a particular target type."""
-
-- __metaclass__ = _EventMeta
--
- @classmethod
- def _accept_with(cls, target):
- # Mapper, ClassManager, Session override this to
-@@ -302,7 +300,7 @@
- def clear(self):
- """Clear all class level listeners"""
-
-- for dispatcher in self._clslevel.values():
-+ for dispatcher in list(self._clslevel.values()):
- dispatcher[:] = []
-
- def for_modify(self, obj):
-@@ -377,7 +375,7 @@
- def __iter__(self):
- return iter(self.parent_listeners)
-
-- def __nonzero__(self):
-+ def __bool__(self):
- return bool(self.parent_listeners)
-
-
-@@ -414,7 +412,7 @@
- def __iter__(self):
- return chain(self.parent_listeners, self.listeners)
-
-- def __nonzero__(self):
-+ def __bool__(self):
- return bool(self.listeners or self.parent_listeners)
-
-
-diff -r 9d0639b9d3be lib/sqlalchemy/exc.py
---- a/lib/sqlalchemy/exc.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/lib/sqlalchemy/exc.py Sat Apr 27 19:42:17 2013 -0400
-@@ -285,7 +285,7 @@
- text = str(orig)
- except (KeyboardInterrupt, SystemExit):
- raise
-- except Exception, e:
-+ except Exception as e:
- text = 'Error in str() of DB-API-generated exception: ' + str(e)
- StatementError.__init__(
- self,
-diff -r 9d0639b9d3be lib/sqlalchemy/ext/associationproxy.py
---- a/lib/sqlalchemy/ext/associationproxy.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/lib/sqlalchemy/ext/associationproxy.py Sat Apr 27 19:42:17 2013 -0400
-@@ -475,7 +475,7 @@
- def __len__(self):
- return len(self.col)
-
-- def __nonzero__(self):
-+ def __bool__(self):
- return bool(self.col)
-
- def __getstate__(self):
-@@ -514,7 +514,7 @@
- stop = index.stop
- step = index.step or 1
-
-- rng = range(index.start or 0, stop, step)
-+ rng = list(range(index.start or 0, stop, step))
- if step == 1:
- for i in rng:
- del self[index.start]
-@@ -569,7 +569,7 @@
-
- def count(self, value):
- return sum([1 for _ in
-- itertools.ifilter(lambda v: v == value, iter(self))])
-+ filter(lambda v: v == value, iter(self))])
-
- def extend(self, values):
- for v in values:
-@@ -668,8 +668,8 @@
- def __hash__(self):
- raise TypeError("%s objects are unhashable" % type(self).__name__)
-
-- for func_name, func in locals().items():
-- if (util.callable(func) and func.func_name == func_name and
-+ for func_name, func in list(locals().items()):
-+ if (util.callable(func) and func.__name__ == func_name and
- not func.__doc__ and hasattr(list, func_name)):
- func.__doc__ = getattr(list, func_name).__doc__
- del func_name, func
-@@ -711,7 +711,7 @@
- return key in self.col
-
- def __iter__(self):
-- return self.col.iterkeys()
-+ return iter(self.col.keys())
-
- def clear(self):
- self.col.clear()
-@@ -738,7 +738,7 @@
- return cmp(dict(self), other)
-
- def __repr__(self):
-- return repr(dict(self.items()))
-+ return repr(dict(list(self.items())))
-
- def get(self, key, default=None):
- try:
-@@ -754,13 +754,13 @@
- return self[key]
-
- def keys(self):
-- return self.col.keys()
-+ return list(self.col.keys())
-
- def iterkeys(self):
-- return self.col.iterkeys()
-+ return iter(self.col.keys())
-
- def values(self):
-- return [self._get(member) for member in self.col.values()]
-+ return [self._get(member) for member in list(self.col.values())]
-
- def itervalues(self):
- for key in self.col:
-@@ -811,13 +811,13 @@
- self[key] = value
-
- def copy(self):
-- return dict(self.items())
-+ return dict(list(self.items()))
-
- def __hash__(self):
- raise TypeError("%s objects are unhashable" % type(self).__name__)
-
-- for func_name, func in locals().items():
-- if (util.callable(func) and func.func_name == func_name and
-+ for func_name, func in list(locals().items()):
-+ if (util.callable(func) and func.__name__ == func_name and
- not func.__doc__ and hasattr(dict, func_name)):
- func.__doc__ = getattr(dict, func_name).__doc__
- del func_name, func
-@@ -838,7 +838,7 @@
- def __len__(self):
- return len(self.col)
-
-- def __nonzero__(self):
-+ def __bool__(self):
- if self.col:
- return True
- else:
-@@ -1014,8 +1014,8 @@
- def __hash__(self):
- raise TypeError("%s objects are unhashable" % type(self).__name__)
-
-- for func_name, func in locals().items():
-- if (util.callable(func) and func.func_name == func_name and
-+ for func_name, func in list(locals().items()):
-+ if (util.callable(func) and func.__name__ == func_name and
- not func.__doc__ and hasattr(set, func_name)):
- func.__doc__ = getattr(set, func_name).__doc__
- del func_name, func
-diff -r 9d0639b9d3be lib/sqlalchemy/ext/declarative/api.py
---- a/lib/sqlalchemy/ext/declarative/api.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/lib/sqlalchemy/ext/declarative/api.py Sat Apr 27 19:42:17 2013 -0400
-@@ -424,7 +424,7 @@
- def prepare(cls, engine):
- """Reflect all :class:`.Table` objects for all current
- :class:`.DeferredReflection` subclasses"""
-- to_map = [m for m in _MapperConfig.configs.values()
-+ to_map = [m for m in list(_MapperConfig.configs.values())
- if issubclass(m.cls, cls)]
- for thingy in to_map:
- cls._sa_decl_prepare(thingy.local_table, engine)
-diff -r 9d0639b9d3be lib/sqlalchemy/ext/declarative/base.py
---- a/lib/sqlalchemy/ext/declarative/base.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/lib/sqlalchemy/ext/declarative/base.py Sat Apr 27 19:42:17 2013 -0400
-@@ -57,7 +57,7 @@
-
- class_mapped = _declared_mapping_info(base) is not None
-
-- for name, obj in vars(base).items():
-+ for name, obj in list(vars(base).items()):
- if name == '__mapper_args__':
- if not mapper_args_fn and (
- not class_mapped or
-@@ -129,7 +129,7 @@
- ret.doc = obj.__doc__
-
- # apply inherited columns as we should
-- for k, v in potential_columns.items():
-+ for k, v in list(potential_columns.items()):
- dict_[k] = v
-
- if inherited_table_args and not tablename:
-@@ -173,7 +173,7 @@
-
- # extract columns from the class dict
- declared_columns = set()
-- for key, c in our_stuff.iteritems():
-+ for key, c in our_stuff.items():
- if isinstance(c, (ColumnProperty, CompositeProperty)):
- for col in c.columns:
- if isinstance(col, Column) and \
-@@ -354,7 +354,7 @@
- # in which case the mapper makes this combination).
- # See if the superclass has a similar column property.
- # If so, join them together.
-- for k, col in properties.items():
-+ for k, col in list(properties.items()):
- if not isinstance(col, expression.ColumnElement):
- continue
- if k in inherited_mapper._props:
-diff -r 9d0639b9d3be lib/sqlalchemy/ext/declarative/clsregistry.py
---- a/lib/sqlalchemy/ext/declarative/clsregistry.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/lib/sqlalchemy/ext/declarative/clsregistry.py Sat Apr 27 19:42:17 2013 -0400
-@@ -255,7 +255,7 @@
- return x.cls
- else:
- return x
-- except NameError, n:
-+ except NameError as n:
- raise exc.InvalidRequestError(
- "When initializing mapper %s, expression %r failed to "
- "locate a name (%r). If this is a class name, consider "
-@@ -275,14 +275,14 @@
- for attr in ('argument', 'order_by', 'primaryjoin', 'secondaryjoin',
- 'secondary', '_user_defined_foreign_keys', 'remote_side'):
- v = getattr(prop, attr)
-- if isinstance(v, basestring):
-+ if isinstance(v, str):
- setattr(prop, attr, resolve_arg(v))
-
- if prop.backref and isinstance(prop.backref, tuple):
- key, kwargs = prop.backref
- for attr in ('primaryjoin', 'secondaryjoin', 'secondary',
- 'foreign_keys', 'remote_side', 'order_by'):
-- if attr in kwargs and isinstance(kwargs[attr], basestring):
-+ if attr in kwargs and isinstance(kwargs[attr], str):
- kwargs[attr] = resolve_arg(kwargs[attr])
-
- return prop
-diff -r 9d0639b9d3be lib/sqlalchemy/ext/mutable.py
---- a/lib/sqlalchemy/ext/mutable.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/lib/sqlalchemy/ext/mutable.py Sat Apr 27 19:42:17 2013 -0400
-@@ -485,7 +485,7 @@
- def changed(self):
- """Subclasses should call this method whenever change events occur."""
-
-- for parent, key in self._parents.items():
-+ for parent, key in list(self._parents.items()):
- flag_modified(parent, key)
-
- @classmethod
-@@ -579,7 +579,7 @@
- def changed(self):
- """Subclasses should call this method whenever change events occur."""
-
-- for parent, key in self._parents.items():
-+ for parent, key in list(self._parents.items()):
-
- prop = object_mapper(parent).get_property(key)
- for value, attr_name in zip(
-diff -r 9d0639b9d3be lib/sqlalchemy/ext/orderinglist.py
---- a/lib/sqlalchemy/ext/orderinglist.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/lib/sqlalchemy/ext/orderinglist.py Sat Apr 27 19:42:17 2013 -0400
-@@ -324,7 +324,7 @@
- if stop < 0:
- stop += len(self)
-
-- for i in xrange(start, stop, step):
-+ for i in range(start, stop, step):
- self.__setitem__(i, entity[i])
- else:
- self._order_entity(index, entity, True)
-@@ -334,21 +334,21 @@
- super(OrderingList, self).__delitem__(index)
- self._reorder()
-
-- # Py2K
-- def __setslice__(self, start, end, values):
-- super(OrderingList, self).__setslice__(start, end, values)
-- self._reorder()
--
-- def __delslice__(self, start, end):
-- super(OrderingList, self).__delslice__(start, end)
-- self._reorder()
-- # end Py2K
-+# start Py2K
-+# def __setslice__(self, start, end, values):
-+# super(OrderingList, self).__setslice__(start, end, values)
-+# self._reorder()
-+#
-+# def __delslice__(self, start, end):
-+# super(OrderingList, self).__delslice__(start, end)
-+# self._reorder()
-+# end Py2K
-
- def __reduce__(self):
- return _reconstitute, (self.__class__, self.__dict__, list(self))
-
-- for func_name, func in locals().items():
-- if (util.callable(func) and func.func_name == func_name and
-+ for func_name, func in list(locals().items()):
-+ if (util.callable(func) and func.__name__ == func_name and
- not func.__doc__ and hasattr(list, func_name)):
- func.__doc__ = getattr(list, func_name).__doc__
- del func_name, func
-diff -r 9d0639b9d3be lib/sqlalchemy/ext/serializer.py
---- a/lib/sqlalchemy/ext/serializer.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/lib/sqlalchemy/ext/serializer.py Sat Apr 27 19:42:17 2013 -0400
-@@ -61,20 +61,22 @@
- from ..util import pickle
- import re
- import base64
--# Py3K
--#from io import BytesIO as byte_buffer
--# Py2K
--from cStringIO import StringIO as byte_buffer
-+# start Py3K
-+from io import BytesIO as byte_buffer
-+# end Py3K
-+# start Py2K
-+#from cStringIO import StringIO as byte_buffer
- # end Py2K
-
--# Py3K
--#def b64encode(x):
--# return base64.b64encode(x).decode('ascii')
--#def b64decode(x):
--# return base64.b64decode(x.encode('ascii'))
--# Py2K
--b64encode = base64.b64encode
--b64decode = base64.b64decode
-+# start Py3K
-+def b64encode(x):
-+ return base64.b64encode(x).decode('ascii')
-+def b64decode(x):
-+ return base64.b64decode(x.encode('ascii'))
-+# end Py3K
-+# start Py2K
-+#b64encode = base64.b64encode
-+#b64decode = base64.b64decode
- # end Py2K
-
- __all__ = ['Serializer', 'Deserializer', 'dumps', 'loads']
-diff -r 9d0639b9d3be lib/sqlalchemy/orm/__init__.py
---- a/lib/sqlalchemy/orm/__init__.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/lib/sqlalchemy/orm/__init__.py Sat Apr 27 19:42:17 2013 -0400
-@@ -1628,7 +1628,7 @@
- alias = kwargs.pop('alias', None)
- if kwargs:
- raise exc.ArgumentError(
-- 'Invalid kwargs for contains_eager: %r' % kwargs.keys())
-+ 'Invalid kwargs for contains_eager: %r' % list(kwargs.keys()))
- return strategies.EagerLazyOption(keys, lazy='joined',
- propagate_to_loaders=False, chained=True), \
- strategies.LoadEagerFromAliasOption(keys, alias=alias, chained=True)
-diff -r 9d0639b9d3be lib/sqlalchemy/orm/attributes.py
---- a/lib/sqlalchemy/orm/attributes.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/lib/sqlalchemy/orm/attributes.py Sat Apr 27 19:42:17 2013 -0400
-@@ -1214,7 +1214,7 @@
-
- """
-
-- def __nonzero__(self):
-+ def __bool__(self):
- return self != HISTORY_BLANK
-
- def empty(self):
-diff -r 9d0639b9d3be lib/sqlalchemy/orm/collections.py
---- a/lib/sqlalchemy/orm/collections.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/lib/sqlalchemy/orm/collections.py Sat Apr 27 19:42:17 2013 -0400
-@@ -657,11 +657,12 @@
- if getattr(obj, '_sa_adapter', None) is not None:
- return getattr(obj, '_sa_adapter')
- elif setting_type == dict:
-- # Py3K
-- #return obj.values()
-- # Py2K
-- return getattr(obj, 'itervalues', getattr(obj, 'values'))()
-- # end Py2K
-+# start Py3K
-+ return list(obj.values())
-+# end Py3K
-+# start Py2K
-+# return getattr(obj, 'itervalues', getattr(obj, 'values'))()
-+# end Py2K
- else:
- return iter(obj)
-
-@@ -705,14 +706,15 @@
- def __iter__(self):
- """Iterate over entities in the collection."""
-
-- # Py3K requires iter() here
-+# start Py3K
-+# end Py3K
- return iter(getattr(self._data(), '_sa_iterator')())
-
- def __len__(self):
- """Count entities in the collection."""
- return len(list(getattr(self._data(), '_sa_iterator')()))
-
-- def __nonzero__(self):
-+ def __bool__(self):
- return True
-
- def fire_append_event(self, item, initiator=None):
-@@ -883,7 +885,7 @@
- # search for _sa_instrument_role-decorated methods in
- # method resolution order, assign to roles
- for supercls in cls.__mro__:
-- for name, method in vars(supercls).items():
-+ for name, method in list(vars(supercls).items()):
- if not util.callable(method):
- continue
-
-@@ -917,11 +919,11 @@
- collection_type = util.duck_type_collection(cls)
- if collection_type in __interfaces:
- canned_roles, decorators = __interfaces[collection_type]
-- for role, name in canned_roles.items():
-+ for role, name in list(canned_roles.items()):
- roles.setdefault(role, name)
-
- # apply ABC auto-decoration to methods that need it
-- for method, decorator in decorators.items():
-+ for method, decorator in list(decorators.items()):
- fn = getattr(cls, method, None)
- if (fn and method not in methods and
- not hasattr(fn, '_sa_instrumented')):
-@@ -952,12 +954,12 @@
-
- # apply ad-hoc instrumentation from decorators, class-level defaults
- # and implicit role declarations
-- for method_name, (before, argument, after) in methods.items():
-+ for method_name, (before, argument, after) in list(methods.items()):
- setattr(cls, method_name,
- _instrument_membership_mutator(getattr(cls, method_name),
- before, argument, after))
- # intern the role map
-- for role, method_name in roles.items():
-+ for role, method_name in list(roles.items()):
- setattr(cls, '_sa_%s' % role, getattr(cls, method_name))
-
- setattr(cls, '_sa_instrumented', id(cls))
-@@ -1094,14 +1096,14 @@
- stop += len(self)
-
- if step == 1:
-- for i in xrange(start, stop, step):
-+ for i in range(start, stop, step):
- if len(self) > start:
- del self[start]
-
- for i, item in enumerate(value):
- self.insert(i + start, item)
- else:
-- rng = range(start, stop, step)
-+ rng = list(range(start, stop, step))
- if len(value) != len(rng):
- raise ValueError(
- "attempt to assign sequence of size %s to "
-@@ -1128,24 +1130,24 @@
- _tidy(__delitem__)
- return __delitem__
-
-- # Py2K
-- def __setslice__(fn):
-- def __setslice__(self, start, end, values):
-- for value in self[start:end]:
-- __del(self, value)
-- values = [__set(self, value) for value in values]
-- fn(self, start, end, values)
-- _tidy(__setslice__)
-- return __setslice__
--
-- def __delslice__(fn):
-- def __delslice__(self, start, end):
-- for value in self[start:end]:
-- __del(self, value)
-- fn(self, start, end)
-- _tidy(__delslice__)
-- return __delslice__
-- # end Py2K
-+# start Py2K
-+# def __setslice__(fn):
-+# def __setslice__(self, start, end, values):
-+# for value in self[start:end]:
-+# __del(self, value)
-+# values = [__set(self, value) for value in values]
-+# fn(self, start, end, values)
-+# _tidy(__setslice__)
-+# return __setslice__
-+#
-+# def __delslice__(fn):
-+# def __delslice__(self, start, end):
-+# for value in self[start:end]:
-+# __del(self, value)
-+# fn(self, start, end)
-+# _tidy(__delslice__)
-+# return __delslice__
-+# end Py2K
-
- def extend(fn):
- def extend(self, iterable):
-@@ -1251,7 +1253,7 @@
- def update(self, __other=Unspecified, **kw):
- if __other is not Unspecified:
- if hasattr(__other, 'keys'):
-- for key in __other.keys():
-+ for key in list(__other.keys()):
- if (key not in self or
- self[key] is not __other[key]):
- self[key] = __other[key]
-@@ -1467,11 +1469,12 @@
- ),
-
- # decorators are required for dicts and object collections.
-- # Py3K
-- #dict: ({'iterator': 'values'}, _dict_decorators()),
-- # Py2K
-- dict: ({'iterator': 'itervalues'}, _dict_decorators()),
-- # end Py2K
-+# start Py3K
-+ dict: ({'iterator': 'values'}, _dict_decorators()),
-+# end Py3K
-+# start Py2K
-+# dict: ({'iterator': 'itervalues'}, _dict_decorators()),
-+# end Py2K
- }
-
-
-diff -r 9d0639b9d3be lib/sqlalchemy/orm/descriptor_props.py
---- a/lib/sqlalchemy/orm/descriptor_props.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/lib/sqlalchemy/orm/descriptor_props.py Sat Apr 27 19:42:17 2013 -0400
-@@ -184,7 +184,7 @@
- def _init_props(self):
- self.props = props = []
- for attr in self.attrs:
-- if isinstance(attr, basestring):
-+ if isinstance(attr, str):
- prop = self.parent.get_property(attr)
- elif isinstance(attr, schema.Column):
- prop = self.parent._columntoproperty[attr]
-diff -r 9d0639b9d3be lib/sqlalchemy/orm/evaluator.py
---- a/lib/sqlalchemy/orm/evaluator.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/lib/sqlalchemy/orm/evaluator.py Sat Apr 27 19:42:17 2013 -0400
-@@ -13,9 +13,9 @@
-
- _straight_ops = set(getattr(operators, op)
- for op in ('add', 'mul', 'sub',
-- # Py2K
-- 'div',
-- # end Py2K
-+# start Py2K
-+# 'div',
-+# end Py2K
- 'mod', 'truediv',
- 'lt', 'le', 'ne', 'gt', 'ge', 'eq'))
-
-@@ -50,7 +50,7 @@
- return lambda obj: get_corresponding_attr(obj)
-
- def visit_clauselist(self, clause):
-- evaluators = map(self.process, clause.clauses)
-+ evaluators = list(map(self.process, clause.clauses))
- if clause.operator is operators.or_:
- def evaluate(obj):
- has_null = False
-@@ -79,8 +79,8 @@
- return evaluate
-
- def visit_binary(self, clause):
-- eval_left, eval_right = map(self.process,
-- [clause.left, clause.right])
-+ eval_left, eval_right = list(map(self.process,
-+ [clause.left, clause.right]))
- operator = clause.operator
- if operator is operators.is_:
- def evaluate(obj):
-diff -r 9d0639b9d3be lib/sqlalchemy/orm/identity.py
---- a/lib/sqlalchemy/orm/identity.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/lib/sqlalchemy/orm/identity.py Sat Apr 27 19:42:17 2013 -0400
-@@ -75,7 +75,7 @@
- state = dict.__getitem__(self, key)
- o = state.obj()
- if o is None:
-- raise KeyError, key
-+ raise KeyError(key)
- return o
-
- def __contains__(self, key):
-@@ -152,30 +152,32 @@
-
- return result
-
-- # Py3K
-- #def items(self):
-- # return iter(self._items())
-- #
-- #def values(self):
-- # return iter(self._values())
-- # Py2K
-- items = _items
--
-- def iteritems(self):
-- return iter(self.items())
--
-- values = _values
--
-- def itervalues(self):
-- return iter(self.values())
-- # end Py2K
-+# start Py3K
-+ def items(self):
-+ return iter(self._items())
-+
-+ def values(self):
-+ return iter(self._values())
-+# end Py3K
-+# start Py2K
-+# items = _items
-+#
-+# def iteritems(self):
-+# return iter(self.items())
-+#
-+# values = _values
-+#
-+# def itervalues(self):
-+# return iter(self.values())
-+# end Py2K
-
- def all_states(self):
-- # Py3K
-- # return list(dict.values(self))
-- # Py2K
-- return dict.values(self)
-- # end Py2K
-+# start Py3K
-+ return list(dict.values(self))
-+# end Py3K
-+# start Py2K
-+# return dict.values(self)
-+# end Py2K
-
- def discard(self, state):
- st = dict.get(self, state.key, None)
-@@ -189,7 +191,7 @@
-
- class StrongInstanceDict(IdentityMap):
- def all_states(self):
-- return [attributes.instance_state(o) for o in self.itervalues()]
-+ return [attributes.instance_state(o) for o in self.values()]
-
- def contains_state(self, state):
- return (
-diff -r 9d0639b9d3be lib/sqlalchemy/orm/instrumentation.py
---- a/lib/sqlalchemy/orm/instrumentation.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/lib/sqlalchemy/orm/instrumentation.py Sat Apr 27 19:42:17 2013 -0400
-@@ -279,7 +279,7 @@
-
- @property
- def attributes(self):
-- return self.itervalues()
-+ return iter(self.values())
-
- ## InstanceState management
-
-@@ -325,7 +325,7 @@
- """TODO"""
- return self.get_impl(key).hasparent(state, optimistic=optimistic)
-
-- def __nonzero__(self):
-+ def __bool__(self):
- """All ClassManagers are non-zero regardless of attribute state."""
- return True
-
-@@ -444,21 +444,23 @@
- func_vars = util.format_argspec_init(original__init__, grouped=False)
- func_text = func_body % func_vars
-
-- # Py3K
-- #func_defaults = getattr(original__init__, '__defaults__', None)
-- #func_kw_defaults = getattr(original__init__, '__kwdefaults__', None)
-- # Py2K
-- func = getattr(original__init__, 'im_func', original__init__)
-- func_defaults = getattr(func, 'func_defaults', None)
-- # end Py2K
-+# start Py3K
-+ func_defaults = getattr(original__init__, '__defaults__', None)
-+ func_kw_defaults = getattr(original__init__, '__kwdefaults__', None)
-+# end Py3K
-+# start Py2K
-+# func = getattr(original__init__, 'im_func', original__init__)
-+# func_defaults = getattr(func, 'func_defaults', None)
-+# end Py2K
-
- env = locals().copy()
-- exec func_text in env
-+ exec(func_text, env)
- __init__ = env['__init__']
- __init__.__doc__ = original__init__.__doc__
- if func_defaults:
-- __init__.func_defaults = func_defaults
-- # Py3K
-- #if func_kw_defaults:
-- # __init__.__kwdefaults__ = func_kw_defaults
-+ __init__.__defaults__ = func_defaults
-+# start Py3K
-+ if func_kw_defaults:
-+ __init__.__kwdefaults__ = func_kw_defaults
-+# end Py3K
- return __init__
-diff -r 9d0639b9d3be lib/sqlalchemy/orm/interfaces.py
---- a/lib/sqlalchemy/orm/interfaces.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/lib/sqlalchemy/orm/interfaces.py Sat Apr 27 19:42:17 2013 -0400
-@@ -15,7 +15,7 @@
- classes within should be considered mostly private.
-
- """
--from __future__ import absolute_import
-+
-
- from .. import exc as sa_exc, util, inspect
- from ..sql import operators
-@@ -659,7 +659,7 @@
- tokens = deque(self.key)
- while tokens:
- token = tokens.popleft()
-- if isinstance(token, basestring):
-+ if isinstance(token, str):
- # wildcard token
- if token.endswith(':*'):
- return [path.token(token)]
-diff -r 9d0639b9d3be lib/sqlalchemy/orm/loading.py
---- a/lib/sqlalchemy/orm/loading.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/lib/sqlalchemy/orm/loading.py Sat Apr 27 19:42:17 2013 -0400
-@@ -11,7 +11,7 @@
- as well as some of the attribute loading strategies.
-
- """
--from __future__ import absolute_import
-+
-
- from .. import util
- from . import attributes, exc as orm_exc, state as statelib
-@@ -47,11 +47,11 @@
- query._entities[0].mapper.dispatch.append_result
-
- (process, labels) = \
-- zip(*[
-+ list(zip(*[
- query_entity.row_processor(query,
- context, custom_rows)
- for query_entity in query._entities
-- ])
-+ ]))
-
- while True:
- context.progress = {}
-@@ -84,11 +84,11 @@
- context.progress.pop(context.refresh_state)
-
- statelib.InstanceState._commit_all_states(
-- context.progress.items(),
-+ list(context.progress.items()),
- session.identity_map
- )
-
-- for state, (dict_, attrs) in context.partials.iteritems():
-+ for state, (dict_, attrs) in context.partials.items():
- state._commit(dict_, attrs)
-
- for row in rows:
-@@ -507,7 +507,7 @@
- pops = (new_populators, existing_populators, delayed_populators,
- eager_populators)
-
-- for prop in mapper._props.itervalues():
-+ for prop in mapper._props.values():
-
- for i, pop in enumerate(prop.create_row_processor(
- context,
-diff -r 9d0639b9d3be lib/sqlalchemy/orm/mapper.py
---- a/lib/sqlalchemy/orm/mapper.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/lib/sqlalchemy/orm/mapper.py Sat Apr 27 19:42:17 2013 -0400
-@@ -13,7 +13,7 @@
- available in :class:`~sqlalchemy.orm.`.
-
- """
--from __future__ import absolute_import
-+
- import types
- import weakref
- from itertools import chain
-@@ -581,7 +581,7 @@
- if with_polymorphic == '*':
- self.with_polymorphic = ('*', None)
- elif isinstance(with_polymorphic, (tuple, list)):
-- if isinstance(with_polymorphic[0], (basestring, tuple, list)):
-+ if isinstance(with_polymorphic[0], (str, tuple, list)):
- self.with_polymorphic = with_polymorphic
- else:
- self.with_polymorphic = (with_polymorphic, None)
-@@ -626,7 +626,7 @@
- self.inherits._inheriting_mappers.add(self)
- self.passive_updates = self.inherits.passive_updates
- self._all_tables = self.inherits._all_tables
-- for key, prop in mapper._props.iteritems():
-+ for key, prop in mapper._props.items():
- if key not in self._props and \
- not self._should_exclude(key, key, local=False,
- column=None):
-@@ -866,12 +866,12 @@
-
- # load custom properties
- if self._init_properties:
-- for key, prop in self._init_properties.iteritems():
-+ for key, prop in self._init_properties.items():
- self._configure_property(key, prop, False)
-
- # pull properties from the inherited mapper if any.
- if self.inherits:
-- for key, prop in self.inherits._props.iteritems():
-+ for key, prop in self.inherits._props.items():
- if key not in self._props and \
- not self._should_exclude(key, key, local=False,
- column=None):
-@@ -919,7 +919,7 @@
- if self.polymorphic_on is not None:
- setter = True
-
-- if isinstance(self.polymorphic_on, basestring):
-+ if isinstance(self.polymorphic_on, str):
- # polymorphic_on specified as as string - link
- # it to mapped ColumnProperty
- try:
-@@ -1235,7 +1235,7 @@
- """
-
- self._log("_post_configure_properties() started")
-- l = [(key, prop) for key, prop in self._props.iteritems()]
-+ l = [(key, prop) for key, prop in self._props.items()]
- for key, prop in l:
- self._log("initialize prop %s", key)
-
-@@ -1253,7 +1253,7 @@
- using `add_property`.
-
- """
-- for key, value in dict_of_properties.iteritems():
-+ for key, value in dict_of_properties.items():
- self.add_property(key, value)
-
- def add_property(self, key, prop):
-@@ -1350,7 +1350,7 @@
- """return an iterator of all MapperProperty objects."""
- if _new_mappers:
- configure_mappers()
-- return self._props.itervalues()
-+ return iter(self._props.values())
-
- def _mappers_from_spec(self, spec, selectable):
- """given a with_polymorphic() argument, return the set of mappers it
-@@ -1623,7 +1623,7 @@
- if _new_mappers:
- configure_mappers()
- return util.ImmutableProperties(util.OrderedDict(
-- (k, v) for k, v in self._props.iteritems()
-+ (k, v) for k, v in self._props.items()
- if isinstance(v, type_)
- ))
-
-@@ -1972,7 +1972,7 @@
- visited_states = set()
- prp, mpp = object(), object()
-
-- visitables = deque([(deque(self._props.values()), prp,
-+ visitables = deque([(deque(list(self._props.values())), prp,
- state, state.dict)])
-
- while visitables:
-@@ -1994,7 +1994,7 @@
- corresponding_dict = iterator.popleft()
- yield instance, instance_mapper, \
- corresponding_state, corresponding_dict
-- visitables.append((deque(instance_mapper._props.values()),
-+ visitables.append((deque(list(instance_mapper._props.values())),
- prp, corresponding_state,
- corresponding_dict))
-
-@@ -2011,7 +2011,7 @@
- table_to_mapper.setdefault(t, mapper)
-
- extra_dependencies = []
-- for table, mapper in table_to_mapper.items():
-+ for table, mapper in list(table_to_mapper.items()):
- super_ = mapper.inherits
- if super_:
- extra_dependencies.extend([
-@@ -2040,7 +2040,7 @@
- return fk.parent not in cols
- return False
-
-- sorted_ = sql_util.sort_tables(table_to_mapper.iterkeys(),
-+ sorted_ = sql_util.sort_tables(iter(table_to_mapper.keys()),
- skip_fn=skip,
- extra_dependencies=extra_dependencies)
-
-diff -r 9d0639b9d3be lib/sqlalchemy/orm/persistence.py
---- a/lib/sqlalchemy/orm/persistence.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/lib/sqlalchemy/orm/persistence.py Sat Apr 27 19:42:17 2013 -0400
-@@ -45,7 +45,7 @@
-
- cached_connections = _cached_connection_dict(base_mapper)
-
-- for table, mapper in base_mapper._sorted_tables.iteritems():
-+ for table, mapper in base_mapper._sorted_tables.items():
- insert = _collect_insert_commands(base_mapper, uowtransaction,
- table, states_to_insert)
-
-@@ -77,7 +77,7 @@
- base_mapper,
- states, uowtransaction)
-
-- for table, mapper in base_mapper._sorted_tables.iteritems():
-+ for table, mapper in base_mapper._sorted_tables.items():
- update = _collect_post_update_commands(base_mapper, uowtransaction,
- table, states_to_update,
- post_update_cols)
-@@ -105,7 +105,7 @@
-
- table_to_mapper = base_mapper._sorted_tables
-
-- for table in reversed(table_to_mapper.keys()):
-+ for table in reversed(list(table_to_mapper.keys())):
- delete = _collect_delete_commands(base_mapper, uowtransaction,
- table, states_to_delete)
-
-@@ -318,7 +318,7 @@
- # history is only
- # in a different table than the one
- # where the version_id_col is.
-- for prop in mapper._columntoproperty.itervalues():
-+ for prop in mapper._columntoproperty.values():
- history = attributes.get_state_history(
- state, prop.key,
- attributes.PASSIVE_NO_INITIALIZE)
-@@ -526,7 +526,7 @@
- for (connection, pkeys, hasvalue, has_all_pks), \
- records in groupby(insert,
- lambda rec: (rec[4],
-- rec[2].keys(),
-+ list(rec[2].keys()),
- bool(rec[5]),
- rec[6])
- ):
-@@ -612,7 +612,7 @@
- # also group them into common (connection, cols) sets
- # to support executemany().
- for key, grouper in groupby(
-- update, lambda rec: (rec[4], rec[2].keys())
-+ update, lambda rec: (rec[4], list(rec[2].keys()))
- ):
- connection = key[0]
- multiparams = [params for state, state_dict,
-@@ -646,7 +646,7 @@
-
- return table.delete(clause)
-
-- for connection, del_objects in delete.iteritems():
-+ for connection, del_objects in delete.items():
- statement = base_mapper._memo(('delete', table), delete_stmt)
-
- connection = cached_connections[connection]
-@@ -803,7 +803,7 @@
- raise sa_exc.ArgumentError(
- "Valid strategies for session synchronization "
- "are %s" % (", ".join(sorted(repr(x)
-- for x in lookup.keys()))))
-+ for x in list(lookup.keys())))))
- else:
- return klass(*arg)
-
-@@ -868,7 +868,7 @@
- #TODO: detect when the where clause is a trivial primary key match
- self.matched_objects = [
- obj for (cls, pk), obj in
-- query.session.identity_map.iteritems()
-+ query.session.identity_map.items()
- if issubclass(cls, target_cls) and
- eval_condition(obj)]
-
-@@ -951,7 +951,7 @@
-
- def _additional_evaluators(self, evaluator_compiler):
- self.value_evaluators = {}
-- for key, value in self.values.iteritems():
-+ for key, value in self.values.items():
- key = _attr_as_key(key)
- self.value_evaluators[key] = evaluator_compiler.process(
- expression._literal_as_binds(value))
-@@ -959,7 +959,7 @@
- def _do_post_synchronize(self):
- session = self.query.session
- states = set()
-- evaluated_keys = self.value_evaluators.keys()
-+ evaluated_keys = list(self.value_evaluators.keys())
- for obj in self.matched_objects:
- state, dict_ = attributes.instance_state(obj),\
- attributes.instance_dict(obj)
-diff -r 9d0639b9d3be lib/sqlalchemy/orm/properties.py
---- a/lib/sqlalchemy/orm/properties.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/lib/sqlalchemy/orm/properties.py Sat Apr 27 19:42:17 2013 -0400
-@@ -27,7 +27,7 @@
- mapperlib = util.importlater("sqlalchemy.orm", "mapperlib")
- NoneType = type(None)
-
--from descriptor_props import CompositeProperty, SynonymProperty, \
-+from .descriptor_props import CompositeProperty, SynonymProperty, \
- ComparableProperty, ConcreteInheritedProperty
-
- __all__ = ['ColumnProperty', 'CompositeProperty', 'SynonymProperty',
-@@ -1204,7 +1204,7 @@
- if not self.is_primary():
- return
- if self.backref is not None and not self.back_populates:
-- if isinstance(self.backref, basestring):
-+ if isinstance(self.backref, str):
- backref_key, kwargs = self.backref, {}
- else:
- backref_key, kwargs = self.backref
-diff -r 9d0639b9d3be lib/sqlalchemy/orm/query.py
---- a/lib/sqlalchemy/orm/query.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/lib/sqlalchemy/orm/query.py Sat Apr 27 19:42:17 2013 -0400
-@@ -47,7 +47,7 @@
- def generate(fn, *args, **kw):
- self = args[0]._clone()
- for assertion in assertions:
-- assertion(self, fn.func_name)
-+ assertion(self, fn.__name__)
- fn(self, *args[1:], **kw)
- return self
- return generate
-@@ -981,11 +981,12 @@
- """Return a scalar result corresponding to the given
- column expression."""
- try:
-- # Py3K
-- #return self.values(column).__next__()[0]
-- # Py2K
-- return self.values(column).next()[0]
-- # end Py2K
-+# start Py3K
-+ return self.values(column).__next__()[0]
-+# end Py3K
-+# start Py2K
-+# return self.values(column).next()[0]
-+# end Py2K
- except StopIteration:
- return None
-
-@@ -1231,7 +1232,7 @@
- """
-
- clauses = [_entity_descriptor(self._joinpoint_zero(), key) == value
-- for key, value in kwargs.iteritems()]
-+ for key, value in kwargs.items()]
- return self.filter(sql.and_(*clauses))
-
- @_generative(_no_statement_condition, _no_limit_offset)
-@@ -1296,7 +1297,7 @@
-
- """
-
-- if isinstance(criterion, basestring):
-+ if isinstance(criterion, str):
- criterion = sql.text(criterion)
-
- if criterion is not None and \
-@@ -1655,7 +1656,7 @@
- kwargs.pop('from_joinpoint', False)
- if kwargs:
- raise TypeError("unknown arguments: %s" %
-- ','.join(kwargs.iterkeys()))
-+ ','.join(iter(kwargs.keys())))
- return self._join(props,
- outerjoin=False, create_aliases=aliased,
- from_joinpoint=from_joinpoint)
-@@ -1671,7 +1672,7 @@
- kwargs.pop('from_joinpoint', False)
- if kwargs:
- raise TypeError("unknown arguments: %s" %
-- ','.join(kwargs.iterkeys()))
-+ ','.join(iter(kwargs.keys())))
- return self._join(props,
- outerjoin=True, create_aliases=aliased,
- from_joinpoint=from_joinpoint)
-@@ -1701,7 +1702,7 @@
- if len(keys) == 2 and \
- isinstance(keys[0], (expression.FromClause,
- type, AliasedClass)) and \
-- isinstance(keys[1], (basestring, expression.ClauseElement,
-+ isinstance(keys[1], (str, expression.ClauseElement,
- interfaces.PropComparator)):
- # detect 2-arg form of join and
- # convert to a tuple.
-@@ -1721,14 +1722,14 @@
- # is a little bit of legacy behavior still at work here
- # which means they might be in either order. may possibly
- # lock this down to (right_entity, onclause) in 0.6.
-- if isinstance(arg1, (interfaces.PropComparator, basestring)):
-+ if isinstance(arg1, (interfaces.PropComparator, str)):
- right_entity, onclause = arg2, arg1
- else:
- right_entity, onclause = arg1, arg2
-
- left_entity = prop = None
-
-- if isinstance(onclause, basestring):
-+ if isinstance(onclause, str):
- left_entity = self._joinpoint_zero()
-
- descriptor = _entity_descriptor(left_entity, onclause)
-@@ -1922,7 +1923,7 @@
- clause = orm_join(clause,
- right,
- onclause, isouter=outerjoin)
-- except sa_exc.ArgumentError, ae:
-+ except sa_exc.ArgumentError as ae:
- raise sa_exc.InvalidRequestError(
- "Could not find a FROM clause to join from. "
- "Tried joining to %s, but got: %s" % (right, ae))
-@@ -1947,7 +1948,7 @@
-
- try:
- clause = orm_join(clause, right, onclause, isouter=outerjoin)
-- except sa_exc.ArgumentError, ae:
-+ except sa_exc.ArgumentError as ae:
- raise sa_exc.InvalidRequestError(
- "Could not find a FROM clause to join from. "
- "Tried joining to %s, but got: %s" % (right, ae))
-@@ -2115,7 +2116,7 @@
- appropriate to the entity class represented by this ``Query``.
-
- """
-- if isinstance(statement, basestring):
-+ if isinstance(statement, str):
- statement = sql.text(statement)
-
- if not isinstance(statement,
-@@ -2609,7 +2610,7 @@
- use_labels=context.labels)
-
- from_clause = inner
-- for eager_join in context.eager_joins.values():
-+ for eager_join in list(context.eager_joins.values()):
- # EagerLoader places a 'stop_on' attribute on the join,
- # giving us a marker as to where the "splice point" of
- # the join should be
-@@ -2674,7 +2675,7 @@
- subtypes are selected from the total results.
-
- """
-- for (ext_info, adapter) in self._mapper_adapter_map.values():
-+ for (ext_info, adapter) in list(self._mapper_adapter_map.values()):
- if ext_info in self._join_entities:
- continue
- single_crit = ext_info.mapper._single_table_criterion
-@@ -2697,7 +2698,7 @@
- def __new__(cls, *args, **kwargs):
- if cls is _QueryEntity:
- entity = args[1]
-- if not isinstance(entity, basestring) and \
-+ if not isinstance(entity, str) and \
- _is_mapped_class(entity):
- cls = _MapperEntity
- else:
-@@ -2905,7 +2906,7 @@
- self.expr = column
- self.namespace = namespace
-
-- if isinstance(column, basestring):
-+ if isinstance(column, str):
- column = sql.literal_column(column)
- self._label_name = column.name
- elif isinstance(column, (
-@@ -3080,7 +3081,7 @@
- self.alias = alias
-
- def process_query(self, query):
-- if isinstance(self.alias, basestring):
-+ if isinstance(self.alias, str):
- alias = query._mapper_zero().mapped_table.alias(self.alias)
- else:
- alias = self.alias
-diff -r 9d0639b9d3be lib/sqlalchemy/orm/session.py
---- a/lib/sqlalchemy/orm/session.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/lib/sqlalchemy/orm/session.py Sat Apr 27 19:42:17 2013 -0400
-@@ -5,7 +5,7 @@
- # the MIT License: http://www.opensource.org/licenses/mit-license.php
- """Provides the Session class and related utilities."""
-
--from __future__ import with_statement
-+
-
- import weakref
- from .. import util, sql, engine, exc as sa_exc, event
-@@ -35,7 +35,7 @@
- def close_all(cls):
- """Close *all* sessions in memory."""
-
-- for sess in _sessions.values():
-+ for sess in list(_sessions.values()):
- sess.close()
-
- @classmethod
-@@ -249,7 +249,7 @@
- if s.key:
- del s.key
-
-- for s, (oldkey, newkey) in self._key_switches.items():
-+ for s, (oldkey, newkey) in list(self._key_switches.items()):
- self.session.identity_map.discard(s)
- s.key = oldkey
- self.session.identity_map.replace(s)
-@@ -327,7 +327,7 @@
- subtransaction.commit()
-
- if not self.session._flushing:
-- for _flush_guard in xrange(100):
-+ for _flush_guard in range(100):
- if self.session._is_clean():
- break
- self.session.flush()
-@@ -604,7 +604,7 @@
- SessionExtension._adapt_listener(self, ext)
-
- if binds is not None:
-- for mapperortable, bind in binds.iteritems():
-+ for mapperortable, bind in binds.items():
- if isinstance(mapperortable, (type, Mapper)):
- self.bind_mapper(mapperortable, bind)
- else:
-@@ -1775,7 +1775,7 @@
- Session.
-
- """
-- return iter(list(self._new.values()) + self.identity_map.values())
-+ return iter(list(self._new.values()) + list(self.identity_map.values()))
-
- def _contains_state(self, state):
- return state in self._new or self.identity_map.contains_state(state)
-@@ -2138,13 +2138,13 @@
- def deleted(self):
- "The set of all instances marked as 'deleted' within this ``Session``"
-
-- return util.IdentitySet(self._deleted.values())
-+ return util.IdentitySet(list(self._deleted.values()))
-
- @property
- def new(self):
- "The set of all instances marked as 'new' within this ``Session``."
-
-- return util.IdentitySet(self._new.values())
-+ return util.IdentitySet(list(self._new.values()))
-
-
- class sessionmaker(_SessionClassMethods):
-@@ -2236,7 +2236,7 @@
- session = Session() # invokes sessionmaker.__call__()
-
- """
-- for k, v in self.kw.items():
-+ for k, v in list(self.kw.items()):
- local_kw.setdefault(k, v)
- return self.class_(**local_kw)
-
-@@ -2255,7 +2255,7 @@
- return "%s(class_=%r%s)" % (
- self.__class__.__name__,
- self.class_.__name__,
-- ", ".join("%s=%r" % (k, v) for k, v in self.kw.items())
-+ ", ".join("%s=%r" % (k, v) for k, v in list(self.kw.items()))
- )
-
- _sessions = weakref.WeakValueDictionary()
-diff -r 9d0639b9d3be lib/sqlalchemy/orm/state.py
---- a/lib/sqlalchemy/orm/state.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/lib/sqlalchemy/orm/state.py Sat Apr 27 19:42:17 2013 -0400
-@@ -417,7 +417,7 @@
- against this set when a refresh operation occurs.
-
- """
-- return set([k for k, v in self.callables.items() if v is self])
-+ return set([k for k, v in list(self.callables.items()) if v is self])
-
- def _instance_dict(self):
- return None
-diff -r 9d0639b9d3be lib/sqlalchemy/orm/strategies.py
---- a/lib/sqlalchemy/orm/strategies.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/lib/sqlalchemy/orm/strategies.py Sat Apr 27 19:42:17 2013 -0400
-@@ -359,7 +359,7 @@
- )
-
- if self.use_get:
-- for col in self._equated_columns.keys():
-+ for col in list(self._equated_columns.keys()):
- if col in self.mapper._equivalent_columns:
- for c in self.mapper._equivalent_columns[col]:
- self._equated_columns[c] = self._equated_columns[col]
-@@ -1332,7 +1332,7 @@
- def __init__(self, key, lazy=True, chained=False,
- propagate_to_loaders=True
- ):
-- if isinstance(key[0], basestring) and key[0] == '*':
-+ if isinstance(key[0], str) and key[0] == '*':
- if len(key) != 1:
- raise sa_exc.ArgumentError(
- "Wildcard identifier '*' must "
-@@ -1384,7 +1384,7 @@
- def __init__(self, key, alias=None, chained=False):
- super(LoadEagerFromAliasOption, self).__init__(key)
- if alias is not None:
-- if not isinstance(alias, basestring):
-+ if not isinstance(alias, str):
- info = inspect(alias)
- alias = info.selectable
- self.alias = alias
-@@ -1401,7 +1401,7 @@
-
- root_mapper, prop = paths[-1].path[-2:]
- if self.alias is not None:
-- if isinstance(self.alias, basestring):
-+ if isinstance(self.alias, str):
- self.alias = prop.target.alias(self.alias)
- paths[-1].set(query, "user_defined_eager_row_processor",
- sql_util.ColumnAdapter(self.alias,
-diff -r 9d0639b9d3be lib/sqlalchemy/orm/unitofwork.py
---- a/lib/sqlalchemy/orm/unitofwork.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/lib/sqlalchemy/orm/unitofwork.py Sat Apr 27 19:42:17 2013 -0400
-@@ -315,7 +315,7 @@
- # see if the graph of mapper dependencies has cycles.
- self.cycles = cycles = topological.find_cycles(
- self.dependencies,
-- self.postsort_actions.values())
-+ list(self.postsort_actions.values()))
-
- if cycles:
- # if yes, break the per-mapper actions into
-@@ -342,7 +342,7 @@
- for dep in convert[edge[1]]:
- self.dependencies.add((edge[0], dep))
-
-- return set([a for a in self.postsort_actions.values()
-+ return set([a for a in list(self.postsort_actions.values())
- if not a.disabled
- ]
- ).difference(cycles)
-@@ -381,7 +381,7 @@
- """
- states = set(self.states)
- isdel = set(
-- s for (s, (isdelete, listonly)) in self.states.iteritems()
-+ s for (s, (isdelete, listonly)) in self.states.items()
- if isdelete
- )
- other = states.difference(isdel)
-@@ -461,7 +461,7 @@
- def __repr__(self):
- return "%s(%s)" % (
- self.__class__.__name__,
-- ",".join(str(x) for x in self.__dict__.values())
-+ ",".join(str(x) for x in list(self.__dict__.values()))
- )
-
-
-diff -r 9d0639b9d3be lib/sqlalchemy/orm/util.py
---- a/lib/sqlalchemy/orm/util.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/lib/sqlalchemy/orm/util.py Sat Apr 27 19:42:17 2013 -0400
-@@ -120,7 +120,7 @@
- colnames = util.OrderedSet()
- colnamemaps = {}
- types = {}
-- for key in table_map.keys():
-+ for key in list(table_map.keys()):
- table = table_map[key]
-
- # mysql doesnt like selecting from a select;
-@@ -146,7 +146,7 @@
- return sql.type_coerce(sql.null(), types[name]).label(name)
-
- result = []
-- for type, table in table_map.iteritems():
-+ for type, table in table_map.items():
- if typecolname is not None:
- result.append(
- sql.select([col(name, table) for name in colnames] +
-@@ -203,7 +203,7 @@
- "positional arguments, got %s" % len(args))
- if kwargs:
- raise sa_exc.ArgumentError("unknown keyword arguments: %s"
-- % ", ".join(kwargs.keys()))
-+ % ", ".join(list(kwargs.keys())))
- mapper = class_mapper(class_)
- if "ident" in locals():
- return mapper.identity_key_from_primary_key(util.to_list(ident))
-@@ -211,7 +211,7 @@
- instance = kwargs.pop("instance")
- if kwargs:
- raise sa_exc.ArgumentError("unknown keyword arguments: %s"
-- % ", ".join(kwargs.keys()))
-+ % ", ".join(list(kwargs.keys())))
- mapper = object_mapper(instance)
- return mapper.identity_key_from_instance(instance)
-
-@@ -300,7 +300,7 @@
-
- def pairs(self):
- path = self.path
-- for i in xrange(0, len(path), 2):
-+ for i in range(0, len(path), 2):
- yield path[i], path[i + 1]
-
- def contains_mapper(self, mapper):
-@@ -314,10 +314,10 @@
-
- def serialize(self):
- path = self.path
-- return zip(
-+ return list(zip(
- [m.class_ for m in [path[i] for i in range(0, len(path), 2)]],
- [path[i].key for i in range(1, len(path), 2)] + [None]
-- )
-+ ))
-
- @classmethod
- def deserialize(cls, path):
-@@ -411,7 +411,7 @@
-
- self.path = parent.path + (entity,)
-
-- def __nonzero__(self):
-+ def __bool__(self):
- return True
-
- def __getitem__(self, entity):
-@@ -589,8 +589,8 @@
- return self.__adapt_prop(attr, key)
- elif hasattr(attr, 'func_code'):
- is_method = getattr(self.__target, key, None)
-- if is_method and is_method.im_self is not None:
-- return util.types.MethodType(attr.im_func, self, self)
-+ if is_method and is_method.__self__ is not None:
-+ return util.types.MethodType(attr.__func__, self, self)
- else:
- return None
- elif hasattr(attr, '__get__'):
-@@ -880,7 +880,7 @@
-
- self._joined_from_info = right_info
-
-- if isinstance(onclause, basestring):
-+ if isinstance(onclause, str):
- onclause = getattr(left_orm_info.entity, onclause)
-
- if isinstance(onclause, attributes.QueryableAttribute):
-@@ -1001,7 +1001,7 @@
- parent/child relationship.
-
- """
-- if isinstance(prop, basestring):
-+ if isinstance(prop, str):
- mapper = object_mapper(instance)
- prop = getattr(mapper.class_, prop).property
- elif isinstance(prop, attributes.QueryableAttribute):
-diff -r 9d0639b9d3be lib/sqlalchemy/pool.py
---- a/lib/sqlalchemy/pool.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/lib/sqlalchemy/pool.py Sat Apr 27 19:42:17 2013 -0400
-@@ -57,7 +57,7 @@
- All pools and connections are disposed.
- """
-
-- for manager in proxies.itervalues():
-+ for manager in proxies.values():
- manager.close()
- proxies.clear()
-
-@@ -368,7 +368,7 @@
- connection = self.__pool._creator()
- self.__pool.logger.debug("Created new connection %r", connection)
- return connection
-- except Exception, e:
-+ except Exception as e:
- self.__pool.logger.debug("Error on connect(): %s", e)
- raise
-
-@@ -391,7 +391,7 @@
- # Immediately close detached instances
- if connection_record is None:
- pool._close_connection(connection)
-- except Exception, e:
-+ except Exception as e:
- if connection_record is not None:
- connection_record.invalidate(e=e)
- if isinstance(e, (SystemExit, KeyboardInterrupt)):
-@@ -499,7 +499,7 @@
- self._connection_record,
- self)
- return self
-- except exc.DisconnectionError, e:
-+ except exc.DisconnectionError as e:
- self._pool.logger.info(
- "Disconnection detected on checkout: %s", e)
- self._connection_record.invalidate(e)
-@@ -755,7 +755,7 @@
- wait = self._max_overflow > -1 and \
- self._overflow >= self._max_overflow
- return self._pool.get(wait, self._timeout)
-- except sqla_queue.SAAbort, aborted:
-+ except sqla_queue.SAAbort as aborted:
- return aborted.context._do_get()
- except sqla_queue.Empty:
- if self._max_overflow > -1 and \
-@@ -1004,7 +1004,7 @@
- self._create_pool_mutex = threading.Lock()
-
- def close(self):
-- for key in self.pools.keys():
-+ for key in list(self.pools.keys()):
- del self.pools[key]
-
- def __del__(self):
-diff -r 9d0639b9d3be lib/sqlalchemy/processors.py
---- a/lib/sqlalchemy/processors.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/lib/sqlalchemy/processors.py Sat Apr 27 19:42:17 2013 -0400
-@@ -38,10 +38,10 @@
- "'%s'" % (type_.__name__, value))
- if has_named_groups:
- groups = m.groupdict(0)
-- return type_(**dict(zip(groups.iterkeys(),
-- map(int, groups.itervalues()))))
-+ return type_(**dict(list(zip(iter(groups.keys()),
-+ list(map(int, iter(groups.values())))))))
- else:
-- return type_(*map(int, m.groups(0)))
-+ return type_(*list(map(int, m.groups(0))))
- return process
-
-
-diff -r 9d0639b9d3be lib/sqlalchemy/schema.py
---- a/lib/sqlalchemy/schema.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/lib/sqlalchemy/schema.py Sat Apr 27 19:42:17 2013 -0400
-@@ -27,7 +27,7 @@
- as components in SQL expressions.
-
- """
--from __future__ import with_statement
-+
- import re
- import inspect
- from . import exc, util, dialects, event, events, inspection
-@@ -683,7 +683,7 @@
- continue
- Index(index.name,
- unique=index.unique,
-- *[table.c[col] for col in index.columns.keys()],
-+ *[table.c[col] for col in list(index.columns.keys())],
- **index.kwargs)
- table.dispatch._update(self.dispatch)
- return table
-@@ -898,7 +898,7 @@
- type_ = kwargs.pop('type_', None)
- args = list(args)
- if args:
-- if isinstance(args[0], basestring):
-+ if isinstance(args[0], str):
- if name is not None:
- raise exc.ArgumentError(
- "May not pass name positionally and as a keyword.")
-@@ -944,11 +944,12 @@
- args.append(self.default)
- else:
- if getattr(self.type, '_warn_on_bytestring', False):
-- # Py3K
-- #if isinstance(self.default, bytes):
-- # Py2K
-- if isinstance(self.default, str):
-- # end Py2K
-+# start Py3K
-+ if isinstance(self.default, bytes):
-+# end Py3K
-+# start Py2K
-+# if isinstance(self.default, str):
-+# end Py2K
- util.warn("Unicode column received non-unicode "
- "default value.")
- args.append(ColumnDefault(self.default))
-@@ -983,7 +984,7 @@
-
- if kwargs:
- raise exc.ArgumentError(
-- "Unknown arguments passed to Column: " + repr(kwargs.keys()))
-+ "Unknown arguments passed to Column: " + repr(list(kwargs.keys())))
-
- def __str__(self):
- if self.name is None:
-@@ -1069,7 +1070,7 @@
- self.table = table
-
- if self.index:
-- if isinstance(self.index, basestring):
-+ if isinstance(self.index, str):
- raise exc.ArgumentError(
- "The 'index' keyword argument on Column is boolean only. "
- "To create indexes with a specific name, create an "
-@@ -1077,7 +1078,7 @@
- Index(expression._truncated_label('ix_%s' % self._label),
- self, unique=self.unique)
- elif self.unique:
-- if isinstance(self.unique, basestring):
-+ if isinstance(self.unique, str):
- raise exc.ArgumentError(
- "The 'unique' keyword argument on Column is boolean "
- "only. To create unique constraints or indexes with a "
-@@ -1153,23 +1154,24 @@
- nullable=self.nullable,
- quote=self.quote,
- _proxies=[self], *fk)
-- except TypeError, e:
-- # Py3K
-- #raise TypeError(
-- # "Could not create a copy of this %r object. "
-- # "Ensure the class includes a _constructor() "
-- # "attribute or method which accepts the "
-- # "standard Column constructor arguments, or "
-- # "references the Column class itself." % self.__class__) from e
-- # Py2K
-+ except TypeError as e:
-+# start Py3K
- raise TypeError(
- "Could not create a copy of this %r object. "
- "Ensure the class includes a _constructor() "
- "attribute or method which accepts the "
- "standard Column constructor arguments, or "
-- "references the Column class itself. "
-- "Original error: %s" % (self.__class__, e))
-- # end Py2K
-+ "references the Column class itself." % self.__class__) from e
-+# end Py3K
-+# start Py2K
-+# raise TypeError(
-+# "Could not create a copy of this %r object. "
-+# "Ensure the class includes a _constructor() "
-+# "attribute or method which accepts the "
-+# "standard Column constructor arguments, or "
-+# "references the Column class itself. "
-+# "Original error: %s" % (self.__class__, e))
-+# end Py2K
-
- c.table = selectable
- selectable._columns.add(c)
-@@ -1345,7 +1347,7 @@
- if schema:
- return schema + "." + self.column.table.name + \
- "." + self.column.key
-- elif isinstance(self._colspec, basestring):
-+ elif isinstance(self._colspec, str):
- return self._colspec
- elif hasattr(self._colspec, '__clause_element__'):
- _column = self._colspec.__clause_element__()
-@@ -1390,7 +1392,7 @@
- """
- # ForeignKey inits its remote column as late as possible, so tables
- # can be defined without dependencies
-- if isinstance(self._colspec, basestring):
-+ if isinstance(self._colspec, str):
- # locate the parent table this foreign key is attached to. we
- # use the "original" column which our parent column represents
- # (its a list of columns/other ColumnElements if the parent
-@@ -1657,7 +1659,8 @@
- defaulted = argspec[3] is not None and len(argspec[3]) or 0
- positionals = len(argspec[0]) - defaulted
-
-- # Py3K compat - no unbound methods
-+# start Py3K
-+# end Py3K
- if inspect.ismethod(inspectable) or inspect.isclass(fn):
- positionals -= 1
-
-@@ -1919,7 +1922,7 @@
- has_argument = True
-
- def __init__(self, arg, for_update=False, _reflected=False):
-- util.assert_arg_type(arg, (basestring,
-+ util.assert_arg_type(arg, (str,
- expression.ClauseElement,
- expression.TextClause), 'arg')
- super(DefaultClause, self).__init__(for_update)
-@@ -2029,7 +2032,7 @@
-
- def _set_parent(self, table):
- for col in self._pending_colargs:
-- if isinstance(col, basestring):
-+ if isinstance(col, str):
- col = table.c[col]
- self.columns.add(col)
-
-@@ -2066,7 +2069,7 @@
-
- def copy(self, **kw):
- c = self.__class__(name=self.name, deferrable=self.deferrable,
-- initially=self.initially, *self.columns.keys())
-+ initially=self.initially, *list(self.columns.keys()))
- c.dispatch._update(self.dispatch)
- return c
-
-@@ -2256,19 +2259,19 @@
-
- @property
- def columns(self):
-- return self._elements.keys()
-+ return list(self._elements.keys())
-
- @property
- def elements(self):
-- return self._elements.values()
-+ return list(self._elements.values())
-
- def _set_parent(self, table):
- super(ForeignKeyConstraint, self)._set_parent(table)
-
-- for col, fk in self._elements.iteritems():
-+ for col, fk in self._elements.items():
- # string-specified column names now get
- # resolved to Column objects
-- if isinstance(col, basestring):
-+ if isinstance(col, str):
- try:
- col = table.c[col]
- except KeyError:
-@@ -2293,8 +2296,8 @@
-
- def copy(self, schema=None, **kw):
- fkc = ForeignKeyConstraint(
-- [x.parent.key for x in self._elements.values()],
-- [x._get_colspec(schema=schema) for x in self._elements.values()],
-+ [x.parent.key for x in list(self._elements.values())],
-+ [x._get_colspec(schema=schema) for x in list(self._elements.values())],
- name=self.name,
- onupdate=self.onupdate,
- ondelete=self.ondelete,
-@@ -2569,7 +2572,7 @@
- return 'MetaData(bind=%r)' % self.bind
-
- def __contains__(self, table_or_key):
-- if not isinstance(table_or_key, basestring):
-+ if not isinstance(table_or_key, str):
- table_or_key = table_or_key.key
- return table_or_key in self.tables
-
-@@ -2584,7 +2587,7 @@
- dict.pop(self.tables, key, None)
- if self._schemas:
- self._schemas = set([t.schema
-- for t in self.tables.values()
-+ for t in list(self.tables.values())
- if t.schema is not None])
-
- def __getstate__(self):
-@@ -2629,7 +2632,7 @@
- def _bind_to(self, bind):
- """Bind this MetaData to an Engine, Connection, string or URL."""
-
-- if isinstance(bind, (basestring, url.URL)):
-+ if isinstance(bind, (str, url.URL)):
- from sqlalchemy import create_engine
- self._bind = create_engine(bind)
- else:
-@@ -2662,7 +2665,7 @@
- :meth:`.Inspector.sorted_tables`
-
- """
-- return sqlutil.sort_tables(self.tables.itervalues())
-+ return sqlutil.sort_tables(iter(self.tables.values()))
-
- def reflect(self, bind=None, schema=None, views=False, only=None):
- """Load all available table definitions from the database.
-@@ -2723,7 +2726,7 @@
- bind.dialect.get_view_names(conn, schema)
- )
-
-- current = set(self.tables.iterkeys())
-+ current = set(self.tables.keys())
-
- if only is None:
- load = [name for name in available if name not in current]
-@@ -2845,7 +2848,7 @@
- def _bind_to(self, bind):
- """Bind to a Connectable in the caller's thread."""
-
-- if isinstance(bind, (basestring, url.URL)):
-+ if isinstance(bind, (str, url.URL)):
- try:
- self.context._engine = self.__engines[bind]
- except KeyError:
-@@ -2870,7 +2873,7 @@
- def dispose(self):
- """Dispose all bound engines, in all thread contexts."""
-
-- for e in self.__engines.itervalues():
-+ for e in self.__engines.values():
- if hasattr(e, 'dispose'):
- e.dispose()
-
-@@ -3075,7 +3078,7 @@
- not self._should_execute_deprecated(None, target, bind, **kw):
- return False
-
-- if isinstance(self.dialect, basestring):
-+ if isinstance(self.dialect, str):
- if self.dialect != bind.engine.name:
- return False
- elif isinstance(self.dialect, (tuple, list, set)):
-@@ -3090,7 +3093,7 @@
- def _should_execute_deprecated(self, event, target, bind, **kw):
- if self.on is None:
- return True
-- elif isinstance(self.on, basestring):
-+ elif isinstance(self.on, str):
- return self.on == bind.engine.name
- elif isinstance(self.on, (tuple, list, set)):
- return bind.engine.name in self.on
-@@ -3105,7 +3108,7 @@
-
- def _check_ddl_on(self, on):
- if (on is not None and
-- (not isinstance(on, (basestring, tuple, list, set)) and
-+ (not isinstance(on, (str, tuple, list, set)) and
- not util.callable(on))):
- raise exc.ArgumentError(
- "Expected the name of a database dialect, a tuple "
-@@ -3230,7 +3233,7 @@
-
- """
-
-- if not isinstance(statement, basestring):
-+ if not isinstance(statement, str):
- raise exc.ArgumentError(
- "Expected a string or unicode SQL statement, got '%r'" %
- statement)
-@@ -3262,7 +3265,7 @@
- def _to_schema_column_or_string(element):
- if hasattr(element, '__clause_element__'):
- element = element.__clause_element__()
-- if not isinstance(element, (basestring, expression.ColumnElement)):
-+ if not isinstance(element, (str, expression.ColumnElement)):
- msg = "Element %r is not a string name or column element"
- raise exc.ArgumentError(msg % element)
- return element
-diff -r 9d0639b9d3be lib/sqlalchemy/sql/__init__.py
---- a/lib/sqlalchemy/sql/__init__.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/lib/sqlalchemy/sql/__init__.py Sat Apr 27 19:42:17 2013 -0400
-@@ -64,5 +64,5 @@
-
- from .visitors import ClauseVisitor
-
--__tmp = locals().keys()
-+__tmp = list(locals().keys())
- __all__ = sorted([i for i in __tmp if not i.startswith('__')])
-diff -r 9d0639b9d3be lib/sqlalchemy/sql/compiler.py
---- a/lib/sqlalchemy/sql/compiler.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/lib/sqlalchemy/sql/compiler.py Sat Apr 27 19:42:17 2013 -0400
-@@ -51,7 +51,7 @@
- 'using', 'verbose', 'when', 'where'])
-
- LEGAL_CHARACTERS = re.compile(r'^[A-Z0-9_$]+$', re.I)
--ILLEGAL_INITIAL_CHARACTERS = set([str(x) for x in xrange(0, 10)]).union(['$'])
-+ILLEGAL_INITIAL_CHARACTERS = set([str(x) for x in range(0, 10)]).union(['$'])
-
- BIND_PARAMS = re.compile(r'(?<![:\w\$\x5c]):([\w\$]+)(?![:\w\$])', re.UNICODE)
- BIND_PARAMS_ESC = re.compile(r'\x5c(:[\w\$]+)(?![:\w\$])', re.UNICODE)
-@@ -83,9 +83,9 @@
- operators.add: ' + ',
- operators.mul: ' * ',
- operators.sub: ' - ',
-- # Py2K
-- operators.div: ' / ',
-- # end Py2K
-+# start Py2K
-+# operators.div: ' / ',
-+# end Py2K
- operators.mod: ' % ',
- operators.truediv: ' / ',
- operators.neg: '-',
-@@ -334,7 +334,7 @@
-
- if params:
- pd = {}
-- for bindparam, name in self.bind_names.iteritems():
-+ for bindparam, name in self.bind_names.items():
- if bindparam.key in params:
- pd[name] = params[bindparam.key]
- elif name in params:
-@@ -480,7 +480,7 @@
-
- def visit_textclause(self, textclause, **kwargs):
- if textclause.typemap is not None:
-- for colname, type_ in textclause.typemap.iteritems():
-+ for colname, type_ in textclause.typemap.items():
- self.result_map[colname
- if self.dialect.case_sensitive
- else colname.lower()] = \
-@@ -826,12 +826,12 @@
- of the DBAPI.
-
- """
-- if isinstance(value, basestring):
-+ if isinstance(value, str):
- value = value.replace("'", "''")
- return "'%s'" % value
- elif value is None:
- return "NULL"
-- elif isinstance(value, (float, int, long)):
-+ elif isinstance(value, (float, int)):
- return repr(value)
- elif isinstance(value, decimal.Decimal):
- return str(value)
-@@ -1136,7 +1136,7 @@
- self, ashint=True)
- })
- for (from_, dialect), hinttext in
-- select._hints.iteritems()
-+ select._hints.items()
- if dialect in ('*', self.dialect.name)
- ])
- hint_text = self.get_select_hint_text(byfrom)
-@@ -1214,7 +1214,7 @@
- self.positiontup = self.cte_positional + self.positiontup
- cte_text = self.get_cte_preamble(self.ctes_recursive) + " "
- cte_text += ", \n".join(
-- [txt for txt in self.ctes.values()]
-+ [txt for txt in list(self.ctes.values())]
- )
- cte_text += "\n "
- return cte_text
-@@ -1325,7 +1325,7 @@
- dialect_hints = dict([
- (table, hint_text)
- for (table, dialect), hint_text in
-- insert_stmt._hints.items()
-+ list(insert_stmt._hints.items())
- if dialect in ('*', self.dialect.name)
- ])
- if insert_stmt.table in dialect_hints:
-@@ -1422,7 +1422,7 @@
- dialect_hints = dict([
- (table, hint_text)
- for (table, dialect), hint_text in
-- update_stmt._hints.items()
-+ list(update_stmt._hints.items())
- if dialect in ('*', self.dialect.name)
- ])
- if update_stmt.table in dialect_hints:
-@@ -1528,7 +1528,7 @@
- values = []
-
- if stmt_parameters is not None:
-- for k, v in stmt_parameters.iteritems():
-+ for k, v in stmt_parameters.items():
- colkey = sql._column_as_key(k)
- if colkey is not None:
- parameters.setdefault(colkey, v)
-@@ -1559,7 +1559,7 @@
- if extra_tables and stmt_parameters:
- normalized_params = dict(
- (sql._clause_element_as_expr(c), param)
-- for c, param in stmt_parameters.items()
-+ for c, param in list(stmt_parameters.items())
- )
- assert self.isupdate
- affected_tables = set()
-@@ -1752,7 +1752,7 @@
- dialect_hints = dict([
- (table, hint_text)
- for (table, dialect), hint_text in
-- delete_stmt._hints.items()
-+ list(delete_stmt._hints.items())
- if dialect in ('*', self.dialect.name)
- ])
- if delete_stmt.table in dialect_hints:
-@@ -1868,22 +1868,23 @@
- and not first_pk)
- if column.primary_key:
- first_pk = True
-- except exc.CompileError, ce:
-- # Py3K
-- #raise exc.CompileError("(in table '%s', column '%s'): %s"
-- # % (
-- # table.description,
-- # column.name,
-- # ce.args[0]
-- # )) from ce
-- # Py2K
-+ except exc.CompileError as ce:
-+# start Py3K
- raise exc.CompileError("(in table '%s', column '%s'): %s"
-- % (
-+ % (
- table.description,
- column.name,
- ce.args[0]
-- )), None, sys.exc_info()[2]
-- # end Py2K
-+ )) from ce
-+# end Py3K
-+# start Py2K
-+# raise exc.CompileError("(in table '%s', column '%s'): %s"
-+# % (
-+# table.description,
-+# column.name,
-+# ce.args[0]
-+# )), None, sys.exc_info()[2]
-+# end Py2K
-
- const = self.create_table_constraints(table)
- if const:
-@@ -2036,7 +2037,7 @@
-
- def get_column_default_string(self, column):
- if isinstance(column.server_default, schema.DefaultClause):
-- if isinstance(column.server_default.arg, basestring):
-+ if isinstance(column.server_default.arg, str):
- return "'%s'" % column.server_default.arg
- else:
- return self.sql_compiler.process(column.server_default.arg)
-@@ -2084,11 +2085,11 @@
- remote_table = list(constraint._elements.values())[0].column.table
- text += "FOREIGN KEY(%s) REFERENCES %s (%s)" % (
- ', '.join(preparer.quote(f.parent.name, f.parent.quote)
-- for f in constraint._elements.values()),
-+ for f in list(constraint._elements.values())),
- self.define_constraint_remote_table(
- constraint, remote_table, preparer),
- ', '.join(preparer.quote(f.column.name, f.column.quote)
-- for f in constraint._elements.values())
-+ for f in list(constraint._elements.values()))
- )
- text += self.define_constraint_match(constraint)
- text += self.define_constraint_cascades(constraint)
-@@ -2355,7 +2356,7 @@
- lc_value = value.lower()
- return (lc_value in self.reserved_words
- or value[0] in self.illegal_initial_characters
-- or not self.legal_characters.match(unicode(value))
-+ or not self.legal_characters.match(str(value))
- or (lc_value != value))
-
- def quote_schema(self, schema, force):
-diff -r 9d0639b9d3be lib/sqlalchemy/sql/expression.py
---- a/lib/sqlalchemy/sql/expression.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/lib/sqlalchemy/sql/expression.py Sat Apr 27 19:42:17 2013 -0400
-@@ -1375,7 +1375,7 @@
- modifier = _FunctionGenerator(group=False)
-
-
--class _truncated_label(unicode):
-+class _truncated_label(str):
- """A unicode subclass used to identify symbolic "
- "names that may require truncation."""
-
-@@ -1395,13 +1395,13 @@
-
- def __add__(self, other):
- return _anonymous_label(
-- unicode(self) +
-- unicode(other))
-+ str(self) +
-+ str(other))
-
- def __radd__(self, other):
- return _anonymous_label(
-- unicode(other) +
-- unicode(self))
-+ str(other) +
-+ str(self))
-
- def apply_map(self, map_):
- return self % map_
-@@ -1422,7 +1422,7 @@
-
-
- def _string_or_unprintable(element):
-- if isinstance(element, basestring):
-+ if isinstance(element, str):
- return element
- else:
- try:
-@@ -1486,7 +1486,7 @@
-
-
- def _column_as_key(element):
-- if isinstance(element, basestring):
-+ if isinstance(element, str):
- return element
- if hasattr(element, '__clause_element__'):
- element = element.__clause_element__()
-@@ -1508,8 +1508,8 @@
- return element
- elif hasattr(element, '__clause_element__'):
- return element.__clause_element__()
-- elif isinstance(element, basestring):
-- return TextClause(unicode(element))
-+ elif isinstance(element, str):
-+ return TextClause(str(element))
- elif isinstance(element, (util.NoneType, bool)):
- return _const_expr(element)
- else:
-@@ -1583,8 +1583,8 @@
- def _interpret_as_from(element):
- insp = inspection.inspect(element, raiseerr=False)
- if insp is None:
-- if isinstance(element, basestring):
-- return TextClause(unicode(element))
-+ if isinstance(element, str):
-+ return TextClause(str(element))
- elif hasattr(insp, "selectable"):
- return insp.selectable
- raise exc.ArgumentError("FROM expression expected")
-@@ -1914,11 +1914,12 @@
- return dialect.statement_compiler(dialect, self, **kw)
-
- def __str__(self):
-- # Py3K
-- #return unicode(self.compile())
-- # Py2K
-- return unicode(self.compile()).encode('ascii', 'backslashreplace')
-- # end Py2K
-+# start Py3K
-+ return str(self.compile())
-+# end Py3K
-+# start Py2K
-+# return unicode(self.compile()).encode('ascii', 'backslashreplace')
-+# end Py2K
-
- def __and__(self, other):
- return and_(self, other)
-@@ -1929,7 +1930,7 @@
- def __invert__(self):
- return self._negate()
-
-- def __nonzero__(self):
-+ def __bool__(self):
- raise TypeError("Boolean value of this clause is not defined")
-
- def _negate(self):
-@@ -2507,7 +2508,7 @@
- def update(self, value):
- self._data.update(value)
- self._all_cols.clear()
-- self._all_cols.update(self._data.values())
-+ self._all_cols.update(list(self._data.values()))
-
- def extend(self, iter):
- self.update((c.key, c) for c in iter)
-@@ -2523,13 +2524,13 @@
- return and_(*l)
-
- def __contains__(self, other):
-- if not isinstance(other, basestring):
-+ if not isinstance(other, str):
- raise exc.ArgumentError("__contains__ requires a string argument")
- return util.OrderedProperties.__contains__(self, other)
-
- def __setstate__(self, state):
- self.__dict__['_data'] = state['_data']
-- self.__dict__['_all_cols'] = util.column_set(self._data.values())
-+ self.__dict__['_all_cols'] = util.column_set(list(self._data.values()))
-
- def contains_column(self, col):
- # this has to be done via set() membership
-@@ -3202,7 +3203,7 @@
- self._execution_options.union(
- {'autocommit': autocommit})
- if typemap is not None:
-- for key in typemap.keys():
-+ for key in list(typemap.keys()):
- typemap[key] = sqltypes.to_instance(typemap[key])
-
- def repl(m):
-@@ -3236,10 +3237,10 @@
-
- def _copy_internals(self, clone=_clone, **kw):
- self.bindparams = dict((b.key, clone(b, **kw))
-- for b in self.bindparams.values())
-+ for b in list(self.bindparams.values()))
-
- def get_children(self, **kwargs):
-- return self.bindparams.values()
-+ return list(self.bindparams.values())
-
-
- class Null(ColumnElement):
-@@ -3750,7 +3751,7 @@
- negate=None, modifiers=None):
- # allow compatibility with libraries that
- # refer to BinaryExpression directly and pass strings
-- if isinstance(operator, basestring):
-+ if isinstance(operator, str):
- operator = operators.custom_op(operator)
- self._orig = (left, right)
- self.left = _literal_as_text(left).self_group(against=operator)
-@@ -3764,7 +3765,7 @@
- else:
- self.modifiers = modifiers
-
-- def __nonzero__(self):
-+ def __bool__(self):
- if self.operator in (operator.eq, operator.ne):
- return self.operator(hash(self._orig[0]), hash(self._orig[1]))
- else:
-@@ -4057,11 +4058,12 @@
-
- @property
- def description(self):
-- # Py3K
-- #return self.name
-- # Py2K
-- return self.name.encode('ascii', 'backslashreplace')
-- # end Py2K
-+# start Py3K
-+ return self.name
-+# end Py3K
-+# start Py2K
-+# return self.name.encode('ascii', 'backslashreplace')
-+# end Py2K
-
- def as_scalar(self):
- try:
-@@ -4471,11 +4473,12 @@
-
- @util.memoized_property
- def description(self):
-- # Py3K
-- #return self.name
-- # Py2K
-- return self.name.encode('ascii', 'backslashreplace')
-- # end Py2K
-+# start Py3K
-+ return self.name
-+# end Py3K
-+# start Py2K
-+# return self.name.encode('ascii', 'backslashreplace')
-+# end Py2K
-
- @_memoized_property
- def _key_label(self):
-@@ -4602,11 +4605,12 @@
-
- @util.memoized_property
- def description(self):
-- # Py3K
-- #return self.name
-- # Py2K
-- return self.name.encode('ascii', 'backslashreplace')
-- # end Py2K
-+# start Py3K
-+ return self.name
-+# end Py3K
-+# start Py2K
-+# return self.name.encode('ascii', 'backslashreplace')
-+# end Py2K
-
- def append_column(self, c):
- self._columns[c.key] = c
-diff -r 9d0639b9d3be lib/sqlalchemy/sql/functions.py
---- a/lib/sqlalchemy/sql/functions.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/lib/sqlalchemy/sql/functions.py Sat Apr 27 19:42:17 2013 -0400
-@@ -41,7 +41,7 @@
- super(_GenericMeta, cls).__init__(clsname, bases, clsdict)
-
-
--class GenericFunction(Function):
-+class GenericFunction(Function, metaclass=_GenericMeta):
- """Define a 'generic' function.
-
- A generic function is a pre-established :class:`.Function`
-@@ -112,7 +112,6 @@
- name is still recognized for backwards-compatibility.
-
- """
-- __metaclass__ = _GenericMeta
-
- coerce_arguments = True
-
-diff -r 9d0639b9d3be lib/sqlalchemy/sql/operators.py
---- a/lib/sqlalchemy/sql/operators.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/lib/sqlalchemy/sql/operators.py Sat Apr 27 19:42:17 2013 -0400
-@@ -14,8 +14,8 @@
- getitem, lshift, rshift
- )
-
--# Py2K
--from operator import (div,)
-+# start Py2K
-+#from operator import (div,)
- # end Py2K
-
- from ..util import symbol
-@@ -789,9 +789,9 @@
- getitem: 15,
- mul: 8,
- truediv: 8,
-- # Py2K
-- div: 8,
-- # end Py2K
-+# start Py2K
-+# div: 8,
-+# end Py2K
- mod: 8,
- neg: 8,
- add: 7,
-diff -r 9d0639b9d3be lib/sqlalchemy/sql/util.py
---- a/lib/sqlalchemy/sql/util.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/lib/sqlalchemy/sql/util.py Sat Apr 27 19:42:17 2013 -0400
-@@ -232,7 +232,7 @@
-
-
- def _quote_ddl_expr(element):
-- if isinstance(element, basestring):
-+ if isinstance(element, str):
- element = element.replace("'", "''")
- return "'%s'" % element
- else:
-@@ -349,7 +349,7 @@
- continue
- try:
- col = fk.get_referent(left)
-- except exc.NoReferenceError, nrte:
-+ except exc.NoReferenceError as nrte:
- if nrte.table_name == left.name:
- raise
- else:
-@@ -367,7 +367,7 @@
- continue
- try:
- col = fk.get_referent(b)
-- except exc.NoReferenceError, nrte:
-+ except exc.NoReferenceError as nrte:
- if nrte.table_name == b.name:
- raise
- else:
-@@ -518,15 +518,15 @@
- # so that the resulting objects are pickleable.
- annotated_classes = {}
-
--for cls in expression.__dict__.values() + [schema.Column, schema.Table]:
-+for cls in list(expression.__dict__.values()) + [schema.Column, schema.Table]:
- if isinstance(cls, type) and issubclass(cls, expression.ClauseElement):
- if issubclass(cls, expression.ColumnElement):
- annotation_cls = "AnnotatedColumnElement"
- else:
- annotation_cls = "Annotated"
-- exec "class Annotated%s(%s, cls):\n" \
-- " pass" % (cls.__name__, annotation_cls) in locals()
-- exec "annotated_classes[cls] = Annotated%s" % (cls.__name__,)
-+ exec("class Annotated%s(%s, cls):\n" \
-+ " pass" % (cls.__name__, annotation_cls), locals())
-+ exec("annotated_classes[cls] = Annotated%s" % (cls.__name__,))
-
-
- def _deep_annotate(element, annotations, exclude=None):
-@@ -765,7 +765,7 @@
- return self.row[self.map[key]]
-
- def keys(self):
-- return self.row.keys()
-+ return list(self.row.keys())
-
-
- class ClauseAdapter(visitors.ReplacingCloningVisitor):
-diff -r 9d0639b9d3be lib/sqlalchemy/sql/visitors.py
---- a/lib/sqlalchemy/sql/visitors.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/lib/sqlalchemy/sql/visitors.py Sat Apr 27 19:42:17 2013 -0400
-@@ -87,14 +87,12 @@
- cls._compiler_dispatch = _compiler_dispatch
-
-
--class Visitable(object):
-+class Visitable(object, metaclass=VisitableType):
- """Base class for visitable objects, applies the
- ``VisitableType`` metaclass.
-
- """
-
-- __metaclass__ = VisitableType
--
-
- class ClauseVisitor(object):
- """Base class for visitor objects which can traverse using
-diff -r 9d0639b9d3be lib/sqlalchemy/testing/__init__.py
---- a/lib/sqlalchemy/testing/__init__.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/lib/sqlalchemy/testing/__init__.py Sat Apr 27 19:42:17 2013 -0400
-@@ -1,4 +1,4 @@
--from __future__ import absolute_import
-+
-
- from .warnings import testing_warn, assert_warnings, resetwarnings
-
-diff -r 9d0639b9d3be lib/sqlalchemy/testing/assertions.py
---- a/lib/sqlalchemy/testing/assertions.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/lib/sqlalchemy/testing/assertions.py Sat Apr 27 19:42:17 2013 -0400
-@@ -1,4 +1,4 @@
--from __future__ import absolute_import
-+
-
- from . import util as testutil
- from sqlalchemy import pool, orm, util
-@@ -63,7 +63,7 @@
-
- @decorator
- def decorate(fn, *args, **kw):
-- if isinstance(db, basestring):
-+ if isinstance(db, str):
- if not spec(config.db):
- return fn(*args, **kw)
- else:
-@@ -171,9 +171,9 @@
- try:
- callable_(*args, **kwargs)
- assert False, "Callable did not raise an exception"
-- except except_cls, e:
-- assert re.search(msg, unicode(e), re.UNICODE), u"%r !~ %s" % (msg, e)
-- print unicode(e).encode('utf-8')
-+ except except_cls as e:
-+ assert re.search(msg, str(e), re.UNICODE), "%r !~ %s" % (msg, e)
-+ print(str(e).encode('utf-8'))
-
-
- class AssertsCompiledSQL(object):
-@@ -190,12 +190,12 @@
- dialect = default.DefaultDialect()
- elif dialect is None:
- dialect = config.db.dialect
-- elif isinstance(dialect, basestring):
-+ elif isinstance(dialect, str):
- dialect = create_engine("%s://" % dialect).dialect
-
- kw = {}
- if params is not None:
-- kw['column_keys'] = params.keys()
-+ kw['column_keys'] = list(params.keys())
-
- if isinstance(clause, orm.Query):
- context = clause._compile_context()
-@@ -205,10 +205,11 @@
- c = clause.compile(dialect=dialect, **kw)
-
- param_str = repr(getattr(c, 'params', {}))
-- # Py3K
-- #param_str = param_str.encode('utf-8').decode('ascii', 'ignore')
-+# start Py3K
-+ param_str = param_str.encode('utf-8').decode('ascii', 'ignore')
-+# end Py3K
-
-- print "\nSQL String:\n" + str(c) + param_str
-+ print("\nSQL String:\n" + str(c) + param_str)
-
- cc = re.sub(r'[\n\t]', '', str(c))
-
-@@ -262,7 +263,7 @@
- class AssertsExecutionResults(object):
- def assert_result(self, result, class_, *objects):
- result = list(result)
-- print repr(result)
-+ print(repr(result))
- self.assert_list(result, class_, objects)
-
- def assert_list(self, result, class_, list):
-@@ -275,7 +276,7 @@
- def assert_row(self, class_, rowobj, desc):
- self.assert_(rowobj.__class__ is class_,
- "item class is not " + repr(class_))
-- for key, value in desc.iteritems():
-+ for key, value in desc.items():
- if isinstance(value, tuple):
- if isinstance(value[1], list):
- self.assert_list(getattr(rowobj, key), value[0], value[1])
-@@ -300,7 +301,7 @@
- found = util.IdentitySet(result)
- expected = set([immutabledict(e) for e in expected])
-
-- for wrong in itertools.ifilterfalse(lambda o: type(o) == cls, found):
-+ for wrong in itertools.filterfalse(lambda o: type(o) == cls, found):
- fail('Unexpected type "%s", expected "%s"' % (
- type(wrong).__name__, cls.__name__))
-
-@@ -311,7 +312,7 @@
- NOVALUE = object()
-
- def _compare_item(obj, spec):
-- for key, value in spec.iteritems():
-+ for key, value in spec.items():
- if isinstance(value, tuple):
- try:
- self.assert_unordered_result(
-@@ -352,7 +353,7 @@
- for rule in rules:
- if isinstance(rule, dict):
- newrule = assertsql.AllOf(*[
-- assertsql.ExactSQL(k, v) for k, v in rule.iteritems()
-+ assertsql.ExactSQL(k, v) for k, v in rule.items()
- ])
- else:
- newrule = assertsql.ExactSQL(*rule)
-diff -r 9d0639b9d3be lib/sqlalchemy/testing/assertsql.py
---- a/lib/sqlalchemy/testing/assertsql.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/lib/sqlalchemy/testing/assertsql.py Sat Apr 27 19:42:17 2013 -0400
-@@ -127,7 +127,7 @@
- # do a positive compare only
-
- for param, received in zip(params, _received_parameters):
-- for k, v in param.iteritems():
-+ for k, v in param.items():
- if k not in received or received[k] != v:
- equivalent = False
- break
-@@ -180,7 +180,7 @@
- all_received = list(_received_parameters)
- while params:
- param = dict(params.pop(0))
-- for k, v in context.compiled.params.iteritems():
-+ for k, v in context.compiled.params.items():
- param.setdefault(k, v)
- if param not in _received_parameters:
- equivalent = False
-@@ -195,9 +195,9 @@
- all_received = []
- self._result = equivalent
- if not self._result:
-- print 'Testing for compiled statement %r partial params '\
-+ print('Testing for compiled statement %r partial params '\
- '%r, received %r with params %r' % (self.statement,
-- all_params, _received_statement, all_received)
-+ all_params, _received_statement, all_received))
- self._errmsg = \
- 'Testing for compiled statement %r partial params %r, '\
- 'received %r with params %r' % (self.statement,
-@@ -262,7 +262,7 @@
-
- # oracle+zxjdbc passes a PyStatement when returning into
-
-- query = unicode(query)
-+ query = str(query)
- if context.engine.name == 'mssql' \
- and query.endswith('; select scope_identity()'):
- query = query[:-25]
-diff -r 9d0639b9d3be lib/sqlalchemy/testing/engines.py
---- a/lib/sqlalchemy/testing/engines.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/lib/sqlalchemy/testing/engines.py Sat Apr 27 19:42:17 2013 -0400
-@@ -1,4 +1,4 @@
--from __future__ import absolute_import
-+
-
- import types
- import weakref
-@@ -31,18 +31,18 @@
- fn()
- except (SystemExit, KeyboardInterrupt):
- raise
-- except Exception, e:
-+ except Exception as e:
- warnings.warn(
- "testing_reaper couldn't "
- "rollback/close connection: %s" % e)
-
- def rollback_all(self):
-- for rec in self.proxy_refs.keys():
-+ for rec in list(self.proxy_refs.keys()):
- if rec is not None and rec.is_valid:
- self._safe(rec.rollback)
-
- def close_all(self):
-- for rec in self.proxy_refs.keys():
-+ for rec in list(self.proxy_refs.keys()):
- if rec is not None:
- self._safe(rec._close)
-
-@@ -66,7 +66,7 @@
-
- self.conns = set()
-
-- for rec in self.testing_engines.keys():
-+ for rec in list(self.testing_engines.keys()):
- if rec is not config.db:
- rec.dispose()
-
-@@ -75,7 +75,7 @@
- for conn in self.conns:
- self._safe(conn.close)
- self.conns = set()
-- for rec in self.testing_engines.keys():
-+ for rec in list(self.testing_engines.keys()):
- rec.dispose()
-
- def assert_all_closed(self):
-@@ -160,7 +160,7 @@
- fn()
- except (SystemExit, KeyboardInterrupt):
- raise
-- except Exception, e:
-+ except Exception as e:
- warnings.warn(
- "ReconnectFixture couldn't "
- "close connection: %s" % e)
-@@ -353,23 +353,24 @@
- Callable = object()
- NoAttribute = object()
-
-- # Py3K
-- #Natives = set([getattr(types, t)
-- # for t in dir(types) if not t.startswith('_')]). \
-- # union([type(t) if not isinstance(t, type)
-- # else t for t in __builtins__.values()]).\
-- # difference([getattr(types, t)
-- # for t in ('FunctionType', 'BuiltinFunctionType',
-- # 'MethodType', 'BuiltinMethodType',
-- # 'LambdaType', )])
-- # Py2K
-+# start Py3K
- Natives = set([getattr(types, t)
- for t in dir(types) if not t.startswith('_')]). \
-+ union([type(t) if not isinstance(t, type)
-+ else t for t in list(__builtins__.values())]).\
- difference([getattr(types, t)
-- for t in ('FunctionType', 'BuiltinFunctionType',
-- 'MethodType', 'BuiltinMethodType',
-- 'LambdaType', 'UnboundMethodType',)])
-- # end Py2K
-+ for t in ('FunctionType', 'BuiltinFunctionType',
-+ 'MethodType', 'BuiltinMethodType',
-+ 'LambdaType', )])
-+# end Py3K
-+# start Py2K
-+# Natives = set([getattr(types, t)
-+# for t in dir(types) if not t.startswith('_')]). \
-+# difference([getattr(types, t)
-+# for t in ('FunctionType', 'BuiltinFunctionType',
-+# 'MethodType', 'BuiltinMethodType',
-+# 'LambdaType', 'UnboundMethodType',)])
-+# end Py2K
-
- def __init__(self):
- self.buffer = deque()
-diff -r 9d0639b9d3be lib/sqlalchemy/testing/entities.py
---- a/lib/sqlalchemy/testing/entities.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/lib/sqlalchemy/testing/entities.py Sat Apr 27 19:42:17 2013 -0400
-@@ -7,7 +7,7 @@
- class BasicEntity(object):
-
- def __init__(self, **kw):
-- for key, value in kw.iteritems():
-+ for key, value in kw.items():
- setattr(self, key, value)
-
- def __repr__(self):
-@@ -67,7 +67,7 @@
- a = self
- b = other
-
-- for attr in a.__dict__.keys():
-+ for attr in list(a.__dict__.keys()):
- if attr.startswith('_'):
- continue
- value = getattr(a, attr)
-diff -r 9d0639b9d3be lib/sqlalchemy/testing/exclusions.py
---- a/lib/sqlalchemy/testing/exclusions.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/lib/sqlalchemy/testing/exclusions.py Sat Apr 27 19:42:17 2013 -0400
-@@ -1,4 +1,4 @@
--from __future__ import with_statement
-+
-
- import operator
- from nose import SkipTest
-@@ -23,10 +23,10 @@
- def fail_if(self, name='block'):
- try:
- yield
-- except Exception, ex:
-+ except Exception as ex:
- if self.predicate():
-- print ("%s failed as expected (%s): %s " % (
-- name, self.predicate, str(ex)))
-+ print(("%s failed as expected (%s): %s " % (
-+ name, self.predicate, str(ex))))
- else:
- raise
- else:
-@@ -92,7 +92,7 @@
- return OrPredicate([cls.as_predicate(pred) for pred in predicate])
- elif isinstance(predicate, tuple):
- return SpecPredicate(*predicate)
-- elif isinstance(predicate, basestring):
-+ elif isinstance(predicate, str):
- return SpecPredicate(predicate, None, None)
- elif util.callable(predicate):
- return LambdaPredicate(predicate)
-diff -r 9d0639b9d3be lib/sqlalchemy/testing/fixtures.py
---- a/lib/sqlalchemy/testing/fixtures.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/lib/sqlalchemy/testing/fixtures.py Sat Apr 27 19:42:17 2013 -0400
-@@ -125,9 +125,9 @@
- for table in reversed(self.metadata.sorted_tables):
- try:
- table.delete().execute().close()
-- except sa.exc.DBAPIError, ex:
-- print >> sys.stderr, "Error emptying table %s: %r" % (
-- table, ex)
-+ except sa.exc.DBAPIError as ex:
-+ print("Error emptying table %s: %r" % (
-+ table, ex), file=sys.stderr)
-
- def setup(self):
- self._setup_each_tables()
-@@ -187,10 +187,10 @@
- def _load_fixtures(cls):
- """Insert rows as represented by the fixtures() method."""
- headers, rows = {}, {}
-- for table, data in cls.fixtures().iteritems():
-+ for table, data in cls.fixtures().items():
- if len(data) < 2:
- continue
-- if isinstance(table, basestring):
-+ if isinstance(table, str):
- table = cls.tables[table]
- headers[table] = data[0]
- rows[table] = data[1:]
-@@ -199,7 +199,7 @@
- continue
- cls.bind.execute(
- table.insert(),
-- [dict(zip(headers[table], column_values))
-+ [dict(list(zip(headers[table], column_values)))
- for column_values in rows[table]])
-
-
-@@ -284,8 +284,8 @@
- cls_registry[classname] = cls
- return type.__init__(cls, classname, bases, dict_)
-
-- class _Base(object):
-- __metaclass__ = FindFixture
-+ class _Base(object, metaclass=FindFixture):
-+ pass
-
- class Basic(BasicEntity, _Base):
- pass
-diff -r 9d0639b9d3be lib/sqlalchemy/testing/plugin/noseplugin.py
---- a/lib/sqlalchemy/testing/plugin/noseplugin.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/lib/sqlalchemy/testing/plugin/noseplugin.py Sat Apr 27 19:42:17 2013 -0400
-@@ -9,10 +9,10 @@
- normally as "from sqlalchemy.testing.plugin import noseplugin".
-
- """
--from __future__ import absolute_import
-+
-
- import os
--import ConfigParser
-+import configparser
-
- from nose.plugins import Plugin
- from nose import SkipTest
-@@ -55,9 +55,9 @@
-
-
- def _list_dbs(*args):
-- print "Available --db options (use --dburi to override)"
-+ print("Available --db options (use --dburi to override)")
- for macro in sorted(file_config.options('db')):
-- print "%20s\t%s" % (macro, file_config.get('db', macro))
-+ print("%20s\t%s" % (macro, file_config.get('db', macro)))
- sys.exit(0)
-
-
-@@ -318,7 +318,7 @@
- opt("--write-profiles", action="store_true", dest="write_profiles", default=False,
- help="Write/update profiling data.")
- global file_config
-- file_config = ConfigParser.ConfigParser()
-+ file_config = configparser.ConfigParser()
- file_config.read(['setup.cfg', 'test.cfg'])
-
- def configure(self, options, conf):
-diff -r 9d0639b9d3be lib/sqlalchemy/testing/profiling.py
---- a/lib/sqlalchemy/testing/profiling.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/lib/sqlalchemy/testing/profiling.py Sat Apr 27 19:42:17 2013 -0400
-@@ -60,9 +60,9 @@
- if report:
- sort_ = target_opts.get('sort', profile_config['sort'])
- limit = target_opts.get('limit', profile_config['limit'])
-- print ("Profile report for target '%s' (%s)" % (
-+ print(("Profile report for target '%s' (%s)" % (
- target, filename)
-- )
-+ ))
-
- stats = load_stats()
- stats.sort_stats(*sort_)
-@@ -198,7 +198,7 @@
- profile_f.close()
-
- def _write(self):
-- print("Writing profile file %s" % self.fname)
-+ print(("Writing profile file %s" % self.fname))
- profile_f = open(self.fname, "w")
- profile_f.write(self._header())
- for test_key in sorted(self.data):
-@@ -253,11 +253,11 @@
- else:
- line_no, expected_count = expected
-
-- print("Pstats calls: %d Expected %s" % (
-+ print(("Pstats calls: %d Expected %s" % (
- callcount,
- expected_count
- )
-- )
-+ ))
- stats.print_stats()
- #stats.print_callers()
-
-diff -r 9d0639b9d3be lib/sqlalchemy/testing/schema.py
---- a/lib/sqlalchemy/testing/schema.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/lib/sqlalchemy/testing/schema.py Sat Apr 27 19:42:17 2013 -0400
-@@ -11,7 +11,7 @@
- def Table(*args, **kw):
- """A schema.Table wrapper/hook for dialect-specific tweaks."""
-
-- test_opts = dict([(k, kw.pop(k)) for k in kw.keys()
-+ test_opts = dict([(k, kw.pop(k)) for k in list(kw.keys())
- if k.startswith('test_')])
-
- kw.update(table_options)
-@@ -58,7 +58,7 @@
- def Column(*args, **kw):
- """A schema.Column wrapper/hook for dialect-specific tweaks."""
-
-- test_opts = dict([(k, kw.pop(k)) for k in kw.keys()
-+ test_opts = dict([(k, kw.pop(k)) for k in list(kw.keys())
- if k.startswith('test_')])
-
- if not config.requirements.foreign_key_ddl.enabled:
-diff -r 9d0639b9d3be lib/sqlalchemy/testing/suite/test_ddl.py
---- a/lib/sqlalchemy/testing/suite/test_ddl.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/lib/sqlalchemy/testing/suite/test_ddl.py Sat Apr 27 19:42:17 2013 -0400
-@@ -1,4 +1,4 @@
--from __future__ import with_statement
-+
-
- from .. import fixtures, config, util
- from ..config import requirements
-diff -r 9d0639b9d3be lib/sqlalchemy/testing/suite/test_reflection.py
---- a/lib/sqlalchemy/testing/suite/test_reflection.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/lib/sqlalchemy/testing/suite/test_reflection.py Sat Apr 27 19:42:17 2013 -0400
-@@ -1,4 +1,4 @@
--from __future__ import with_statement
-+
-
- import sqlalchemy as sa
- from sqlalchemy import exc as sa_exc
-@@ -386,7 +386,7 @@
- self.tables.email_addresses, self.tables.dingalings
- insp = inspect(meta.bind)
- oid = insp.get_table_oid(table_name, schema)
-- self.assert_(isinstance(oid, (int, long)))
-+ self.assert_(isinstance(oid, int))
-
- def test_get_table_oid(self):
- self._test_get_table_oid('users')
-diff -r 9d0639b9d3be lib/sqlalchemy/testing/suite/test_types.py
---- a/lib/sqlalchemy/testing/suite/test_types.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/lib/sqlalchemy/testing/suite/test_types.py Sat Apr 27 19:42:17 2013 -0400
-@@ -15,9 +15,9 @@
- class _UnicodeFixture(object):
- __requires__ = 'unicode_data',
-
-- data = u"Alors vous imaginez ma surprise, au lever du jour, "\
-- u"quand une drôle de petite voix m’a réveillé. Elle "\
-- u"disait: « S’il vous plaît… dessine-moi un mouton! »"
-+ data = "Alors vous imaginez ma surprise, au lever du jour, "\
-+ "quand une drôle de petite voix m’a réveillé. Elle "\
-+ "disait: « S’il vous plaît… dessine-moi un mouton! »"
-
- @classmethod
- def define_tables(cls, metadata):
-@@ -47,7 +47,7 @@
- row,
- (self.data, )
- )
-- assert isinstance(row[0], unicode)
-+ assert isinstance(row[0], str)
-
- def test_round_trip_executemany(self):
- unicode_table = self.tables.unicode_table
-@@ -58,7 +58,7 @@
- {
- 'unicode_data': self.data,
- }
-- for i in xrange(3)
-+ for i in range(3)
- ]
- )
-
-@@ -69,22 +69,22 @@
- ).fetchall()
- eq_(
- rows,
-- [(self.data, ) for i in xrange(3)]
-+ [(self.data, ) for i in range(3)]
- )
- for row in rows:
-- assert isinstance(row[0], unicode)
-+ assert isinstance(row[0], str)
-
- def _test_empty_strings(self):
- unicode_table = self.tables.unicode_table
-
- config.db.execute(
- unicode_table.insert(),
-- {"unicode_data": u''}
-+ {"unicode_data": ''}
- )
- row = config.db.execute(
- select([unicode_table.c.unicode_data])
- ).first()
-- eq_(row, (u'',))
-+ eq_(row, ('',))
-
-
- class UnicodeVarcharTest(_UnicodeFixture, fixtures.TablesTest):
-diff -r 9d0639b9d3be lib/sqlalchemy/testing/util.py
---- a/lib/sqlalchemy/testing/util.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/lib/sqlalchemy/testing/util.py Sat Apr 27 19:42:17 2013 -0400
-@@ -32,13 +32,13 @@
-
- def picklers():
- picklers = set()
-- # Py2K
-- try:
-- import cPickle
-- picklers.add(cPickle)
-- except ImportError:
-- pass
-- # end Py2K
-+# start Py2K
-+# try:
-+# import cPickle
-+# picklers.add(cPickle)
-+# except ImportError:
-+# pass
-+# end Py2K
- import pickle
- picklers.add(pickle)
-
-@@ -130,8 +130,8 @@
- try:
- fn.__name__ = name
- except TypeError:
-- fn = types.FunctionType(fn.func_code, fn.func_globals, name,
-- fn.func_defaults, fn.func_closure)
-+ fn = types.FunctionType(fn.__code__, fn.__globals__, name,
-+ fn.__defaults__, fn.__closure__)
- return fn
-
-
-diff -r 9d0639b9d3be lib/sqlalchemy/testing/warnings.py
---- a/lib/sqlalchemy/testing/warnings.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/lib/sqlalchemy/testing/warnings.py Sat Apr 27 19:42:17 2013 -0400
-@@ -1,4 +1,4 @@
--from __future__ import absolute_import
-+
-
- import warnings
- from .. import exc as sa_exc
-@@ -10,7 +10,7 @@
-
- filename = "sqlalchemy.testing.warnings"
- lineno = 1
-- if isinstance(msg, basestring):
-+ if isinstance(msg, str):
- warnings.warn_explicit(msg, sa_exc.SAWarning, filename, lineno)
- else:
- warnings.warn_explicit(msg, filename, lineno)
-diff -r 9d0639b9d3be lib/sqlalchemy/types.py
---- a/lib/sqlalchemy/types.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/lib/sqlalchemy/types.py Sat Apr 27 19:42:17 2013 -0400
-@@ -156,8 +156,8 @@
-
- """
-
-- return self.__class__.column_expression.func_code \
-- is not TypeEngine.column_expression.func_code
-+ return self.__class__.column_expression.__code__ \
-+ is not TypeEngine.column_expression.__code__
-
- def bind_expression(self, bindvalue):
- """"Given a bind value (i.e. a :class:`.BindParameter` instance),
-@@ -194,8 +194,8 @@
-
- """
-
-- return self.__class__.bind_expression.func_code \
-- is not TypeEngine.bind_expression.func_code
-+ return self.__class__.bind_expression.__code__ \
-+ is not TypeEngine.bind_expression.__code__
-
- def compare_values(self, x, y):
- """Compare two values for equality."""
-@@ -392,12 +392,13 @@
- return default.DefaultDialect()
-
- def __str__(self):
-- # Py3K
-- #return unicode(self.compile())
-- # Py2K
-- return unicode(self.compile()).\
-- encode('ascii', 'backslashreplace')
-- # end Py2K
-+# start Py3K
-+ return str(self.compile())
-+# end Py3K
-+# start Py2K
-+# return unicode(self.compile()).\
-+# encode('ascii', 'backslashreplace')
-+# end Py2K
-
- def __init__(self, *args, **kwargs):
- """Support implementations that were passing arguments"""
-@@ -723,8 +724,8 @@
-
- """
-
-- return self.__class__.process_bind_param.func_code \
-- is not TypeDecorator.process_bind_param.func_code
-+ return self.__class__.process_bind_param.__code__ \
-+ is not TypeDecorator.process_bind_param.__code__
-
- def bind_processor(self, dialect):
- """Provide a bound value processing function for the
-@@ -769,8 +770,8 @@
- exception throw.
-
- """
-- return self.__class__.process_result_value.func_code \
-- is not TypeDecorator.process_result_value.func_code
-+ return self.__class__.process_result_value.__code__ \
-+ is not TypeDecorator.process_result_value.__code__
-
- def result_processor(self, dialect, coltype):
- """Provide a result value processing function for the given
-@@ -1114,11 +1115,12 @@
- self.convert_unicode != 'force':
- if self._warn_on_bytestring:
- def process(value):
-- # Py3K
-- #if isinstance(value, bytes):
-- # Py2K
-- if isinstance(value, str):
-- # end Py2K
-+# start Py3K
-+ if isinstance(value, bytes):
-+# end Py3K
-+# start Py2K
-+# if isinstance(value, str):
-+# end Py2K
- util.warn("Unicode type received non-unicode bind "
- "param value.")
- return value
-@@ -1130,7 +1132,7 @@
- warn_on_bytestring = self._warn_on_bytestring
-
- def process(value):
-- if isinstance(value, unicode):
-+ if isinstance(value, str):
- return encoder(value, self.unicode_error)[0]
- elif warn_on_bytestring and value is not None:
- util.warn("Unicode type received non-unicode bind "
-@@ -1156,7 +1158,7 @@
- # habits. since we will be getting back unicode
- # in most cases, we check for it (decode will fail).
- def process(value):
-- if isinstance(value, unicode):
-+ if isinstance(value, str):
- return value
- else:
- return to_unicode(value)
-@@ -1171,7 +1173,7 @@
- @property
- def python_type(self):
- if self.convert_unicode:
-- return unicode
-+ return str
- else:
- return str
-
-@@ -1318,12 +1320,12 @@
- Integer: self.__class__,
- Numeric: Numeric,
- },
-- # Py2K
-- operators.div: {
-- Integer: self.__class__,
-- Numeric: Numeric,
-- },
-- # end Py2K
-+# start Py2K
-+# operators.div: {
-+# Integer: self.__class__,
-+# Numeric: Numeric,
-+# },
-+# end Py2K
- operators.truediv: {
- Integer: self.__class__,
- Numeric: Numeric,
-@@ -1488,12 +1490,12 @@
- Numeric: self.__class__,
- Integer: self.__class__,
- },
-- # Py2K
-- operators.div: {
-- Numeric: self.__class__,
-- Integer: self.__class__,
-- },
-- # end Py2K
-+# start Py2K
-+# operators.div: {
-+# Numeric: self.__class__,
-+# Integer: self.__class__,
-+# },
-+# end Py2K
- operators.truediv: {
- Numeric: self.__class__,
- Integer: self.__class__,
-@@ -1558,11 +1560,11 @@
- Interval: Interval,
- Numeric: self.__class__,
- },
-- # Py2K
-- operators.div: {
-- Numeric: self.__class__,
-- },
-- # end Py2K
-+# start Py2K
-+# operators.div: {
-+# Numeric: self.__class__,
-+# },
-+# end Py2K
- operators.truediv: {
- Numeric: self.__class__,
- },
-@@ -1693,11 +1695,12 @@
-
- @property
- def python_type(self):
-- # Py3K
-- #return bytes
-- # Py2K
-- return str
-- # end Py2K
-+# start Py3K
-+ return bytes
-+# end Py3K
-+# start Py2K
-+# return str
-+# end Py2K
-
- # Python 3 - sqlite3 doesn't need the `Binary` conversion
- # here, though pg8000 does to indicate "bytea"
-@@ -1715,32 +1718,33 @@
- # Python 3 has native bytes() type
- # both sqlite3 and pg8000 seem to return it,
- # psycopg2 as of 2.5 returns 'memoryview'
-- # Py3K
-- #def result_processor(self, dialect, coltype):
-- # def process(value):
-- # if value is not None:
-- # value = bytes(value)
-- # return value
-- # return process
-- # Py2K
-+# start Py3K
- def result_processor(self, dialect, coltype):
-- if util.jython:
-- def process(value):
-- if value is not None:
-- if isinstance(value, array.array):
-- return value.tostring()
-- return str(value)
-- else:
-- return None
-- else:
-- process = processors.to_str
-+ def process(value):
-+ if value is not None:
-+ value = bytes(value)
-+ return value
- return process
-- # end Py2K
-+# end Py3K
-+# start Py2K
-+# def result_processor(self, dialect, coltype):
-+# if util.jython:
-+# def process(value):
-+# if value is not None:
-+# if isinstance(value, array.array):
-+# return value.tostring()
-+# return str(value)
-+# else:
-+# return None
-+# else:
-+# process = processors.to_str
-+# return process
-+# end Py2K
-
- def coerce_compared_value(self, op, value):
- """See :meth:`.TypeEngine.coerce_compared_value` for a description."""
-
-- if isinstance(value, basestring):
-+ if isinstance(value, str):
- return self
- else:
- return super(_Binary, self).coerce_compared_value(op, value)
-@@ -1997,7 +2001,7 @@
- convert_unicode = kw.pop('convert_unicode', None)
- if convert_unicode is None:
- for e in enums:
-- if isinstance(e, unicode):
-+ if isinstance(e, str):
- convert_unicode = True
- break
- else:
-@@ -2296,11 +2300,11 @@
- operators.truediv: {
- Numeric: self.__class__
- },
-- # Py2K
-- operators.div: {
-- Numeric: self.__class__
-- }
-- # end Py2K
-+# start Py2K
-+# operators.div: {
-+# Numeric: self.__class__
-+# }
-+# end Py2K
- }
-
- @property
-@@ -2451,11 +2455,12 @@
-
- _type_map = {
- str: String(),
-- # Py3K
-- #bytes: LargeBinary(),
-- # Py2K
-- unicode: Unicode(),
-- # end Py2K
-+# start Py3K
-+ bytes: LargeBinary(),
-+# end Py3K
-+# start Py2K
-+# unicode: Unicode(),
-+# end Py2K
- int: Integer(),
- float: Numeric(),
- bool: BOOLEANTYPE,
-diff -r 9d0639b9d3be lib/sqlalchemy/util/_collections.py
---- a/lib/sqlalchemy/util/_collections.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/lib/sqlalchemy/util/_collections.py Sat Apr 27 19:42:17 2013 -0400
-@@ -55,7 +55,7 @@
- t = tuple.__new__(cls, vals)
- t._labels = []
- if labels:
-- t.__dict__.update(zip(labels, vals))
-+ t.__dict__.update(list(zip(labels, vals)))
- t._labels = labels
- return t
-
-@@ -94,7 +94,7 @@
- .. versionadded:: 0.8
-
- """
-- return dict((key, self.__dict__[key]) for key in self.keys())
-+ return dict((key, self.__dict__[key]) for key in list(self.keys()))
-
-
- class ImmutableContainer(object):
-@@ -142,7 +142,7 @@
- return len(self._data)
-
- def __iter__(self):
-- return self._data.itervalues()
-+ return iter(self._data.values())
-
- def __add__(self, other):
- return list(self) + list(other)
-@@ -189,13 +189,13 @@
- return default
-
- def keys(self):
-- return self._data.keys()
-+ return list(self._data.keys())
-
- def values(self):
-- return self._data.values()
-+ return list(self._data.values())
-
- def items(self):
-- return self._data.items()
-+ return list(self._data.items())
-
- def has_key(self, key):
- return key in self._data
-@@ -242,7 +242,7 @@
- def update(self, ____sequence=None, **kwargs):
- if ____sequence is not None:
- if hasattr(____sequence, 'keys'):
-- for key in ____sequence.keys():
-+ for key in list(____sequence.keys()):
- self.__setitem__(key, ____sequence[key])
- else:
- for key, value in ____sequence:
-@@ -270,13 +270,13 @@
- return list(self._list)
-
- def iterkeys(self):
-- return iter(self.keys())
-+ return iter(list(self.keys()))
-
- def items(self):
-- return [(key, self[key]) for key in self.keys()]
-+ return [(key, self[key]) for key in list(self.keys())]
-
- def iteritems(self):
-- return iter(self.items())
-+ return iter(list(self.items()))
-
- def __setitem__(self, key, object):
- if key not in self:
-@@ -470,8 +470,8 @@
-
- if len(self) > len(other):
- return False
-- for m in itertools.ifilterfalse(other._members.__contains__,
-- self._members.iterkeys()):
-+ for m in itertools.filterfalse(other._members.__contains__,
-+ iter(self._members.keys())):
- return False
- return True
-
-@@ -491,8 +491,8 @@
- if len(self) < len(other):
- return False
-
-- for m in itertools.ifilterfalse(self._members.__contains__,
-- other._members.iterkeys()):
-+ for m in itertools.filterfalse(self._members.__contains__,
-+ iter(other._members.keys())):
- return False
- return True
-
-@@ -582,7 +582,7 @@
- return result
-
- def _member_id_tuples(self):
-- return ((id(v), v) for v in self._members.itervalues())
-+ return ((id(v), v) for v in self._members.values())
-
- def __xor__(self, other):
- if not isinstance(other, IdentitySet):
-@@ -599,7 +599,7 @@
- return self
-
- def copy(self):
-- return type(self)(self._members.itervalues())
-+ return type(self)(iter(self._members.values()))
-
- __copy__ = copy
-
-@@ -607,13 +607,13 @@
- return len(self._members)
-
- def __iter__(self):
-- return self._members.itervalues()
-+ return iter(self._members.values())
-
- def __hash__(self):
- raise TypeError('set objects are unhashable')
-
- def __repr__(self):
-- return '%s(%r)' % (type(self).__name__, self._members.values())
-+ return '%s(%r)' % (type(self).__name__, list(self._members.values()))
-
-
- class WeakSequence(object):
-@@ -623,7 +623,7 @@
- )
-
- def __iter__(self):
-- return self._storage.itervalues()
-+ return iter(self._storage.values())
-
- def __getitem__(self, index):
- try:
-@@ -754,7 +754,7 @@
-
- """
- for elem in x:
-- if not isinstance(elem, basestring) and hasattr(elem, '__iter__'):
-+ if not isinstance(elem, str) and hasattr(elem, '__iter__'):
- for y in flatten_iterator(elem):
- yield y
- else:
-diff -r 9d0639b9d3be lib/sqlalchemy/util/compat.py
---- a/lib/sqlalchemy/util/compat.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/lib/sqlalchemy/util/compat.py Sat Apr 27 19:42:17 2013 -0400
-@@ -43,14 +43,14 @@
-
- if sys.version_info < (2, 6):
- def next(iter):
-- return iter.next()
-+ return iter.__next__()
- else:
- next = next
- if py3k_warning:
- import pickle
- else:
- try:
-- import cPickle as pickle
-+ import pickle as pickle
- except ImportError:
- import pickle
-
-@@ -59,12 +59,13 @@
- # in newer pythons
- from cgi import parse_qsl
- else:
-- from urlparse import parse_qsl
-+ from urllib.parse import parse_qsl
-
--# Py3K
--#from inspect import getfullargspec as inspect_getfullargspec
--# Py2K
--from inspect import getargspec as inspect_getfullargspec
-+# start Py3K
-+from inspect import getfullargspec as inspect_getfullargspec
-+# end Py3K
-+# start Py2K
-+#from inspect import getargspec as inspect_getfullargspec
- # end Py2K
-
- if py3k_warning:
-diff -r 9d0639b9d3be lib/sqlalchemy/util/deprecations.py
---- a/lib/sqlalchemy/util/deprecations.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/lib/sqlalchemy/util/deprecations.py Sat Apr 27 19:42:17 2013 -0400
-@@ -10,7 +10,7 @@
- from .. import exc
- import warnings
- import re
--from langhelpers import decorator
-+from .langhelpers import decorator
-
-
- def warn_deprecated(msg, stacklevel=3):
-diff -r 9d0639b9d3be lib/sqlalchemy/util/langhelpers.py
---- a/lib/sqlalchemy/util/langhelpers.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/lib/sqlalchemy/util/langhelpers.py Sat Apr 27 19:42:17 2013 -0400
-@@ -21,10 +21,12 @@
- from .. import exc
- import hashlib
- from . import compat
-+import collections
-
- def md5_hex(x):
-- # Py3K
-- #x = x.encode('utf-8')
-+# start Py3K
-+ x = x.encode('utf-8')
-+# end Py3K
- m = hashlib.md5()
- m.update(x)
- return m.hexdigest()
-@@ -77,8 +79,8 @@
- used = set(used)
- for base in bases:
- pool = itertools.chain((base,),
-- itertools.imap(lambda i: base + str(i),
-- xrange(1000)))
-+ map(lambda i: base + str(i),
-+ range(1000)))
- for sym in pool:
- if sym not in used:
- used.add(sym)
-@@ -95,7 +97,7 @@
- if not inspect.isfunction(fn):
- raise Exception("not a decoratable function")
- spec = inspect_getfullargspec(fn)
-- names = tuple(spec[0]) + spec[1:3] + (fn.func_name,)
-+ names = tuple(spec[0]) + spec[1:3] + (fn.__name__,)
- targ_name, fn_name = _unique_symbols(names, 'target', 'fn')
-
- metadata = dict(target=targ_name, fn=fn_name)
-@@ -104,7 +106,7 @@
- code = 'lambda %(args)s: %(target)s(%(fn)s, %(apply_kw)s)' % (
- metadata)
- decorated = eval(code, {targ_name: target, fn_name: fn})
-- decorated.func_defaults = getattr(fn, 'im_func', fn).func_defaults
-+ decorated.__defaults__ = getattr(fn, 'im_func', fn).__defaults__
- return update_wrapper(decorated, fn)
- return update_wrapper(decorate, target)
-
-@@ -176,7 +178,7 @@
- ctr = class_.__dict__.get('__init__', False)
- if (not ctr or
- not isinstance(ctr, types.FunctionType) or
-- not isinstance(ctr.func_code, types.CodeType)):
-+ not isinstance(ctr.__code__, types.CodeType)):
- stack.update(class_.__bases__)
- continue
-
-@@ -194,7 +196,7 @@
- from inspect import CO_VARKEYWORDS
-
- def inspect_func_args(fn):
-- co = fn.func_code
-+ co = fn.__code__
- nargs = co.co_argcount
- names = co.co_varnames
- args = list(names[:nargs])
-@@ -250,7 +252,7 @@
- 'apply_pos': '(self, a, b, c, **d)'}
-
- """
-- if callable(fn):
-+ if isinstance(fn, collections.Callable):
- spec = inspect_getfullargspec(fn)
- else:
- # we accept an existing argspec...
-@@ -263,22 +265,23 @@
- else:
- self_arg = None
-
-- # Py3K
-- #apply_pos = inspect.formatargspec(spec[0], spec[1],
-- # spec[2], None, spec[4])
-- #num_defaults = 0
-- #if spec[3]:
-- # num_defaults += len(spec[3])
-- #if spec[4]:
-- # num_defaults += len(spec[4])
-- #name_args = spec[0] + spec[4]
-- # Py2K
-- apply_pos = inspect.formatargspec(spec[0], spec[1], spec[2])
-+# start Py3K
-+ apply_pos = inspect.formatargspec(spec[0], spec[1],
-+ spec[2], None, spec[4])
- num_defaults = 0
- if spec[3]:
- num_defaults += len(spec[3])
-- name_args = spec[0]
-- # end Py2K
-+ if spec[4]:
-+ num_defaults += len(spec[4])
-+ name_args = spec[0] + spec[4]
-+# end Py3K
-+# start Py2K
-+# apply_pos = inspect.formatargspec(spec[0], spec[1], spec[2])
-+# num_defaults = 0
-+# if spec[3]:
-+# num_defaults += len(spec[3])
-+# name_args = spec[0]
-+# end Py2K
-
- if num_defaults:
- defaulted_vals = name_args[0 - num_defaults:]
-@@ -341,8 +344,8 @@
-
- """
-
-- if isinstance(func_or_cls, types.MethodType) and not func_or_cls.im_self:
-- return func_or_cls.im_func
-+ if isinstance(func_or_cls, types.MethodType) and not func_or_cls.__self__:
-+ return func_or_cls.__func__
- else:
- return func_or_cls
-
-@@ -399,7 +402,7 @@
-
- """
- def __init__(self, meth):
-- self.target = meth.im_self
-+ self.target = meth.__self__
- self.name = meth.__name__
-
- def __call__(self, *arg, **kw):
-@@ -419,32 +422,34 @@
- will not be descended.
-
- """
-- # Py2K
-- if isinstance(cls, types.ClassType):
-- return list()
-- # end Py2K
-+# start Py2K
-+# if isinstance(cls, types.ClassType):
-+# return list()
-+# end Py2K
- hier = set([cls])
- process = list(cls.__mro__)
- while process:
- c = process.pop()
-- # Py2K
-- if isinstance(c, types.ClassType):
-- continue
-+# start Py2K
-+# if isinstance(c, types.ClassType):
-+# continue
-+# for b in (_ for _ in c.__bases__
-+# if _ not in hier and not isinstance(_, types.ClassType)):
-+# end Py2K
-+# start Py3K
- for b in (_ for _ in c.__bases__
-- if _ not in hier and not isinstance(_, types.ClassType)):
-- # end Py2K
-- # Py3K
-- #for b in (_ for _ in c.__bases__
-- # if _ not in hier):
-+ if _ not in hier):
-+# end Py3K
- process.append(b)
- hier.add(b)
-- # Py3K
-- #if c.__module__ == 'builtins' or not hasattr(c, '__subclasses__'):
-- # continue
-- # Py2K
-- if c.__module__ == '__builtin__' or not hasattr(c, '__subclasses__'):
-+# start Py3K
-+ if c.__module__ == 'builtins' or not hasattr(c, '__subclasses__'):
- continue
-- # end Py2K
-+# end Py3K
-+# start Py2K
-+# if c.__module__ == '__builtin__' or not hasattr(c, '__subclasses__'):
-+# continue
-+# end Py2K
- for s in [_ for _ in c.__subclasses__() if _ not in hier]:
- process.append(s)
- hier.add(s)
-@@ -501,9 +506,9 @@
- "return %(name)s.%(method)s%(d_args)s" % locals())
-
- env = from_instance is not None and {name: from_instance} or {}
-- exec py in env
-+ exec(py, env)
- try:
-- env[method].func_defaults = fn.func_defaults
-+ env[method].__defaults__ = fn.__defaults__
- except AttributeError:
- pass
- setattr(into_cls, method, env[method])
-@@ -512,11 +517,12 @@
- def methods_equivalent(meth1, meth2):
- """Return True if the two methods are the same implementation."""
-
-- # Py3K
-- #return getattr(meth1, '__func__', meth1) is getattr(meth2, '__func__', meth2)
-- # Py2K
-- return getattr(meth1, 'im_func', meth1) is getattr(meth2, 'im_func', meth2)
-- # end Py2K
-+# start Py3K
-+ return getattr(meth1, '__func__', meth1) is getattr(meth2, '__func__', meth2)
-+# end Py3K
-+# start Py2K
-+# return getattr(meth1, 'im_func', meth1) is getattr(meth2, 'im_func', meth2)
-+# end Py2K
-
-
- def as_interface(obj, cls=None, methods=None, required=None):
-@@ -589,7 +595,7 @@
- for method, impl in dictlike_iteritems(obj):
- if method not in interface:
- raise TypeError("%r: unknown in this interface" % method)
-- if not callable(impl):
-+ if not isinstance(impl, collections.Callable):
- raise TypeError("%r=%r is not callable" % (method, impl))
- setattr(AnonymousInterface, method, staticmethod(impl))
- found.add(method)
-@@ -753,7 +759,7 @@
-
- # from paste.deploy.converters
- def asbool(obj):
-- if isinstance(obj, (str, unicode)):
-+ if isinstance(obj, str):
- obj = obj.strip().lower()
- if obj in ['true', 'yes', 'on', 'y', 't', '1']:
- return True
-@@ -814,13 +820,13 @@
- """Return a threadsafe counter function."""
-
- lock = threading.Lock()
-- counter = itertools.count(1L)
-+ counter = itertools.count(1)
-
- # avoid the 2to3 "next" transformation...
- def _next():
- lock.acquire()
- try:
-- return counter.next()
-+ return next(counter)
- finally:
- lock.release()
-
-@@ -876,15 +882,16 @@
- def dictlike_iteritems(dictlike):
- """Return a (key, value) iterator for almost any dict-like object."""
-
-- # Py3K
-- #if hasattr(dictlike, 'items'):
-- # return dictlike.items()
-- # Py2K
-- if hasattr(dictlike, 'iteritems'):
-- return dictlike.iteritems()
-- elif hasattr(dictlike, 'items'):
-- return iter(dictlike.items())
-- # end Py2K
-+# start Py3K
-+ if hasattr(dictlike, 'items'):
-+ return list(dictlike.items())
-+# end Py3K
-+# start Py2K
-+# if hasattr(dictlike, 'iteritems'):
-+# return dictlike.iteritems()
-+# elif hasattr(dictlike, 'items'):
-+# return iter(dictlike.items())
-+# end Py2K
-
- getter = getattr(dictlike, '__getitem__', getattr(dictlike, 'get', None))
- if getter is None:
-@@ -893,11 +900,11 @@
-
- if hasattr(dictlike, 'iterkeys'):
- def iterator():
-- for key in dictlike.iterkeys():
-+ for key in dictlike.keys():
- yield key, getter(key)
- return iterator()
- elif hasattr(dictlike, 'keys'):
-- return iter((key, getter(key)) for key in dictlike.keys())
-+ return iter((key, getter(key)) for key in list(dictlike.keys()))
- else:
- raise TypeError(
- "Object '%r' is not dict-like" % dictlike)
-@@ -1034,7 +1041,7 @@
- be controlled.
-
- """
-- if isinstance(msg, basestring):
-+ if isinstance(msg, str):
- warnings.warn(msg, exc.SAWarning, stacklevel=stacklevel)
- else:
- warnings.warn(msg, stacklevel=stacklevel)
-diff -r 9d0639b9d3be test/aaa_profiling/test_compiler.py
---- a/test/aaa_profiling/test_compiler.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/test/aaa_profiling/test_compiler.py Sat Apr 27 19:42:17 2013 -0400
-@@ -29,7 +29,7 @@
- for c in t.c:
- c.type._type_affinity
- from sqlalchemy import types
-- for t in types._type_map.values():
-+ for t in list(types._type_map.values()):
- t._type_affinity
-
- cls.dialect = default.DefaultDialect()
-diff -r 9d0639b9d3be test/aaa_profiling/test_memusage.py
---- a/test/aaa_profiling/test_memusage.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/test/aaa_profiling/test_memusage.py Sat Apr 27 19:42:17 2013 -0400
-@@ -47,7 +47,7 @@
- gc_collect()
- samples[x] = len(get_objects_skipping_sqlite_issue())
-
-- print "sample gc sizes:", samples
-+ print("sample gc sizes:", samples)
-
- assert len(_sessions) == 0
-
-diff -r 9d0639b9d3be test/aaa_profiling/test_orm.py
---- a/test/aaa_profiling/test_orm.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/test/aaa_profiling/test_orm.py Sat Apr 27 19:42:17 2013 -0400
-@@ -144,7 +144,7 @@
-
- child.insert().execute([
- {'id':i, 'data':'c%d' % i}
-- for i in xrange(1, 251)
-+ for i in range(1, 251)
- ])
- parent.insert().execute([
- {
-@@ -152,7 +152,7 @@
- 'data':'p%dc%d' % (i, (i % 250) + 1),
- 'child_id':(i % 250) + 1
- }
-- for i in xrange(1, 1000)
-+ for i in range(1, 1000)
- ])
-
- def test_many_to_one_load_no_identity(self):
-@@ -234,11 +234,11 @@
- s = Session()
- s.add_all([
- A(id=i,
-- bs=[B(id=(i * 5) + j) for j in xrange(1, 5)],
-+ bs=[B(id=(i * 5) + j) for j in range(1, 5)],
- c=C(id=i),
-- ds=[D(id=(i * 5) + j) for j in xrange(1, 5)]
-+ ds=[D(id=(i * 5) + j) for j in range(1, 5)]
- )
-- for i in xrange(1, 5)
-+ for i in range(1, 5)
- ])
- s.commit()
-
-@@ -249,11 +249,11 @@
- s = Session()
- for a in [
- A(id=i,
-- bs=[B(id=(i * 5) + j) for j in xrange(1, 5)],
-+ bs=[B(id=(i * 5) + j) for j in range(1, 5)],
- c=C(id=i),
-- ds=[D(id=(i * 5) + j) for j in xrange(1, 5)]
-+ ds=[D(id=(i * 5) + j) for j in range(1, 5)]
- )
-- for i in xrange(1, 5)
-+ for i in range(1, 5)
- ]:
- s.merge(a)
-
-diff -r 9d0639b9d3be test/aaa_profiling/test_resultset.py
---- a/test/aaa_profiling/test_resultset.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/test/aaa_profiling/test_resultset.py Sat Apr 27 19:42:17 2013 -0400
-@@ -19,10 +19,10 @@
-
- def setup(self):
- metadata.create_all()
-- t.insert().execute([dict(('field%d' % fnum, u'value%d' % fnum)
-+ t.insert().execute([dict(('field%d' % fnum, 'value%d' % fnum)
- for fnum in range(NUM_FIELDS)) for r_num in
- range(NUM_RECORDS)])
-- t2.insert().execute([dict(('field%d' % fnum, u'value%d' % fnum)
-+ t2.insert().execute([dict(('field%d' % fnum, 'value%d' % fnum)
- for fnum in range(NUM_FIELDS)) for r_num in
- range(NUM_RECORDS)])
-
-@@ -88,7 +88,7 @@
-
- keymap = {}
- for index, (keyobjs, processor, values) in \
-- enumerate(zip(keys, processors, row)):
-+ enumerate(list(zip(keys, processors, row))):
- for key in keyobjs:
- keymap[key] = (processor, key, index)
- keymap[index] = (processor, key, index)
-diff -r 9d0639b9d3be test/aaa_profiling/test_zoomark.py
---- a/test/aaa_profiling/test_zoomark.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/test/aaa_profiling/test_zoomark.py Sat Apr 27 19:42:17 2013 -0400
-@@ -72,68 +72,68 @@
- Zoo = metadata.tables['Zoo']
- Animal = metadata.tables['Animal']
- engine = metadata.bind
-- wap = engine.execute(Zoo.insert(), Name=u'Wild Animal Park',
-+ wap = engine.execute(Zoo.insert(), Name='Wild Animal Park',
- Founded=datetime.date(2000, 1, 1),
- Opens=datetime.time(8, 15, 59),
- LastEscape=
- datetime.datetime(2004, 7, 29, 5, 6, 7),
- Admission=4.95).inserted_primary_key[0]
-- sdz = engine.execute(Zoo.insert(), Name=u'San Diego Zoo',
-+ sdz = engine.execute(Zoo.insert(), Name='San Diego Zoo',
- Founded=datetime.date(1935, 9, 13),
- Opens=datetime.time(9, 0, 0),
- Admission=0).inserted_primary_key[0]
-- engine.execute(Zoo.insert(inline=True), Name=u'Montr\xe9al Biod\xf4me',
-+ engine.execute(Zoo.insert(inline=True), Name='Montr\xe9al Biod\xf4me',
- Founded=datetime.date(1992, 6, 19),
- Opens=datetime.time(9, 0, 0), Admission=11.75)
-- seaworld = engine.execute(Zoo.insert(), Name=u'Sea_World',
-+ seaworld = engine.execute(Zoo.insert(), Name='Sea_World',
- Admission=60).inserted_primary_key[0]
-
- # Let's add a crazy futuristic Zoo to test large date values.
-
-- lp = engine.execute(Zoo.insert(), Name=u'Luna Park',
-+ lp = engine.execute(Zoo.insert(), Name='Luna Park',
- Founded=datetime.date(2072, 7, 17),
- Opens=datetime.time(0, 0, 0),
- Admission=134.95).inserted_primary_key[0]
-
- # Animals
-
-- leopardid = engine.execute(Animal.insert(), Species=u'Leopard',
-+ leopardid = engine.execute(Animal.insert(), Species='Leopard',
- Lifespan=73.5).inserted_primary_key[0]
- engine.execute(Animal.update(Animal.c.ID == leopardid), ZooID=wap,
- LastEscape=datetime.datetime( 2004, 12, 21, 8, 15, 0, 999907,)
- )
-- lion = engine.execute(Animal.insert(), Species=u'Lion',
-+ lion = engine.execute(Animal.insert(), Species='Lion',
- ZooID=wap).inserted_primary_key[0]
-- engine.execute(Animal.insert(), Species=u'Slug', Legs=1, Lifespan=.75)
-- tiger = engine.execute(Animal.insert(), Species=u'Tiger',
-+ engine.execute(Animal.insert(), Species='Slug', Legs=1, Lifespan=.75)
-+ tiger = engine.execute(Animal.insert(), Species='Tiger',
- ZooID=sdz).inserted_primary_key[0]
-
- # Override Legs.default with itself just to make sure it works.
-
-- engine.execute(Animal.insert(inline=True), Species=u'Bear', Legs=4)
-- engine.execute(Animal.insert(inline=True), Species=u'Ostrich', Legs=2,
-+ engine.execute(Animal.insert(inline=True), Species='Bear', Legs=4)
-+ engine.execute(Animal.insert(inline=True), Species='Ostrich', Legs=2,
- Lifespan=103.2)
-- engine.execute(Animal.insert(inline=True), Species=u'Centipede',
-+ engine.execute(Animal.insert(inline=True), Species='Centipede',
- Legs=100)
-- emp = engine.execute(Animal.insert(), Species=u'Emperor Penguin',
-+ emp = engine.execute(Animal.insert(), Species='Emperor Penguin',
- Legs=2, ZooID=seaworld).inserted_primary_key[0]
-- adelie = engine.execute(Animal.insert(), Species=u'Adelie Penguin',
-+ adelie = engine.execute(Animal.insert(), Species='Adelie Penguin',
- Legs=2, ZooID=seaworld).inserted_primary_key[0]
-- engine.execute(Animal.insert(inline=True), Species=u'Millipede',
-+ engine.execute(Animal.insert(inline=True), Species='Millipede',
- Legs=1000000, ZooID=sdz)
-
- # Add a mother and child to test relationships
-
-- bai_yun = engine.execute(Animal.insert(), Species=u'Ape',
-- Name=u'Bai Yun', Legs=2).inserted_primary_key[0]
-- engine.execute(Animal.insert(inline=True), Species=u'Ape',
-- Name=u'Hua Mei', Legs=2, MotherID=bai_yun)
-+ bai_yun = engine.execute(Animal.insert(), Species='Ape',
-+ Name='Bai Yun', Legs=2).inserted_primary_key[0]
-+ engine.execute(Animal.insert(inline=True), Species='Ape',
-+ Name='Hua Mei', Legs=2, MotherID=bai_yun)
-
- def test_baseline_2_insert(self):
- Animal = metadata.tables['Animal']
- i = Animal.insert(inline=True)
-- for x in xrange(ITERATIONS):
-- tick = i.execute(Species=u'Tick', Name=u'Tick %d' % x,
-+ for x in range(ITERATIONS):
-+ tick = i.execute(Species='Tick', Name='Tick %d' % x,
- Legs=8)
-
- def test_baseline_3_properties(self):
-@@ -146,28 +146,28 @@
-
- return list(engine.execute(select).first())
-
-- for x in xrange(ITERATIONS):
-+ for x in range(ITERATIONS):
-
- # Zoos
-
- WAP = fullobject(Zoo.select(Zoo.c.Name
-- == u'Wild Animal Park'))
-+ == 'Wild Animal Park'))
- SDZ = fullobject(Zoo.select(Zoo.c.Founded
- == datetime.date(1935, 9, 13)))
- Biodome = fullobject(Zoo.select(Zoo.c.Name
-- == u'Montr\xe9al Biod\xf4me'))
-+ == 'Montr\xe9al Biod\xf4me'))
- seaworld = fullobject(Zoo.select(Zoo.c.Admission
- == float(60)))
-
- # Animals
-
- leopard = fullobject(Animal.select(Animal.c.Species
-- == u'Leopard'))
-+ == 'Leopard'))
- ostrich = fullobject(Animal.select(Animal.c.Species
-- == u'Ostrich'))
-+ == 'Ostrich'))
- millipede = fullobject(Animal.select(Animal.c.Legs
- == 1000000))
-- ticks = fullobject(Animal.select(Animal.c.Species == u'Tick'
-+ ticks = fullobject(Animal.select(Animal.c.Species == 'Tick'
- ))
-
- def test_baseline_4_expressions(self):
-@@ -180,7 +180,7 @@
-
- return [list(row) for row in engine.execute(select).fetchall()]
-
-- for x in xrange(ITERATIONS):
-+ for x in range(ITERATIONS):
- assert len(fulltable(Zoo.select())) == 5
- assert len(fulltable(Animal.select())) == ITERATIONS + 12
- assert len(fulltable(Animal.select(Animal.c.Legs == 4))) \
-@@ -194,9 +194,9 @@
- assert len(fulltable(Animal.select(Animal.c.Lifespan
- > 70))) == 2
- assert len(fulltable(Animal.select(Animal.c.Species.
-- startswith(u'L')))) == 2
-+ startswith('L')))) == 2
- assert len(fulltable(Animal.select(Animal.c.Species.
-- endswith(u'pede')))) == 2
-+ endswith('pede')))) == 2
- assert len(fulltable(Animal.select(Animal.c.LastEscape
- != None))) == 1
- assert len(fulltable(Animal.select(None
-@@ -204,10 +204,10 @@
-
- # In operator (containedby)
-
-- assert len(fulltable(Animal.select(Animal.c.Species.like(u'%pede%'
-+ assert len(fulltable(Animal.select(Animal.c.Species.like('%pede%'
- )))) == 2
-- assert len(fulltable(Animal.select(Animal.c.Species.in_([u'Lion'
-- , u'Tiger', u'Bear'])))) == 3
-+ assert len(fulltable(Animal.select(Animal.c.Species.in_(['Lion'
-+ , 'Tiger', 'Bear'])))) == 3
-
- # Try In with cell references
- class thing(object):
-@@ -215,20 +215,20 @@
-
-
- pet, pet2 = thing(), thing()
-- pet.Name, pet2.Name = u'Slug', u'Ostrich'
-+ pet.Name, pet2.Name = 'Slug', 'Ostrich'
- assert len(fulltable(Animal.select(Animal.c.Species.in_([pet.Name,
- pet2.Name])))) == 2
-
- # logic and other functions
-
-- assert len(fulltable(Animal.select(Animal.c.Species.like(u'Slug'
-+ assert len(fulltable(Animal.select(Animal.c.Species.like('Slug'
- )))) == 1
-- assert len(fulltable(Animal.select(Animal.c.Species.like(u'%pede%'
-+ assert len(fulltable(Animal.select(Animal.c.Species.like('%pede%'
- )))) == 2
-- name = u'Lion'
-+ name = 'Lion'
- assert len(fulltable(Animal.select(func.length(Animal.c.Species)
- == len(name)))) == ITERATIONS + 3
-- assert len(fulltable(Animal.select(Animal.c.Species.like(u'%i%'
-+ assert len(fulltable(Animal.select(Animal.c.Species.like('%i%'
- )))) == ITERATIONS + 7
-
- # Test now(), today(), year(), month(), day()
-@@ -250,7 +250,7 @@
- Zoo = metadata.tables['Zoo']
- engine = metadata.bind
-
-- for x in xrange(ITERATIONS):
-+ for x in range(ITERATIONS):
-
- # views
-
-@@ -274,7 +274,7 @@
- for species, lifespan in engine.execute(select([Animal.c.Species,
- Animal.c.Lifespan])).fetchall():
- assert lifespan == expected[species]
-- expected = [u'Montr\xe9al Biod\xf4me', 'Wild Animal Park']
-+ expected = ['Montr\xe9al Biod\xf4me', 'Wild Animal Park']
- e = select([Zoo.c.Name], and_(Zoo.c.Founded != None,
- Zoo.c.Founded <= func.current_timestamp(),
- Zoo.c.Founded >= datetime.date(1990, 1, 1)))
-@@ -290,21 +290,21 @@
- def test_baseline_6_editing(self):
- Zoo = metadata.tables['Zoo']
- engine = metadata.bind
-- for x in xrange(ITERATIONS):
-+ for x in range(ITERATIONS):
-
- # Edit
-
-- SDZ = engine.execute(Zoo.select(Zoo.c.Name == u'San Diego Zoo'
-+ SDZ = engine.execute(Zoo.select(Zoo.c.Name == 'San Diego Zoo'
- )).first()
- engine.execute(Zoo.update(Zoo.c.ID == SDZ['ID'
-- ]), Name=u'The San Diego Zoo',
-+ ]), Name='The San Diego Zoo',
- Founded=datetime.date(1900, 1, 1),
- Opens=datetime.time(7, 30, 0),
- Admission='35.00')
-
- # Test edits
-
-- SDZ = engine.execute(Zoo.select(Zoo.c.Name == u'The San Diego Zoo'
-+ SDZ = engine.execute(Zoo.select(Zoo.c.Name == 'The San Diego Zoo'
- )).first()
- assert SDZ['Founded'] == datetime.date(1900, 1, 1), \
- SDZ['Founded']
-@@ -312,14 +312,14 @@
- # Change it back
-
- engine.execute(Zoo.update(Zoo.c.ID == SDZ['ID'
-- ]), Name=u'San Diego Zoo',
-+ ]), Name='San Diego Zoo',
- Founded=datetime.date(1935, 9, 13),
- Opens=datetime.time(9, 0, 0),
- Admission='0')
-
- # Test re-edits
-
-- SDZ = engine.execute(Zoo.select(Zoo.c.Name == u'San Diego Zoo'
-+ SDZ = engine.execute(Zoo.select(Zoo.c.Name == 'San Diego Zoo'
- )).first()
- assert SDZ['Founded'] == datetime.date(1935, 9, 13)
-
-@@ -333,14 +333,14 @@
-
- return [list(row) for row in engine.execute(select).fetchall()]
-
-- for x in xrange(ITERATIONS):
-+ for x in range(ITERATIONS):
- za = fulltable(select([Zoo.c.ID] + list(Animal.c),
-- Zoo.c.Name == u'San Diego Zoo',
-+ Zoo.c.Name == 'San Diego Zoo',
- from_obj=[join(Zoo, Animal)]))
-- SDZ = Zoo.select(Zoo.c.Name == u'San Diego Zoo')
-+ SDZ = Zoo.select(Zoo.c.Name == 'San Diego Zoo')
- e = fulltable(select([Zoo.c.ID, Animal.c.ID],
-- and_(Zoo.c.Name == u'San Diego Zoo',
-- Animal.c.Species == u'Leopard'),
-+ and_(Zoo.c.Name == 'San Diego Zoo',
-+ Animal.c.Species == 'Leopard'),
- from_obj=[join(Zoo, Animal)]))
-
- # Now try the same query with INNER, LEFT, and RIGHT JOINs.
-diff -r 9d0639b9d3be test/aaa_profiling/test_zoomark_orm.py
---- a/test/aaa_profiling/test_zoomark_orm.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/test/aaa_profiling/test_zoomark_orm.py Sat Apr 27 19:42:17 2013 -0400
-@@ -75,14 +75,14 @@
- class Zoo(object):
-
- def __init__(self, **kwargs):
-- for k, v in kwargs.iteritems():
-+ for k, v in kwargs.items():
- setattr(self, k, v)
-
-
- class Animal(object):
-
- def __init__(self, **kwargs):
-- for k, v in kwargs.iteritems():
-+ for k, v in kwargs.items():
- setattr(self, k, v)
-
-
-@@ -90,93 +90,93 @@
- mapper(Animal, animal)
-
- def test_baseline_1a_populate(self):
-- wap = Zoo(Name=u'Wild Animal Park', Founded=datetime.date(2000,
-+ wap = Zoo(Name='Wild Animal Park', Founded=datetime.date(2000,
- 1, 1), Opens=datetime.time(8, 15, 59),
- LastEscape=datetime.datetime( 2004, 7, 29, 5, 6, 7, ),
- Admission=4.95)
- session.add(wap)
-- sdz = Zoo(Name=u'San Diego Zoo', Founded=datetime.date(1835, 9,
-+ sdz = Zoo(Name='San Diego Zoo', Founded=datetime.date(1835, 9,
- 13), Opens=datetime.time(9, 0, 0), Admission=0)
- session.add(sdz)
-- bio = Zoo(Name=u'Montr\xe9al Biod\xf4me',
-+ bio = Zoo(Name='Montr\xe9al Biod\xf4me',
- Founded=datetime.date(1992, 6, 19),
- Opens=datetime.time(9, 0, 0), Admission=11.75)
- session.add(bio)
-- seaworld = Zoo(Name=u'Sea_World', Admission=60)
-+ seaworld = Zoo(Name='Sea_World', Admission=60)
- session.add(seaworld)
-
- # Let's add a crazy futuristic Zoo to test large date values.
-
-- lp = Zoo(Name=u'Luna Park', Founded=datetime.date(2072, 7, 17),
-+ lp = Zoo(Name='Luna Park', Founded=datetime.date(2072, 7, 17),
- Opens=datetime.time(0, 0, 0), Admission=134.95)
- session.add(lp)
- session.flush()
-
- # Animals
-
-- leopard = Animal(Species=u'Leopard', Lifespan=73.5)
-+ leopard = Animal(Species='Leopard', Lifespan=73.5)
- session.add(leopard)
- leopard.ZooID = wap.ID
- leopard.LastEscape = \
- datetime.datetime(2004, 12, 21, 8, 15, 0, 999907, )
-- session.add(Animal(Species=u'Lion', ZooID=wap.ID))
-- session.add(Animal(Species=u'Slug', Legs=1, Lifespan=.75))
-- session.add(Animal(Species=u'Tiger', ZooID=sdz.ID))
-+ session.add(Animal(Species='Lion', ZooID=wap.ID))
-+ session.add(Animal(Species='Slug', Legs=1, Lifespan=.75))
-+ session.add(Animal(Species='Tiger', ZooID=sdz.ID))
-
- # Override Legs.default with itself just to make sure it works.
-
-- session.add(Animal(Species=u'Bear', Legs=4))
-- session.add(Animal(Species=u'Ostrich', Legs=2, Lifespan=103.2))
-- session.add(Animal(Species=u'Centipede', Legs=100))
-- session.add(Animal(Species=u'Emperor Penguin', Legs=2,
-+ session.add(Animal(Species='Bear', Legs=4))
-+ session.add(Animal(Species='Ostrich', Legs=2, Lifespan=103.2))
-+ session.add(Animal(Species='Centipede', Legs=100))
-+ session.add(Animal(Species='Emperor Penguin', Legs=2,
- ZooID=seaworld.ID))
-- session.add(Animal(Species=u'Adelie Penguin', Legs=2,
-+ session.add(Animal(Species='Adelie Penguin', Legs=2,
- ZooID=seaworld.ID))
-- session.add(Animal(Species=u'Millipede', Legs=1000000,
-+ session.add(Animal(Species='Millipede', Legs=1000000,
- ZooID=sdz.ID))
-
- # Add a mother and child to test relationships
-
-- bai_yun = Animal(Species=u'Ape', Nameu=u'Bai Yun', Legs=2)
-+ bai_yun = Animal(Species='Ape', Nameu='Bai Yun', Legs=2)
- session.add(bai_yun)
-- session.add(Animal(Species=u'Ape', Name=u'Hua Mei', Legs=2,
-+ session.add(Animal(Species='Ape', Name='Hua Mei', Legs=2,
- MotherID=bai_yun.ID))
- session.flush()
- session.commit()
-
- def test_baseline_2_insert(self):
-- for x in xrange(ITERATIONS):
-- session.add(Animal(Species=u'Tick', Name=u'Tick %d' % x,
-+ for x in range(ITERATIONS):
-+ session.add(Animal(Species='Tick', Name='Tick %d' % x,
- Legs=8))
- session.flush()
-
- def test_baseline_3_properties(self):
-- for x in xrange(ITERATIONS):
-+ for x in range(ITERATIONS):
-
- # Zoos
-
- WAP = list(session.query(Zoo).filter(Zoo.Name
-- == u'Wild Animal Park'))
-+ == 'Wild Animal Park'))
- SDZ = list(session.query(Zoo).filter(Zoo.Founded
- == datetime.date(1835, 9, 13)))
- Biodome = list(session.query(Zoo).filter(Zoo.Name
-- == u'Montr\xe9al Biod\xf4me'))
-+ == 'Montr\xe9al Biod\xf4me'))
- seaworld = list(session.query(Zoo).filter(Zoo.Admission
- == float(60)))
-
- # Animals
-
- leopard = list(session.query(Animal).filter(Animal.Species
-- == u'Leopard'))
-+ == 'Leopard'))
- ostrich = list(session.query(Animal).filter(Animal.Species
-- == u'Ostrich'))
-+ == 'Ostrich'))
- millipede = list(session.query(Animal).filter(Animal.Legs
- == 1000000))
- ticks = list(session.query(Animal).filter(Animal.Species
-- == u'Tick'))
-+ == 'Tick'))
-
- def test_baseline_4_expressions(self):
-- for x in xrange(ITERATIONS):
-+ for x in range(ITERATIONS):
- assert len(list(session.query(Zoo))) == 5
- assert len(list(session.query(Animal))) == ITERATIONS + 12
- assert len(list(session.query(Animal).filter(Animal.Legs
-@@ -190,9 +190,9 @@
- assert len(list(session.query(Animal).filter(Animal.Lifespan
- > 70))) == 2
- assert len(list(session.query(Animal).
-- filter(Animal.Species.like(u'L%')))) == 2
-+ filter(Animal.Species.like('L%')))) == 2
- assert len(list(session.query(Animal).
-- filter(Animal.Species.like(u'%pede')))) == 2
-+ filter(Animal.Species.like('%pede')))) == 2
- assert len(list(session.query(Animal).filter(Animal.LastEscape
- != None))) == 1
- assert len(list(session.query(Animal).filter(Animal.LastEscape
-@@ -201,29 +201,29 @@
- # In operator (containedby)
-
- assert len(list(session.query(Animal).filter(
-- Animal.Species.like(u'%pede%')))) == 2
-+ Animal.Species.like('%pede%')))) == 2
- assert len(list(session.query(Animal).
-- filter(Animal.Species.in_((u'Lion'
-- , u'Tiger', u'Bear'))))) == 3
-+ filter(Animal.Species.in_(('Lion'
-+ , 'Tiger', 'Bear'))))) == 3
-
- # Try In with cell references
- class thing(object):
- pass
-
- pet, pet2 = thing(), thing()
-- pet.Name, pet2.Name = u'Slug', u'Ostrich'
-+ pet.Name, pet2.Name = 'Slug', 'Ostrich'
- assert len(list(session.query(Animal).
- filter(Animal.Species.in_((pet.Name,
- pet2.Name))))) == 2
-
- # logic and other functions
-
-- name = u'Lion'
-+ name = 'Lion'
- assert len(list(session.query(Animal).
- filter(func.length(Animal.Species)
- == len(name)))) == ITERATIONS + 3
- assert len(list(session.query(Animal).
-- filter(Animal.Species.like(u'%i%'
-+ filter(Animal.Species.like('%i%'
- )))) == ITERATIONS + 7
-
- # Test now(), today(), year(), month(), day()
-@@ -246,7 +246,7 @@
-
- # TODO: convert to ORM
- engine = metadata.bind
-- for x in xrange(ITERATIONS):
-+ for x in range(ITERATIONS):
-
- # views
-
-@@ -270,7 +270,7 @@
- for species, lifespan in engine.execute(select([Animal.c.Species,
- Animal.c.Lifespan])).fetchall():
- assert lifespan == expected[species]
-- expected = [u'Montr\xe9al Biod\xf4me', 'Wild Animal Park']
-+ expected = ['Montr\xe9al Biod\xf4me', 'Wild Animal Park']
- e = select([Zoo.c.Name], and_(Zoo.c.Founded != None,
- Zoo.c.Founded <= func.current_timestamp(),
- Zoo.c.Founded >= datetime.date(1990, 1, 1)))
-@@ -284,13 +284,13 @@
- legs.sort()
-
- def test_baseline_6_editing(self):
-- for x in xrange(ITERATIONS):
-+ for x in range(ITERATIONS):
-
- # Edit
-
-- SDZ = session.query(Zoo).filter(Zoo.Name == u'San Diego Zoo'
-+ SDZ = session.query(Zoo).filter(Zoo.Name == 'San Diego Zoo'
- ).one()
-- SDZ.Name = u'The San Diego Zoo'
-+ SDZ.Name = 'The San Diego Zoo'
- SDZ.Founded = datetime.date(1900, 1, 1)
- SDZ.Opens = datetime.time(7, 30, 0)
- SDZ.Admission = 35.00
-@@ -298,19 +298,19 @@
- # Test edits
-
- SDZ = session.query(Zoo).filter(Zoo.Name
-- == u'The San Diego Zoo').one()
-+ == 'The San Diego Zoo').one()
- assert SDZ.Founded == datetime.date(1900, 1, 1), SDZ.Founded
-
- # Change it back
-
-- SDZ.Name = u'San Diego Zoo'
-+ SDZ.Name = 'San Diego Zoo'
- SDZ.Founded = datetime.date(1835, 9, 13)
- SDZ.Opens = datetime.time(9, 0, 0)
- SDZ.Admission = 0
-
- # Test re-edits
-
-- SDZ = session.query(Zoo).filter(Zoo.Name == u'San Diego Zoo'
-+ SDZ = session.query(Zoo).filter(Zoo.Name == 'San Diego Zoo'
- ).one()
- assert SDZ.Founded == datetime.date(1835, 9, 13), \
- SDZ.Founded
-diff -r 9d0639b9d3be test/base/test_dependency.py
---- a/test/base/test_dependency.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/test/base/test_dependency.py Sat Apr 27 19:42:17 2013 -0400
-@@ -82,7 +82,7 @@
- try:
- list(topological.sort(tuples, allitems))
- assert False
-- except exc.CircularDependencyError, err:
-+ except exc.CircularDependencyError as err:
- eq_(err.cycles, set(['node1', 'node3', 'node2', 'node5',
- 'node4']))
- eq_(err.edges, set([('node3', 'node1'), ('node4', 'node1'),
-@@ -105,7 +105,7 @@
- try:
- list(topological.sort(tuples, allitems))
- assert False
-- except exc.CircularDependencyError, err:
-+ except exc.CircularDependencyError as err:
- eq_(err.cycles, set(['node1', 'node3', 'node2']))
- eq_(err.edges, set([('node3', 'node1'), ('node2', 'node3'),
- ('node3', 'node2'), ('node1', 'node2'),
-@@ -271,7 +271,7 @@
- ('node4', 'node17'), ('node2', 'node20'), ('node19', 'node10'),
- ('node8', 'node4'), ('node11', 'node3'), ('node6', 'node1')
- ]
-- allnodes = ['node%d' % i for i in xrange(1, 21)]
-+ allnodes = ['node%d' % i for i in range(1, 21)]
- eq_(
- topological.find_cycles(tuples, allnodes),
- set(['node11', 'node10', 'node13', 'node15', 'node14', 'node17',
-diff -r 9d0639b9d3be test/base/test_events.py
---- a/test/base/test_events.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/test/base/test_events.py Sat Apr 27 19:42:17 2013 -0400
-@@ -190,7 +190,7 @@
- def test_lis_subcalss_lis(self):
- @event.listens_for(self.TargetOne, "event_one")
- def handler1(x, y):
-- print 'handler1'
-+ print('handler1')
-
- class SubTarget(self.TargetOne):
- pass
-@@ -207,7 +207,7 @@
- def test_lis_multisub_lis(self):
- @event.listens_for(self.TargetOne, "event_one")
- def handler1(x, y):
-- print 'handler1'
-+ print('handler1')
-
- class SubTarget(self.TargetOne):
- pass
-diff -r 9d0639b9d3be test/base/test_except.py
---- a/test/base/test_except.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/test/base/test_except.py Sat Apr 27 19:42:17 2013 -0400
-@@ -5,14 +5,15 @@
- from sqlalchemy.testing import fixtures
- from sqlalchemy.testing import eq_
-
--# Py3K
--#StandardError = BaseException
--# Py2K
--from exceptions import StandardError, KeyboardInterrupt, SystemExit
-+# start Py3K
-+Exception = BaseException
-+# end Py3K
-+# start Py2K
-+#from exceptions import StandardError, KeyboardInterrupt, SystemExit
- # end Py2K
-
-
--class Error(StandardError):
-+class Error(Exception):
- """This class will be old-style on <= 2.4 and new-style on >=
- 2.5."""
-
-@@ -47,7 +48,7 @@
- try:
- raise sa_exceptions.DBAPIError.instance('this is a message'
- , None, OperationalError(), DatabaseError)
-- except sa_exceptions.DBAPIError, exc:
-+ except sa_exceptions.DBAPIError as exc:
- assert str(exc) \
- == "(OperationalError) 'this is a message' None"
-
-@@ -58,7 +59,7 @@
- {'a': 1, 'b': 2, 'c': 3, 'd': 4, 'e': 5, 'f': 6, 'g': 7, 'h':
- 8, 'i': 9, 'j': 10, 'k': 11,
- }, OperationalError(), DatabaseError)
-- except sa_exceptions.DBAPIError, exc:
-+ except sa_exceptions.DBAPIError as exc:
- assert str(exc).startswith("(OperationalError) 'this is a "
- "message' {")
-
-@@ -67,7 +68,7 @@
- raise sa_exceptions.DBAPIError.instance('this is a message',
- [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11,],
- OperationalError(), DatabaseError)
-- except sa_exceptions.DBAPIError, exc:
-+ except sa_exceptions.DBAPIError as exc:
- assert str(exc).startswith("(OperationalError) 'this is a "
- "message' [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]")
-
-@@ -77,7 +78,7 @@
- [{1: 1}, {1: 1}, {1: 1}, {1: 1}, {1: 1}, {1: 1},
- {1: 1}, {1:1}, {1: 1}, {1: 1},],
- OperationalError(), DatabaseError)
-- except sa_exceptions.DBAPIError, exc:
-+ except sa_exceptions.DBAPIError as exc:
- eq_(str(exc) ,
- "(OperationalError) 'this is a message' [{1: 1}, "\
- "{1: 1}, {1: 1}, {1: 1}, {1: 1}, {1: 1}, {1: 1}, {1: "\
-@@ -87,7 +88,7 @@
- {1: 1}, {1: 1}, {1: 1}, {1: 1}, {1: 1}, {1: 1}, {1: 1},
- {1:1}, {1: 1}, {1: 1}, {1: 1},
- ], OperationalError(), DatabaseError)
-- except sa_exceptions.DBAPIError, exc:
-+ except sa_exceptions.DBAPIError as exc:
- eq_(str(exc) ,
- "(OperationalError) 'this is a message' [{1: 1}, "
- "{1: 1}, {1: 1}, {1: 1}, {1: 1}, {1: 1}, "
-@@ -100,7 +101,7 @@
- (1, ), (1, ), (1, ), (1, ), (1, ), (1, ), (1, ), (1, ), (1, ),
- (1, ),
- ], OperationalError(), DatabaseError)
-- except sa_exceptions.DBAPIError, exc:
-+ except sa_exceptions.DBAPIError as exc:
- eq_(str(exc),
- "(OperationalError) 'this is a message' [(1,), "\
- "(1,), (1,), (1,), (1,), (1,), (1,), (1,), (1,), (1,)]")
-@@ -109,7 +110,7 @@
- (1, ), (1, ), (1, ), (1, ), (1, ), (1, ), (1, ), (1, ), (1, ),
- (1, ), (1, ),
- ], OperationalError(), DatabaseError)
-- except sa_exceptions.DBAPIError, exc:
-+ except sa_exceptions.DBAPIError as exc:
- eq_(str(exc),
- "(OperationalError) 'this is a message' [(1,), "
- "(1,), (1,), (1,), (1,), (1,), (1,), (1,) "
-@@ -121,7 +122,7 @@
- try:
- raise sa_exceptions.DBAPIError.instance('', [],
- ProgrammingError(), DatabaseError)
-- except sa_exceptions.DBAPIError, e:
-+ except sa_exceptions.DBAPIError as e:
- self.assert_(True)
- self.assert_('Error in str() of DB-API' in e.args[0])
-
-@@ -129,7 +130,7 @@
- try:
- raise sa_exceptions.DBAPIError.instance('', [], OutOfSpec(),
- DatabaseError)
-- except sa_exceptions.DBAPIError, e:
-+ except sa_exceptions.DBAPIError as e:
- self.assert_(e.__class__ is sa_exceptions.DBAPIError)
- except OutOfSpec:
- self.assert_(False)
-@@ -137,7 +138,7 @@
- try:
- raise sa_exceptions.DBAPIError.instance('', [],
- sa_exceptions.ArgumentError(), DatabaseError)
-- except sa_exceptions.DBAPIError, e:
-+ except sa_exceptions.DBAPIError as e:
- self.assert_(e.__class__ is sa_exceptions.DBAPIError)
- except sa_exceptions.ArgumentError:
- self.assert_(False)
-diff -r 9d0639b9d3be test/base/test_utils.py
---- a/test/base/test_utils.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/test/base/test_utils.py Sat Apr 27 19:42:17 2013 -0400
-@@ -16,7 +16,7 @@
- eq_(len(keyed_tuple), 0)
-
- eq_(keyed_tuple.__dict__, {'_labels': []})
-- eq_(keyed_tuple.keys(), [])
-+ eq_(list(keyed_tuple.keys()), [])
- eq_(keyed_tuple._fields, ())
- eq_(keyed_tuple._asdict(), {})
-
-@@ -27,7 +27,7 @@
- eq_(len(keyed_tuple), 2)
-
- eq_(keyed_tuple.__dict__, {'_labels': []})
-- eq_(keyed_tuple.keys(), [])
-+ eq_(list(keyed_tuple.keys()), [])
- eq_(keyed_tuple._fields, ())
- eq_(keyed_tuple._asdict(), {})
-
-@@ -37,7 +37,7 @@
- def test_basic_creation(self):
- keyed_tuple = util.KeyedTuple([1, 2], ['a', 'b'])
- eq_(str(keyed_tuple), '(1, 2)')
-- eq_(keyed_tuple.keys(), ['a', 'b'])
-+ eq_(list(keyed_tuple.keys()), ['a', 'b'])
- eq_(keyed_tuple._fields, ('a', 'b'))
- eq_(keyed_tuple._asdict(), {'a': 1, 'b': 2})
-
-@@ -66,7 +66,7 @@
- # TODO: consider not allowing None labels
- expected = {'a': 1, None: 2, 'b': 3, '_labels': ['a', None, 'b']}
- eq_(keyed_tuple.__dict__, expected)
-- eq_(keyed_tuple.keys(), ['a', 'b'])
-+ eq_(list(keyed_tuple.keys()), ['a', 'b'])
- eq_(keyed_tuple._fields, ('a', 'b'))
- eq_(keyed_tuple._asdict(), {'a': 1, 'b': 3})
-
-@@ -86,7 +86,7 @@
- # TODO: consider not allowing duplicate labels
- expected = {'a': 1, 'b': 3, '_labels': ['a', 'b', 'b']}
- eq_(keyed_tuple.__dict__, expected)
-- eq_(keyed_tuple.keys(), ['a', 'b', 'b'])
-+ eq_(list(keyed_tuple.keys()), ['a', 'b', 'b'])
- eq_(keyed_tuple._fields, ('a', 'b', 'b'))
- eq_(keyed_tuple._asdict(), {'a': 1, 'b': 3})
-
-@@ -125,12 +125,12 @@
- o['snack'] = 'attack'
- o['c'] = 3
-
-- eq_(o.keys(), ['a', 'b', 'snack', 'c'])
-- eq_(o.values(), [1, 2, 'attack', 3])
-+ eq_(list(o.keys()), ['a', 'b', 'snack', 'c'])
-+ eq_(list(o.values()), [1, 2, 'attack', 3])
-
- o.pop('snack')
-- eq_(o.keys(), ['a', 'b', 'c'])
-- eq_(o.values(), [1, 2, 3])
-+ eq_(list(o.keys()), ['a', 'b', 'c'])
-+ eq_(list(o.values()), [1, 2, 3])
-
- try:
- o.pop('eep')
-@@ -146,40 +146,40 @@
- except TypeError:
- pass
-
-- eq_(o.keys(), ['a', 'b', 'c'])
-- eq_(o.values(), [1, 2, 3])
-+ eq_(list(o.keys()), ['a', 'b', 'c'])
-+ eq_(list(o.values()), [1, 2, 3])
-
- o2 = util.OrderedDict(d=4)
- o2['e'] = 5
-
-- eq_(o2.keys(), ['d', 'e'])
-- eq_(o2.values(), [4, 5])
-+ eq_(list(o2.keys()), ['d', 'e'])
-+ eq_(list(o2.values()), [4, 5])
-
- o.update(o2)
-- eq_(o.keys(), ['a', 'b', 'c', 'd', 'e'])
-- eq_(o.values(), [1, 2, 3, 4, 5])
-+ eq_(list(o.keys()), ['a', 'b', 'c', 'd', 'e'])
-+ eq_(list(o.values()), [1, 2, 3, 4, 5])
-
- o.setdefault('c', 'zzz')
- o.setdefault('f', 6)
-- eq_(o.keys(), ['a', 'b', 'c', 'd', 'e', 'f'])
-- eq_(o.values(), [1, 2, 3, 4, 5, 6])
-+ eq_(list(o.keys()), ['a', 'b', 'c', 'd', 'e', 'f'])
-+ eq_(list(o.values()), [1, 2, 3, 4, 5, 6])
-
- def test_odict_constructor(self):
- o = util.OrderedDict([('name', 'jbe'), ('fullname', 'jonathan'
- ), ('password', '')])
-- eq_(o.keys(), ['name', 'fullname', 'password'])
-+ eq_(list(o.keys()), ['name', 'fullname', 'password'])
-
- def test_odict_copy(self):
- o = util.OrderedDict()
- o["zzz"] = 1
- o["aaa"] = 2
-- eq_(o.keys(), ['zzz', 'aaa'])
-+ eq_(list(o.keys()), ['zzz', 'aaa'])
-
- o2 = o.copy()
-- eq_(o2.keys(), o.keys())
-+ eq_(list(o2.keys()), list(o.keys()))
-
- o3 = copy.copy(o)
-- eq_(o3.keys(), o.keys())
-+ eq_(list(o3.keys()), list(o.keys()))
-
-
- class OrderedSetTest(fixtures.TestBase):
-@@ -198,7 +198,7 @@
- def test_serialize(self):
- d = util.immutabledict({1: 2, 3: 4})
- for loads, dumps in picklers():
-- print loads(dumps(d))
-+ print(loads(dumps(d)))
-
-
- class MemoizedAttrTest(fixtures.TestBase):
-@@ -252,7 +252,7 @@
- try:
- cc['col1'] in cc
- assert False
-- except exc.ArgumentError, e:
-+ except exc.ArgumentError as e:
- eq_(str(e), "__contains__ requires a string argument")
-
- def test_compare(self):
-@@ -414,14 +414,14 @@
- for type_ in (object, ImmutableSubclass):
- data = [type_(), type_()]
- ids = util.IdentitySet()
-- for i in range(2) + range(2):
-+ for i in list(range(2)) + list(range(2)):
- ids.add(data[i])
- self.assert_eq(ids, data)
-
- for type_ in (EqOverride, HashOverride, HashEqOverride):
- data = [type_(1), type_(1), type_(2)]
- ids = util.IdentitySet()
-- for i in range(3) + range(3):
-+ for i in list(range(3)) + list(range(3)):
- ids.add(data[i])
- self.assert_eq(ids, data)
-
-@@ -843,13 +843,14 @@
- return super_, sub_, twin1, twin2, unique1, unique2
-
- def _assert_unorderable_types(self, callable_):
-- # Py3K
-- #assert_raises_message(
-- # TypeError, 'unorderable types', callable_)
-- # Py2K
-+# start Py3K
- assert_raises_message(
-- TypeError, 'cannot compare sets using cmp()', callable_)
-- # end Py2K
-+ TypeError, 'unorderable types', callable_)
-+# end Py3K
-+# start Py2K
-+# assert_raises_message(
-+# TypeError, 'cannot compare sets using cmp()', callable_)
-+# end Py2K
-
- def test_basic_sanity(self):
- IdentitySet = util.IdentitySet
-@@ -969,23 +970,23 @@
- d = subdict(a=1, b=2, c=3)
- self._ok(d)
-
-- # Py2K
-- def test_UserDict(self):
-- import UserDict
-- d = UserDict.UserDict(a=1, b=2, c=3)
-- self._ok(d)
-- # end Py2K
-+# start Py2K
-+# def test_UserDict(self):
-+# import UserDict
-+# d = UserDict.UserDict(a=1, b=2, c=3)
-+# self._ok(d)
-+# end Py2K
-
- def test_object(self):
- self._notok(object())
-
-- # Py2K
-- def test_duck_1(self):
-- class duck1(object):
-- def iteritems(duck):
-- return iter(self.baseline)
-- self._ok(duck1())
-- # end Py2K
-+# start Py2K
-+# def test_duck_1(self):
-+# class duck1(object):
-+# def iteritems(duck):
-+# return iter(self.baseline)
-+# self._ok(duck1())
-+# end Py2K
-
- def test_duck_2(self):
- class duck2(object):
-@@ -993,16 +994,16 @@
- return list(self.baseline)
- self._ok(duck2())
-
-- # Py2K
-- def test_duck_3(self):
-- class duck3(object):
-- def iterkeys(duck):
-- return iter(['a', 'b', 'c'])
--
-- def __getitem__(duck, key):
-- return dict(a=1, b=2, c=3).get(key)
-- self._ok(duck3())
-- # end Py2K
-+# start Py2K
-+# def test_duck_3(self):
-+# class duck3(object):
-+# def iterkeys(duck):
-+# return iter(['a', 'b', 'c'])
-+#
-+# def __getitem__(duck, key):
-+# return dict(a=1, b=2, c=3).get(key)
-+# self._ok(duck3())
-+# end Py2K
-
- def test_duck_4(self):
- class duck4(object):
-@@ -1029,9 +1030,9 @@
- class DuckTypeCollectionTest(fixtures.TestBase):
-
- def test_sets(self):
-- # Py2K
-- import sets
-- # end Py2K
-+# start Py2K
-+# import sets
-+# end Py2K
-
- class SetLike(object):
- def add(self):
-@@ -1041,9 +1042,9 @@
- __emulates__ = set
-
- for type_ in (set,
-- # Py2K
-- sets.Set,
-- # end Py2K
-+# start Py2K
-+# sets.Set,
-+# end Py2K
- SetLike,
- ForcedSet):
- eq_(util.duck_type_collection(type_), set)
-@@ -1051,9 +1052,9 @@
- eq_(util.duck_type_collection(instance), set)
-
- for type_ in (frozenset,
-- # Py2K
-- sets.ImmutableSet
-- # end Py2K
-+# start Py2K
-+# sets.ImmutableSet
-+# end Py2K
- ):
- is_(util.duck_type_collection(type_), None)
- instance = type_()
-@@ -1175,7 +1176,7 @@
- sym3 = util.pickle.loads(s)
-
- for protocol in 0, 1, 2:
-- print protocol
-+ print(protocol)
- serial = util.pickle.dumps(sym1)
- rt = util.pickle.loads(serial)
- assert rt is sym1
-@@ -1550,21 +1551,21 @@
- eq_(set(util.class_hierarchy(A)), set((A, B, C, object)))
- eq_(set(util.class_hierarchy(B)), set((A, B, C, object)))
-
-- # Py2K
-- def test_oldstyle_mixin(self):
-- class A(object):
-- pass
--
-- class Mixin:
-- pass
--
-- class B(A, Mixin):
-- pass
--
-- eq_(set(util.class_hierarchy(B)), set((A, B, object)))
-- eq_(set(util.class_hierarchy(Mixin)), set())
-- eq_(set(util.class_hierarchy(A)), set((A, B, object)))
-- # end Py2K
-+# start Py2K
-+# def test_oldstyle_mixin(self):
-+# class A(object):
-+# pass
-+#
-+# class Mixin:
-+# pass
-+#
-+# class B(A, Mixin):
-+# pass
-+#
-+# eq_(set(util.class_hierarchy(B)), set((A, B, object)))
-+# eq_(set(util.class_hierarchy(Mixin)), set())
-+# eq_(set(util.class_hierarchy(A)), set((A, B, object)))
-+# end Py2K
-
-
- class TestClassProperty(fixtures.TestBase):
-diff -r 9d0639b9d3be test/dialect/test_firebird.py
---- a/test/dialect/test_firebird.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/test/dialect/test_firebird.py Sat Apr 27 19:42:17 2013 -0400
-@@ -28,7 +28,7 @@
- )
- con.execute('CREATE DOMAIN img_domain AS BLOB SUB_TYPE '
- 'BINARY')
-- except ProgrammingError, e:
-+ except ProgrammingError as e:
- if not 'attempt to store duplicate value' in str(e):
- raise e
- con.execute('''CREATE GENERATOR gen_testtable_id''')
-diff -r 9d0639b9d3be test/dialect/test_mssql.py
---- a/test/dialect/test_mssql.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/test/dialect/test_mssql.py Sat Apr 27 19:42:17 2013 -0400
-@@ -403,7 +403,7 @@
- self.assert_compile(
- s,
- "SELECT TOP 10 t.x, t.y FROM t WHERE t.x = :x_1 ORDER BY t.y",
-- checkparams={u'x_1': 5}
-+ checkparams={'x_1': 5}
- )
-
- def test_limit_zero_using_top(self):
-@@ -414,7 +414,7 @@
- self.assert_compile(
- s,
- "SELECT TOP 0 t.x, t.y FROM t WHERE t.x = :x_1 ORDER BY t.y",
-- checkparams={u'x_1': 5}
-+ checkparams={'x_1': 5}
- )
-
- def test_offset_using_window(self):
-@@ -424,14 +424,14 @@
-
- # test that the select is not altered with subsequent compile
- # calls
-- for i in xrange(2):
-+ for i in range(2):
- self.assert_compile(
- s,
- "SELECT anon_1.x, anon_1.y FROM (SELECT t.x AS x, t.y "
- "AS y, ROW_NUMBER() OVER (ORDER BY t.y) AS "
- "mssql_rn FROM t WHERE t.x = :x_1) AS "
- "anon_1 WHERE mssql_rn > :mssql_rn_1",
-- checkparams={u'mssql_rn_1': 20, u'x_1': 5}
-+ checkparams={'mssql_rn_1': 20, 'x_1': 5}
- )
-
- def test_limit_offset_using_window(self):
-@@ -447,7 +447,7 @@
- "FROM t "
- "WHERE t.x = :x_1) AS anon_1 "
- "WHERE mssql_rn > :mssql_rn_1 AND mssql_rn <= :mssql_rn_2",
-- checkparams={u'mssql_rn_1': 20, u'mssql_rn_2': 30, u'x_1': 5}
-+ checkparams={'mssql_rn_1': 20, 'mssql_rn_2': 30, 'x_1': 5}
- )
-
- def test_limit_offset_with_correlated_order_by(self):
-@@ -468,7 +468,7 @@
- "FROM t1 "
- "WHERE t1.x = :x_1) AS anon_1 "
- "WHERE mssql_rn > :mssql_rn_1 AND mssql_rn <= :mssql_rn_2",
-- checkparams={u'mssql_rn_1': 20, u'mssql_rn_2': 30, u'x_1': 5}
-+ checkparams={'mssql_rn_1': 20, 'mssql_rn_2': 30, 'x_1': 5}
- )
-
- def test_limit_zero_offset_using_window(self):
-@@ -482,7 +482,7 @@
- s,
- "SELECT TOP 0 t.x, t.y FROM t "
- "WHERE t.x = :x_1 ORDER BY t.y",
-- checkparams={u'x_1': 5}
-+ checkparams={'x_1': 5}
- )
-
- def test_sequence_start_0(self):
-@@ -851,11 +851,11 @@
- # encode in UTF-8 (sting object) because this is the default
- # dialect encoding
-
-- con.execute(u"insert into unitest_table values ('bien u\
-+ con.execute("insert into unitest_table values ('bien u\
- umang\xc3\xa9')".encode('UTF-8'))
- try:
- r = t1.select().execute().first()
-- assert isinstance(r[1], unicode), \
-+ assert isinstance(r[1], str), \
- '%s is %s instead of unicode, working on %s' % (r[1],
- type(r[1]), meta.bind)
- finally:
-@@ -1707,7 +1707,7 @@
- )]
- for value in test_items:
- float_table.insert().execute(floatcol=value)
-- except Exception, e:
-+ except Exception as e:
- raise e
-
-
-@@ -1953,8 +1953,8 @@
-
- def test_unicode(self):
- module = __import__('pymssql')
-- result = module.Binary(u'foo')
-- eq_(result, u'foo')
-+ result = module.Binary('foo')
-+ eq_(result, 'foo')
-
- def test_bytes(self):
- module = __import__('pymssql')
-@@ -2073,7 +2073,7 @@
-
- dialect = mssql.dialect()
- value = CoerceUnicode().bind_processor(dialect)('a string')
-- assert isinstance(value, unicode)
-+ assert isinstance(value, str)
-
- class ReflectHugeViewTest(fixtures.TestBase):
- __only_on__ = 'mssql'
-@@ -2085,13 +2085,13 @@
- t = Table('base_table', self.metadata,
- *[
- Column("long_named_column_number_%d" % i, Integer)
-- for i in xrange(self.col_num)
-+ for i in range(self.col_num)
- ]
- )
- self.view_str = view_str = \
- "CREATE VIEW huge_named_view AS SELECT %s FROM base_table" % (
- ",".join("long_named_column_number_%d" % i
-- for i in xrange(self.col_num))
-+ for i in range(self.col_num))
- )
- assert len(view_str) > 4000
-
-diff -r 9d0639b9d3be test/dialect/test_mysql.py
---- a/test/dialect/test_mysql.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/test/dialect/test_mysql.py Sat Apr 27 19:42:17 2013 -0400
-@@ -372,9 +372,9 @@
- try:
- self.assert_(list(row) == expected)
- except:
-- print "Storing %s" % store
-- print "Expected %s" % expected
-- print "Found %s" % list(row)
-+ print("Storing %s" % store)
-+ print("Expected %s" % expected)
-+ print("Found %s" % list(row))
- raise
- table.delete().execute().close()
-
-@@ -684,17 +684,17 @@
- metadata = MetaData(unicode_engine)
- t1 = Table('table', metadata,
- Column('id', Integer, primary_key=True),
-- Column('value', Enum(u'réveillé', u'drôle', u'S’il')),
-- Column('value2', mysql.ENUM(u'réveillé', u'drôle', u'S’il'))
-+ Column('value', Enum('réveillé', 'drôle', 'S’il')),
-+ Column('value2', mysql.ENUM('réveillé', 'drôle', 'S’il'))
- )
- metadata.create_all()
- try:
-- t1.insert().execute(value=u'drôle', value2=u'drôle')
-- t1.insert().execute(value=u'réveillé', value2=u'réveillé')
-- t1.insert().execute(value=u'S’il', value2=u'S’il')
-+ t1.insert().execute(value='drôle', value2='drôle')
-+ t1.insert().execute(value='réveillé', value2='réveillé')
-+ t1.insert().execute(value='S’il', value2='S’il')
- eq_(t1.select().order_by(t1.c.id).execute().fetchall(),
-- [(1, u'drôle', u'drôle'), (2, u'réveillé', u'réveillé'),
-- (3, u'S’il', u'S’il')]
-+ [(1, 'drôle', 'drôle'), (2, 'réveillé', 'réveillé'),
-+ (3, 'S’il', 'S’il')]
- )
-
- # test reflection of the enum labels
-@@ -706,10 +706,10 @@
- # latin-1 stuff forcing its way in ?
-
- assert t2.c.value.type.enums[0:2] == \
-- (u'réveillé', u'drôle') # u'S’il') # eh ?
-+ ('réveillé', 'drôle') # u'S’il') # eh ?
-
- assert t2.c.value2.type.enums[0:2] == \
-- (u'réveillé', u'drôle') # u'S’il') # eh ?
-+ ('réveillé', 'drôle') # u'S’il') # eh ?
- finally:
- metadata.drop_all()
-
-@@ -877,13 +877,13 @@
- reflected = Table('mysql_case', MetaData(testing.db),
- autoload=True, include_columns=['c1', 'C2'])
- for t in case_table, reflected:
-- assert 'c1' in t.c.keys()
-- assert 'C2' in t.c.keys()
-+ assert 'c1' in list(t.c.keys())
-+ assert 'C2' in list(t.c.keys())
- reflected2 = Table('mysql_case', MetaData(testing.db),
- autoload=True, include_columns=['c1', 'c2'])
-- assert 'c1' in reflected2.c.keys()
-+ assert 'c1' in list(reflected2.c.keys())
- for c in ['c2', 'C2', 'C3']:
-- assert c not in reflected2.c.keys()
-+ assert c not in list(reflected2.c.keys())
- finally:
- case_table.drop()
-
-@@ -1370,7 +1370,7 @@
- def _options(self, modes):
- def connect(con, record):
- cursor = con.cursor()
-- print "DOING THiS:", "set sql_mode='%s'" % (",".join(modes))
-+ print("DOING THiS:", "set sql_mode='%s'" % (",".join(modes)))
- cursor.execute("set sql_mode='%s'" % (",".join(modes)))
- e = engines.testing_engine(options={
- 'pool_events':[
-diff -r 9d0639b9d3be test/dialect/test_oracle.py
---- a/test/dialect/test_oracle.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/test/dialect/test_oracle.py Sat Apr 27 19:42:17 2013 -0400
-@@ -1,5 +1,5 @@
- # coding: utf-8
--from __future__ import with_statement
-+
-
- from sqlalchemy.testing import eq_
- from sqlalchemy import *
-@@ -807,7 +807,7 @@
- )
- def test_twophase_prepare_false(self):
- conn = self._connection()
-- for i in xrange(2):
-+ for i in range(2):
- trans = conn.begin_twophase()
- conn.execute("select 1 from dual")
- trans.prepare()
-@@ -817,7 +817,7 @@
-
- def test_twophase_prepare_true(self):
- conn = self._connection()
-- for i in xrange(2):
-+ for i in range(2):
- trans = conn.begin_twophase()
- conn.execute("insert into datatable (id, data) "
- "values (%s, 'somedata')" % i)
-@@ -870,7 +870,7 @@
- b = bindparam("foo", "hello world!")
- assert b.type.dialect_impl(dialect).get_dbapi_type(dbapi) == 'STRING'
-
-- b = bindparam("foo", u"hello world!")
-+ b = bindparam("foo", "hello world!")
- assert b.type.dialect_impl(dialect).get_dbapi_type(dbapi) == 'STRING'
-
- def test_long(self):
-@@ -1257,7 +1257,7 @@
- autoload=True, oracle_resolve_synonyms=True
- )
- for row in types_table.select().execute().fetchall():
-- [row[k] for k in row.keys()]
-+ [row[k] for k in list(row.keys())]
-
- @testing.provide_metadata
- def test_raw_roundtrip(self):
-@@ -1291,11 +1291,11 @@
- t2.c.data.type.dialect_impl(testing.db.dialect),
- cx_oracle._OracleNVarChar)
-
-- data = u'm’a réveillé.'
-+ data = 'm’a réveillé.'
- t2.insert().execute(data=data)
- res = t2.select().execute().first()['data']
- eq_(res, data)
-- assert isinstance(res, unicode)
-+ assert isinstance(res, str)
-
-
- def test_char_length(self):
-@@ -1426,7 +1426,7 @@
- m = MetaData(testing.db)
- m.reflect()
- eq_(
-- set(t.name for t in m.tables.values()),
-+ set(t.name for t in list(m.tables.values())),
- set(['admin_docindex'])
- )
-
-@@ -1641,28 +1641,28 @@
- metadata.create_all()
-
- table.insert().execute(
-- {'_underscorecolumn': u'’é'},
-+ {'_underscorecolumn': '’é'},
- )
- result = testing.db.execute(
-- table.select().where(table.c._underscorecolumn==u'’é')
-+ table.select().where(table.c._underscorecolumn=='’é')
- ).scalar()
-- eq_(result, u'’é')
-+ eq_(result, '’é')
-
- @testing.provide_metadata
- def test_quoted_column_unicode(self):
- metadata = self.metadata
- table=Table("atable", metadata,
-- Column(u"méil", Unicode(255), primary_key=True),
-+ Column("méil", Unicode(255), primary_key=True),
- )
- metadata.create_all()
-
- table.insert().execute(
-- {u'méil': u'’é'},
-+ {'méil': '’é'},
- )
- result = testing.db.execute(
-- table.select().where(table.c[u'méil']==u'’é')
-+ table.select().where(table.c['méil']=='’é')
- ).scalar()
-- eq_(result, u'’é')
-+ eq_(result, '’é')
-
-
- class DBLinkReflectionTest(fixtures.TestBase):
-@@ -1702,5 +1702,5 @@
-
- t = Table('test_table_syn', m, autoload=True,
- autoload_with=testing.db, oracle_resolve_synonyms=True)
-- eq_(t.c.keys(), ['id', 'data'])
-+ eq_(list(t.c.keys()), ['id', 'data'])
- eq_(list(t.primary_key), [t.c.id])
-diff -r 9d0639b9d3be test/dialect/test_postgresql.py
---- a/test/dialect/test_postgresql.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/test/dialect/test_postgresql.py Sat Apr 27 19:42:17 2013 -0400
-@@ -1,6 +1,6 @@
- # coding: utf-8
-
--from __future__ import with_statement
-+
-
- from sqlalchemy.testing.assertions import eq_, assert_raises, \
- assert_raises_message, is_, AssertsExecutionResults, \
-@@ -559,21 +559,21 @@
- t1 = Table('table', metadata,
- Column('id', Integer, primary_key=True),
- Column('value',
-- Enum(u'réveillé', u'drôle', u'S’il',
-+ Enum('réveillé', 'drôle', 'S’il',
- name='onetwothreetype'))
- )
-
- metadata.create_all()
- try:
-- t1.insert().execute(value=u'drôle')
-- t1.insert().execute(value=u'réveillé')
-- t1.insert().execute(value=u'S’il')
-+ t1.insert().execute(value='drôle')
-+ t1.insert().execute(value='réveillé')
-+ t1.insert().execute(value='S’il')
- eq_(t1.select().order_by(t1.c.id).execute().fetchall(),
-- [(1, u'drôle'), (2, u'réveillé'), (3, u'S’il')]
-+ [(1, 'drôle'), (2, 'réveillé'), (3, 'S’il')]
- )
- m2 = MetaData(testing.db)
- t2 = Table('table', m2, autoload=True)
-- assert t2.c.value.type.enums == (u'réveillé', u'drôle', u'S’il')
-+ assert t2.c.value.type.enums == ('réveillé', 'drôle', 'S’il')
- finally:
- metadata.drop_all()
-
-@@ -1238,7 +1238,7 @@
- :
- try:
- con.execute(ddl)
-- except exc.DBAPIError, e:
-+ except exc.DBAPIError as e:
- if not 'already exists' in str(e):
- raise e
- con.execute('CREATE TABLE testtable (question integer, answer '
-@@ -1476,7 +1476,7 @@
- meta1.create_all()
- meta2 = MetaData(testing.db)
- subject = Table('subject', meta2, autoload=True)
-- eq_(subject.primary_key.columns.keys(), [u'p2', u'p1'])
-+ eq_(list(subject.primary_key.columns.keys()), ['p2', 'p1'])
-
- @testing.provide_metadata
- def test_pg_weirdchar_reflection(self):
-@@ -1749,7 +1749,7 @@
- conn.execute("ALTER TABLE t RENAME COLUMN x to y")
-
- ind = testing.db.dialect.get_indexes(conn, "t", None)
-- eq_(ind, [{'unique': False, 'column_names': [u'y'], 'name': u'idx1'}])
-+ eq_(ind, [{'unique': False, 'column_names': ['y'], 'name': 'idx1'}])
- conn.close()
-
- class CustomTypeReflectionTest(fixtures.TestBase):
-@@ -2174,8 +2174,8 @@
-
- def test_insert_array(self):
- arrtable = self.tables.arrtable
-- arrtable.insert().execute(intarr=[1, 2, 3], strarr=[u'abc',
-- u'def'])
-+ arrtable.insert().execute(intarr=[1, 2, 3], strarr=['abc',
-+ 'def'])
- results = arrtable.select().execute().fetchall()
- eq_(len(results), 1)
- eq_(results[0]['intarr'], [1, 2, 3])
-@@ -2183,9 +2183,9 @@
-
- def test_array_where(self):
- arrtable = self.tables.arrtable
-- arrtable.insert().execute(intarr=[1, 2, 3], strarr=[u'abc',
-- u'def'])
-- arrtable.insert().execute(intarr=[4, 5, 6], strarr=u'ABC')
-+ arrtable.insert().execute(intarr=[1, 2, 3], strarr=['abc',
-+ 'def'])
-+ arrtable.insert().execute(intarr=[4, 5, 6], strarr='ABC')
- results = arrtable.select().where(arrtable.c.intarr == [1, 2,
- 3]).execute().fetchall()
- eq_(len(results), 1)
-@@ -2194,7 +2194,7 @@
- def test_array_concat(self):
- arrtable = self.tables.arrtable
- arrtable.insert().execute(intarr=[1, 2, 3],
-- strarr=[u'abc', u'def'])
-+ strarr=['abc', 'def'])
- results = select([arrtable.c.intarr + [4, 5,
- 6]]).execute().fetchall()
- eq_(len(results), 1)
-@@ -2203,15 +2203,15 @@
- def test_array_subtype_resultprocessor(self):
- arrtable = self.tables.arrtable
- arrtable.insert().execute(intarr=[4, 5, 6],
-- strarr=[[u'm\xe4\xe4'], [u'm\xf6\xf6'
-+ strarr=[['m\xe4\xe4'], ['m\xf6\xf6'
- ]])
-- arrtable.insert().execute(intarr=[1, 2, 3], strarr=[u'm\xe4\xe4'
-- , u'm\xf6\xf6'])
-+ arrtable.insert().execute(intarr=[1, 2, 3], strarr=['m\xe4\xe4'
-+ , 'm\xf6\xf6'])
- results = \
- arrtable.select(order_by=[arrtable.c.intarr]).execute().fetchall()
- eq_(len(results), 2)
-- eq_(results[0]['strarr'], [u'm\xe4\xe4', u'm\xf6\xf6'])
-- eq_(results[1]['strarr'], [[u'm\xe4\xe4'], [u'm\xf6\xf6']])
-+ eq_(results[0]['strarr'], ['m\xe4\xe4', 'm\xf6\xf6'])
-+ eq_(results[1]['strarr'], [['m\xe4\xe4'], ['m\xf6\xf6']])
-
- def test_array_literal(self):
- eq_(
-@@ -2263,7 +2263,7 @@
- testing.db.execute(
- arrtable.insert(),
- intarr=[4, 5, 6],
-- strarr=[u'abc', u'def']
-+ strarr=['abc', 'def']
- )
- eq_(
- testing.db.scalar(select([arrtable.c.intarr[2:3]])),
-diff -r 9d0639b9d3be test/dialect/test_sqlite.py
---- a/test/dialect/test_sqlite.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/test/dialect/test_sqlite.py Sat Apr 27 19:42:17 2013 -0400
-@@ -84,7 +84,7 @@
- eq_(row, (1, datetime.date(2010, 5, 10),
- datetime.datetime( 2010, 5, 10, 12, 15, 25, )))
- r = engine.execute(func.current_date()).scalar()
-- assert isinstance(r, basestring)
-+ assert isinstance(r, str)
- finally:
- t.drop(engine)
- engine.dispose()
-@@ -104,8 +104,8 @@
- UnicodeText(),
- ):
- bindproc = t.dialect_impl(dialect).bind_processor(dialect)
-- assert not bindproc or isinstance(bindproc(u'some string'),
-- unicode)
-+ assert not bindproc or isinstance(bindproc('some string'),
-+ str)
-
- @testing.provide_metadata
- def test_type_reflection(self):
-@@ -566,7 +566,7 @@
- eq_(inspector.get_indexes('foo'), [])
- eq_(inspector.get_indexes('foo',
- include_auto_indexes=True), [{'unique': 1, 'name'
-- : u'sqlite_autoindex_foo_1', 'column_names': [u'bar']}])
-+ : 'sqlite_autoindex_foo_1', 'column_names': ['bar']}])
- finally:
- meta.drop_all()
-
-@@ -602,7 +602,7 @@
- 'dow': '%w',
- 'week': '%W',
- }
-- for field, subst in mapping.items():
-+ for field, subst in list(mapping.items()):
- self.assert_compile(select([extract(field, t.c.col1)]),
- "SELECT CAST(STRFTIME('%s', t.col1) AS "
- "INTEGER) AS anon_1 FROM t" % subst)
-diff -r 9d0639b9d3be test/dialect/test_sybase.py
---- a/test/dialect/test_sybase.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/test/dialect/test_sybase.py Sat Apr 27 19:42:17 2013 -0400
-@@ -19,7 +19,7 @@
- 'year': 'year',
- }
-
-- for field, subst in mapping.items():
-+ for field, subst in list(mapping.items()):
- self.assert_compile(
- select([extract(field, t.c.col1)]),
- 'SELECT DATEPART("%s", t.col1) AS anon_1 FROM t' % subst)
-diff -r 9d0639b9d3be test/engine/test_bind.py
---- a/test/engine/test_bind.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/test/engine/test_bind.py Sat Apr 27 19:42:17 2013 -0400
-@@ -1,6 +1,6 @@
- """tests the "bind" attribute/argument across schema and SQL,
- including the deprecated versions of these arguments"""
--from __future__ import with_statement
-+
- from sqlalchemy.testing import eq_, assert_raises
- from sqlalchemy import engine, exc
- from sqlalchemy import MetaData, ThreadLocalMetaData
-@@ -61,7 +61,7 @@
- try:
- meth()
- assert False
-- except exc.UnboundExecutionError, e:
-+ except exc.UnboundExecutionError as e:
- eq_(str(e),
- "The MetaData is not bound to an Engine or "
- "Connection. Execution can not proceed without a "
-@@ -82,7 +82,7 @@
- try:
- meth()
- assert False
-- except exc.UnboundExecutionError, e:
-+ except exc.UnboundExecutionError as e:
- eq_(
- str(e),
- "The Table 'test_table' "
-diff -r 9d0639b9d3be test/engine/test_ddlemit.py
---- a/test/engine/test_ddlemit.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/test/engine/test_ddlemit.py Sat Apr 27 19:42:17 2013 -0400
-@@ -47,7 +47,7 @@
-
- return (m, ) + tuple(
- Table('t%d' % i, m, Column('x', Integer))
-- for i in xrange(1, 6)
-+ for i in range(1, 6)
- )
-
- def _table_seq_fixture(self):
-diff -r 9d0639b9d3be test/engine/test_ddlevents.py
---- a/test/engine/test_ddlevents.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/test/engine/test_ddlevents.py Sat Apr 27 19:42:17 2013 -0400
-@@ -1,4 +1,4 @@
--from __future__ import with_statement
-+
- from sqlalchemy.testing import assert_raises, assert_raises_message
- from sqlalchemy.schema import DDL, CheckConstraint, AddConstraint, \
- DropConstraint
-diff -r 9d0639b9d3be test/engine/test_execute.py
---- a/test/engine/test_execute.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/test/engine/test_execute.py Sat Apr 27 19:42:17 2013 -0400
-@@ -1,4 +1,4 @@
--from __future__ import with_statement
-+
-
- from sqlalchemy.testing import eq_, assert_raises, assert_raises_message, \
- config, is_
-@@ -18,7 +18,7 @@
- from sqlalchemy.engine import result as _result, default
- from sqlalchemy.engine.base import Connection, Engine
- from sqlalchemy.testing import fixtures
--import StringIO
-+import io
-
- users, metadata, users_autoinc = None, None, None
- class ExecuteTest(fixtures.TestBase):
-@@ -256,7 +256,7 @@
- try:
- cursor = raw.cursor()
- cursor.execute("SELECTINCORRECT")
-- except testing.db.dialect.dbapi.DatabaseError, orig:
-+ except testing.db.dialect.dbapi.DatabaseError as orig:
- # py3k has "orig" in local scope...
- the_orig = orig
- finally:
-@@ -622,7 +622,7 @@
- def test_log_large_dict(self):
- self.eng.execute(
- "INSERT INTO foo (data) values (:data)",
-- [{"data":str(i)} for i in xrange(100)]
-+ [{"data":str(i)} for i in range(100)]
- )
- eq_(
- self.buf.buffer[1].message,
-@@ -635,7 +635,7 @@
- def test_log_large_list(self):
- self.eng.execute(
- "INSERT INTO foo (data) values (?)",
-- [(str(i), ) for i in xrange(100)]
-+ [(str(i), ) for i in range(100)]
- )
- eq_(
- self.buf.buffer[1].message,
-@@ -654,7 +654,7 @@
- "100 total bound parameter sets ... {'data': '98'}, {'data': '99'}\]",
- lambda: self.eng.execute(
- "INSERT INTO nonexistent (data) values (:data)",
-- [{"data":str(i)} for i in xrange(100)]
-+ [{"data":str(i)} for i in range(100)]
- )
- )
-
-@@ -668,7 +668,7 @@
- "\('98',\), \('99',\)\]",
- lambda: self.eng.execute(
- "INSERT INTO nonexistent (data) values (?)",
-- [(str(i), ) for i in xrange(100)]
-+ [(str(i), ) for i in range(100)]
- )
- )
-
-@@ -834,9 +834,9 @@
-
- class MockStrategyTest(fixtures.TestBase):
- def _engine_fixture(self):
-- buf = StringIO.StringIO()
-+ buf = io.StringIO()
- def dump(sql, *multiparams, **params):
-- buf.write(unicode(sql.compile(dialect=engine.dialect)))
-+ buf.write(str(sql.compile(dialect=engine.dialect)))
- engine = create_engine('postgresql://', strategy='mock', executor=dump)
- return engine, buf
-
-@@ -939,7 +939,7 @@
- def test_row_c_sequence_check(self):
- import csv
- import collections
-- from StringIO import StringIO
-+ from io import StringIO
-
- metadata = MetaData()
- metadata.bind = 'sqlite://'
-@@ -1026,7 +1026,7 @@
- )
- m.create_all(engine)
- engine.execute(t.insert(), [
-- {'x':i, 'y':"t_%d" % i} for i in xrange(1, 12)
-+ {'x':i, 'y':"t_%d" % i} for i in range(1, 12)
- ])
-
- def _test_proxy(self, cls):
-@@ -1039,13 +1039,13 @@
- assert isinstance(r, cls)
- for i in range(5):
- rows.append(r.fetchone())
-- eq_(rows, [(i, "t_%d" % i) for i in xrange(1, 6)])
-+ eq_(rows, [(i, "t_%d" % i) for i in range(1, 6)])
-
- rows = r.fetchmany(3)
-- eq_(rows, [(i, "t_%d" % i) for i in xrange(6, 9)])
-+ eq_(rows, [(i, "t_%d" % i) for i in range(6, 9)])
-
- rows = r.fetchall()
-- eq_(rows, [(i, "t_%d" % i) for i in xrange(9, 12)])
-+ eq_(rows, [(i, "t_%d" % i) for i in range(9, 12)])
-
- r = self.engine.execute(select([self.table]))
- rows = r.fetchmany(None)
-@@ -1059,7 +1059,7 @@
-
- r = self.engine.execute(select([self.table]).limit(5))
- rows = r.fetchmany(6)
-- eq_(rows, [(i, "t_%d" % i) for i in xrange(1, 6)])
-+ eq_(rows, [(i, "t_%d" % i) for i in range(1, 6)])
-
- def test_plain(self):
- self._test_proxy(_result.ResultProxy)
-@@ -1184,7 +1184,7 @@
- try:
- conn.execute("SELECT FOO FROM I_DONT_EXIST")
- assert False
-- except tsa.exc.DBAPIError, e:
-+ except tsa.exc.DBAPIError as e:
- assert canary[0][2] is e.orig
- assert canary[0][0] == "SELECT FOO FROM I_DONT_EXIST"
-
-diff -r 9d0639b9d3be test/engine/test_parseconnect.py
---- a/test/engine/test_parseconnect.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/test/engine/test_parseconnect.py Sat Apr 27 19:42:17 2013 -0400
-@@ -1,6 +1,6 @@
- from sqlalchemy.testing import assert_raises, assert_raises_message, eq_
--import ConfigParser
--import StringIO
-+import configparser
-+import io
- import sqlalchemy.engine.url as url
- from sqlalchemy import create_engine, engine_from_config, exc, pool
- from sqlalchemy.engine.util import _coerce_config
-@@ -103,8 +103,8 @@
- pool_threadlocal=1
- pool_timeout=10
- """
-- ini = ConfigParser.ConfigParser()
-- ini.readfp(StringIO.StringIO(raw))
-+ ini = configparser.ConfigParser()
-+ ini.readfp(io.StringIO(raw))
-
- expected = {
- 'url': 'postgresql://scott:tiger@somehost/test?fooz=somevalue',
-@@ -234,7 +234,7 @@
- : True}, convert_unicode=True)
- try:
- e.connect()
-- except tsa.exc.DBAPIError, de:
-+ except tsa.exc.DBAPIError as de:
- assert not de.connection_invalidated
-
- def test_ensure_dialect_does_is_disconnect_no_conn(self):
-@@ -266,7 +266,7 @@
- try:
- create_engine('sqlite://', module=ThrowOnConnect()).connect()
- assert False
-- except tsa.exc.DBAPIError, de:
-+ except tsa.exc.DBAPIError as de:
- assert de.connection_invalidated
-
- def test_urlattr(self):
-diff -r 9d0639b9d3be test/engine/test_pool.py
---- a/test/engine/test_pool.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/test/engine/test_pool.py Sat Apr 27 19:42:17 2013 -0400
-@@ -525,23 +525,23 @@
- self.assert_((item in innerself.checked_out) == in_cout)
- self.assert_((item in innerself.checked_in) == in_cin)
- def inst_connect(self, con, record):
-- print "connect(%s, %s)" % (con, record)
-+ print("connect(%s, %s)" % (con, record))
- assert con is not None
- assert record is not None
- self.connected.append(con)
- def inst_first_connect(self, con, record):
-- print "first_connect(%s, %s)" % (con, record)
-+ print("first_connect(%s, %s)" % (con, record))
- assert con is not None
- assert record is not None
- self.first_connected.append(con)
- def inst_checkout(self, con, record, proxy):
-- print "checkout(%s, %s, %s)" % (con, record, proxy)
-+ print("checkout(%s, %s, %s)" % (con, record, proxy))
- assert con is not None
- assert record is not None
- assert proxy is not None
- self.checked_out.append(con)
- def inst_checkin(self, con, record):
-- print "checkin(%s, %s)" % (con, record)
-+ print("checkin(%s, %s)" % (con, record))
- # con can be None if invalidated
- assert record is not None
- self.checked_in.append(con)
-@@ -738,8 +738,8 @@
- def status(pool):
- tup = pool.size(), pool.checkedin(), pool.overflow(), \
- pool.checkedout()
-- print 'Pool size: %d Connections in pool: %d Current '\
-- 'Overflow: %d Current Checked out connections: %d' % tup
-+ print('Pool size: %d Connections in pool: %d Current '\
-+ 'Overflow: %d Current Checked out connections: %d' % tup)
- return tup
-
- c1 = p.connect()
-@@ -794,7 +794,7 @@
- try:
- c4 = p.connect()
- assert False
-- except tsa.exc.TimeoutError, e:
-+ except tsa.exc.TimeoutError as e:
- assert int(time.time() - now) == 2
-
- def test_timeout_race(self):
-@@ -812,18 +812,18 @@
- max_overflow = 1, use_threadlocal = False, timeout=3)
- timeouts = []
- def checkout():
-- for x in xrange(1):
-+ for x in range(1):
- now = time.time()
- try:
- c1 = p.connect()
-- except tsa.exc.TimeoutError, e:
-+ except tsa.exc.TimeoutError as e:
- timeouts.append(time.time() - now)
- continue
- time.sleep(4)
- c1.close()
-
- threads = []
-- for i in xrange(10):
-+ for i in range(10):
- th = threading.Thread(target=checkout)
- th.start()
- threads.append(th)
-@@ -860,7 +860,7 @@
- except tsa.exc.TimeoutError:
- pass
- threads = []
-- for i in xrange(thread_count):
-+ for i in range(thread_count):
- th = threading.Thread(target=whammy)
- th.start()
- threads.append(th)
-@@ -1007,8 +1007,8 @@
- strong_refs.add(c.connection)
- return c
-
-- for j in xrange(5):
-- conns = [_conn() for i in xrange(4)]
-+ for j in range(5):
-+ conns = [_conn() for i in range(4)]
- for c in conns:
- c.close()
-
-@@ -1152,7 +1152,7 @@
- return p.connect()
-
- def checkout():
-- for x in xrange(10):
-+ for x in range(10):
- c = _conn()
- assert c
- c.cursor()
-@@ -1160,7 +1160,7 @@
- time.sleep(.1)
-
- threads = []
-- for i in xrange(10):
-+ for i in range(10):
- th = threading.Thread(target=checkout)
- th.start()
- threads.append(th)
-diff -r 9d0639b9d3be test/engine/test_processors.py
---- a/test/engine/test_processors.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/test/engine/test_processors.py Sat Apr 27 19:42:17 2013 -0400
-@@ -53,7 +53,7 @@
- cls.module = type("util", (object,),
- dict(
- (k, staticmethod(v))
-- for k, v in processors.py_fallback().items()
-+ for k, v in list(processors.py_fallback().items())
- )
- )
-
-@@ -156,7 +156,7 @@
- cls.module = type("util", (object,),
- dict(
- (k, staticmethod(v))
-- for k, v in util.py_fallback().items()
-+ for k, v in list(util.py_fallback().items())
- )
- )
-
-diff -r 9d0639b9d3be test/engine/test_reconnect.py
---- a/test/engine/test_reconnect.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/test/engine/test_reconnect.py Sat Apr 27 19:42:17 2013 -0400
-@@ -173,7 +173,7 @@
- try:
- trans.commit()
- assert False
-- except tsa.exc.InvalidRequestError, e:
-+ except tsa.exc.InvalidRequestError as e:
- assert str(e) \
- == "Can't reconnect until invalid transaction is "\
- "rolled back"
-@@ -370,7 +370,7 @@
- try:
- conn.execute(select([1]))
- assert False
-- except tsa.exc.DBAPIError, e:
-+ except tsa.exc.DBAPIError as e:
- if not e.connection_invalidated:
- raise
-
-@@ -386,7 +386,7 @@
- try:
- conn.execute(select([1]))
- assert False
-- except tsa.exc.DBAPIError, e:
-+ except tsa.exc.DBAPIError as e:
- if not e.connection_invalidated:
- raise
- assert conn.invalidated
-@@ -407,7 +407,7 @@
- try:
- c1.execute(select([1]))
- assert False
-- except tsa.exc.DBAPIError, e:
-+ except tsa.exc.DBAPIError as e:
- assert e.connection_invalidated
-
- p2 = engine.pool
-@@ -415,7 +415,7 @@
- try:
- c2.execute(select([1]))
- assert False
-- except tsa.exc.DBAPIError, e:
-+ except tsa.exc.DBAPIError as e:
- assert e.connection_invalidated
-
- # pool isn't replaced
-@@ -503,7 +503,7 @@
- try:
- conn.execute(select([1]))
- assert False
-- except tsa.exc.DBAPIError, e:
-+ except tsa.exc.DBAPIError as e:
- if not e.connection_invalidated:
- raise
- assert not conn.closed
-@@ -523,7 +523,7 @@
- try:
- conn.execute(select([1]))
- assert False
-- except tsa.exc.DBAPIError, e:
-+ except tsa.exc.DBAPIError as e:
- if not e.connection_invalidated:
- raise
-
-@@ -542,7 +542,7 @@
- try:
- conn.execute(select([1]))
- assert False
-- except tsa.exc.DBAPIError, e:
-+ except tsa.exc.DBAPIError as e:
- if not e.connection_invalidated:
- raise
- assert not conn.closed
-@@ -558,7 +558,7 @@
- try:
- trans.commit()
- assert False
-- except tsa.exc.InvalidRequestError, e:
-+ except tsa.exc.InvalidRequestError as e:
- assert str(e) \
- == "Can't reconnect until invalid transaction is "\
- "rolled back"
-@@ -634,13 +634,13 @@
- def test_invalidate_on_results(self):
- conn = engine.connect()
- result = conn.execute('select * from sometable')
-- for x in xrange(20):
-+ for x in range(20):
- result.fetchone()
- engine.test_shutdown()
- try:
-- print 'ghost result: %r' % result.fetchone()
-+ print('ghost result: %r' % result.fetchone())
- assert False
-- except tsa.exc.DBAPIError, e:
-+ except tsa.exc.DBAPIError as e:
- if not e.connection_invalidated:
- raise
- assert conn.invalidated
-diff -r 9d0639b9d3be test/engine/test_reflection.py
---- a/test/engine/test_reflection.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/test/engine/test_reflection.py Sat Apr 27 19:42:17 2013 -0400
-@@ -811,7 +811,7 @@
- try:
- m4.reflect(only=['rt_a', 'rt_f'])
- self.assert_(False)
-- except sa.exc.InvalidRequestError, e:
-+ except sa.exc.InvalidRequestError as e:
- self.assert_(e.args[0].endswith('(rt_f)'))
-
- m5 = MetaData(testing.db)
-@@ -833,7 +833,7 @@
- )
-
- if existing:
-- print "Other tables present in database, skipping some checks."
-+ print("Other tables present in database, skipping some checks.")
- else:
- baseline.drop_all()
- m9 = MetaData(testing.db)
-@@ -1041,19 +1041,19 @@
- cls.metadata = metadata = MetaData()
-
- no_multibyte_period = set([
-- (u'plain', u'col_plain', u'ix_plain')
-+ ('plain', 'col_plain', 'ix_plain')
- ])
- no_has_table = [
-- (u'no_has_table_1', u'col_Unit\u00e9ble', u'ix_Unit\u00e9ble'),
-- (u'no_has_table_2', u'col_\u6e2c\u8a66', u'ix_\u6e2c\u8a66'),
-+ ('no_has_table_1', 'col_Unit\u00e9ble', 'ix_Unit\u00e9ble'),
-+ ('no_has_table_2', 'col_\u6e2c\u8a66', 'ix_\u6e2c\u8a66'),
- ]
- no_case_sensitivity = [
-- (u'\u6e2c\u8a66', u'col_\u6e2c\u8a66', u'ix_\u6e2c\u8a66'),
-- (u'unit\u00e9ble', u'col_unit\u00e9ble', u'ix_unit\u00e9ble'),
-+ ('\u6e2c\u8a66', 'col_\u6e2c\u8a66', 'ix_\u6e2c\u8a66'),
-+ ('unit\u00e9ble', 'col_unit\u00e9ble', 'ix_unit\u00e9ble'),
- ]
- full = [
-- (u'Unit\u00e9ble', u'col_Unit\u00e9ble', u'ix_Unit\u00e9ble'),
-- (u'\u6e2c\u8a66', u'col_\u6e2c\u8a66', u'ix_\u6e2c\u8a66'),
-+ ('Unit\u00e9ble', 'col_Unit\u00e9ble', 'ix_Unit\u00e9ble'),
-+ ('\u6e2c\u8a66', 'col_\u6e2c\u8a66', 'ix_\u6e2c\u8a66'),
- ]
-
- # as you can see, our options for this kind of thing
-diff -r 9d0639b9d3be test/engine/test_transaction.py
---- a/test/engine/test_transaction.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/test/engine/test_transaction.py Sat Apr 27 19:42:17 2013 -0400
-@@ -74,8 +74,8 @@
- connection.execute(users.insert(), user_id=1, user_name='user3')
- transaction.commit()
- assert False
-- except Exception , e:
-- print "Exception: ", e
-+ except Exception as e:
-+ print("Exception: ", e)
- transaction.rollback()
-
- result = connection.execute("select * from query_users")
-@@ -121,10 +121,10 @@
- trans2.rollback()
- raise
- transaction.rollback()
-- except Exception, e:
-+ except Exception as e:
- transaction.rollback()
- raise
-- except Exception, e:
-+ except Exception as e:
- try:
- assert str(e) == 'uh oh' # and not "This transaction is
- # inactive"
-@@ -167,7 +167,7 @@
- connection.execute(users.insert(), user_id=2, user_name='user2')
- try:
- connection.execute(users.insert(), user_id=2, user_name='user2.5')
-- except Exception, e:
-+ except Exception as e:
- trans.__exit__(*sys.exc_info())
-
- assert not trans.is_active
-@@ -1019,7 +1019,7 @@
- con = testing.db.connect()
- sel = counters.select(for_update=update_style,
- whereclause=counters.c.counter_id == 1)
-- for i in xrange(count):
-+ for i in range(count):
- trans = con.begin()
- try:
- existing = con.execute(sel).first()
-@@ -1033,7 +1033,7 @@
- raise AssertionError('Got %s post-update, expected '
- '%s' % (readback['counter_value'], incr))
- trans.commit()
-- except Exception, e:
-+ except Exception as e:
- trans.rollback()
- errors.append(e)
- break
-@@ -1057,7 +1057,7 @@
- db.execute(counters.insert(), counter_id=1, counter_value=0)
- iterations, thread_count = 10, 5
- threads, errors = [], []
-- for i in xrange(thread_count):
-+ for i in range(thread_count):
- thrd = threading.Thread(target=self.increment,
- args=(iterations, ),
- kwargs={'errors': errors,
-@@ -1088,7 +1088,7 @@
- rows = con.execute(sel).fetchall()
- time.sleep(0.25)
- trans.commit()
-- except Exception, e:
-+ except Exception as e:
- trans.rollback()
- errors.append(e)
- con.close()
-@@ -1105,7 +1105,7 @@
- db.execute(counters.insert(), counter_id=cid + 1,
- counter_value=0)
- errors, threads = [], []
-- for i in xrange(thread_count):
-+ for i in range(thread_count):
- thrd = threading.Thread(target=self.overlap,
- args=(groups.pop(0), errors,
- update_style))
-diff -r 9d0639b9d3be test/ext/declarative/test_basic.py
---- a/test/ext/declarative/test_basic.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/test/ext/declarative/test_basic.py Sat Apr 27 19:42:17 2013 -0400
-@@ -127,14 +127,13 @@
- class BrokenMeta(type):
- def __getattribute__(self, attr):
- if attr == 'xyzzy':
-- raise AttributeError, 'xyzzy'
-+ raise AttributeError('xyzzy')
- else:
- return object.__getattribute__(self,attr)
-
- # even though this class has an xyzzy attribute, getattr(cls,"xyzzy")
- # fails
-- class BrokenParent(object):
-- __metaclass__ = BrokenMeta
-+ class BrokenParent(object, metaclass=BrokenMeta):
- xyzzy = "magic"
-
- # _as_declarative() inspects obj.__class__.__bases__
-@@ -1458,12 +1457,12 @@
-
- @classmethod
- def insert_data(cls):
-- params = [dict(zip(('id', 'name'), column_values))
-+ params = [dict(list(zip(('id', 'name'), column_values)))
- for column_values in [(7, 'jack'), (8, 'ed'), (9,
- 'fred'), (10, 'chuck')]]
- User.__table__.insert().execute(params)
-- Address.__table__.insert().execute([dict(zip(('id',
-- 'user_id', 'email'), column_values))
-+ Address.__table__.insert().execute([dict(list(zip(('id',
-+ 'user_id', 'email'), column_values)))
- for column_values in [(1, 7, 'jack@bean.com'), (2,
- 8, 'ed@wood.com'), (3, 8, 'ed@bettyboop.com'), (4,
- 8, 'ed@lala.com'), (5, 9, 'fred@fred.com')]])
-@@ -1492,6 +1491,6 @@
- for inline in True, False:
- for stringbased in True, False:
- testclass = _produce_test(inline, stringbased)
-- exec '%s = testclass' % testclass.__name__
-+ exec('%s = testclass' % testclass.__name__)
- del testclass
-
-diff -r 9d0639b9d3be test/ext/declarative/test_clsregistry.py
---- a/test/ext/declarative/test_clsregistry.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/test/ext/declarative/test_clsregistry.py Sat Apr 27 19:42:17 2013 -0400
-@@ -124,7 +124,7 @@
-
- base = weakref.WeakValueDictionary()
-
-- for i in xrange(3):
-+ for i in range(3):
- f1 = MockClass(base, "foo.bar.Foo")
- f2 = MockClass(base, "foo.alt.Foo")
- clsregistry.add_class("Foo", f1)
-diff -r 9d0639b9d3be test/ext/declarative/test_inheritance.py
---- a/test/ext/declarative/test_inheritance.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/test/ext/declarative/test_inheritance.py Sat Apr 27 19:42:17 2013 -0400
-@@ -573,7 +573,7 @@
- primary_language = Column(String(50))
-
- assert Person.__table__.c.golf_swing is not None
-- assert not Person.__table__.c.has_key('primary_language')
-+ assert 'primary_language' not in Person.__table__.c
- assert Engineer.__table__.c.primary_language is not None
- assert Engineer.primary_language is not None
- assert Manager.golf_swing is not None
-diff -r 9d0639b9d3be test/ext/declarative/test_mixin.py
---- a/test/ext/declarative/test_mixin.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/test/ext/declarative/test_mixin.py Sat Apr 27 19:42:17 2013 -0400
-@@ -685,7 +685,7 @@
- __mapper_args__ = dict(polymorphic_identity='specific')
-
- assert Specific.__table__ is Generic.__table__
-- eq_(Generic.__table__.c.keys(), ['id', 'type', 'value'])
-+ eq_(list(Generic.__table__.c.keys()), ['id', 'type', 'value'])
- assert class_mapper(Specific).polymorphic_on \
- is Generic.__table__.c.type
- eq_(class_mapper(Specific).polymorphic_identity, 'specific')
-@@ -714,9 +714,9 @@
-
- eq_(Generic.__table__.name, 'generic')
- eq_(Specific.__table__.name, 'specific')
-- eq_(Generic.__table__.c.keys(), ['timestamp', 'id',
-+ eq_(list(Generic.__table__.c.keys()), ['timestamp', 'id',
- 'python_type'])
-- eq_(Specific.__table__.c.keys(), ['id'])
-+ eq_(list(Specific.__table__.c.keys()), ['id'])
- eq_(Generic.__table__.kwargs, {'mysql_engine': 'InnoDB'})
- eq_(Specific.__table__.kwargs, {'mysql_engine': 'InnoDB'})
-
-@@ -749,12 +749,12 @@
- primary_key=True)
-
- eq_(BaseType.__table__.name, 'basetype')
-- eq_(BaseType.__table__.c.keys(), ['timestamp', 'type', 'id',
-+ eq_(list(BaseType.__table__.c.keys()), ['timestamp', 'type', 'id',
- 'value'])
- eq_(BaseType.__table__.kwargs, {'mysql_engine': 'InnoDB'})
- assert Single.__table__ is BaseType.__table__
- eq_(Joined.__table__.name, 'joined')
-- eq_(Joined.__table__.c.keys(), ['id'])
-+ eq_(list(Joined.__table__.c.keys()), ['id'])
- eq_(Joined.__table__.kwargs, {'mysql_engine': 'InnoDB'})
-
- def test_col_copy_vs_declared_attr_joined_propagation(self):
-@@ -839,7 +839,7 @@
- __mapper_args__ = dict(polymorphic_identity='specific')
-
- eq_(BaseType.__table__.name, 'basetype')
-- eq_(BaseType.__table__.c.keys(), ['type', 'id', 'value'])
-+ eq_(list(BaseType.__table__.c.keys()), ['type', 'id', 'value'])
- assert Specific.__table__ is BaseType.__table__
- assert class_mapper(Specific).polymorphic_on \
- is BaseType.__table__.c.type
-@@ -870,9 +870,9 @@
- primary_key=True)
-
- eq_(BaseType.__table__.name, 'basetype')
-- eq_(BaseType.__table__.c.keys(), ['type', 'id', 'value'])
-+ eq_(list(BaseType.__table__.c.keys()), ['type', 'id', 'value'])
- eq_(Specific.__table__.name, 'specific')
-- eq_(Specific.__table__.c.keys(), ['id'])
-+ eq_(list(Specific.__table__.c.keys()), ['id'])
-
- def test_single_back_propagate(self):
-
-@@ -891,7 +891,7 @@
-
- __mapper_args__ = dict(polymorphic_identity='specific')
-
-- eq_(BaseType.__table__.c.keys(), ['type', 'id', 'timestamp'])
-+ eq_(list(BaseType.__table__.c.keys()), ['type', 'id', 'timestamp'])
-
- def test_table_in_model_and_same_column_in_mixin(self):
-
-@@ -987,7 +987,7 @@
- id = Column(Integer, primary_key=True)
- __tablename__ = 'model'
-
-- eq_(Model.__table__.c.keys(), ['col1', 'col3', 'col2', 'col4',
-+ eq_(list(Model.__table__.c.keys()), ['col1', 'col3', 'col2', 'col4',
- 'id'])
-
- def test_honor_class_mro_one(self):
-@@ -1078,8 +1078,8 @@
- filter_class = FilterB
- id = Column(Integer(), primary_key=True)
-
-- TypeA(filters=[u'foo'])
-- TypeB(filters=[u'foo'])
-+ TypeA(filters=['foo'])
-+ TypeB(filters=['foo'])
-
- class DeclarativeMixinPropertyTest(DeclarativeTestBase):
-
-diff -r 9d0639b9d3be test/ext/test_associationproxy.py
---- a/test/ext/test_associationproxy.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/test/ext/test_associationproxy.py Sat Apr 27 19:42:17 2013 -0400
-@@ -288,7 +288,7 @@
- ch = Child('a', 'regular')
- p1._children.append(ch)
-
-- self.assert_(ch in p1._children.values())
-+ self.assert_(ch in list(p1._children.values()))
- self.assert_(len(p1._children) == 1)
-
- self.assert_(p1.children)
-@@ -300,7 +300,7 @@
-
- p1.children['b'] = 'proxied'
-
-- self.assert_('proxied' in p1.children.values())
-+ self.assert_('proxied' in list(p1.children.values()))
- self.assert_('b' in p1.children)
- self.assert_('proxied' not in p1._children)
- self.assert_(len(p1.children) == 2)
-@@ -526,9 +526,9 @@
- try:
- self.assert_(p.children == control)
- except:
-- print 'Test %s.%s(%s):' % (set(base), op, other)
-- print 'want', repr(control)
-- print 'got', repr(p.children)
-+ print('Test %s.%s(%s):' % (set(base), op, other))
-+ print('want', repr(control))
-+ print('got', repr(p.children))
- raise
-
- p = self.roundtrip(p)
-@@ -536,9 +536,9 @@
- try:
- self.assert_(p.children == control)
- except:
-- print 'Test %s.%s(%s):' % (base, op, other)
-- print 'want', repr(control)
-- print 'got', repr(p.children)
-+ print('Test %s.%s(%s):' % (base, op, other))
-+ print('want', repr(control))
-+ print('got', repr(p.children))
- raise
-
- # in-place mutations
-@@ -553,15 +553,15 @@
- p.children = base[:]
- control = set(base[:])
-
-- exec "p.children %s other" % op
-- exec "control %s other" % op
-+ exec("p.children %s other" % op)
-+ exec("control %s other" % op)
-
- try:
- self.assert_(p.children == control)
- except:
-- print 'Test %s %s %s:' % (set(base), op, other)
-- print 'want', repr(control)
-- print 'got', repr(p.children)
-+ print('Test %s %s %s:' % (set(base), op, other))
-+ print('want', repr(control))
-+ print('got', repr(p.children))
- raise
-
- p = self.roundtrip(p)
-@@ -569,9 +569,9 @@
- try:
- self.assert_(p.children == control)
- except:
-- print 'Test %s %s %s:' % (base, op, other)
-- print 'want', repr(control)
-- print 'got', repr(p.children)
-+ print('Test %s %s %s:' % (base, op, other))
-+ print('want', repr(control))
-+ print('got', repr(p.children))
- raise
-
-
-diff -r 9d0639b9d3be test/ext/test_serializer.py
---- a/test/ext/test_serializer.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/test/ext/test_serializer.py Sat Apr 27 19:42:17 2013 -0400
-@@ -47,12 +47,12 @@
-
- @classmethod
- def insert_data(cls):
-- params = [dict(zip(('id', 'name'), column_values))
-+ params = [dict(list(zip(('id', 'name'), column_values)))
- for column_values in [(7, 'jack'), (8, 'ed'), (9,
- 'fred'), (10, 'chuck')]]
- users.insert().execute(params)
-- addresses.insert().execute([dict(zip(('id', 'user_id', 'email'
-- ), column_values))
-+ addresses.insert().execute([dict(list(zip(('id', 'user_id', 'email'
-+ ), column_values)))
- for column_values in [(1, 7,
- 'jack@bean.com'), (2, 8,
- 'ed@wood.com'), (3, 8,
-@@ -85,8 +85,8 @@
- users.metadata, None)
- eq_(str(expr), str(re_expr))
- assert re_expr.bind is testing.db
-- eq_(re_expr.execute().fetchall(), [(7, u'jack'), (8, u'ed'),
-- (8, u'ed'), (8, u'ed'), (9, u'fred')])
-+ eq_(re_expr.execute().fetchall(), [(7, 'jack'), (8, 'ed'),
-+ (8, 'ed'), (8, 'ed'), (9, 'fred')])
-
- def test_query_one(self):
- q = Session.query(User).\
-@@ -121,7 +121,7 @@
- q2 = serializer.loads(serializer.dumps(q, -1), users.metadata,
- Session)
- eq_(q2.all(), [User(name='fred')])
-- eq_(list(q2.values(User.id, User.name)), [(9, u'fred')])
-+ eq_(list(q2.values(User.id, User.name)), [(9, 'fred')])
-
- def test_query_three(self):
- ua = aliased(User)
-@@ -134,7 +134,7 @@
-
- # try to pull out the aliased entity here...
- ua_2 = q2._entities[0].entity_zero.entity
-- eq_(list(q2.values(ua_2.id, ua_2.name)), [(9, u'fred')])
-+ eq_(list(q2.values(ua_2.id, ua_2.name)), [(9, 'fred')])
-
- def test_orm_join(self):
- from sqlalchemy.orm.util import join
-diff -r 9d0639b9d3be test/orm/inheritance/test_assorted_poly.py
---- a/test/orm/inheritance/test_assorted_poly.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/test/orm/inheritance/test_assorted_poly.py Sat Apr 27 19:42:17 2013 -0400
-@@ -18,7 +18,7 @@
-
- class AttrSettable(object):
- def __init__(self, **kwargs):
-- [setattr(self, k, v) for k, v in kwargs.iteritems()]
-+ [setattr(self, k, v) for k, v in kwargs.items()]
- def __repr__(self):
- return self.__class__.__name__ + "(%s)" % (hex(id(self)))
-
-@@ -386,7 +386,7 @@
- # class definitions
- class Person(object):
- def __init__(self, **kwargs):
-- for key, value in kwargs.iteritems():
-+ for key, value in kwargs.items():
- setattr(self, key, value)
- def __repr__(self):
- return "Ordinary person %s" % self.name
-@@ -400,7 +400,7 @@
- (self.name, self.longer_status)
- class Car(object):
- def __init__(self, **kwargs):
-- for key, value in kwargs.iteritems():
-+ for key, value in kwargs.items():
- setattr(self, key, value)
- def __repr__(self):
- return "Car number %d" % self.car_id
-@@ -512,7 +512,7 @@
-
- class Person(object):
- def __init__(self, **kwargs):
-- for key, value in kwargs.iteritems():
-+ for key, value in kwargs.items():
- setattr(self, key, value)
- def __repr__(self):
- return "Ordinary person %s" % self.name
-@@ -526,7 +526,7 @@
- (self.name, self.longer_status)
- class Car(object):
- def __init__(self, **kwargs):
-- for key, value in kwargs.iteritems():
-+ for key, value in kwargs.items():
- setattr(self, key, value)
- def __repr__(self):
- return "Car number %d" % self.car_id
-@@ -646,7 +646,7 @@
-
- class PersistentObject(object):
- def __init__(self, **kwargs):
-- for key, value in kwargs.iteritems():
-+ for key, value in kwargs.items():
- setattr(self, key, value)
-
- class Status(PersistentObject):
-@@ -838,7 +838,7 @@
- # class definitions
- class PersistentObject(object):
- def __init__(self, **kwargs):
-- for key, value in kwargs.iteritems():
-+ for key, value in kwargs.items():
- setattr(self, key, value)
- class Status(PersistentObject):
- def __repr__(self):
-@@ -968,7 +968,7 @@
- def test_threelevels(self):
- class Employee( object):
- def set( me, **kargs):
-- for k,v in kargs.iteritems(): setattr( me, k, v)
-+ for k,v in kargs.items(): setattr( me, k, v)
- return me
- def __str__(me):
- return str(me.__class__.__name__)+':'+str(me.name)
-diff -r 9d0639b9d3be test/orm/inheritance/test_basic.py
---- a/test/orm/inheritance/test_basic.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/test/orm/inheritance/test_basic.py Sat Apr 27 19:42:17 2013 -0400
-@@ -1355,7 +1355,7 @@
- def _do_test(self, j1, j2):
- class A(object):
- def __init__(self, **kwargs):
-- for key, value in kwargs.items():
-+ for key, value in list(kwargs.items()):
- setattr(self, key, value)
-
- class B(A):
-@@ -1830,7 +1830,7 @@
- "SELECT sub.counter AS sub_counter, base.counter AS base_counter, "
- "sub.counter2 AS sub_counter2 FROM base JOIN sub ON "
- "base.id = sub.id WHERE base.id = :param_1",
-- lambda ctx:{u'param_1': s1.id}
-+ lambda ctx:{'param_1': s1.id}
- ),
- )
-
-@@ -1910,7 +1910,7 @@
- "SELECT subsub.counter2 AS subsub_counter2, "
- "sub.counter2 AS sub_counter2 FROM subsub, sub "
- "WHERE :param_1 = sub.id AND sub.id = subsub.id",
-- lambda ctx:{u'param_1': s1.id}
-+ lambda ctx:{'param_1': s1.id}
- ),
- )
-
-@@ -2409,9 +2409,9 @@
- polymorphic_identity='foo')
- sess = create_session()
- f = Foo()
-- f.content_type = u'bar'
-+ f.content_type = 'bar'
- sess.add(f)
- sess.flush()
- f_id = f.id
- sess.expunge_all()
-- assert sess.query(Content).get(f_id).content_type == u'bar'
-+ assert sess.query(Content).get(f_id).content_type == 'bar'
-diff -r 9d0639b9d3be test/orm/inheritance/test_concrete.py
---- a/test/orm/inheritance/test_concrete.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/test/orm/inheritance/test_concrete.py Sat Apr 27 19:42:17 2013 -0400
-@@ -722,10 +722,10 @@
- @classmethod
- def insert_data(cls):
- refugees_table.insert().execute(dict(refugee_fid=1,
-- name=u'refugee1'), dict(refugee_fid=2, name=u'refugee2'
-+ name='refugee1'), dict(refugee_fid=2, name='refugee2'
- ))
- offices_table.insert().execute(dict(office_fid=1,
-- name=u'office1'), dict(office_fid=2, name=u'office2'))
-+ name='office1'), dict(office_fid=2, name='office2'))
-
- def test_keys(self):
- pjoin = polymorphic_union({'refugee': refugees_table, 'office'
-diff -r 9d0639b9d3be test/orm/inheritance/test_magazine.py
---- a/test/orm/inheritance/test_magazine.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/test/orm/inheritance/test_magazine.py Sat Apr 27 19:42:17 2013 -0400
-@@ -8,7 +8,7 @@
-
- class BaseObject(object):
- def __init__(self, *args, **kwargs):
-- for key, value in kwargs.iteritems():
-+ for key, value in kwargs.items():
- setattr(self, key, value)
- class Publication(BaseObject):
- pass
-@@ -200,15 +200,15 @@
-
-
- session.flush()
-- print [x for x in session]
-+ print([x for x in session])
- session.expunge_all()
-
- session.flush()
- session.expunge_all()
- p = session.query(Publication).filter(Publication.name=="Test").one()
-
-- print p.issues[0].locations[0].magazine.pages
-- print [page, page2, page3]
-+ print(p.issues[0].locations[0].magazine.pages)
-+ print([page, page2, page3])
- assert repr(p.issues[0].locations[0].magazine.pages) == repr([page, page2, page3]), repr(p.issues[0].locations[0].magazine.pages)
-
- test_roundtrip = function_named(
-diff -r 9d0639b9d3be test/orm/inheritance/test_manytomany.py
---- a/test/orm/inheritance/test_manytomany.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/test/orm/inheritance/test_manytomany.py Sat Apr 27 19:42:17 2013 -0400
-@@ -42,7 +42,7 @@
- def testbasic(self):
- class Principal(object):
- def __init__(self, **kwargs):
-- for key, value in kwargs.iteritems():
-+ for key, value in kwargs.items():
- setattr(self, key, value)
-
- class User(Principal):
-@@ -94,8 +94,8 @@
-
- mapper(Foo, foo)
- mapper(Bar, bar, inherits=Foo)
-- print foo.join(bar).primary_key
-- print class_mapper(Bar).primary_key
-+ print(foo.join(bar).primary_key)
-+ print(class_mapper(Bar).primary_key)
- b = Bar('somedata')
- sess = create_session()
- sess.add(b)
-@@ -133,8 +133,8 @@
- sess.expunge_all()
-
- l = sess.query(Bar).all()
-- print l[0]
-- print l[0].foos
-+ print(l[0])
-+ print(l[0].foos)
- self.assert_unordered_result(l, Bar,
- # {'id':1, 'data':'barfoo', 'bid':1, 'foos':(Foo, [{'id':2,'data':'subfoo1'}, {'id':3,'data':'subfoo2'}])},
- {'id':b.id, 'data':'barfoo', 'foos':(Foo, [{'id':f1.id,'data':'subfoo1'}, {'id':f2.id,'data':'subfoo2'}])},
-@@ -197,7 +197,7 @@
- compare = [repr(b)] + sorted([repr(o) for o in b.foos])
- sess.expunge_all()
- l = sess.query(Bar).all()
-- print repr(l[0]) + repr(l[0].foos)
-+ print(repr(l[0]) + repr(l[0].foos))
- found = [repr(l[0])] + sorted([repr(o) for o in l[0].foos])
- eq_(found, compare)
-
-@@ -239,11 +239,11 @@
- sess.expunge_all()
-
- l = sess.query(Blub).all()
-- print l
-+ print(l)
- self.assert_(repr(l[0]) == compare)
- sess.expunge_all()
- x = sess.query(Blub).filter_by(id=blubid).one()
-- print x
-+ print(x)
- self.assert_(repr(x) == compare)
-
-
-diff -r 9d0639b9d3be test/orm/inheritance/test_polymorphic_rel.py
---- a/test/orm/inheritance/test_polymorphic_rel.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/test/orm/inheritance/test_polymorphic_rel.py Sat Apr 27 19:42:17 2013 -0400
-@@ -7,7 +7,7 @@
- from sqlalchemy import testing
- from sqlalchemy.testing import assert_raises, eq_
-
--from _poly_fixtures import Company, Person, Engineer, Manager, Boss, \
-+from ._poly_fixtures import Company, Person, Engineer, Manager, Boss, \
- Machine, Paperwork, _Polymorphic,\
- _PolymorphicPolymorphic, _PolymorphicUnions, _PolymorphicJoins,\
- _PolymorphicAliasedJoins
-@@ -118,24 +118,24 @@
- eq_(q.count(), 1)
- eq_(q.all(), [
- (
-- Company(company_id=1, name=u'MegaCorp, Inc.'),
-+ Company(company_id=1, name='MegaCorp, Inc.'),
- Engineer(
-- status=u'regular engineer',
-- engineer_name=u'dilbert',
-- name=u'dilbert',
-+ status='regular engineer',
-+ engineer_name='dilbert',
-+ name='dilbert',
- company_id=1,
-- primary_language=u'java',
-+ primary_language='java',
- person_id=1,
-- type=u'engineer'),
-- Company(company_id=1, name=u'MegaCorp, Inc.'),
-+ type='engineer'),
-+ Company(company_id=1, name='MegaCorp, Inc.'),
- Engineer(
-- status=u'regular engineer',
-- engineer_name=u'wally',
-- name=u'wally',
-+ status='regular engineer',
-+ engineer_name='wally',
-+ name='wally',
- company_id=1,
-- primary_language=u'c++',
-+ primary_language='c++',
- person_id=2,
-- type=u'engineer')
-+ type='engineer')
- )
- ])
-
-@@ -337,8 +337,8 @@
- sess = create_session()
-
- expected = [
-- (u'dogbert',),
-- (u'pointy haired boss',)]
-+ ('dogbert',),
-+ ('pointy haired boss',)]
- eq_(sess.query(Manager.name)
- .order_by(Manager.name).all(),
- expected)
-@@ -346,9 +346,9 @@
- def test_join_from_columns_or_subclass_two(self):
- sess = create_session()
- expected = [
-- (u'dogbert',),
-- (u'dogbert',),
-- (u'pointy haired boss',)]
-+ ('dogbert',),
-+ ('dogbert',),
-+ ('pointy haired boss',)]
- eq_(sess.query(Manager.name)
- .join(Paperwork, Manager.paperwork)
- .order_by(Manager.name).all(),
-@@ -357,14 +357,14 @@
- def test_join_from_columns_or_subclass_three(self):
- sess = create_session()
- expected = [
-- (u'dilbert',),
-- (u'dilbert',),
-- (u'dogbert',),
-- (u'dogbert',),
-- (u'pointy haired boss',),
-- (u'vlad',),
-- (u'wally',),
-- (u'wally',)]
-+ ('dilbert',),
-+ ('dilbert',),
-+ ('dogbert',),
-+ ('dogbert',),
-+ ('pointy haired boss',),
-+ ('vlad',),
-+ ('wally',),
-+ ('wally',)]
- eq_(sess.query(Person.name)
- .join(Paperwork, Person.paperwork)
- .order_by(Person.name).all(),
-@@ -375,14 +375,14 @@
- # Load Person.name, joining from Person -> paperwork, get all
- # the people.
- expected = [
-- (u'dilbert',),
-- (u'dilbert',),
-- (u'dogbert',),
-- (u'dogbert',),
-- (u'pointy haired boss',),
-- (u'vlad',),
-- (u'wally',),
-- (u'wally',)]
-+ ('dilbert',),
-+ ('dilbert',),
-+ ('dogbert',),
-+ ('dogbert',),
-+ ('pointy haired boss',),
-+ ('vlad',),
-+ ('wally',),
-+ ('wally',)]
- eq_(sess.query(Person.name)
- .join(paperwork,
- Person.person_id == paperwork.c.person_id)
-@@ -393,9 +393,9 @@
- sess = create_session()
- # same, on manager. get only managers.
- expected = [
-- (u'dogbert',),
-- (u'dogbert',),
-- (u'pointy haired boss',)]
-+ ('dogbert',),
-+ ('dogbert',),
-+ ('pointy haired boss',)]
- eq_(sess.query(Manager.name)
- .join(paperwork,
- Manager.person_id == paperwork.c.person_id)
-@@ -418,14 +418,14 @@
- # here, it joins to the full result set. This is 0.6's
- # behavior and is more or less wrong.
- expected = [
-- (u'dilbert',),
-- (u'dilbert',),
-- (u'dogbert',),
-- (u'dogbert',),
-- (u'pointy haired boss',),
-- (u'vlad',),
-- (u'wally',),
-- (u'wally',)]
-+ ('dilbert',),
-+ ('dilbert',),
-+ ('dogbert',),
-+ ('dogbert',),
-+ ('pointy haired boss',),
-+ ('vlad',),
-+ ('wally',),
-+ ('wally',)]
- eq_(sess.query(Person.name)
- .join(paperwork,
- Manager.person_id == paperwork.c.person_id)
-@@ -435,9 +435,9 @@
- # when a join is present and managers.person_id is available,
- # you get the managers.
- expected = [
-- (u'dogbert',),
-- (u'dogbert',),
-- (u'pointy haired boss',)]
-+ ('dogbert',),
-+ ('dogbert',),
-+ ('pointy haired boss',)]
- eq_(sess.query(Person.name)
- .join(paperwork,
- Manager.person_id == paperwork.c.person_id)
-@@ -454,9 +454,9 @@
- def test_join_from_columns_or_subclass_eight(self):
- sess = create_session()
- expected = [
-- (u'dogbert',),
-- (u'dogbert',),
-- (u'pointy haired boss',)]
-+ ('dogbert',),
-+ ('dogbert',),
-+ ('pointy haired boss',)]
- eq_(sess.query(Manager.name)
- .join(paperwork,
- Manager.person_id == paperwork.c.person_id)
-@@ -474,9 +474,9 @@
- def test_join_from_columns_or_subclass_ten(self):
- sess = create_session()
- expected = [
-- (u'pointy haired boss', u'review #1'),
-- (u'dogbert', u'review #2'),
-- (u'dogbert', u'review #3')]
-+ ('pointy haired boss', 'review #1'),
-+ ('dogbert', 'review #2'),
-+ ('dogbert', 'review #3')]
- eq_(sess.query(Manager.name, Paperwork.description)
- .join(Paperwork,
- Manager.person_id == Paperwork.person_id)
-@@ -486,9 +486,9 @@
- def test_join_from_columns_or_subclass_eleven(self):
- sess = create_session()
- expected = [
-- (u'pointy haired boss',),
-- (u'dogbert',),
-- (u'dogbert',)]
-+ ('pointy haired boss',),
-+ ('dogbert',),
-+ ('dogbert',)]
- malias = aliased(Manager)
- eq_(sess.query(malias.name)
- .join(paperwork,
-@@ -977,32 +977,32 @@
-
- expected = [
- (Engineer(
-- status=u'regular engineer',
-- engineer_name=u'dilbert',
-- name=u'dilbert',
-+ status='regular engineer',
-+ engineer_name='dilbert',
-+ name='dilbert',
- company_id=1,
-- primary_language=u'java',
-+ primary_language='java',
- person_id=1,
-- type=u'engineer'),
-- u'MegaCorp, Inc.'),
-+ type='engineer'),
-+ 'MegaCorp, Inc.'),
- (Engineer(
-- status=u'regular engineer',
-- engineer_name=u'wally',
-- name=u'wally',
-+ status='regular engineer',
-+ engineer_name='wally',
-+ name='wally',
- company_id=1,
-- primary_language=u'c++',
-+ primary_language='c++',
- person_id=2,
-- type=u'engineer'),
-- u'MegaCorp, Inc.'),
-+ type='engineer'),
-+ 'MegaCorp, Inc.'),
- (Engineer(
-- status=u'elbonian engineer',
-- engineer_name=u'vlad',
-- name=u'vlad',
-+ status='elbonian engineer',
-+ engineer_name='vlad',
-+ name='vlad',
- company_id=2,
-- primary_language=u'cobol',
-+ primary_language='cobol',
- person_id=5,
-- type=u'engineer'),
-- u'Elbonia, Inc.')]
-+ type='engineer'),
-+ 'Elbonia, Inc.')]
- eq_(sess.query(Engineer, Company.name)
- .join(Company.employees)
- .filter(Person.type == 'engineer').all(),
-@@ -1011,9 +1011,9 @@
- def test_mixed_entities_two(self):
- sess = create_session()
- expected = [
-- (u'java', u'MegaCorp, Inc.'),
-- (u'cobol', u'Elbonia, Inc.'),
-- (u'c++', u'MegaCorp, Inc.')]
-+ ('java', 'MegaCorp, Inc.'),
-+ ('cobol', 'Elbonia, Inc.'),
-+ ('c++', 'MegaCorp, Inc.')]
- eq_(sess.query(Engineer.primary_language, Company.name)
- .join(Company.employees)
- .filter(Person.type == 'engineer')
-@@ -1025,19 +1025,19 @@
- palias = aliased(Person)
- expected = [(
- Engineer(
-- status=u'elbonian engineer',
-- engineer_name=u'vlad',
-- name=u'vlad',
-- primary_language=u'cobol'),
-- u'Elbonia, Inc.',
-+ status='elbonian engineer',
-+ engineer_name='vlad',
-+ name='vlad',
-+ primary_language='cobol'),
-+ 'Elbonia, Inc.',
- Engineer(
-- status=u'regular engineer',
-- engineer_name=u'dilbert',
-- name=u'dilbert',
-+ status='regular engineer',
-+ engineer_name='dilbert',
-+ name='dilbert',
- company_id=1,
-- primary_language=u'java',
-+ primary_language='java',
- person_id=1,
-- type=u'engineer'))]
-+ type='engineer'))]
- eq_(sess.query(Person, Company.name, palias)
- .join(Company.employees)
- .filter(Company.name == 'Elbonia, Inc.')
-@@ -1049,19 +1049,19 @@
- palias = aliased(Person)
- expected = [(
- Engineer(
-- status=u'regular engineer',
-- engineer_name=u'dilbert',
-- name=u'dilbert',
-+ status='regular engineer',
-+ engineer_name='dilbert',
-+ name='dilbert',
- company_id=1,
-- primary_language=u'java',
-+ primary_language='java',
- person_id=1,
-- type=u'engineer'),
-- u'Elbonia, Inc.',
-+ type='engineer'),
-+ 'Elbonia, Inc.',
- Engineer(
-- status=u'elbonian engineer',
-- engineer_name=u'vlad',
-- name=u'vlad',
-- primary_language=u'cobol'),)]
-+ status='elbonian engineer',
-+ engineer_name='vlad',
-+ name='vlad',
-+ primary_language='cobol'),)]
- eq_(sess.query(palias, Company.name, Person)
- .join(Company.employees)
- .filter(Company.name == 'Elbonia, Inc.')
-@@ -1071,7 +1071,7 @@
- def test_mixed_entities_five(self):
- sess = create_session()
- palias = aliased(Person)
-- expected = [(u'vlad', u'Elbonia, Inc.', u'dilbert')]
-+ expected = [('vlad', 'Elbonia, Inc.', 'dilbert')]
- eq_(sess.query(Person.name, Company.name, palias.name)
- .join(Company.employees)
- .filter(Company.name == 'Elbonia, Inc.')
-@@ -1082,9 +1082,9 @@
- sess = create_session()
- palias = aliased(Person)
- expected = [
-- (u'manager', u'dogbert', u'engineer', u'dilbert'),
-- (u'manager', u'dogbert', u'engineer', u'wally'),
-- (u'manager', u'dogbert', u'boss', u'pointy haired boss')]
-+ ('manager', 'dogbert', 'engineer', 'dilbert'),
-+ ('manager', 'dogbert', 'engineer', 'wally'),
-+ ('manager', 'dogbert', 'boss', 'pointy haired boss')]
- eq_(sess.query(Person.type, Person.name, palias.type, palias.name)
- .filter(Person.company_id == palias.company_id)
- .filter(Person.name == 'dogbert')
-@@ -1095,14 +1095,14 @@
- def test_mixed_entities_seven(self):
- sess = create_session()
- expected = [
-- (u'dilbert', u'tps report #1'),
-- (u'dilbert', u'tps report #2'),
-- (u'dogbert', u'review #2'),
-- (u'dogbert', u'review #3'),
-- (u'pointy haired boss', u'review #1'),
-- (u'vlad', u'elbonian missive #3'),
-- (u'wally', u'tps report #3'),
-- (u'wally', u'tps report #4')]
-+ ('dilbert', 'tps report #1'),
-+ ('dilbert', 'tps report #2'),
-+ ('dogbert', 'review #2'),
-+ ('dogbert', 'review #3'),
-+ ('pointy haired boss', 'review #1'),
-+ ('vlad', 'elbonian missive #3'),
-+ ('wally', 'tps report #3'),
-+ ('wally', 'tps report #4')]
- eq_(sess.query(Person.name, Paperwork.description)
- .filter(Person.person_id == Paperwork.person_id)
- .order_by(Person.name, Paperwork.description).all(),
-@@ -1116,7 +1116,7 @@
-
- def test_mixed_entities_nine(self):
- sess = create_session()
-- expected = [(u'Elbonia, Inc.', 1), (u'MegaCorp, Inc.', 4)]
-+ expected = [('Elbonia, Inc.', 1), ('MegaCorp, Inc.', 4)]
- eq_(sess.query(Company.name, func.count(Person.person_id))
- .filter(Company.company_id == Person.company_id)
- .group_by(Company.name)
-@@ -1125,7 +1125,7 @@
-
- def test_mixed_entities_ten(self):
- sess = create_session()
-- expected = [(u'Elbonia, Inc.', 1), (u'MegaCorp, Inc.', 4)]
-+ expected = [('Elbonia, Inc.', 1), ('MegaCorp, Inc.', 4)]
- eq_(sess.query(Company.name, func.count(Person.person_id))
- .join(Company.employees)
- .group_by(Company.name)
-@@ -1153,14 +1153,14 @@
-
- def test_mixed_entities_eleven(self):
- sess = create_session()
-- expected = [(u'java',), (u'c++',), (u'cobol',)]
-+ expected = [('java',), ('c++',), ('cobol',)]
- eq_(sess.query(Engineer.primary_language)
- .filter(Person.type == 'engineer').all(),
- expected)
-
- def test_mixed_entities_twelve(self):
- sess = create_session()
-- expected = [(u'vlad', u'Elbonia, Inc.')]
-+ expected = [('vlad', 'Elbonia, Inc.')]
- eq_(sess.query(Person.name, Company.name)
- .join(Company.employees)
- .filter(Company.name == 'Elbonia, Inc.').all(),
-@@ -1168,15 +1168,15 @@
-
- def test_mixed_entities_thirteen(self):
- sess = create_session()
-- expected = [(u'pointy haired boss', u'fore')]
-+ expected = [('pointy haired boss', 'fore')]
- eq_(sess.query(Boss.name, Boss.golf_swing).all(), expected)
-
- def test_mixed_entities_fourteen(self):
- sess = create_session()
- expected = [
-- (u'dilbert', u'java'),
-- (u'wally', u'c++'),
-- (u'vlad', u'cobol')]
-+ ('dilbert', 'java'),
-+ ('wally', 'c++'),
-+ ('vlad', 'cobol')]
- eq_(sess.query(Engineer.name, Engineer.primary_language).all(),
- expected)
-
-@@ -1184,12 +1184,12 @@
- sess = create_session()
-
- expected = [(
-- u'Elbonia, Inc.',
-+ 'Elbonia, Inc.',
- Engineer(
-- status=u'elbonian engineer',
-- engineer_name=u'vlad',
-- name=u'vlad',
-- primary_language=u'cobol'))]
-+ status='elbonian engineer',
-+ engineer_name='vlad',
-+ name='vlad',
-+ primary_language='cobol'))]
- eq_(sess.query(Company.name, Person)
- .join(Company.employees)
- .filter(Company.name == 'Elbonia, Inc.').all(),
-@@ -1199,11 +1199,11 @@
- sess = create_session()
- expected = [(
- Engineer(
-- status=u'elbonian engineer',
-- engineer_name=u'vlad',
-- name=u'vlad',
-- primary_language=u'cobol'),
-- u'Elbonia, Inc.')]
-+ status='elbonian engineer',
-+ engineer_name='vlad',
-+ name='vlad',
-+ primary_language='cobol'),
-+ 'Elbonia, Inc.')]
- eq_(sess.query(Person, Company.name)
- .join(Company.employees)
- .filter(Company.name == 'Elbonia, Inc.').all(),
-diff -r 9d0639b9d3be test/orm/inheritance/test_productspec.py
---- a/test/orm/inheritance/test_productspec.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/test/orm/inheritance/test_productspec.py Sat Apr 27 19:42:17 2013 -0400
-@@ -122,8 +122,8 @@
-
- a1 = session.query(Product).filter_by(name='a1').one()
- new = repr(a1)
-- print orig
-- print new
-+ print(orig)
-+ print(new)
- assert orig == new == '<Assembly a1> specification=[<SpecLine 1.0 <Product p1>>, <SpecLine 1.0 <Detail d1>>] documents=None'
-
- def testtwo(self):
-@@ -153,8 +153,8 @@
- session.flush()
- session.expunge_all()
- new = repr(session.query(SpecLine).all())
-- print orig
-- print new
-+ print(orig)
-+ print(new)
- assert orig == new == '[<SpecLine 1.0 <Product p1>>, <SpecLine 1.0 <Detail d1>>]'
-
- def testthree(self):
-@@ -206,8 +206,8 @@
-
- a1 = session.query(Product).filter_by(name='a1').one()
- new = repr(a1)
-- print orig
-- print new
-+ print(orig)
-+ print(new)
- assert orig == new == '<Assembly a1> specification=[<SpecLine 1.0 <Detail d1>>] documents=[<Document doc1>, <RasterDocument doc2>]'
-
- def testfour(self):
-@@ -245,8 +245,8 @@
-
- a1 = session.query(Product).filter_by(name='a1').one()
- new = repr(a1)
-- print orig
-- print new
-+ print(orig)
-+ print(new)
- assert orig == new == '<Assembly a1> specification=None documents=[<RasterDocument doc2>]'
-
- del a1.documents[0]
-@@ -312,7 +312,7 @@
-
- a1 = session.query(Product).filter_by(name='a1').one()
- new = repr(a1)
-- print orig
-- print new
-+ print(orig)
-+ print(new)
- assert orig == new == '<Assembly a1> specification=[<SpecLine 1.0 <Detail d1>>] documents=[<Document doc1>, <RasterDocument doc2>]'
-
-diff -r 9d0639b9d3be test/orm/inheritance/test_relationship.py
---- a/test/orm/inheritance/test_relationship.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/test/orm/inheritance/test_relationship.py Sat Apr 27 19:42:17 2013 -0400
-@@ -287,7 +287,7 @@
- def _five_obj_fixture(self):
- sess = Session()
- e1, e2, e3, e4, e5 = [
-- Engineer(name='e%d' % (i + 1)) for i in xrange(5)
-+ Engineer(name='e%d' % (i + 1)) for i in range(5)
- ]
- e3.reports_to = e1
- e4.reports_to = e2
-diff -r 9d0639b9d3be test/orm/inheritance/test_with_poly.py
---- a/test/orm/inheritance/test_with_poly.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/test/orm/inheritance/test_with_poly.py Sat Apr 27 19:42:17 2013 -0400
-@@ -11,7 +11,7 @@
- from sqlalchemy.testing.schema import Table, Column
- from sqlalchemy.testing import assert_raises, eq_
-
--from _poly_fixtures import Company, Person, Engineer, Manager, Boss, \
-+from ._poly_fixtures import Company, Person, Engineer, Manager, Boss, \
- Machine, Paperwork, _PolymorphicFixtureBase, _Polymorphic,\
- _PolymorphicPolymorphic, _PolymorphicUnions, _PolymorphicJoins,\
- _PolymorphicAliasedJoins
-@@ -37,8 +37,8 @@
- pa.Manager.manager_name=='dogbert')).\
- order_by(pa.Engineer.type).all(),
- [
-- (u'dilbert', u'java', None),
-- (u'dogbert', None, u'dogbert'),
-+ ('dilbert', 'java', None),
-+ ('dogbert', None, 'dogbert'),
- ]
- )
-
-@@ -63,10 +63,10 @@
- )
- ).order_by(pa.name, pa_alias.name)],
- [
-- (u'dilbert', Engineer, u'dilbert', Engineer),
-- (u'dogbert', Manager, u'pointy haired boss', Boss),
-- (u'vlad', Engineer, u'vlad', Engineer),
-- (u'wally', Engineer, u'wally', Engineer)
-+ ('dilbert', Engineer, 'dilbert', Engineer),
-+ ('dogbert', Manager, 'pointy haired boss', Boss),
-+ ('vlad', Engineer, 'vlad', Engineer),
-+ ('wally', Engineer, 'wally', Engineer)
- ]
- )
-
-@@ -91,10 +91,10 @@
- )
- ).order_by(pa.name, pa_alias.name)],
- [
-- (u'dilbert', u'java', u'dilbert', u'java'),
-- (u'dogbert', None, u'pointy haired boss', None),
-- (u'vlad', u'cobol', u'vlad', u'cobol'),
-- (u'wally', u'c++', u'wally', u'c++')
-+ ('dilbert', 'java', 'dilbert', 'java'),
-+ ('dogbert', None, 'pointy haired boss', None),
-+ ('vlad', 'cobol', 'vlad', 'cobol'),
-+ ('wally', 'c++', 'wally', 'c++')
- ]
- )
-
-diff -r 9d0639b9d3be test/orm/test_assorted_eager.py
---- a/test/orm/test_assorted_eager.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/test/orm/test_assorted_eager.py Sat Apr 27 19:42:17 2013 -0400
-@@ -129,7 +129,7 @@
- # 3 "
-
- # not orm style correct query
-- print "Obtaining correct results without orm"
-+ print("Obtaining correct results without orm")
- result = sa.select(
- [tests.c.id,categories.c.name],
- sa.and_(tests.c.owner_id == 1,
-@@ -140,7 +140,7 @@
- tests.c.id == options.c.test_id,
- tests.c.owner_id == options.c.owner_id))]
- ).execute().fetchall()
-- eq_(result, [(1, u'Some Category'), (3, u'Some Category')])
-+ eq_(result, [(1, 'Some Category'), (3, 'Some Category')])
-
- def test_withoutjoinedload(self):
- Thing, tests, options = (self.classes.Thing,
-@@ -158,7 +158,7 @@
- options.c.someoption==False))))
-
- result = ["%d %s" % ( t.id,t.category.name ) for t in l]
-- eq_(result, [u'1 Some Category', u'3 Some Category'])
-+ eq_(result, ['1 Some Category', '3 Some Category'])
-
- def test_withjoinedload(self):
- """
-@@ -185,7 +185,7 @@
- options.c.someoption==False))))
-
- result = ["%d %s" % ( t.id,t.category.name ) for t in l]
-- eq_(result, [u'1 Some Category', u'3 Some Category'])
-+ eq_(result, ['1 Some Category', '3 Some Category'])
-
- def test_dslish(self):
- """test the same as withjoinedload except using generative"""
-@@ -203,7 +203,7 @@
- ).outerjoin('owner_option')
-
- result = ["%d %s" % ( t.id,t.category.name ) for t in l]
-- eq_(result, [u'1 Some Category', u'3 Some Category'])
-+ eq_(result, ['1 Some Category', '3 Some Category'])
-
- @testing.crashes('sybase', 'FIXME: unknown, verify not fails_on')
- def test_without_outerjoin_literal(self):
-@@ -219,7 +219,7 @@
- join('owner_option'))
-
- result = ["%d %s" % ( t.id,t.category.name ) for t in l]
-- eq_(result, [u'3 Some Category'])
-+ eq_(result, ['3 Some Category'])
-
- def test_withoutouterjoin(self):
- Thing, tests, options = (self.classes.Thing,
-@@ -234,7 +234,7 @@
- ).join('owner_option')
-
- result = ["%d %s" % ( t.id,t.category.name ) for t in l]
-- eq_(result, [u'3 Some Category'])
-+ eq_(result, ['3 Some Category'])
-
-
- class EagerTest2(fixtures.MappedTest):
-@@ -764,7 +764,7 @@
- (1,),),
-
- task=(('title', 'task_type_id', 'status_id', 'prj_id'),
-- (u'task 1', 1, 1, 1)))
-+ ('task 1', 1, 1, 1)))
-
- @classmethod
- def setup_classes(cls):
-@@ -804,7 +804,7 @@
- session = create_session()
-
- eq_(session.query(Joined).limit(10).offset(0).one(),
-- Joined(id=1, title=u'task 1', props_cnt=0))
-+ Joined(id=1, title='task 1', props_cnt=0))
-
-
- class EagerTest9(fixtures.MappedTest):
-diff -r 9d0639b9d3be test/orm/test_attributes.py
---- a/test/orm/test_attributes.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/test/orm/test_attributes.py Sat Apr 27 19:42:17 2013 -0400
-@@ -838,7 +838,7 @@
- attributes.register_attribute(Foo, 'collection',
- uselist=True, typecallable=dict, useobject=True)
- assert False
-- except sa_exc.ArgumentError, e:
-+ except sa_exc.ArgumentError as e:
- assert str(e) \
- == 'Type InstrumentedDict must elect an appender '\
- 'method to be a collection class'
-@@ -865,7 +865,7 @@
- attributes.register_attribute(Foo, 'collection',
- uselist=True, typecallable=MyColl, useobject=True)
- assert False
-- except sa_exc.ArgumentError, e:
-+ except sa_exc.ArgumentError as e:
- assert str(e) \
- == 'Type MyColl must elect an appender method to be a '\
- 'collection class'
-@@ -889,7 +889,7 @@
- try:
- Foo().collection
- assert True
-- except sa_exc.ArgumentError, e:
-+ except sa_exc.ArgumentError as e:
- assert False
-
- class GetNoValueTest(fixtures.ORMTest):
-@@ -1410,7 +1410,7 @@
- class Foo(fixtures.BasicEntity):
- pass
- class Bar(fixtures.BasicEntity):
-- def __nonzero__(self):
-+ def __bool__(self):
- assert False
-
- instrumentation.register_class(Foo)
-diff -r 9d0639b9d3be test/orm/test_collection.py
---- a/test/orm/test_collection.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/test/orm/test_collection.py Sat Apr 27 19:42:17 2013 -0400
-@@ -1106,7 +1106,7 @@
-
- @collection.converter
- def _convert(self, dictlike):
-- for key, value in dictlike.iteritems():
-+ for key, value in dictlike.items():
- yield value + 5
-
- class Foo(object):
-@@ -1145,12 +1145,12 @@
- def __delitem__(self, key):
- del self.data[key]
- def values(self):
-- return self.data.values()
-+ return list(self.data.values())
- def __contains__(self, key):
- return key in self.data
- @collection.iterator
- def itervalues(self):
-- return self.data.itervalues()
-+ return iter(self.data.values())
- __hash__ = object.__hash__
- def __eq__(self, other):
- return self.data == other
-@@ -1158,7 +1158,7 @@
- return 'DictLike(%s)' % repr(self.data)
-
- self._test_adapter(DictLike, self.dictable_entity,
-- to_set=lambda c: set(c.itervalues()))
-+ to_set=lambda c: set(c.values()))
- self._test_dict(DictLike)
- self._test_dict_bulk(DictLike)
- self.assert_(getattr(DictLike, '_sa_instrumented') == id(DictLike))
-@@ -1185,12 +1185,12 @@
- def __delitem__(self, key):
- del self.data[key]
- def values(self):
-- return self.data.values()
-+ return list(self.data.values())
- def __contains__(self, key):
- return key in self.data
- @collection.iterator
- def itervalues(self):
-- return self.data.itervalues()
-+ return iter(self.data.values())
- __hash__ = object.__hash__
- def __eq__(self, other):
- return self.data == other
-@@ -1198,7 +1198,7 @@
- return 'DictIsh(%s)' % repr(self.data)
-
- self._test_adapter(DictIsh, self.dictable_entity,
-- to_set=lambda c: set(c.itervalues()))
-+ to_set=lambda c: set(c.values()))
- self._test_dict(DictIsh)
- self._test_dict_bulk(DictIsh)
- self.assert_(getattr(DictIsh, '_sa_instrumented') == id(DictIsh))
-@@ -1859,7 +1859,7 @@
- f = sess.query(Foo).get(f.col1)
- assert len(list(f.bars)) == 2
-
-- existing = set([id(b) for b in f.bars.values()])
-+ existing = set([id(b) for b in list(f.bars.values())])
-
- col = collections.collection_adapter(f.bars)
- col.append_with_event(Bar('b'))
-@@ -1869,7 +1869,7 @@
- f = sess.query(Foo).get(f.col1)
- assert len(list(f.bars)) == 2
-
-- replaced = set([id(b) for b in f.bars.values()])
-+ replaced = set([id(b) for b in list(f.bars.values())])
- self.assert_(existing != replaced)
-
- def test_list(self):
-diff -r 9d0639b9d3be test/orm/test_composites.py
---- a/test/orm/test_composites.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/test/orm/test_composites.py Sat Apr 27 19:42:17 2013 -0400
-@@ -510,11 +510,11 @@
- session.commit()
- eq_(
- testing.db.execute(descriptions.select()).fetchall(),
-- [(1, u'Color', u'Number')]
-+ [(1, 'Color', 'Number')]
- )
- eq_(
- testing.db.execute(values.select()).fetchall(),
-- [(1, 1, u'Red', u'5'), (2, 1, u'Blue', u'1')]
-+ [(1, 1, 'Red', '5'), (2, 1, 'Blue', '1')]
- )
-
- class ManyToOneTest(fixtures.MappedTest):
-diff -r 9d0639b9d3be test/orm/test_deprecations.py
---- a/test/orm/test_deprecations.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/test/orm/test_deprecations.py Sat Apr 27 19:42:17 2013 -0400
-@@ -122,7 +122,7 @@
- session = sessionmaker(query_cls=MyQuery)()
-
- ad1 = session.query(Address).get(1)
-- assert ad1 in cache.values()
-+ assert ad1 in list(cache.values())
-
- def test_load(self):
- """x = session.query(Address).load(1)
-diff -r 9d0639b9d3be test/orm/test_dynamic.py
---- a/test/orm/test_dynamic.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/test/orm/test_dynamic.py Sat Apr 27 19:42:17 2013 -0400
-@@ -119,9 +119,9 @@
- eq_(
- list(u.addresses.order_by(desc(Address.email_address))),
- [
-- Address(email_address=u'ed@wood.com'),
-- Address(email_address=u'ed@lala.com'),
-- Address(email_address=u'ed@bettyboop.com')
-+ Address(email_address='ed@wood.com'),
-+ Address(email_address='ed@lala.com'),
-+ Address(email_address='ed@bettyboop.com')
- ]
- )
-
-@@ -137,9 +137,9 @@
- eq_(
- list(u.addresses),
- [
-- Address(email_address=u'ed@wood.com'),
-- Address(email_address=u'ed@lala.com'),
-- Address(email_address=u'ed@bettyboop.com')
-+ Address(email_address='ed@wood.com'),
-+ Address(email_address='ed@lala.com'),
-+ Address(email_address='ed@bettyboop.com')
- ]
- )
-
-@@ -147,9 +147,9 @@
- eq_(
- list(u.addresses.order_by(None).order_by(Address.email_address)),
- [
-- Address(email_address=u'ed@bettyboop.com'),
-- Address(email_address=u'ed@lala.com'),
-- Address(email_address=u'ed@wood.com')
-+ Address(email_address='ed@bettyboop.com'),
-+ Address(email_address='ed@lala.com'),
-+ Address(email_address='ed@wood.com')
- ]
- )
-
-@@ -157,9 +157,9 @@
- eq_(
- set(u.addresses.order_by(None)),
- set([
-- Address(email_address=u'ed@bettyboop.com'),
-- Address(email_address=u'ed@lala.com'),
-- Address(email_address=u'ed@wood.com')
-+ Address(email_address='ed@bettyboop.com'),
-+ Address(email_address='ed@lala.com'),
-+ Address(email_address='ed@wood.com')
- ])
- )
-
-@@ -529,12 +529,12 @@
- "SELECT addresses.id AS addresses_id, addresses.email_address "
- "AS addresses_email_address FROM addresses "
- "WHERE addresses.id = :param_1",
-- lambda ctx: [{u'param_1': a2_id}]
-+ lambda ctx: [{'param_1': a2_id}]
- ),
- CompiledSQL(
- "UPDATE addresses SET user_id=:user_id WHERE addresses.id = "
- ":addresses_id",
-- lambda ctx: [{u'addresses_id': a2_id, 'user_id': None}]
-+ lambda ctx: [{'addresses_id': a2_id, 'user_id': None}]
- )
- )
-
-diff -r 9d0639b9d3be test/orm/test_eager_relations.py
---- a/test/orm/test_eager_relations.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/test/orm/test_eager_relations.py Sat Apr 27 19:42:17 2013 -0400
-@@ -747,11 +747,11 @@
-
- sess = create_session()
- eq_(sess.query(User).first(),
-- User(name=u'jack',orders=[
-- Order(address_id=1,description=u'order 1',isopen=0,user_id=7,id=1),
-- Order(address_id=1,description=u'order 3',isopen=1,user_id=7,id=3),
-- Order(address_id=None,description=u'order 5',isopen=0,user_id=7,id=5)],
-- email_address=u'jack@bean.com',id=7)
-+ User(name='jack',orders=[
-+ Order(address_id=1,description='order 1',isopen=0,user_id=7,id=1),
-+ Order(address_id=1,description='order 3',isopen=1,user_id=7,id=3),
-+ Order(address_id=None,description='order 5',isopen=0,user_id=7,id=5)],
-+ email_address='jack@bean.com',id=7)
- )
-
- def test_useget_cancels_eager(self):
-@@ -2131,15 +2131,15 @@
- })
-
- sess = create_session()
-- w1 = Widget(name=u'w1')
-- w2 = Widget(name=u'w2')
-+ w1 = Widget(name='w1')
-+ w2 = Widget(name='w2')
- w1.children.append(w2)
- sess.add(w1)
- sess.flush()
- sess.expunge_all()
-
- eq_([Widget(name='w1', children=[Widget(name='w2')])],
-- sess.query(Widget).filter(Widget.name==u'w1').all())
-+ sess.query(Widget).filter(Widget.name=='w1').all())
-
- class MixedEntitiesTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL):
- run_setup_mappers = 'once'
-@@ -2223,24 +2223,24 @@
- eq_(
- [
- (
-- User(addresses=[Address(email_address=u'fred@fred.com')], name=u'fred'),
-- Order(description=u'order 2', isopen=0, items=[Item(description=u'item 1'), Item(description=u'item 2'), Item(description=u'item 3')]),
-- User(addresses=[Address(email_address=u'jack@bean.com')], name=u'jack'),
-- Order(description=u'order 3', isopen=1, items=[Item(description=u'item 3'), Item(description=u'item 4'), Item(description=u'item 5')])
-+ User(addresses=[Address(email_address='fred@fred.com')], name='fred'),
-+ Order(description='order 2', isopen=0, items=[Item(description='item 1'), Item(description='item 2'), Item(description='item 3')]),
-+ User(addresses=[Address(email_address='jack@bean.com')], name='jack'),
-+ Order(description='order 3', isopen=1, items=[Item(description='item 3'), Item(description='item 4'), Item(description='item 5')])
- ),
-
- (
-- User(addresses=[Address(email_address=u'fred@fred.com')], name=u'fred'),
-- Order(description=u'order 2', isopen=0, items=[Item(description=u'item 1'), Item(description=u'item 2'), Item(description=u'item 3')]),
-- User(addresses=[Address(email_address=u'jack@bean.com')], name=u'jack'),
-- Order(address_id=None, description=u'order 5', isopen=0, items=[Item(description=u'item 5')])
-+ User(addresses=[Address(email_address='fred@fred.com')], name='fred'),
-+ Order(description='order 2', isopen=0, items=[Item(description='item 1'), Item(description='item 2'), Item(description='item 3')]),
-+ User(addresses=[Address(email_address='jack@bean.com')], name='jack'),
-+ Order(address_id=None, description='order 5', isopen=0, items=[Item(description='item 5')])
- ),
-
- (
-- User(addresses=[Address(email_address=u'fred@fred.com')], name=u'fred'),
-- Order(description=u'order 4', isopen=1, items=[Item(description=u'item 1'), Item(description=u'item 5')]),
-- User(addresses=[Address(email_address=u'jack@bean.com')], name=u'jack'),
-- Order(address_id=None, description=u'order 5', isopen=0, items=[Item(description=u'item 5')])
-+ User(addresses=[Address(email_address='fred@fred.com')], name='fred'),
-+ Order(description='order 4', isopen=1, items=[Item(description='item 1'), Item(description='item 5')]),
-+ User(addresses=[Address(email_address='jack@bean.com')], name='jack'),
-+ Order(address_id=None, description='order 5', isopen=0, items=[Item(description='item 5')])
- ),
- ],
- sess.query(User, Order, u1, o1).\
-@@ -2651,9 +2651,9 @@
- Movie = self.classes.Movie
-
- session = Session(testing.db)
-- rscott = Director(name=u"Ridley Scott")
-- alien = Movie(title=u"Alien")
-- brunner = Movie(title=u"Blade Runner")
-+ rscott = Director(name="Ridley Scott")
-+ alien = Movie(title="Alien")
-+ brunner = Movie(title="Blade Runner")
- rscott.movies.append(brunner)
- rscott.movies.append(alien)
- session.add_all([rscott, alien, brunner])
-diff -r 9d0639b9d3be test/orm/test_expire.py
---- a/test/orm/test_expire.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/test/orm/test_expire.py Sat Apr 27 19:42:17 2013 -0400
-@@ -850,7 +850,7 @@
- assert len(u.addresses) == 3
- sess.expire(u)
- assert 'addresses' not in u.__dict__
-- print "-------------------------------------------"
-+ print("-------------------------------------------")
- sess.query(User).filter_by(id=8).all()
- assert 'addresses' in u.__dict__
- assert len(u.addresses) == 3
-diff -r 9d0639b9d3be test/orm/test_froms.py
---- a/test/orm/test_froms.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/test/orm/test_froms.py Sat Apr 27 19:42:17 2013 -0400
-@@ -315,12 +315,12 @@
- from_self(User.name, ualias.name).
- order_by(User.name, ualias.name).all(),
- [
-- (u'chuck', u'ed'),
-- (u'chuck', u'fred'),
-- (u'chuck', u'jack'),
-- (u'ed', u'jack'),
-- (u'fred', u'ed'),
-- (u'fred', u'jack')
-+ ('chuck', 'ed'),
-+ ('chuck', 'fred'),
-+ ('chuck', 'jack'),
-+ ('ed', 'jack'),
-+ ('fred', 'ed'),
-+ ('fred', 'jack')
- ]
- )
-
-@@ -330,7 +330,7 @@
- from_self(User.name, ualias.name).
- filter(ualias.name=='ed')\
- .order_by(User.name, ualias.name).all(),
-- [(u'chuck', u'ed'), (u'fred', u'ed')]
-+ [('chuck', 'ed'), ('fred', 'ed')]
- )
-
- eq_(
-@@ -340,11 +340,11 @@
- join(ualias.addresses).
- order_by(ualias.name, Address.email_address).all(),
- [
-- (u'ed', u'fred@fred.com'),
-- (u'jack', u'ed@bettyboop.com'),
-- (u'jack', u'ed@lala.com'),
-- (u'jack', u'ed@wood.com'),
-- (u'jack', u'fred@fred.com')]
-+ ('ed', 'fred@fred.com'),
-+ ('jack', 'ed@bettyboop.com'),
-+ ('jack', 'ed@lala.com'),
-+ ('jack', 'ed@wood.com'),
-+ ('jack', 'fred@fred.com')]
- )
-
-
-@@ -629,8 +629,8 @@
- eq_(
- q.all(),
- [(
-- A(bid=2, id=1, name=u'a1', type=u'a'),
-- C(age=3, id=2, name=u'c1', type=u'c')
-+ A(bid=2, id=1, name='a1', type='a'),
-+ C(age=3, id=2, name='c1', type='c')
- )]
- )
-
-@@ -642,8 +642,8 @@
- eq_(
- q.all(),
- [(
-- C(age=3, id=2, name=u'c1', type=u'c'),
-- A(bid=2, id=1, name=u'a1', type=u'a')
-+ C(age=3, id=2, name='c1', type='c'),
-+ A(bid=2, id=1, name='a1', type='a')
- )]
- )
-
-@@ -929,12 +929,12 @@
- # have a Dingaling here due to using the inner
- # join for the eager load
- [
-- User(name=u'ed', addresses=[
-- Address(email_address=u'ed@wood.com',
-+ User(name='ed', addresses=[
-+ Address(email_address='ed@wood.com',
- dingaling=Dingaling(data='ding 1/2')),
- ]),
-- User(name=u'fred', addresses=[
-- Address(email_address=u'fred@fred.com',
-+ User(name='fred', addresses=[
-+ Address(email_address='fred@fred.com',
- dingaling=Dingaling(data='ding 2/5'))
- ])
- ]
-@@ -965,12 +965,12 @@
- # have a Dingaling here due to using the inner
- # join for the eager load
- [
-- User(name=u'ed', addresses=[
-- Address(email_address=u'ed@wood.com',
-+ User(name='ed', addresses=[
-+ Address(email_address='ed@wood.com',
- dingaling=Dingaling(data='ding 1/2')),
- ]),
-- User(name=u'fred', addresses=[
-- Address(email_address=u'fred@fred.com',
-+ User(name='fred', addresses=[
-+ Address(email_address='fred@fred.com',
- dingaling=Dingaling(data='ding 2/5'))
- ])
- ]
-@@ -998,11 +998,11 @@
- contains_eager(User.orders)).order_by(User.id,
- Order.id).offset(1).limit(2).all()
- eq_(l, [User(id=7,
-- addresses=[Address(email_address=u'jack@bean.com',
-- user_id=7, id=1)], name=u'jack',
-+ addresses=[Address(email_address='jack@bean.com',
-+ user_id=7, id=1)], name='jack',
- orders=[Order(address_id=1, user_id=7,
-- description=u'order 3', isopen=1, id=3),
-- Order(address_id=None, user_id=7, description=u'order 5'
-+ description='order 3', isopen=1, id=3),
-+ Order(address_id=None, user_id=7, description='order 5'
- , isopen=0, id=5)])])
-
- self.assert_sql_count(testing.db, go, 1)
-@@ -1020,11 +1020,11 @@
- order_by(User.id, oalias.id).\
- offset(1).limit(2).all()
- eq_(l, [User(id=7,
-- addresses=[Address(email_address=u'jack@bean.com',
-- user_id=7, id=1)], name=u'jack',
-+ addresses=[Address(email_address='jack@bean.com',
-+ user_id=7, id=1)], name='jack',
- orders=[Order(address_id=1, user_id=7,
-- description=u'order 3', isopen=1, id=3),
-- Order(address_id=None, user_id=7, description=u'order 5'
-+ description='order 3', isopen=1, id=3),
-+ Order(address_id=None, user_id=7, description='order 5'
- , isopen=0, id=5)])])
-
- self.assert_sql_count(testing.db, go, 1)
-@@ -1045,15 +1045,15 @@
- sel = users.select(User.id.in_([7, 8])).alias()
- q = sess.query(User)
- q2 = q.select_from(sel).values(User.name)
-- eq_(list(q2), [(u'jack',), (u'ed',)])
-+ eq_(list(q2), [('jack',), ('ed',)])
-
- q = sess.query(User)
- q2 = q.order_by(User.id).\
- values(User.name, User.name + " " + cast(User.id, String(50)))
- eq_(
- list(q2),
-- [(u'jack', u'jack 7'), (u'ed', u'ed 8'),
-- (u'fred', u'fred 9'), (u'chuck', u'chuck 10')]
-+ [('jack', 'jack 7'), ('ed', 'ed 8'),
-+ ('fred', 'fred 9'), ('chuck', 'chuck 10')]
- )
-
- q2 = q.join('addresses').\
-@@ -1061,27 +1061,27 @@
- order_by(User.id, Address.id).\
- values(User.name, Address.email_address)
- eq_(list(q2),
-- [(u'ed', u'ed@wood.com'), (u'ed', u'ed@bettyboop.com'),
-- (u'ed', u'ed@lala.com'), (u'fred', u'fred@fred.com')])
-+ [('ed', 'ed@wood.com'), ('ed', 'ed@bettyboop.com'),
-+ ('ed', 'ed@lala.com'), ('fred', 'fred@fred.com')])
-
- q2 = q.join('addresses').\
- filter(User.name.like('%e%')).\
- order_by(desc(Address.email_address)).\
- slice(1, 3).values(User.name, Address.email_address)
-- eq_(list(q2), [(u'ed', u'ed@wood.com'), (u'ed', u'ed@lala.com')])
-+ eq_(list(q2), [('ed', 'ed@wood.com'), ('ed', 'ed@lala.com')])
-
- adalias = aliased(Address)
- q2 = q.join(adalias, 'addresses').\
- filter(User.name.like('%e%')).order_by(adalias.email_address).\
- values(User.name, adalias.email_address)
-- eq_(list(q2), [(u'ed', u'ed@bettyboop.com'), (u'ed', u'ed@lala.com'),
-- (u'ed', u'ed@wood.com'), (u'fred', u'fred@fred.com')])
-+ eq_(list(q2), [('ed', 'ed@bettyboop.com'), ('ed', 'ed@lala.com'),
-+ ('ed', 'ed@wood.com'), ('fred', 'fred@fred.com')])
-
- q2 = q.values(func.count(User.name))
-- assert q2.next() == (4,)
-+ assert next(q2) == (4,)
-
- q2 = q.select_from(sel).filter(User.id==8).values(User.name, sel.c.name, User.name)
-- eq_(list(q2), [(u'ed', u'ed', u'ed')])
-+ eq_(list(q2), [('ed', 'ed', 'ed')])
-
- # using User.xxx is alised against "sel", so this query returns nothing
- q2 = q.select_from(sel).\
-@@ -1093,7 +1093,7 @@
- q2 = q.select_from(sel).\
- filter(users.c.id==8).\
- filter(users.c.id>sel.c.id).values(users.c.name, sel.c.name, User.name)
-- eq_(list(q2), [(u'ed', u'jack', u'jack')])
-+ eq_(list(q2), [('ed', 'jack', 'jack')])
-
- def test_alias_naming(self):
- User = self.classes.User
-@@ -1123,10 +1123,10 @@
- filter(u2.id>1).\
- order_by(User.id, sel.c.id, u2.id).\
- values(User.name, sel.c.name, u2.name)
-- eq_(list(q2), [(u'jack', u'jack', u'jack'), (u'jack', u'jack', u'ed'),
-- (u'jack', u'jack', u'fred'), (u'jack', u'jack', u'chuck'),
-- (u'ed', u'ed', u'jack'), (u'ed', u'ed', u'ed'),
-- (u'ed', u'ed', u'fred'), (u'ed', u'ed', u'chuck')])
-+ eq_(list(q2), [('jack', 'jack', 'jack'), ('jack', 'jack', 'ed'),
-+ ('jack', 'jack', 'fred'), ('jack', 'jack', 'chuck'),
-+ ('ed', 'ed', 'jack'), ('ed', 'ed', 'ed'),
-+ ('ed', 'ed', 'fred'), ('ed', 'ed', 'chuck')])
-
- @testing.fails_on('mssql', 'FIXME: unknown')
- @testing.fails_on('oracle',
-@@ -1177,8 +1177,8 @@
- # we don't want Address to be outside of the subquery here
- eq_(
- list(sess.query(User, subq)[0:3]),
-- [(User(id=7,name=u'jack'), 1), (User(id=8,name=u'ed'), 3),
-- (User(id=9,name=u'fred'), 1)]
-+ [(User(id=7,name='jack'), 1), (User(id=8,name='ed'), 3),
-+ (User(id=9,name='fred'), 1)]
- )
-
- # same thing without the correlate, as it should
-@@ -1190,8 +1190,8 @@
- # we don't want Address to be outside of the subquery here
- eq_(
- list(sess.query(User, subq)[0:3]),
-- [(User(id=7,name=u'jack'), 1), (User(id=8,name=u'ed'), 3),
-- (User(id=9,name=u'fred'), 1)]
-+ [(User(id=7,name='jack'), 1), (User(id=8,name='ed'), 3),
-+ (User(id=9,name='fred'), 1)]
- )
-
-
-@@ -1202,23 +1202,23 @@
-
- sess = create_session()
-
-- eq_(sess.query(User.name).all(), [(u'jack',), (u'ed',), (u'fred',), (u'chuck',)])
-+ eq_(sess.query(User.name).all(), [('jack',), ('ed',), ('fred',), ('chuck',)])
-
- sel = users.select(User.id.in_([7, 8])).alias()
- q = sess.query(User.name)
- q2 = q.select_from(sel).all()
-- eq_(list(q2), [(u'jack',), (u'ed',)])
-+ eq_(list(q2), [('jack',), ('ed',)])
-
- eq_(sess.query(User.name, Address.email_address).filter(User.id==Address.user_id).all(), [
-- (u'jack', u'jack@bean.com'), (u'ed', u'ed@wood.com'),
-- (u'ed', u'ed@bettyboop.com'), (u'ed', u'ed@lala.com'),
-- (u'fred', u'fred@fred.com')
-+ ('jack', 'jack@bean.com'), ('ed', 'ed@wood.com'),
-+ ('ed', 'ed@bettyboop.com'), ('ed', 'ed@lala.com'),
-+ ('fred', 'fred@fred.com')
- ])
-
- eq_(sess.query(User.name, func.count(Address.email_address)).\
- outerjoin(User.addresses).group_by(User.id, User.name).\
- order_by(User.id).all(),
-- [(u'jack', 1), (u'ed', 3), (u'fred', 1), (u'chuck', 0)]
-+ [('jack', 1), ('ed', 3), ('fred', 1), ('chuck', 0)]
- )
-
- eq_(sess.query(User, func.count(Address.email_address)).\
-@@ -1246,8 +1246,8 @@
- eq_(sess.query(func.count(adalias.email_address), User).\
- outerjoin(adalias, User.addresses).group_by(User).\
- order_by(User.id).all(),
-- [(1, User(name=u'jack',id=7)), (3, User(name=u'ed',id=8)),
-- (1, User(name=u'fred',id=9)), (0, User(name=u'chuck',id=10))]
-+ [(1, User(name='jack',id=7)), (3, User(name='ed',id=8)),
-+ (1, User(name='fred',id=9)), (0, User(name='chuck',id=10))]
- )
-
- # select from aliasing + explicit aliasing
-@@ -1257,12 +1257,12 @@
- from_self(User, adalias.email_address).\
- order_by(User.id, adalias.id).all(),
- [
-- (User(name=u'jack',id=7), u'jack@bean.com'),
-- (User(name=u'ed',id=8), u'ed@wood.com'),
-- (User(name=u'ed',id=8), u'ed@bettyboop.com'),
-- (User(name=u'ed',id=8), u'ed@lala.com'),
-- (User(name=u'fred',id=9), u'fred@fred.com'),
-- (User(name=u'chuck',id=10), None)
-+ (User(name='jack',id=7), 'jack@bean.com'),
-+ (User(name='ed',id=8), 'ed@wood.com'),
-+ (User(name='ed',id=8), 'ed@bettyboop.com'),
-+ (User(name='ed',id=8), 'ed@lala.com'),
-+ (User(name='fred',id=9), 'fred@fred.com'),
-+ (User(name='chuck',id=10), None)
- ]
- )
-
-@@ -1272,8 +1272,8 @@
- filter(Address.email_address.like('%ed%')).\
- from_self().all(),
- [
-- User(name=u'ed',id=8),
-- User(name=u'fred',id=9),
-+ User(name='ed',id=8),
-+ User(name='fred',id=9),
- ]
- )
-
-@@ -1293,27 +1293,27 @@
-
- q.all(),
- [(User(addresses=[
-- Address(user_id=7,email_address=u'jack@bean.com',id=1)],
-- name=u'jack',id=7), u'jack@bean.com'),
-+ Address(user_id=7,email_address='jack@bean.com',id=1)],
-+ name='jack',id=7), 'jack@bean.com'),
- (User(addresses=[
-- Address(user_id=8,email_address=u'ed@wood.com',id=2),
-- Address(user_id=8,email_address=u'ed@bettyboop.com',id=3),
-- Address(user_id=8,email_address=u'ed@lala.com',id=4)],
-- name=u'ed',id=8), u'ed@wood.com'),
-+ Address(user_id=8,email_address='ed@wood.com',id=2),
-+ Address(user_id=8,email_address='ed@bettyboop.com',id=3),
-+ Address(user_id=8,email_address='ed@lala.com',id=4)],
-+ name='ed',id=8), 'ed@wood.com'),
- (User(addresses=[
-- Address(user_id=8,email_address=u'ed@wood.com',id=2),
-- Address(user_id=8,email_address=u'ed@bettyboop.com',id=3),
-- Address(user_id=8,email_address=u'ed@lala.com',id=4)],name=u'ed',id=8),
-- u'ed@bettyboop.com'),
-+ Address(user_id=8,email_address='ed@wood.com',id=2),
-+ Address(user_id=8,email_address='ed@bettyboop.com',id=3),
-+ Address(user_id=8,email_address='ed@lala.com',id=4)],name='ed',id=8),
-+ 'ed@bettyboop.com'),
- (User(addresses=[
-- Address(user_id=8,email_address=u'ed@wood.com',id=2),
-- Address(user_id=8,email_address=u'ed@bettyboop.com',id=3),
-- Address(user_id=8,email_address=u'ed@lala.com',id=4)],name=u'ed',id=8),
-- u'ed@lala.com'),
-- (User(addresses=[Address(user_id=9,email_address=u'fred@fred.com',id=5)],name=u'fred',id=9),
-- u'fred@fred.com'),
-+ Address(user_id=8,email_address='ed@wood.com',id=2),
-+ Address(user_id=8,email_address='ed@bettyboop.com',id=3),
-+ Address(user_id=8,email_address='ed@lala.com',id=4)],name='ed',id=8),
-+ 'ed@lala.com'),
-+ (User(addresses=[Address(user_id=9,email_address='fred@fred.com',id=5)],name='fred',id=9),
-+ 'fred@fred.com'),
-
-- (User(addresses=[],name=u'chuck',id=10), None)]
-+ (User(addresses=[],name='chuck',id=10), None)]
- )
-
- def test_column_from_limited_joinedload(self):
-@@ -1367,19 +1367,19 @@
- eq_(
- q.all(),
- [
-- (Order(address_id=1,description=u'order 3',isopen=1,user_id=7,id=3),
-- Order(address_id=1,description=u'order 1',isopen=0,user_id=7,id=1)),
-- (Order(address_id=None,description=u'order 5',isopen=0,user_id=7,id=5),
-- Order(address_id=1,description=u'order 1',isopen=0,user_id=7,id=1)),
-- (Order(address_id=None,description=u'order 5',isopen=0,user_id=7,id=5),
-- Order(address_id=1,description=u'order 3',isopen=1,user_id=7,id=3))
-+ (Order(address_id=1,description='order 3',isopen=1,user_id=7,id=3),
-+ Order(address_id=1,description='order 1',isopen=0,user_id=7,id=1)),
-+ (Order(address_id=None,description='order 5',isopen=0,user_id=7,id=5),
-+ Order(address_id=1,description='order 1',isopen=0,user_id=7,id=1)),
-+ (Order(address_id=None,description='order 5',isopen=0,user_id=7,id=5),
-+ Order(address_id=1,description='order 3',isopen=1,user_id=7,id=3))
- ]
- )
-
-
- # ensure column expressions are taken from inside the subquery, not restated at the top
- q = sess.query(Order.id, Order.description, literal_column("'q'").label('foo')).\
-- filter(Order.description == u'order 3').from_self()
-+ filter(Order.description == 'order 3').from_self()
- self.assert_compile(q,
- "SELECT anon_1.orders_id AS "
- "anon_1_orders_id, anon_1.orders_descriptio"
-@@ -1392,7 +1392,7 @@
- "anon_1")
- eq_(
- q.all(),
-- [(3, u'order 3', 'q')]
-+ [(3, 'order 3', 'q')]
- )
-
-
-@@ -1518,7 +1518,7 @@
-
- eq_(
- sess.query(User.id).add_column(users).all(),
-- [(7, 7, u'jack'), (8, 8, u'ed'), (9, 9, u'fred'), (10, 10, u'chuck')]
-+ [(7, 7, 'jack'), (8, 8, 'ed'), (9, 9, 'fred'), (10, 10, 'chuck')]
- )
-
- def test_multi_columns_2(self):
-@@ -1936,7 +1936,7 @@
- filter(Keyword.name.in_(['red', 'big', 'round'])).\
- all(),
- [
-- User(name=u'jack',id=7)
-+ User(name='jack',id=7)
- ])
-
- eq_(sess.query(User).select_from(sel).\
-@@ -1944,7 +1944,7 @@
- filter(Keyword.name.in_(['red', 'big', 'round'])).\
- all(),
- [
-- User(name=u'jack',id=7)
-+ User(name='jack',id=7)
- ])
-
- def go():
-@@ -1955,39 +1955,39 @@
- filter(Keyword.name.in_(['red', 'big', 'round'])).\
- all(),
- [
-- User(name=u'jack',orders=[
-- Order(description=u'order 1',items=[
-- Item(description=u'item 1',
-+ User(name='jack',orders=[
-+ Order(description='order 1',items=[
-+ Item(description='item 1',
- keywords=[
-- Keyword(name=u'red'),
-- Keyword(name=u'big'),
-- Keyword(name=u'round')
-+ Keyword(name='red'),
-+ Keyword(name='big'),
-+ Keyword(name='round')
- ]),
-- Item(description=u'item 2',
-+ Item(description='item 2',
- keywords=[
-- Keyword(name=u'red',id=2),
-- Keyword(name=u'small',id=5),
-- Keyword(name=u'square')
-+ Keyword(name='red',id=2),
-+ Keyword(name='small',id=5),
-+ Keyword(name='square')
- ]),
-- Item(description=u'item 3',
-+ Item(description='item 3',
- keywords=[
-- Keyword(name=u'green',id=3),
-- Keyword(name=u'big',id=4),
-- Keyword(name=u'round',id=6)])
-+ Keyword(name='green',id=3),
-+ Keyword(name='big',id=4),
-+ Keyword(name='round',id=6)])
- ]),
-- Order(description=u'order 3',items=[
-- Item(description=u'item 3',
-+ Order(description='order 3',items=[
-+ Item(description='item 3',
- keywords=[
-- Keyword(name=u'green',id=3),
-- Keyword(name=u'big',id=4),
-- Keyword(name=u'round',id=6)
-+ Keyword(name='green',id=3),
-+ Keyword(name='big',id=4),
-+ Keyword(name='round',id=6)
- ]),
-- Item(description=u'item 4',keywords=[],id=4),
-- Item(description=u'item 5',keywords=[],id=5)
-+ Item(description='item 4',keywords=[],id=4),
-+ Item(description='item 5',keywords=[],id=5)
- ]),
-- Order(description=u'order 5',
-+ Order(description='order 5',
- items=[
-- Item(description=u'item 5',keywords=[])])
-+ Item(description='item 5',keywords=[])])
- ])
- ])
- self.assert_sql_count(testing.db, go, 1)
-@@ -1998,15 +1998,15 @@
- join('items', 'keywords').\
- filter(Keyword.name == 'red').\
- order_by(Order.id).all(), [
-- Order(description=u'order 1',id=1),
-- Order(description=u'order 2',id=2),
-+ Order(description='order 1',id=1),
-+ Order(description='order 2',id=2),
- ])
- eq_(sess.query(Order).select_from(sel2).\
- join('items', 'keywords', aliased=True).\
- filter(Keyword.name == 'red').\
- order_by(Order.id).all(), [
-- Order(description=u'order 1',id=1),
-- Order(description=u'order 2',id=2),
-+ Order(description='order 1',id=1),
-+ Order(description='order 2',id=2),
- ])
-
-
-diff -r 9d0639b9d3be test/orm/test_generative.py
---- a/test/orm/test_generative.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/test/orm/test_generative.py Sat Apr 27 19:42:17 2013 -0400
-@@ -78,13 +78,14 @@
- assert sess.query(func.min(foo.c.bar)).filter(foo.c.bar<30).one() == (0,)
-
- assert sess.query(func.max(foo.c.bar)).filter(foo.c.bar<30).one() == (29,)
-- # Py3K
-- #assert query.filter(foo.c.bar<30).values(sa.func.max(foo.c.bar)).__next__()[0] == 29
-- #assert query.filter(foo.c.bar<30).values(sa.func.max(foo.c.bar)).__next__()[0] == 29
-- # Py2K
-- assert query.filter(foo.c.bar<30).values(sa.func.max(foo.c.bar)).next()[0] == 29
-- assert query.filter(foo.c.bar<30).values(sa.func.max(foo.c.bar)).next()[0] == 29
-- # end Py2K
-+# start Py3K
-+ assert query.filter(foo.c.bar<30).values(sa.func.max(foo.c.bar)).__next__()[0] == 29
-+ assert query.filter(foo.c.bar<30).values(sa.func.max(foo.c.bar)).__next__()[0] == 29
-+# end Py3K
-+# start Py2K
-+# assert query.filter(foo.c.bar<30).values(sa.func.max(foo.c.bar)).next()[0] == 29
-+# assert query.filter(foo.c.bar<30).values(sa.func.max(foo.c.bar)).next()[0] == 29
-+# end Py2K
-
- @testing.fails_if(lambda:testing.against('mysql+mysqldb') and
- testing.db.dialect.dbapi.version_info[:4] == (1, 2, 1, 'gamma'),
-@@ -111,18 +112,20 @@
-
- query = create_session().query(Foo)
-
-- # Py3K
-- #avg_f = query.filter(foo.c.bar<30).values(sa.func.avg(foo.c.bar)).__next__()[0]
-- # Py2K
-- avg_f = query.filter(foo.c.bar<30).values(sa.func.avg(foo.c.bar)).next()[0]
-- # end Py2K
-+# start Py3K
-+ avg_f = query.filter(foo.c.bar<30).values(sa.func.avg(foo.c.bar)).__next__()[0]
-+# end Py3K
-+# start Py2K
-+# avg_f = query.filter(foo.c.bar<30).values(sa.func.avg(foo.c.bar)).next()[0]
-+# end Py2K
- assert float(round(avg_f, 1)) == 14.5
-
-- # Py3K
-- #avg_o = query.filter(foo.c.bar<30).values(sa.func.avg(foo.c.bar)).__next__()[0]
-- # Py2K
-- avg_o = query.filter(foo.c.bar<30).values(sa.func.avg(foo.c.bar)).next()[0]
-- # end Py2K
-+# start Py3K
-+ avg_o = query.filter(foo.c.bar<30).values(sa.func.avg(foo.c.bar)).__next__()[0]
-+# end Py3K
-+# start Py2K
-+# avg_o = query.filter(foo.c.bar<30).values(sa.func.avg(foo.c.bar)).next()[0]
-+# end Py2K
- assert float(round(avg_o, 1)) == 14.5
-
- def test_filter(self):
-diff -r 9d0639b9d3be test/orm/test_inspect.py
---- a/test/orm/test_inspect.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/test/orm/test_inspect.py Sat Apr 27 19:42:17 2013 -0400
-@@ -142,7 +142,7 @@
- [insp.get_property('id'), insp.get_property('name')]
- )
- eq_(
-- insp.column_attrs.keys(),
-+ list(insp.column_attrs.keys()),
- ['id', 'name']
- )
- is_(
-@@ -274,7 +274,7 @@
- insp = inspect(SomeSubClass)
- eq_(
- dict((k, v.extension_type)
-- for k, v in insp.all_orm_descriptors.items()
-+ for k, v in list(insp.all_orm_descriptors.items())
- ),
- {
- 'id': NOT_EXTENSION,
-diff -r 9d0639b9d3be test/orm/test_instrumentation.py
---- a/test/orm/test_instrumentation.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/test/orm/test_instrumentation.py Sat Apr 27 19:42:17 2013 -0400
-@@ -522,30 +522,31 @@
- class Py3KFunctionInstTest(fixtures.ORMTest):
- __requires__ = ("python3", )
-
-- # Py3K
-- #def _kw_only_fixture(self):
-- # class A(object):
-- # def __init__(self, a, *, b, c):
-- # self.a = a
-- # self.b = b
-- # self.c = c
-- # return self._instrument(A)
-- #
-- #def _kw_plus_posn_fixture(self):
-- # class A(object):
-- # def __init__(self, a, *args, b, c):
-- # self.a = a
-- # self.b = b
-- # self.c = c
-- # return self._instrument(A)
-- #
-- #def _kw_opt_fixture(self):
-- # class A(object):
-- # def __init__(self, a, *, b, c="c"):
-- # self.a = a
-- # self.b = b
-- # self.c = c
-- # return self._instrument(A)
-+# start Py3K
-+ def _kw_only_fixture(self):
-+ class A(object):
-+ def __init__(self, a, *, b, c):
-+ self.a = a
-+ self.b = b
-+ self.c = c
-+ return self._instrument(A)
-+
-+ def _kw_plus_posn_fixture(self):
-+ class A(object):
-+ def __init__(self, a, *args, b, c):
-+ self.a = a
-+ self.b = b
-+ self.c = c
-+ return self._instrument(A)
-+
-+ def _kw_opt_fixture(self):
-+ class A(object):
-+ def __init__(self, a, *, b, c="c"):
-+ self.a = a
-+ self.b = b
-+ self.c = c
-+ return self._instrument(A)
-+# end Py3K
-
- def _instrument(self, cls):
- manager = instrumentation.register_class(cls)
-diff -r 9d0639b9d3be test/orm/test_joins.py
---- a/test/orm/test_joins.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/test/orm/test_joins.py Sat Apr 27 19:42:17 2013 -0400
-@@ -774,14 +774,14 @@
- eq_(
- sess.query(User).join(Address.user).\
- filter(Address.email_address=='ed@wood.com').all(),
-- [User(id=8,name=u'ed')]
-+ [User(id=8,name='ed')]
- )
-
- # its actually not so controversial if you view it in terms
- # of multiple entities.
- eq_(
- sess.query(User, Address).join(Address.user).filter(Address.email_address=='ed@wood.com').all(),
-- [(User(id=8,name=u'ed'), Address(email_address='ed@wood.com'))]
-+ [(User(id=8,name='ed'), Address(email_address='ed@wood.com'))]
- )
-
- # this was the controversial part. now, raise an error if the feature is abused.
-@@ -1162,9 +1162,9 @@
- sess.query(OrderAlias).join('items').filter_by(description='item 3').\
- order_by(OrderAlias.id).all(),
- [
-- Order(address_id=1,description=u'order 1',isopen=0,user_id=7,id=1),
-- Order(address_id=4,description=u'order 2',isopen=0,user_id=9,id=2),
-- Order(address_id=1,description=u'order 3',isopen=1,user_id=7,id=3)
-+ Order(address_id=1,description='order 1',isopen=0,user_id=7,id=1),
-+ Order(address_id=4,description='order 2',isopen=0,user_id=9,id=2),
-+ Order(address_id=1,description='order 3',isopen=1,user_id=7,id=3)
- ]
- )
-
-@@ -1175,9 +1175,9 @@
- filter_by(description='item 3').\
- order_by(User.id, OrderAlias.id).all(),
- [
-- (User(name=u'jack',id=7), Order(address_id=1,description=u'order 1',isopen=0,user_id=7,id=1), u'item 3'),
-- (User(name=u'jack',id=7), Order(address_id=1,description=u'order 3',isopen=1,user_id=7,id=3), u'item 3'),
-- (User(name=u'fred',id=9), Order(address_id=4,description=u'order 2',isopen=0,user_id=9,id=2), u'item 3')
-+ (User(name='jack',id=7), Order(address_id=1,description='order 1',isopen=0,user_id=7,id=1), 'item 3'),
-+ (User(name='jack',id=7), Order(address_id=1,description='order 3',isopen=1,user_id=7,id=3), 'item 3'),
-+ (User(name='fred',id=9), Order(address_id=4,description='order 2',isopen=0,user_id=9,id=2), 'item 3')
- ]
- )
-
-@@ -1334,12 +1334,12 @@
- eq_(
- sess.query(User, ualias).filter(User.id > ualias.id).order_by(desc(ualias.id), User.name).all(),
- [
-- (User(id=10,name=u'chuck'), User(id=9,name=u'fred')),
-- (User(id=10,name=u'chuck'), User(id=8,name=u'ed')),
-- (User(id=9,name=u'fred'), User(id=8,name=u'ed')),
-- (User(id=10,name=u'chuck'), User(id=7,name=u'jack')),
-- (User(id=8,name=u'ed'), User(id=7,name=u'jack')),
-- (User(id=9,name=u'fred'), User(id=7,name=u'jack'))
-+ (User(id=10,name='chuck'), User(id=9,name='fred')),
-+ (User(id=10,name='chuck'), User(id=8,name='ed')),
-+ (User(id=9,name='fred'), User(id=8,name='ed')),
-+ (User(id=10,name='chuck'), User(id=7,name='jack')),
-+ (User(id=8,name='ed'), User(id=7,name='jack')),
-+ (User(id=9,name='fred'), User(id=7,name='jack'))
- ]
- )
-
-@@ -1351,7 +1351,7 @@
-
- eq_(
- sess.query(User.name).join(addresses, User.id==addresses.c.user_id).order_by(User.id).all(),
-- [(u'jack',), (u'ed',), (u'ed',), (u'ed',), (u'fred',)]
-+ [('jack',), ('ed',), ('ed',), ('ed',), ('fred',)]
- )
-
- def test_no_joinpoint_expr(self):
-@@ -2066,13 +2066,13 @@
- # using 'n1.parent' implicitly joins to unaliased Node
- eq_(
- sess.query(n1).join(n1.parent).filter(Node.data=='n1').all(),
-- [Node(parent_id=1,data=u'n11',id=2), Node(parent_id=1,data=u'n12',id=3), Node(parent_id=1,data=u'n13',id=4)]
-+ [Node(parent_id=1,data='n11',id=2), Node(parent_id=1,data='n12',id=3), Node(parent_id=1,data='n13',id=4)]
- )
-
- # explicit (new syntax)
- eq_(
- sess.query(n1).join(Node, n1.parent).filter(Node.data=='n1').all(),
-- [Node(parent_id=1,data=u'n11',id=2), Node(parent_id=1,data=u'n12',id=3), Node(parent_id=1,data=u'n13',id=4)]
-+ [Node(parent_id=1,data='n11',id=2), Node(parent_id=1,data='n12',id=3), Node(parent_id=1,data='n13',id=4)]
- )
-
-
-diff -r 9d0639b9d3be test/orm/test_loading.py
---- a/test/orm/test_loading.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/test/orm/test_loading.py Sat Apr 27 19:42:17 2013 -0400
-@@ -73,7 +73,7 @@
- [(x.id, y) for x, y in it],
- [(1, 1), (2, 2), (7, 7), (8, 8)]
- )
-- eq_(it[0].keys(), ['User', 'id'])
-+ eq_(list(it[0].keys()), ['User', 'id'])
-
- def test_entity_col_mix_keyed_tuple(self):
- s, (u1, u2, u3, u4) = self._fixture()
-@@ -91,7 +91,7 @@
- [(x.id, y) for x, y in it],
- [(1, 1), (2, 2), (7, 7), (8, 8)]
- )
-- eq_(it[0].keys(), ['User', 'id'])
-+ eq_(list(it[0].keys()), ['User', 'id'])
-
- def test_none_entity(self):
- s, (u1, u2, u3, u4) = self._fixture()
-diff -r 9d0639b9d3be test/orm/test_mapper.py
---- a/test/orm/test_mapper.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/test/orm/test_mapper.py Sat Apr 27 19:42:17 2013 -0400
-@@ -272,13 +272,14 @@
- )
- m = mapper(Foo, foo_t)
- class DontCompareMeToString(int):
-- # Py3K
-- # pass
-- # Py2K
-- def __lt__(self, other):
-- assert not isinstance(other, basestring)
-- return int(self) < other
-- # end Py2K
-+# start Py3K
-+ pass
-+# end Py3K
-+# start Py2K
-+# def __lt__(self, other):
-+# assert not isinstance(other, basestring)
-+# return int(self) < other
-+# end Py2K
- foos = [Foo(id='f%d' % i) for i in range(5)]
- states = [attributes.instance_state(f) for f in foos]
-
-@@ -847,7 +848,7 @@
-
- def test_we_dont_call_bool(self):
- class NoBoolAllowed(object):
-- def __nonzero__(self):
-+ def __bool__(self):
- raise Exception("nope")
- mapper(NoBoolAllowed, self.tables.users)
- u1 = NoBoolAllowed()
-@@ -1057,12 +1058,12 @@
-
- eq_(
- create_session().query(User).all(),
-- [User(id=7, name=u'jack'), User(id=9, name=u'fred'), User(id=8, name=u'ed'), User(id=10, name=u'chuck')]
-+ [User(id=7, name='jack'), User(id=9, name='fred'), User(id=8, name='ed'), User(id=10, name='chuck')]
- )
-
- eq_(
- create_session().query(User).order_by(User.name).all(),
-- [User(id=10, name=u'chuck'), User(id=8, name=u'ed'), User(id=9, name=u'fred'), User(id=7, name=u'jack')]
-+ [User(id=10, name='chuck'), User(id=8, name='ed'), User(id=9, name='fred'), User(id=7, name='jack')]
- )
-
- # 'Raises a "expression evaluation not supported" error at prepare time
-@@ -2123,7 +2124,7 @@
- mapper(Address, addresses)
-
- eq_(
-- dict((k, v[0].__name__) for k, v in u_m.validators.items()),
-+ dict((k, v[0].__name__) for k, v in list(u_m.validators.items())),
- {'name':'validate_name',
- 'addresses':'validate_address'}
- )
-@@ -2992,29 +2993,29 @@
- Column('ht1b_id', Integer, ForeignKey('ht1.id'), primary_key=True),
- Column('value', String(10)))
-
-- # Py2K
-- def test_baseclass(self):
-- ht1 = self.tables.ht1
--
-- class OldStyle:
-- pass
--
-- assert_raises(sa.exc.ArgumentError, mapper, OldStyle, ht1)
--
-- assert_raises(sa.exc.ArgumentError, mapper, 123)
--
-- class NoWeakrefSupport(str):
-- pass
--
-- # TODO: is weakref support detectable without an instance?
-- #self.assertRaises(sa.exc.ArgumentError, mapper, NoWeakrefSupport, t2)
-- # end Py2K
-+# start Py2K
-+# def test_baseclass(self):
-+# ht1 = self.tables.ht1
-+#
-+# class OldStyle:
-+# pass
-+#
-+# assert_raises(sa.exc.ArgumentError, mapper, OldStyle, ht1)
-+#
-+# assert_raises(sa.exc.ArgumentError, mapper, 123)
-+#
-+# class NoWeakrefSupport(str):
-+# pass
-+#
-+# # TODO: is weakref support detectable without an instance?
-+# #self.assertRaises(sa.exc.ArgumentError, mapper, NoWeakrefSupport, t2)
-+# end Py2K
-
- class _ValueBase(object):
- def __init__(self, value='abc', id=None):
- self.id = id
- self.value = value
-- def __nonzero__(self):
-+ def __bool__(self):
- return False
- def __hash__(self):
- return hash(self.value)
-@@ -3173,7 +3174,7 @@
- return self.value
-
- class H2(object):
-- def __nonzero__(self):
-+ def __bool__(self):
- return bool(self.get_value())
-
- def get_value(self):
-@@ -3224,7 +3225,7 @@
- self._test("someprop")
-
- def test_unicode(self):
-- self._test(u"someprop")
-+ self._test("someprop")
-
- def test_int(self):
- self._test(5)
-diff -r 9d0639b9d3be test/orm/test_merge.py
---- a/test/orm/test_merge.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/test/orm/test_merge.py Sat Apr 27 19:42:17 2013 -0400
-@@ -346,7 +346,7 @@
- sess = create_session()
- sess.merge(u1)
- sess.flush()
-- assert u1.addresses.keys() == ['foo@bar.com']
-+ assert list(u1.addresses.keys()) == ['foo@bar.com']
-
- def test_attribute_cascade(self):
- """Merge of a persistent entity with two child
-@@ -803,7 +803,7 @@
- try:
- sess2.merge(u, load=False)
- assert False
-- except sa.exc.InvalidRequestError, e:
-+ except sa.exc.InvalidRequestError as e:
- assert "merge() with load=False option does not support "\
- "objects marked as 'dirty'. flush() all changes on "\
- "mapped instances before merging with load=False." \
-@@ -924,7 +924,7 @@
- sess2.expunge_all()
- eq_(sess2.query(User).get(u2.id).addresses[0].email_address,
- 'somenewaddress')
-- except sa.exc.InvalidRequestError, e:
-+ except sa.exc.InvalidRequestError as e:
- assert "load=False option does not support" in str(e)
-
- def test_synonym_comparable(self):
-diff -r 9d0639b9d3be test/orm/test_naturalpks.py
---- a/test/orm/test_naturalpks.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/test/orm/test_naturalpks.py Sat Apr 27 19:42:17 2013 -0400
-@@ -2,7 +2,7 @@
- Primary key changing capabilities and passive/non-passive cascading updates.
-
- """
--from __future__ import with_statement
-+
- from sqlalchemy.testing import eq_, ne_, \
- assert_raises, assert_raises_message
- import sqlalchemy as sa
-@@ -519,11 +519,11 @@
-
- session = sa.orm.sessionmaker()()
-
-- a_published = User(1, PUBLISHED, u'a')
-+ a_published = User(1, PUBLISHED, 'a')
- session.add(a_published)
- session.commit()
-
-- a_editable = User(1, EDITABLE, u'a')
-+ a_editable = User(1, EDITABLE, 'a')
-
- session.add(a_editable)
- session.commit()
-diff -r 9d0639b9d3be test/orm/test_pickled.py
---- a/test/orm/test_pickled.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/test/orm/test_pickled.py Sat Apr 27 19:42:17 2013 -0400
-@@ -443,21 +443,21 @@
- if pickled is not False:
- row = pickle.loads(pickle.dumps(row, pickled))
-
-- eq_(row.keys(), ['User', 'Address'])
-+ eq_(list(row.keys()), ['User', 'Address'])
- eq_(row.User, row[0])
- eq_(row.Address, row[1])
-
- for row in sess.query(User.name, User.id.label('foobar')):
- if pickled is not False:
- row = pickle.loads(pickle.dumps(row, pickled))
-- eq_(row.keys(), ['name', 'foobar'])
-+ eq_(list(row.keys()), ['name', 'foobar'])
- eq_(row.name, row[0])
- eq_(row.foobar, row[1])
-
- for row in sess.query(User).values(User.name, User.id.label('foobar')):
- if pickled is not False:
- row = pickle.loads(pickle.dumps(row, pickled))
-- eq_(row.keys(), ['name', 'foobar'])
-+ eq_(list(row.keys()), ['name', 'foobar'])
- eq_(row.name, row[0])
- eq_(row.foobar, row[1])
-
-@@ -465,21 +465,21 @@
- for row in sess.query(User, oalias).join(User.orders).all():
- if pickled is not False:
- row = pickle.loads(pickle.dumps(row, pickled))
-- eq_(row.keys(), ['User'])
-+ eq_(list(row.keys()), ['User'])
- eq_(row.User, row[0])
-
- oalias = aliased(Order, name='orders')
- for row in sess.query(User, oalias).join(oalias, User.orders).all():
- if pickled is not False:
- row = pickle.loads(pickle.dumps(row, pickled))
-- eq_(row.keys(), ['User', 'orders'])
-+ eq_(list(row.keys()), ['User', 'orders'])
- eq_(row.User, row[0])
- eq_(row.orders, row[1])
-
- # test here that first col is not labeled, only
- # one name in keys, matches correctly
- for row in sess.query(User.name + 'hoho', User.name):
-- eq_(row.keys(), ['name'])
-+ eq_(list(row.keys()), ['name'])
- eq_(row[0], row.name + 'hoho')
-
- if pickled is not False:
-diff -r 9d0639b9d3be test/orm/test_query.py
---- a/test/orm/test_query.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/test/orm/test_query.py Sat Apr 27 19:42:17 2013 -0400
-@@ -407,11 +407,12 @@
- Column('data', Unicode(40)))
- try:
- metadata.create_all()
-- # Py3K
-- #ustring = b'petit voix m\xe2\x80\x99a'.decode('utf-8')
-- # Py2K
-- ustring = 'petit voix m\xe2\x80\x99a'.decode('utf-8')
-- # end Py2K
-+# start Py3K
-+ ustring = b'petit voix m\xe2\x80\x99a'.decode('utf-8')
-+# end Py3K
-+# start Py2K
-+# ustring = 'petit voix m\xe2\x80\x99a'.decode('utf-8')
-+# end Py2K
-
- table.insert().execute(id=ustring, data=ustring)
- class LocalFoo(self.classes.Base):
-@@ -637,9 +638,9 @@
- (operator.sub, '-'),
- # Py3k
- #(operator.truediv, '/'),
-- # Py2K
-- (operator.div, '/'),
-- # end Py2K
-+# start Py2K
-+# (operator.div, '/'),
-+# end Py2K
- ):
- for (lhs, rhs, res) in (
- (5, User.id, ':id_1 %s users.id'),
-@@ -1047,7 +1048,7 @@
- adalias = aliased(Address, q1.subquery())
- eq_(
- s.query(User, adalias).join(adalias, User.id==adalias.user_id).all(),
-- [(User(id=7,name=u'jack'), Address(email_address=u'jack@bean.com',user_id=7,id=1))]
-+ [(User(id=7,name='jack'), Address(email_address='jack@bean.com',user_id=7,id=1))]
- )
-
- # more slice tests are available in test/orm/generative.py
-@@ -1196,7 +1197,7 @@
- "users.name AS users_name FROM users WHERE users.id = :param_1 "
- "UNION SELECT users.id AS users_id, users.name AS users_name "
- "FROM users WHERE users.id = :param_2) AS anon_1",
-- checkparams = {u'param_1': 7, u'param_2': 8}
-+ checkparams = {'param_1': 7, 'param_2': 8}
- )
-
- def test_any(self):
-@@ -1319,7 +1320,7 @@
- "FROM users JOIN addresses ON users.id = addresses.user_id "
- "WHERE users.name = :name_1 AND "
- "addresses.email_address = :email_address_1",
-- checkparams={u'email_address_1': 'ed@ed.com', u'name_1': 'ed'}
-+ checkparams={'email_address_1': 'ed@ed.com', 'name_1': 'ed'}
- )
-
- def test_filter_by_no_property(self):
-@@ -1468,14 +1469,14 @@
- for q in (q3.order_by(User.id, "anon_1_param_1"), q6.order_by(User.id, "foo")):
- eq_(q.all(),
- [
-- (User(id=7, name=u'jack'), u'x'),
-- (User(id=7, name=u'jack'), u'y'),
-- (User(id=8, name=u'ed'), u'x'),
-- (User(id=8, name=u'ed'), u'y'),
-- (User(id=9, name=u'fred'), u'x'),
-- (User(id=9, name=u'fred'), u'y'),
-- (User(id=10, name=u'chuck'), u'x'),
-- (User(id=10, name=u'chuck'), u'y')
-+ (User(id=7, name='jack'), 'x'),
-+ (User(id=7, name='jack'), 'y'),
-+ (User(id=8, name='ed'), 'x'),
-+ (User(id=8, name='ed'), 'y'),
-+ (User(id=9, name='fred'), 'x'),
-+ (User(id=9, name='fred'), 'y'),
-+ (User(id=10, name='chuck'), 'x'),
-+ (User(id=10, name='chuck'), 'y')
- ]
- )
-
-@@ -1603,7 +1604,7 @@
-
- sess = create_session()
- orders = sess.query(Order).filter(Order.id.in_([2, 3, 4]))
-- eq_(orders.values(func.sum(Order.user_id * Order.address_id)).next(), (79,))
-+ eq_(next(orders.values(func.sum(Order.user_id * Order.address_id))), (79,))
- eq_(orders.value(func.sum(Order.user_id * Order.address_id)), 79)
-
- def test_apply(self):
-@@ -1616,9 +1617,9 @@
- User, Address = self.classes.User, self.classes.Address
-
- sess = create_session()
-- assert [User(name=u'ed',id=8)] == sess.query(User).order_by(User.id).group_by(User).join('addresses').having(func.count(Address.id)> 2).all()
--
-- assert [User(name=u'jack',id=7), User(name=u'fred',id=9)] == sess.query(User).order_by(User.id).group_by(User).join('addresses').having(func.count(Address.id)< 2).all()
-+ assert [User(name='ed',id=8)] == sess.query(User).order_by(User.id).group_by(User).join('addresses').having(func.count(Address.id)> 2).all()
-+
-+ assert [User(name='jack',id=7), User(name='fred',id=9)] == sess.query(User).order_by(User.id).group_by(User).join('addresses').having(func.count(Address.id)< 2).all()
-
-
- class ExistsTest(QueryTest, AssertsCompiledSQL):
-@@ -1798,14 +1799,14 @@
-
- ret = []
- eq_(len(sess.identity_map), 0)
-- ret.append(q.next())
-- ret.append(q.next())
-+ ret.append(next(q))
-+ ret.append(next(q))
- eq_(len(sess.identity_map), 2)
-- ret.append(q.next())
-- ret.append(q.next())
-+ ret.append(next(q))
-+ ret.append(next(q))
- eq_(len(sess.identity_map), 4)
- try:
-- q.next()
-+ next(q)
- assert False
- except StopIteration:
- pass
-@@ -1915,7 +1916,7 @@
- User.id, text("users.name"))
-
- eq_(s.query(User.id, "name").order_by(User.id).all(),
-- [(7, u'jack'), (8, u'ed'), (9, u'fred'), (10, u'chuck')])
-+ [(7, 'jack'), (8, 'ed'), (9, 'fred'), (10, 'chuck')])
-
- def test_via_select(self):
- User = self.classes.User
-@@ -1973,7 +1974,7 @@
- try:
- q = sess.query(Item).with_parent(u1)
- assert False
-- except sa_exc.InvalidRequestError, e:
-+ except sa_exc.InvalidRequestError as e:
- assert str(e) \
- == "Could not locate a property which relates "\
- "instances of class 'Item' to instances of class 'User'"
-@@ -2058,7 +2059,7 @@
- "addresses.id AS addresses_id, addresses.user_id AS "
- "addresses_user_id, addresses.email_address AS addresses_email_address "
- "FROM addresses WHERE :param_2 = addresses.user_id) AS anon_1",
-- checkparams={u'param_1': 7, u'param_2': 8},
-+ checkparams={'param_1': 7, 'param_2': 8},
- )
-
- def test_unique_binds_or(self):
-@@ -2075,7 +2076,7 @@
- "addresses_user_id, addresses.email_address AS "
- "addresses_email_address FROM addresses WHERE "
- ":param_1 = addresses.user_id OR :param_2 = addresses.user_id",
-- checkparams={u'param_1': 7, u'param_2': 8},
-+ checkparams={'param_1': 7, 'param_2': 8},
- )
-
- class SynonymTest(QueryTest):
-@@ -2117,9 +2118,9 @@
- options(joinedload(User.orders_syn)).all()
- eq_(result, [
- User(id=7, name='jack', orders=[
-- Order(description=u'order 1'),
-- Order(description=u'order 3'),
-- Order(description=u'order 5')
-+ Order(description='order 1'),
-+ Order(description='order 3'),
-+ Order(description='order 5')
- ])
- ])
- self.assert_sql_count(testing.db, go, 1)
-@@ -2133,9 +2134,9 @@
- options(joinedload(User.orders_syn_2)).all()
- eq_(result, [
- User(id=7, name='jack', orders=[
-- Order(description=u'order 1'),
-- Order(description=u'order 3'),
-- Order(description=u'order 5')
-+ Order(description='order 1'),
-+ Order(description='order 3'),
-+ Order(description='order 5')
- ])
- ])
- self.assert_sql_count(testing.db, go, 1)
-@@ -2149,9 +2150,9 @@
- options(joinedload('orders_syn_2')).all()
- eq_(result, [
- User(id=7, name='jack', orders=[
-- Order(description=u'order 1'),
-- Order(description=u'order 3'),
-- Order(description=u'order 5')
-+ Order(description='order 1'),
-+ Order(description='order 3'),
-+ Order(description='order 5')
- ])
- ])
- self.assert_sql_count(testing.db, go, 1)
-@@ -2355,7 +2356,7 @@
- if isinstance(item, type):
- item = class_mapper(item)
- else:
-- if isinstance(item, basestring):
-+ if isinstance(item, str):
- item = inspect(r[-1]).mapper.attrs[item]
- r.append(item)
- return tuple(r)
-diff -r 9d0639b9d3be test/orm/test_relationships.py
---- a/test/orm/test_relationships.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/test/orm/test_relationships.py Sat Apr 27 19:42:17 2013 -0400
-@@ -426,13 +426,13 @@
- c1 = Company('c1')
- c2 = Company('c2')
-
-- e1 = Employee(u'emp1', c1, 1)
-- e2 = Employee(u'emp2', c1, 2, e1)
-- e3 = Employee(u'emp3', c1, 3, e1)
-- e4 = Employee(u'emp4', c1, 4, e3)
-- e5 = Employee(u'emp5', c2, 1)
-- e6 = Employee(u'emp6', c2, 2, e5)
-- e7 = Employee(u'emp7', c2, 3, e5)
-+ e1 = Employee('emp1', c1, 1)
-+ e2 = Employee('emp2', c1, 2, e1)
-+ e3 = Employee('emp3', c1, 3, e1)
-+ e4 = Employee('emp4', c1, 4, e3)
-+ e5 = Employee('emp5', c2, 1)
-+ e6 = Employee('emp6', c2, 2, e5)
-+ e7 = Employee('emp7', c2, 3, e5)
-
- sess.add_all((c1, c2))
- sess.commit()
-@@ -642,7 +642,7 @@
- try:
- sess.flush()
- assert False
-- except AssertionError, e:
-+ except AssertionError as e:
- startswith_(str(e),
- "Dependency rule tried to blank-out "
- "primary key column 'tableB.id' on instance ")
-@@ -667,7 +667,7 @@
- try:
- sess.flush()
- assert False
-- except AssertionError, e:
-+ except AssertionError as e:
- startswith_(str(e),
- "Dependency rule tried to blank-out "
- "primary key column 'tableB.id' on instance ")
-@@ -1106,9 +1106,9 @@
- eq_(
- sess.query(Subscriber).order_by(Subscriber.type).all(),
- [
-- Subscriber(id=1, type=u'A'),
-- Subscriber(id=2, type=u'B'),
-- Subscriber(id=2, type=u'C')
-+ Subscriber(id=1, type='A'),
-+ Subscriber(id=2, type='B'),
-+ Subscriber(id=2, type='C')
- ]
- )
-
-@@ -1365,7 +1365,7 @@
- try:
- sess.add(a1)
- assert False
-- except AssertionError, err:
-+ except AssertionError as err:
- eq_(str(err),
- "Attribute 'bs' on class '%s' doesn't handle "
- "objects of type '%s'" % (A, C))
-diff -r 9d0639b9d3be test/orm/test_session.py
---- a/test/orm/test_session.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/test/orm/test_session.py Sat Apr 27 19:42:17 2013 -0400
-@@ -232,7 +232,7 @@
- # use :bindparam style
- eq_(sess.execute("select * from users where id=:id",
- {'id': 7}).fetchall(),
-- [(7, u'jack')])
-+ [(7, 'jack')])
-
-
- # use :bindparam style
-@@ -733,7 +733,7 @@
- # withstand a change? should this be
- # more directly attempting to manipulate the identity_map ?
- u1, u2, u3 = sess.query(User).all()
-- for i, (key, value) in enumerate(sess.identity_map.iteritems()):
-+ for i, (key, value) in enumerate(iter(sess.identity_map.items())):
- if i == 2:
- del u3
- gc_collect()
-@@ -747,7 +747,7 @@
- @event.listens_for(m, "after_update")
- def e(mapper, conn, target):
- sess = object_session(target)
-- for entry in sess.identity_map.values():
-+ for entry in list(sess.identity_map.values()):
- entry.name = "5"
-
- a1, a2 = User(name="1"), User(name="2")
-@@ -845,7 +845,7 @@
- u = session.query(User).filter_by(id=7).one()
-
- # get everything to load in both directions
-- print [a.user for a in u.addresses]
-+ print([a.user for a in u.addresses])
-
- # then see if expunge fails
- session.expunge(u)
-@@ -1187,7 +1187,7 @@
- s.flush()
- user = s.query(User).one()
- user = None
-- print s.identity_map
-+ print(s.identity_map)
- gc_collect()
- assert len(s.identity_map) == 1
-
-@@ -1207,7 +1207,7 @@
- s = create_session(weak_identity_map=False)
- mapper(User, users)
-
-- for o in [User(name='u%s' % x) for x in xrange(10)]:
-+ for o in [User(name='u%s' % x) for x in range(10)]:
- s.add(o)
- # o is still live after this loop...
-
-diff -r 9d0639b9d3be test/orm/test_subquery_relations.py
---- a/test/orm/test_subquery_relations.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/test/orm/test_subquery_relations.py Sat Apr 27 19:42:17 2013 -0400
-@@ -1552,9 +1552,9 @@
- Movie = self.classes.Movie
-
- session = Session(testing.db)
-- rscott = Director(name=u"Ridley Scott")
-- alien = Movie(title=u"Alien")
-- brunner = Movie(title=u"Blade Runner")
-+ rscott = Director(name="Ridley Scott")
-+ alien = Movie(title="Alien")
-+ brunner = Movie(title="Blade Runner")
- rscott.movies.append(brunner)
- rscott.movies.append(alien)
- session.add_all([rscott, alien, brunner])
-diff -r 9d0639b9d3be test/orm/test_unitofwork.py
---- a/test/orm/test_unitofwork.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/test/orm/test_unitofwork.py Sat Apr 27 19:42:17 2013 -0400
-@@ -87,7 +87,7 @@
-
- mapper(Test, uni_t1)
-
-- txt = u"\u0160\u0110\u0106\u010c\u017d"
-+ txt = "\u0160\u0110\u0106\u010c\u017d"
- t1 = Test(id=1, txt=txt)
- self.assert_(t1.txt == txt)
-
-@@ -107,7 +107,7 @@
- 't2s': relationship(Test2)})
- mapper(Test2, uni_t2)
-
-- txt = u"\u0160\u0110\u0106\u010c\u017d"
-+ txt = "\u0160\u0110\u0106\u010c\u017d"
- t1 = Test(txt=txt)
- t1.t2s.append(Test2())
- t1.t2s.append(Test2())
-@@ -132,16 +132,16 @@
- @classmethod
- def define_tables(cls, metadata):
- t1 = Table('unitable1', metadata,
-- Column(u'méil', Integer, primary_key=True, key='a', test_needs_autoincrement=True),
-- Column(u'\u6e2c\u8a66', Integer, key='b'),
-+ Column('méil', Integer, primary_key=True, key='a', test_needs_autoincrement=True),
-+ Column('\u6e2c\u8a66', Integer, key='b'),
- Column('type', String(20)),
- test_needs_fk=True,
- test_needs_autoincrement=True)
-- t2 = Table(u'Unitéble2', metadata,
-- Column(u'méil', Integer, primary_key=True, key="cc", test_needs_autoincrement=True),
-- Column(u'\u6e2c\u8a66', Integer,
-- ForeignKey(u'unitable1.a'), key="d"),
-- Column(u'\u6e2c\u8a66_2', Integer, key="e"),
-+ t2 = Table('Unitéble2', metadata,
-+ Column('méil', Integer, primary_key=True, key="cc", test_needs_autoincrement=True),
-+ Column('\u6e2c\u8a66', Integer,
-+ ForeignKey('unitable1.a'), key="d"),
-+ Column('\u6e2c\u8a66_2', Integer, key="e"),
- test_needs_fk=True,
- test_needs_autoincrement=True)
-
-@@ -238,11 +238,12 @@
- Foo, t1 = self.classes.Foo, self.tables.t1
-
-
-- # Py3K
-- #data = b"this is some data"
-- # Py2K
-- data = "this is some data"
-- # end Py2K
-+# start Py3K
-+ data = b"this is some data"
-+# end Py3K
-+# start Py2K
-+# data = "this is some data"
-+# end Py2K
-
- mapper(Foo, t1)
-
-@@ -1054,13 +1055,13 @@
- session.flush()
-
- user_rows = users.select(users.c.id.in_([u.id])).execute().fetchall()
-- eq_(user_rows[0].values(), [u.id, 'one2manytester'])
-+ eq_(list(user_rows[0].values()), [u.id, 'one2manytester'])
-
- address_rows = addresses.select(
- addresses.c.id.in_([a.id, a2.id]),
- order_by=[addresses.c.email_address]).execute().fetchall()
-- eq_(address_rows[0].values(), [a2.id, u.id, 'lala@test.org'])
-- eq_(address_rows[1].values(), [a.id, u.id, 'one2many@test.org'])
-+ eq_(list(address_rows[0].values()), [a2.id, u.id, 'lala@test.org'])
-+ eq_(list(address_rows[1].values()), [a.id, u.id, 'one2many@test.org'])
-
- userid = u.id
- addressid = a2.id
-@@ -1071,7 +1072,7 @@
-
- address_rows = addresses.select(
- addresses.c.id == addressid).execute().fetchall()
-- eq_(address_rows[0].values(),
-+ eq_(list(address_rows[0].values()),
- [addressid, userid, 'somethingnew@foo.com'])
- self.assert_(u.id == userid and a2.id == addressid)
-
-@@ -1501,18 +1502,18 @@
- assert u.name == 'multitester'
-
- user_rows = users.select(users.c.id.in_([u.foo_id])).execute().fetchall()
-- eq_(user_rows[0].values(), [u.foo_id, 'multitester'])
-+ eq_(list(user_rows[0].values()), [u.foo_id, 'multitester'])
- address_rows = addresses.select(addresses.c.id.in_([u.id])).execute().fetchall()
-- eq_(address_rows[0].values(), [u.id, u.foo_id, 'multi@test.org'])
-+ eq_(list(address_rows[0].values()), [u.id, u.foo_id, 'multi@test.org'])
-
- u.email = 'lala@hey.com'
- u.name = 'imnew'
- session.flush()
-
- user_rows = users.select(users.c.id.in_([u.foo_id])).execute().fetchall()
-- eq_(user_rows[0].values(), [u.foo_id, 'imnew'])
-+ eq_(list(user_rows[0].values()), [u.foo_id, 'imnew'])
- address_rows = addresses.select(addresses.c.id.in_([u.id])).execute().fetchall()
-- eq_(address_rows[0].values(), [u.id, u.foo_id, 'lala@hey.com'])
-+ eq_(list(address_rows[0].values()), [u.id, u.foo_id, 'lala@hey.com'])
-
- session.expunge_all()
- u = session.query(User).get(id)
-@@ -1650,7 +1651,7 @@
- l = sa.select([users, addresses],
- sa.and_(users.c.id==addresses.c.user_id,
- addresses.c.id==a.id)).execute()
-- eq_(l.first().values(),
-+ eq_(list(l.first().values()),
- [a.user.id, 'asdf8d', a.id, a.user_id, 'theater@foo.com'])
-
- def test_many_to_one_1(self):
-@@ -2127,7 +2128,7 @@
-
- assert assoc.count().scalar() == 2
- i.keywords = []
-- print i.keywords
-+ print(i.keywords)
- session.flush()
- assert assoc.count().scalar() == 0
-
-diff -r 9d0639b9d3be test/orm/test_unitofworkv2.py
---- a/test/orm/test_unitofworkv2.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/test/orm/test_unitofworkv2.py Sat Apr 27 19:42:17 2013 -0400
-@@ -26,7 +26,7 @@
- def _assert_uow_size(self, session, expected ):
- uow = self._get_test_uow(session)
- postsort_actions = uow._generate_actions()
-- print postsort_actions
-+ print(postsort_actions)
- eq_(len(postsort_actions), expected, postsort_actions)
-
- class UOWTest(_fixtures.FixtureTest,
-@@ -125,12 +125,12 @@
- CompiledSQL(
- "UPDATE addresses SET user_id=:user_id WHERE "
- "addresses.id = :addresses_id",
-- lambda ctx: [{u'addresses_id': a1.id, 'user_id': None}]
-+ lambda ctx: [{'addresses_id': a1.id, 'user_id': None}]
- ),
- CompiledSQL(
- "UPDATE addresses SET user_id=:user_id WHERE "
- "addresses.id = :addresses_id",
-- lambda ctx: [{u'addresses_id': a2.id, 'user_id': None}]
-+ lambda ctx: [{'addresses_id': a2.id, 'user_id': None}]
- ),
- CompiledSQL(
- "DELETE FROM users WHERE users.id = :id",
-@@ -235,12 +235,12 @@
- CompiledSQL(
- "UPDATE addresses SET user_id=:user_id WHERE "
- "addresses.id = :addresses_id",
-- lambda ctx: [{u'addresses_id': a1.id, 'user_id': None}]
-+ lambda ctx: [{'addresses_id': a1.id, 'user_id': None}]
- ),
- CompiledSQL(
- "UPDATE addresses SET user_id=:user_id WHERE "
- "addresses.id = :addresses_id",
-- lambda ctx: [{u'addresses_id': a2.id, 'user_id': None}]
-+ lambda ctx: [{'addresses_id': a2.id, 'user_id': None}]
- ),
- CompiledSQL(
- "DELETE FROM users WHERE users.id = :id",
-@@ -1149,7 +1149,7 @@
- "nodes, node_to_nodes WHERE :param_1 = "
- "node_to_nodes.right_node_id AND nodes.id = "
- "node_to_nodes.left_node_id" ,
-- lambda ctx:{u'param_1': n1.id},
-+ lambda ctx:{'param_1': n1.id},
- ),
- CompiledSQL(
- "DELETE FROM node_to_nodes WHERE "
-diff -r 9d0639b9d3be test/orm/test_update_delete.py
---- a/test/orm/test_update_delete.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/test/orm/test_update_delete.py Sat Apr 27 19:42:17 2013 -0400
-@@ -188,22 +188,22 @@
- update({'age': User.age - 10}, synchronize_session='evaluate')
-
- eq_([john.age, jack.age, jill.age, jane.age], [25,37,29,27])
-- eq_(sess.query(User.age).order_by(User.id).all(), zip([25,37,29,27]))
-+ eq_(sess.query(User.age).order_by(User.id).all(), list(zip([25,37,29,27])))
-
- sess.query(User).filter(User.age > 29).\
- update({User.age: User.age - 10}, synchronize_session='evaluate')
- eq_([john.age, jack.age, jill.age, jane.age], [25,27,29,27])
-- eq_(sess.query(User.age).order_by(User.id).all(), zip([25,27,29,27]))
-+ eq_(sess.query(User.age).order_by(User.id).all(), list(zip([25,27,29,27])))
-
- sess.query(User).filter(User.age > 27).\
- update({users.c.age: User.age - 10}, synchronize_session='evaluate')
- eq_([john.age, jack.age, jill.age, jane.age], [25,27,19,27])
-- eq_(sess.query(User.age).order_by(User.id).all(), zip([25,27,19,27]))
-+ eq_(sess.query(User.age).order_by(User.id).all(), list(zip([25,27,19,27])))
-
- sess.query(User).filter(User.age == 25).\
- update({User.age: User.age - 10}, synchronize_session='fetch')
- eq_([john.age, jack.age, jill.age, jane.age], [15,27,19,27])
-- eq_(sess.query(User.age).order_by(User.id).all(), zip([15,27,19,27]))
-+ eq_(sess.query(User.age).order_by(User.id).all(), list(zip([15,27,19,27])))
-
- def test_update_against_metadata(self):
- User, users = self.classes.User, self.tables.users
-@@ -211,7 +211,7 @@
- sess = Session()
-
- sess.query(users).update({users.c.age: 29}, synchronize_session=False)
-- eq_(sess.query(User.age).order_by(User.id).all(), zip([29,29,29,29]))
-+ eq_(sess.query(User.age).order_by(User.id).all(), list(zip([29,29,29,29])))
-
- def test_update_with_bindparams(self):
- User = self.classes.User
-@@ -224,7 +224,7 @@
- update({'age': User.age - 10}, synchronize_session='fetch')
-
- eq_([john.age, jack.age, jill.age, jane.age], [25,37,29,27])
-- eq_(sess.query(User.age).order_by(User.id).all(), zip([25,37,29,27]))
-+ eq_(sess.query(User.age).order_by(User.id).all(), list(zip([25,37,29,27])))
-
- def test_update_without_load(self):
- User = self.classes.User
-@@ -233,7 +233,7 @@
-
- sess.query(User).filter(User.id == 3).\
- update({'age': 44}, synchronize_session='fetch')
-- eq_(sess.query(User.age).order_by(User.id).all(), zip([25,47,44,37]))
-+ eq_(sess.query(User.age).order_by(User.id).all(), list(zip([25,47,44,37])))
-
- def test_update_changes_resets_dirty(self):
- User = self.classes.User
-@@ -300,7 +300,7 @@
- update({'age': User.age - 10}, synchronize_session='fetch')
-
- eq_([john.age, jack.age, jill.age, jane.age], [25,37,29,27])
-- eq_(sess.query(User.age).order_by(User.id).all(), zip([25,37,29,27]))
-+ eq_(sess.query(User.age).order_by(User.id).all(), list(zip([25,37,29,27])))
-
- @testing.fails_if(lambda: not testing.db.dialect.supports_sane_rowcount)
- def test_update_returns_rowcount(self):
-@@ -334,7 +334,7 @@
- sess.query(User).update({'age': 42}, synchronize_session='evaluate')
-
- eq_([john.age, jack.age, jill.age, jane.age], [42,42,42,42])
-- eq_(sess.query(User.age).order_by(User.id).all(), zip([42,42,42,42]))
-+ eq_(sess.query(User.age).order_by(User.id).all(), list(zip([42,42,42,42])))
-
- def test_delete_all(self):
- User = self.classes.User
-@@ -516,7 +516,7 @@
-
- eq_([foo.title, bar.title, baz.title], ['foofoo','barbar', 'baz'])
- eq_(sess.query(Document.title).order_by(Document.id).all(),
-- zip(['foofoo','barbar', 'baz']))
-+ list(zip(['foofoo','barbar', 'baz'])))
-
- def test_update_with_explicit_joinedload(self):
- User = self.classes.User
-@@ -528,7 +528,7 @@
- update({'age': User.age - 10}, synchronize_session='fetch')
-
- eq_([john.age, jack.age, jill.age, jane.age], [25,37,29,27])
-- eq_(sess.query(User.age).order_by(User.id).all(), zip([25,37,29,27]))
-+ eq_(sess.query(User.age).order_by(User.id).all(), list(zip([25,37,29,27])))
-
- def test_delete_with_eager_relationships(self):
- Document = self.classes.Document
-@@ -538,7 +538,7 @@
- sess.query(Document).filter(Document.user_id == 1).\
- delete(synchronize_session=False)
-
-- eq_(sess.query(Document.title).all(), zip(['baz']))
-+ eq_(sess.query(Document.title).all(), list(zip(['baz'])))
-
- class UpdateDeleteFromTest(fixtures.MappedTest):
- @classmethod
-diff -r 9d0639b9d3be test/orm/test_utils.py
---- a/test/orm/test_utils.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/test/orm/test_utils.py Sat Apr 27 19:42:17 2013 -0400
-@@ -52,10 +52,10 @@
- alias = aliased(Point)
-
- assert Point.zero
-- # Py2K
-- # TODO: what is this testing ??
-- assert not getattr(alias, 'zero')
-- # end Py2K
-+# start Py2K
-+# # TODO: what is this testing ??
-+# assert not getattr(alias, 'zero')
-+# end Py2K
-
- def test_classmethods(self):
- class Point(object):
-@@ -123,17 +123,19 @@
- self.func = func
- def __get__(self, instance, owner):
- if instance is None:
-- # Py3K
-- #args = (self.func, owner)
-- # Py2K
-- args = (self.func, owner, owner.__class__)
-- # end Py2K
-+# start Py3K
-+ args = (self.func, owner)
-+# end Py3K
-+# start Py2K
-+# args = (self.func, owner, owner.__class__)
-+# end Py2K
- else:
-- # Py3K
-- #args = (self.func, instance)
-- # Py2K
-- args = (self.func, instance, owner)
-- # end Py2K
-+# start Py3K
-+ args = (self.func, instance)
-+# end Py3K
-+# start Py2K
-+# args = (self.func, instance, owner)
-+# end Py2K
- return types.MethodType(*args)
-
- class PropertyDescriptor(object):
-diff -r 9d0639b9d3be test/perf/insertspeed.py
---- a/test/perf/insertspeed.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/test/perf/insertspeed.py Sat Apr 27 19:42:17 2013 -0400
-@@ -13,18 +13,18 @@
-
- def sa_unprofiled_insertmany(n):
- i = Person_table.insert()
-- i.execute([{'name':'John Doe','sex':1,'age':35} for j in xrange(n)])
-+ i.execute([{'name':'John Doe','sex':1,'age':35} for j in range(n)])
-
- def sqlite_unprofiled_insertmany(n):
- conn = db.connect().connection
- c = conn.cursor()
-- persons = [('john doe', 1, 35) for i in xrange(n)]
-+ persons = [('john doe', 1, 35) for i in range(n)]
- c.executemany("insert into Person(name, sex, age) values (?,?,?)", persons)
-
- @profiling.profiled('sa_profiled_insert_many', always=True)
- def sa_profiled_insert_many(n):
- i = Person_table.insert()
-- i.execute([{'name':'John Doe','sex':1,'age':35} for j in xrange(n)])
-+ i.execute([{'name':'John Doe','sex':1,'age':35} for j in range(n)])
- s = Person_table.select()
- r = s.execute()
- res = [[value for value in row] for row in r.fetchall()]
-@@ -32,7 +32,7 @@
- def sqlite_unprofiled_insert(n):
- conn = db.connect().connection
- c = conn.cursor()
-- for j in xrange(n):
-+ for j in range(n):
- c.execute("insert into Person(name, sex, age) values (?,?,?)",
- ('john doe', 1, 35))
-
-@@ -40,13 +40,13 @@
- # Another option is to build Person_table.insert() outside of the
- # loop. But it doesn't make much of a difference, so might as well
- # use the worst-case/naive version here.
-- for j in xrange(n):
-+ for j in range(n):
- Person_table.insert().execute({'name':'John Doe','sex':1,'age':35})
-
- @profiling.profiled('sa_profiled_insert', always=True)
- def sa_profiled_insert(n):
- i = Person_table.insert()
-- for j in xrange(n):
-+ for j in range(n):
- i.execute({'name':'John Doe','sex':1,'age':35})
- s = Person_table.select()
- r = s.execute()
-@@ -69,12 +69,12 @@
- metadata.drop_all()
- metadata.create_all()
-
-- print "%s (%s)" % (label, ', '.join([str(a) for a in args]))
-+ print("%s (%s)" % (label, ', '.join([str(a) for a in args])))
- fn(*args, **kw)
-
- def all():
- try:
-- print "Bulk INSERTS via executemany():\n"
-+ print("Bulk INSERTS via executemany():\n")
-
- run_timed(sqlite_unprofiled_insertmany,
- 'pysqlite bulk insert',
-@@ -88,7 +88,7 @@
- 'SQLAlchemy bulk insert/select, profiled',
- 50000)
-
-- print "\nIndividual INSERTS via execute():\n"
-+ print("\nIndividual INSERTS via execute():\n")
-
- run_timed(sqlite_unprofiled_insert,
- "pysqlite individual insert",
-diff -r 9d0639b9d3be test/perf/objselectspeed.py
---- a/test/perf/objselectspeed.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/test/perf/objselectspeed.py Sat Apr 27 19:42:17 2013 -0400
-@@ -36,7 +36,7 @@
- metadata.create_all()
- i = Person_table.insert()
- data = [{'name':'John Doe','sex':1,'age':35, 'type':'employee'}] * 100
-- for j in xrange(500):
-+ for j in range(500):
- i.execute(data)
-
- # note we arent fetching from employee_table,
-@@ -46,7 +46,7 @@
- #for j in xrange(500):
- # i.execute(data)
-
-- print "Inserted 50,000 rows"
-+ print("Inserted 50,000 rows")
-
- def sqlite_select(entity_cls):
- conn = db.connect().connection
-@@ -89,10 +89,10 @@
- t, t2 = 0, 0
- def usage(label):
- now = resource.getrusage(resource.RUSAGE_SELF)
-- print "%s: %0.3fs real, %0.3fs user, %0.3fs sys" % (
-+ print("%s: %0.3fs real, %0.3fs user, %0.3fs sys" % (
- label, t2 - t,
- now.ru_utime - usage.last.ru_utime,
-- now.ru_stime - usage.last.ru_stime)
-+ now.ru_stime - usage.last.ru_stime))
- usage.snap(now)
- usage.snap = lambda stats=None: setattr(
- usage, 'last', stats or resource.getrusage(resource.RUSAGE_SELF))
-diff -r 9d0639b9d3be test/perf/objupdatespeed.py
---- a/test/perf/objupdatespeed.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/test/perf/objupdatespeed.py Sat Apr 27 19:42:17 2013 -0400
-@@ -39,12 +39,12 @@
- i.execute(data)
-
- i = Email_table.insert()
-- for j in xrange(1, NUM + 1):
-+ for j in range(1, NUM + 1):
- i.execute(address='foo@bar', person_id=j)
- if j % 2:
- i.execute(address='baz@quux', person_id=j)
-
-- print "Inserted %d rows." % (NUM + NUM + (NUM // 2))
-+ print("Inserted %d rows." % (NUM + NUM + (NUM // 2)))
-
- def orm_select(session):
- return session.query(Person).all()
-@@ -63,10 +63,10 @@
- t, t2 = 0, 0
- def usage(label):
- now = resource.getrusage(resource.RUSAGE_SELF)
-- print "%s: %0.3fs real, %0.3fs user, %0.3fs sys" % (
-+ print("%s: %0.3fs real, %0.3fs user, %0.3fs sys" % (
- label, t2 - t,
- now.ru_utime - usage.last.ru_utime,
-- now.ru_stime - usage.last.ru_stime)
-+ now.ru_stime - usage.last.ru_stime))
- usage.snap(now)
- usage.snap = lambda stats=None: setattr(
- usage, 'last', stats or resource.getrusage(resource.RUSAGE_SELF))
-diff -r 9d0639b9d3be test/perf/orm2010.py
---- a/test/perf/orm2010.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/test/perf/orm2010.py Sat Apr 27 19:42:17 2013 -0400
-@@ -95,7 +95,7 @@
- name="Boss %d" % i,
- golf_average=Decimal(random.randint(40, 150))
- )
-- for i in xrange(1000)
-+ for i in range(1000)
- ]
-
- sess.add_all(bosses)
-@@ -107,7 +107,7 @@
- name="Grunt %d" % i,
- savings=Decimal(random.randint(5000000, 15000000) / 100)
- )
-- for i in xrange(10000)
-+ for i in range(10000)
- ]
-
- # Assign each Grunt a Boss. Look them up in the DB
-@@ -149,15 +149,15 @@
-
- counts_by_methname = dict((key[2], stats.stats[key][0]) for key in stats.stats)
-
--print "SQLA Version: %s" % __version__
--print "Total calls %d" % stats.total_calls
--print "Total cpu seconds: %.2f" % stats.total_tt
--print 'Total execute calls: %d' \
-+print("SQLA Version: %s" % __version__)
-+print("Total calls %d" % stats.total_calls)
-+print("Total cpu seconds: %.2f" % stats.total_tt)
-+print('Total execute calls: %d' \
- % counts_by_methname["<method 'execute' of 'sqlite3.Cursor' "
-- "objects>"]
--print 'Total executemany calls: %d' \
-+ "objects>"])
-+print('Total executemany calls: %d' \
- % counts_by_methname.get("<method 'executemany' of 'sqlite3.Cursor' "
-- "objects>", 0)
-+ "objects>", 0))
-
- #stats.sort_stats('time', 'calls')
- #stats.print_stats()
-diff -r 9d0639b9d3be test/perf/ormsession.py
---- a/test/perf/ormsession.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/test/perf/ormsession.py Sat Apr 27 19:42:17 2013 -0400
-@@ -81,9 +81,9 @@
-
- transaction = con.begin()
- data, subdata = [], []
-- for item_id in xrange(1, q_items + 1):
-+ for item_id in range(1, q_items + 1):
- data.append({'name': "item number %s" % item_id})
-- for subitem_id in xrange(1, (item_id % q_sub_per_item) + 1):
-+ for subitem_id in range(1, (item_id % q_sub_per_item) + 1):
- subdata.append({'item_id': item_id,
- 'name': "subitem number %s" % subitem_id})
- if item_id % 100 == 0:
-@@ -99,7 +99,7 @@
-
- transaction = con.begin()
- data = []
-- for customer_id in xrange(1, q_customers):
-+ for customer_id in range(1, q_customers):
- data.append({'name': "customer number %s" % customer_id})
- if customer_id % 100 == 0:
- customers.insert().execute(*data)
-@@ -111,7 +111,7 @@
- transaction = con.begin()
- data, subdata = [], []
- order_t = int(time.time()) - (5000 * 5 * 60)
-- current = xrange(1, q_customers)
-+ current = range(1, q_customers)
- step, purchase_id = 1, 0
- while current:
- next = []
-diff -r 9d0639b9d3be test/perf/stress_all.py
---- a/test/perf/stress_all.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/test/perf/stress_all.py Sat Apr 27 19:42:17 2013 -0400
-@@ -2,7 +2,7 @@
- from datetime import *
- import decimal
- #from fastdec import mpd as Decimal
--from cPickle import dumps, loads
-+from pickle import dumps, loads
-
- #from sqlalchemy.dialects.postgresql.base import ARRAY
-
-@@ -42,9 +42,9 @@
-
- def getitem_long_results(raw_results):
- return [
-- (r[0L],
-- r[1L], r[2L], r[3L], r[4L], r[5L],
-- r[6L], r[7L], r[8L], r[9L], r[10L])
-+ (r[0],
-+ r[1], r[2], r[3], r[4], r[5],
-+ r[6], r[7], r[8], r[9], r[10])
- for r in raw_results]
-
- def getitem_obj_results(raw_results):
-@@ -128,7 +128,7 @@
-
- # Unicode
- def genunicodevalue(rnum, fnum):
-- return (rnum % 4) and (u"value%d" % fnum) or None
-+ return (rnum % 4) and ("value%d" % fnum) or None
- unicodetest = (Unicode(20, ), genunicodevalue,
- dict(num_records=100000))
- # dict(engineurl='mysql:///test', freshdata=False))
-@@ -139,10 +139,10 @@
- pickletypetest, typedecoratortest, unicodetest]
- for engineurl in ('postgresql://scott:tiger@localhost/test',
- 'sqlite://', 'mysql://scott:tiger@localhost/test'):
-- print "\n%s\n" % engineurl
-+ print("\n%s\n" % engineurl)
- for datatype, genvalue, kwargs in tests:
-- print "%s:" % getattr(datatype, '__name__',
-- datatype.__class__.__name__),
-+ print("%s:" % getattr(datatype, '__name__',
-+ datatype.__class__.__name__), end=' ')
- profile_and_time_dbfunc(iter_results, datatype, genvalue,
- profile=False, engineurl=engineurl,
- verbose=verbose, **kwargs)
-@@ -158,13 +158,13 @@
- slice_results]
- for engineurl in ('postgresql://scott:tiger@localhost/test',
- 'sqlite://', 'mysql://scott:tiger@localhost/test'):
-- print "\n%s\n" % engineurl
-+ print("\n%s\n" % engineurl)
- test_table = prepare(Unicode(20,),
- genunicodevalue,
- num_fields=10, num_records=100000,
- verbose=verbose, engineurl=engineurl)
- for method in methods:
-- print "%s:" % method.__name__,
-+ print("%s:" % method.__name__, end=' ')
- time_dbfunc(test_table, method, genunicodevalue,
- num_fields=10, num_records=100000, profile=False,
- verbose=verbose)
-@@ -174,9 +174,9 @@
- # --------------------------------
-
- def pickletofile_results(raw_results):
-- from cPickle import dump, load
-+ from pickle import dump, load
- for protocol in (0, 1, 2):
-- print "dumping protocol %d..." % protocol
-+ print("dumping protocol %d..." % protocol)
- f = file('noext.pickle%d' % protocol, 'wb')
- dump(raw_results, f, protocol)
- f.close()
-@@ -198,7 +198,7 @@
- num_fields=10, num_records=10000)
- funcs = [pickle_rows, pickle_results]
- for func in funcs:
-- print "%s:" % func.__name__,
-+ print("%s:" % func.__name__, end=' ')
- time_dbfunc(test_table, func, genunicodevalue,
- num_records=10000, profile=False, verbose=verbose)
-
-@@ -217,9 +217,9 @@
-
- def get_results():
- return session.query(Test).all()
-- print "ORM:",
-+ print("ORM:", end=' ')
- for engineurl in ('postgresql:///test', 'sqlite://', 'mysql:///test'):
-- print "\n%s\n" % engineurl
-+ print("\n%s\n" % engineurl)
- profile_and_time_dbfunc(getattr_results, Unicode(20), genunicodevalue,
- class_=Test, getresults_func=get_results,
- engineurl=engineurl, #freshdata=False,
-diff -r 9d0639b9d3be test/perf/stresstest.py
---- a/test/perf/stresstest.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/test/perf/stresstest.py Sat Apr 27 19:42:17 2013 -0400
-@@ -17,13 +17,13 @@
-
- def insert(test_table, num_fields, num_records, genvalue, verbose=True):
- if verbose:
-- print "building insert values...",
-+ print("building insert values...", end=' ')
- sys.stdout.flush()
- values = [dict(("field%d" % fnum, genvalue(rnum, fnum))
- for fnum in range(num_fields))
- for rnum in range(num_records)]
- if verbose:
-- print "inserting...",
-+ print("inserting...", end=' ')
- sys.stdout.flush()
- def db_insert():
- test_table.insert().execute(values)
-@@ -32,11 +32,11 @@
- "from __main__ import db_insert",
- number=1)
- if verbose:
-- print "%s" % round(timing, 3)
-+ print("%s" % round(timing, 3))
-
- def check_result(results, num_fields, genvalue, verbose=True):
- if verbose:
-- print "checking...",
-+ print("checking...", end=' ')
- sys.stdout.flush()
- for rnum, row in enumerate(results):
- expected = tuple([rnum + 1] +
-@@ -49,28 +49,28 @@
-
- def nicer_res(values, printvalues=False):
- if printvalues:
-- print values
-+ print(values)
- min_time = min(values)
- return round(min_time, 3), round(avgdev(values, min_time), 2)
-
- def profile_func(func_name, verbose=True):
- if verbose:
-- print "profiling...",
-+ print("profiling...", end=' ')
- sys.stdout.flush()
- cProfile.run('%s()' % func_name, 'prof')
-
- def time_func(func_name, num_tests=1, verbose=True):
- if verbose:
-- print "timing...",
-+ print("timing...", end=' ')
- sys.stdout.flush()
- timings = timeit.repeat('%s()' % func_name,
- "from __main__ import %s" % func_name,
- number=num_tests, repeat=5)
- avg, dev = nicer_res(timings)
- if verbose:
-- print "%s (%s)" % (avg, dev)
-+ print("%s (%s)" % (avg, dev))
- else:
-- print avg
-+ print(avg)
-
- def profile_and_time(func_name, num_tests=1):
- profile_func(func_name)
-@@ -121,7 +121,7 @@
- check_results=check_result, profile=True,
- check_leaks=True, print_leaks=False, verbose=True):
- if verbose:
-- print "testing '%s'..." % test_func.__name__,
-+ print("testing '%s'..." % test_func.__name__, end=' ')
- sys.stdout.flush()
- if class_ is not None:
- clear_mappers()
-@@ -148,12 +148,12 @@
- diff = hashable_objects_after - hashable_objects_before
- ldiff = len(diff)
- if print_leaks and ldiff < num_records:
-- print "\n*** hashable objects leaked (%d) ***" % ldiff
-- print '\n'.join(map(str, diff))
-- print "***\n"
-+ print("\n*** hashable objects leaked (%d) ***" % ldiff)
-+ print('\n'.join(map(str, diff)))
-+ print("***\n")
-
- if num_leaks > num_records:
-- print "(leaked: %d !)" % num_leaks,
-+ print("(leaked: %d !)" % num_leaks, end=' ')
- if profile:
- profile_func('test', verbose)
- time_func('test', num_tests, verbose)
-diff -r 9d0639b9d3be test/perf/threaded_compile.py
---- a/test/perf/threaded_compile.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/test/perf/threaded_compile.py Sat Apr 27 19:42:17 2013 -0400
-@@ -4,7 +4,7 @@
-
- from sqlalchemy import *
- from sqlalchemy.orm import *
--import thread, time
-+import _thread, time
- from sqlalchemy.orm import mapperlib
-
-
-@@ -42,20 +42,20 @@
-
- def run1():
- for i in range(50):
-- print "T1", thread.get_ident()
-+ print("T1", _thread.get_ident())
- class_mapper(T1)
- time.sleep(.05)
-
- def run2():
- for i in range(50):
-- print "T2", thread.get_ident()
-+ print("T2", _thread.get_ident())
- class_mapper(T2)
- time.sleep(.057)
-
- def run3():
- for i in range(50):
- def foo():
-- print "FOO", thread.get_ident()
-+ print("FOO", _thread.get_ident())
- class Foo(object):pass
- mapper(Foo, t3)
- class_mapper(Foo).compile()
-@@ -64,12 +64,12 @@
-
- mapper(T1, t1, properties={'t2':relationship(T2, backref="t1")})
- mapper(T2, t2)
--print "START"
-+print("START")
- for j in range(0, 5):
-- thread.start_new_thread(run1, ())
-- thread.start_new_thread(run2, ())
-- thread.start_new_thread(run3, ())
-- thread.start_new_thread(run3, ())
-- thread.start_new_thread(run3, ())
--print "WAIT"
-+ _thread.start_new_thread(run1, ())
-+ _thread.start_new_thread(run2, ())
-+ _thread.start_new_thread(run3, ())
-+ _thread.start_new_thread(run3, ())
-+ _thread.start_new_thread(run3, ())
-+print("WAIT")
- time.sleep(5)
-diff -r 9d0639b9d3be test/sql/test_compiler.py
---- a/test/sql/test_compiler.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/test/sql/test_compiler.py Sat Apr 27 19:42:17 2013 -0400
-@@ -661,13 +661,13 @@
- s = select([table1.c.myid]).as_scalar()
- try:
- s.c.foo
-- except exc.InvalidRequestError, err:
-+ except exc.InvalidRequestError as err:
- assert str(err) \
- == 'Scalar Select expression has no columns; use this '\
- 'object directly within a column-level expression.'
- try:
- s.columns.foo
-- except exc.InvalidRequestError, err:
-+ except exc.InvalidRequestError as err:
- assert str(err) \
- == 'Scalar Select expression has no columns; use this '\
- 'object directly within a column-level expression.'
-@@ -1116,9 +1116,9 @@
-
- # test unicode
- self.assert_compile(select(
-- [u"foobar(a)", u"pk_foo_bar(syslaal)"],
-- u"a = 12",
-- from_obj=[u"foobar left outer join lala on foobar.foo = lala.foo"]
-+ ["foobar(a)", "pk_foo_bar(syslaal)"],
-+ "a = 12",
-+ from_obj=["foobar left outer join lala on foobar.foo = lala.foo"]
- ),
- "SELECT foobar(a), pk_foo_bar(syslaal) FROM foobar "
- "left outer join lala on foobar.foo = lala.foo WHERE a = 12"
-@@ -2245,7 +2245,7 @@
- func.lala(table1.c.name).label('gg')])
-
- eq_(
-- s1.c.keys(),
-+ list(s1.c.keys()),
- ['myid', 'foobar', str(f1), 'gg']
- )
-
-@@ -2273,7 +2273,7 @@
- t = table1
-
- s1 = select([col], from_obj=t)
-- assert s1.c.keys() == [key], s1.c.keys()
-+ assert list(s1.c.keys()) == [key], list(s1.c.keys())
-
- if label:
- self.assert_compile(s1,
-@@ -2679,11 +2679,11 @@
- def test_reraise_of_column_spec_issue_unicode(self):
- MyType = self._illegal_type_fixture()
- t1 = Table('t', MetaData(),
-- Column(u'méil', MyType())
-+ Column('méil', MyType())
- )
- assert_raises_message(
- exc.CompileError,
-- ur"\(in table 't', column 'méil'\): Couldn't compile type",
-+ r"\(in table 't', column 'méil'\): Couldn't compile type",
- schema.CreateTable(t1).compile
- )
-
-diff -r 9d0639b9d3be test/sql/test_defaults.py
---- a/test/sql/test_defaults.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/test/sql/test_defaults.py Sat Apr 27 19:42:17 2013 -0400
-@@ -600,7 +600,7 @@
- nonai.insert().execute(data='row 1')
- nonai.insert().execute(data='row 2')
- assert False
-- except sa.exc.DBAPIError, e:
-+ except sa.exc.DBAPIError as e:
- assert True
-
- nonai.insert().execute(id=1, data='row 1')
-@@ -649,7 +649,7 @@
- def _assert_seq_result(self, ret):
- """asserts return of next_value is an int"""
-
-- assert isinstance(ret, (int, long))
-+ assert isinstance(ret, int)
- assert ret > 0
-
- def test_implicit_connectionless(self):
-@@ -781,7 +781,7 @@
- ]
- start = seq.start or 1
- inc = seq.increment or 1
-- assert values == list(xrange(start, start + inc * 3, inc))
-+ assert values == list(range(start, start + inc * 3, inc))
-
- finally:
- seq.drop(testing.db)
-@@ -1156,20 +1156,22 @@
- c = Column(Unicode(32))
-
- def test_unicode_default(self):
-- # Py3K
-- #default = 'foo'
-- # Py2K
-- default = u'foo'
-- # end Py2K
-+# start Py3K
-+ default = 'foo'
-+# end Py3K
-+# start Py2K
-+# default = u'foo'
-+# end Py2K
- c = Column(Unicode(32), default=default)
-
-
- def test_nonunicode_default(self):
-- # Py3K
-- #default = b'foo'
-- # Py2K
-- default = 'foo'
-- # end Py2K
-+# start Py3K
-+ default = b'foo'
-+# end Py3K
-+# start Py2K
-+# default = 'foo'
-+# end Py2K
- assert_raises_message(
- sa.exc.SAWarning,
- "Unicode column received non-unicode default value.",
-diff -r 9d0639b9d3be test/sql/test_functions.py
---- a/test/sql/test_functions.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/test/sql/test_functions.py Sat Apr 27 19:42:17 2013 -0400
-@@ -381,7 +381,7 @@
- assert t.select(t.c.id == id).execute().first()['value'] == 9
- t.update(values={t.c.value: func.length("asdf")}).execute()
- assert t.select().execute().first()['value'] == 4
-- print "--------------------------"
-+ print("--------------------------")
- t2.insert().execute()
- t2.insert(values=dict(value=func.length("one"))).execute()
- t2.insert(values=dict(value=func.length("asfda") + -19)).\
-@@ -409,7 +409,7 @@
-
- t2.update(values={t2.c.value: func.length("asfdaasdf"),
- t2.c.stuff: "foo"}).execute()
-- print "HI", select([t2.c.value, t2.c.stuff]).execute().first()
-+ print("HI", select([t2.c.value, t2.c.stuff]).execute().first())
- eq_(select([t2.c.value, t2.c.stuff]).execute().first(),
- (9, "foo")
- )
-diff -r 9d0639b9d3be test/sql/test_generative.py
---- a/test/sql/test_generative.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/test/sql/test_generative.py Sat Apr 27 19:42:17 2013 -0400
-@@ -176,7 +176,7 @@
- canary = []
- def visit(binary, l, r):
- canary.append((binary.operator, l, r))
-- print binary.operator, l, r
-+ print(binary.operator, l, r)
- sql_util.visit_binary_product(visit, expr)
- eq_(
- canary, expected
-@@ -433,7 +433,7 @@
- clause2 = Vis().traverse(clause)
- assert c1 == str(clause)
- assert str(clause2) == c1 + " SOME MODIFIER=:lala"
-- assert clause.bindparams.keys() == ['bar']
-+ assert list(clause.bindparams.keys()) == ['bar']
- assert set(clause2.bindparams.keys()) == set(['bar', 'lala'])
-
- def test_select(self):
-@@ -446,8 +446,8 @@
- s3 = Vis().traverse(s2)
- assert str(s3) == s3_assert
- assert str(s2) == s2_assert
-- print str(s2)
-- print str(s3)
-+ print(str(s2))
-+ print(str(s3))
- class Vis(ClauseVisitor):
- def visit_select(self, select):
- select.append_whereclause(t1.c.col2 == 7)
-@@ -459,8 +459,8 @@
- def visit_select(self, select):
- select.append_whereclause(t1.c.col3 == 9)
- s4 = Vis().traverse(s3)
-- print str(s3)
-- print str(s4)
-+ print(str(s3))
-+ print(str(s4))
- assert str(s4) == s4_assert
- assert str(s3) == s3_assert
-
-@@ -471,8 +471,8 @@
- binary.left = t1.c.col1
- binary.right = bindparam("col1", unique=True)
- s5 = Vis().traverse(s4)
-- print str(s4)
-- print str(s5)
-+ print(str(s4))
-+ print(str(s5))
- assert str(s5) == s5_assert
- assert str(s4) == s4_assert
-
-diff -r 9d0639b9d3be test/sql/test_metadata.py
---- a/test/sql/test_metadata.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/test/sql/test_metadata.py Sat Apr 27 19:42:17 2013 -0400
-@@ -89,7 +89,7 @@
- msgs.append("attach %s.%s" % (t.name, c.name))
- c1 = Column('foo', String())
- m = MetaData()
-- for i in xrange(3):
-+ for i in range(3):
- cx = c1.copy()
- # as of 0.7, these events no longer copy. its expected
- # that listeners will be re-established from the
-@@ -511,7 +511,7 @@
- def _get_key(i):
- return [i.name, i.unique] + \
- sorted(i.kwargs.items()) + \
-- i.columns.keys()
-+ list(i.columns.keys())
-
- eq_(
- sorted([_get_key(i) for i in table.indexes]),
-diff -r 9d0639b9d3be test/sql/test_query.py
---- a/test/sql/test_query.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/test/sql/test_query.py Sat Apr 27 19:42:17 2013 -0400
-@@ -367,10 +367,10 @@
- )
- if use_labels:
- eq_(result[0]['query_users_user_id'], 7)
-- eq_(result[0].keys(), ["query_users_user_id", "query_users_user_name"])
-+ eq_(list(result[0].keys()), ["query_users_user_id", "query_users_user_name"])
- else:
- eq_(result[0]['user_id'], 7)
-- eq_(result[0].keys(), ["user_id", "user_name"])
-+ eq_(list(result[0].keys()), ["user_id", "user_name"])
-
- eq_(result[0][0], 7)
- eq_(result[0][users.c.user_id], 7)
-@@ -523,13 +523,13 @@
-
- def a_eq(got, wanted):
- if got != wanted:
-- print "Wanted %s" % wanted
-- print "Received %s" % got
-+ print("Wanted %s" % wanted)
-+ print("Received %s" % got)
- self.assert_(got == wanted, got)
-
- a_eq(prep('select foo'), 'select foo')
- a_eq(prep("time='12:30:00'"), "time='12:30:00'")
-- a_eq(prep(u"time='12:30:00'"), u"time='12:30:00'")
-+ a_eq(prep("time='12:30:00'"), "time='12:30:00'")
- a_eq(prep(":this:that"), ":this:that")
- a_eq(prep(":this :that"), "? ?")
- a_eq(prep("(:this),(:that :other)"), "(?),(? ?)")
-@@ -769,7 +769,7 @@
- ).first()
- eq_(r['user_id'], 1)
- eq_(r['user_name'], "john")
-- eq_(r.keys(), ["user_id", "user_name"])
-+ eq_(list(r.keys()), ["user_id", "user_name"])
-
- @testing.only_on("sqlite", "sqlite specific feature")
- def test_column_accessor_sqlite_raw(self):
-@@ -784,7 +784,7 @@
- assert 'user_name' not in r
- eq_(r['query_users.user_id'], 1)
- eq_(r['query_users.user_name'], "john")
-- eq_(r.keys(), ["query_users.user_id", "query_users.user_name"])
-+ eq_(list(r.keys()), ["query_users.user_id", "query_users.user_name"])
-
- @testing.only_on("sqlite", "sqlite specific feature")
- def test_column_accessor_sqlite_translated(self):
-@@ -799,7 +799,7 @@
- eq_(r['user_name'], "john")
- eq_(r['query_users.user_id'], 1)
- eq_(r['query_users.user_name'], "john")
-- eq_(r.keys(), ["user_id", "user_name"])
-+ eq_(list(r.keys()), ["user_id", "user_name"])
-
- def test_column_accessor_labels_w_dots(self):
- users.insert().execute(
-@@ -812,7 +812,7 @@
- eq_(r['query_users.user_id'], 1)
- eq_(r['query_users.user_name'], "john")
- assert "user_name" not in r
-- eq_(r.keys(), ["query_users.user_id", "query_users.user_name"])
-+ eq_(list(r.keys()), ["query_users.user_id", "query_users.user_name"])
-
- def test_column_accessor_unary(self):
- users.insert().execute(
-@@ -889,7 +889,7 @@
- ])
- ).first()
-
-- eq_(row.keys(), ["case_insensitive", "CaseSensitive"])
-+ eq_(list(row.keys()), ["case_insensitive", "CaseSensitive"])
- eq_(row["case_insensitive"], 1)
- eq_(row["CaseSensitive"], 2)
-
-@@ -911,7 +911,7 @@
- ])
- ).first()
-
-- eq_(row.keys(), ["case_insensitive", "CaseSensitive"])
-+ eq_(list(row.keys()), ["case_insensitive", "CaseSensitive"])
- eq_(row["case_insensitive"], 1)
- eq_(row["CaseSensitive"], 2)
- eq_(row["Case_insensitive"],1)
-@@ -1072,14 +1072,14 @@
- def test_keys(self):
- users.insert().execute(user_id=1, user_name='foo')
- r = users.select().execute()
-- eq_([x.lower() for x in r.keys()], ['user_id', 'user_name'])
-+ eq_([x.lower() for x in list(r.keys())], ['user_id', 'user_name'])
- r = r.first()
-- eq_([x.lower() for x in r.keys()], ['user_id', 'user_name'])
-+ eq_([x.lower() for x in list(r.keys())], ['user_id', 'user_name'])
-
- def test_items(self):
- users.insert().execute(user_id=1, user_name='foo')
- r = users.select().execute().first()
-- eq_([(x[0].lower(), x[1]) for x in r.items()], [('user_id', 1), ('user_name', 'foo')])
-+ eq_([(x[0].lower(), x[1]) for x in list(r.items())], [('user_id', 1), ('user_name', 'foo')])
-
- def test_len(self):
- users.insert().execute(user_id=1, user_name='foo')
-@@ -1098,8 +1098,8 @@
- r = users.select(users.c.user_id==1).execute().first()
- eq_(r[0], 1)
- eq_(r[1], 'foo')
-- eq_([x.lower() for x in r.keys()], ['user_id', 'user_name'])
-- eq_(r.values(), [1, 'foo'])
-+ eq_([x.lower() for x in list(r.keys())], ['user_id', 'user_name'])
-+ eq_(list(r.values()), [1, 'foo'])
-
- def test_column_order_with_text_query(self):
- # should return values in query order
-@@ -1107,8 +1107,8 @@
- r = testing.db.execute('select user_name, user_id from query_users').first()
- eq_(r[0], 'foo')
- eq_(r[1], 1)
-- eq_([x.lower() for x in r.keys()], ['user_name', 'user_id'])
-- eq_(r.values(), ['foo', 1])
-+ eq_([x.lower() for x in list(r.keys())], ['user_name', 'user_id'])
-+ eq_(list(r.values()), ['foo', 1])
-
- @testing.crashes('oracle', 'FIXME: unknown, varify not fails_on()')
- @testing.crashes('firebird', 'An identifier must begin with a letter')
-@@ -1137,7 +1137,7 @@
- self.assert_(r['_parent'] == 'Hidden parent')
- self.assert_(r['_row'] == 'Hidden row')
- try:
-- print r._parent, r._row
-+ print(r._parent, r._row)
- self.fail('Should not allow access to private attributes')
- except AttributeError:
- pass # expected
-@@ -2334,7 +2334,7 @@
- expr = select(
- [t1.c.t1_id, t2.c.t2_id, t3.c.t3_id],
- from_obj=[(t1.join(t2).outerjoin(t3, criteria))])
-- print expr
-+ print(expr)
- self.assertRows(expr, [(10, 20, 30), (11, 21, None)])
-
- def test_mixed_where(self):
-@@ -2416,7 +2416,7 @@
- select([
- flds.c.intcol, func.row_number().over(order_by=flds.c.strcol)
- ]).execute().fetchall(),
-- [(13, 1L), (5, 2L)]
-+ [(13, 1), (5, 2)]
- )
-
-
-diff -r 9d0639b9d3be test/sql/test_quote.py
---- a/test/sql/test_quote.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/test/sql/test_quote.py Sat Apr 27 19:42:17 2013 -0400
-@@ -552,8 +552,8 @@
-
- def a_eq(have, want):
- if have != want:
-- print "Wanted %s" % want
-- print "Received %s" % have
-+ print("Wanted %s" % want)
-+ print("Received %s" % have)
- self.assert_(have == want)
-
- a_eq(unformat('foo'), ['foo'])
-@@ -584,13 +584,13 @@
-
- def a_eq(have, want):
- if have != want:
-- print "Wanted %s" % want
-- print "Received %s" % have
-+ print("Wanted %s" % want)
-+ print("Received %s" % have)
- self.assert_(have == want)
-
- a_eq(unformat('foo'), ['foo'])
- a_eq(unformat('`foo`'), ['foo'])
-- a_eq(unformat(`'foo'`), ["'foo'"])
-+ a_eq(unformat(repr('foo')), ["'foo'"])
- a_eq(unformat('foo.bar'), ['foo', 'bar'])
- a_eq(unformat('`foo`.`bar`'), ['foo', 'bar'])
- a_eq(unformat('foo.`bar`'), ['foo', 'bar'])
-diff -r 9d0639b9d3be test/sql/test_rowcount.py
---- a/test/sql/test_rowcount.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/test/sql/test_rowcount.py Sat Apr 27 19:42:17 2013 -0400
-@@ -53,20 +53,20 @@
- # WHERE matches 3, 3 rows changed
- department = employees_table.c.department
- r = employees_table.update(department=='C').execute(department='Z')
-- print "expecting 3, dialect reports %s" % r.rowcount
-+ print("expecting 3, dialect reports %s" % r.rowcount)
- assert r.rowcount == 3
-
- def test_update_rowcount2(self):
- # WHERE matches 3, 0 rows changed
- department = employees_table.c.department
- r = employees_table.update(department=='C').execute(department='C')
-- print "expecting 3, dialect reports %s" % r.rowcount
-+ print("expecting 3, dialect reports %s" % r.rowcount)
- assert r.rowcount == 3
-
- def test_delete_rowcount(self):
- # WHERE matches 3, 3 rows deleted
- department = employees_table.c.department
- r = employees_table.delete(department=='C').execute()
-- print "expecting 3, dialect reports %s" % r.rowcount
-+ print("expecting 3, dialect reports %s" % r.rowcount)
- assert r.rowcount == 3
-
-diff -r 9d0639b9d3be test/sql/test_selectable.py
---- a/test/sql/test_selectable.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/test/sql/test_selectable.py Sat Apr 27 19:42:17 2013 -0400
-@@ -174,11 +174,11 @@
-
- def test_clone_append_column(self):
- sel = select([literal_column('1').label('a')])
-- eq_(sel.c.keys(), ['a'])
-+ eq_(list(sel.c.keys()), ['a'])
- cloned = visitors.ReplacingCloningVisitor().traverse(sel)
- cloned.append_column(literal_column('2').label('b'))
- cloned.append_column(func.foo())
-- eq_(cloned.c.keys(), ['a', 'b', 'foo()'])
-+ eq_(list(cloned.c.keys()), ['a', 'b', 'foo()'])
-
- def test_append_column_after_replace_selectable(self):
- basesel = select([literal_column('1').label('a')])
-@@ -362,10 +362,10 @@
-
- def test_join(self):
- a = join(table1, table2)
-- print str(a.select(use_labels=True))
-+ print(str(a.select(use_labels=True)))
- b = table2.alias('b')
- j = join(a, b)
-- print str(j)
-+ print(str(j))
- criterion = a.c.table1_col1 == b.c.col2
- self.assert_(criterion.compare(j.onclause))
-
-@@ -949,7 +949,7 @@
- primary_key=True), Column('x', Integer))
- d = Table('d', meta, Column('id', Integer, ForeignKey('c.id'),
- primary_key=True), Column('x', Integer))
-- print list(a.join(b, a.c.x == b.c.id).primary_key)
-+ print(list(a.join(b, a.c.x == b.c.id).primary_key))
- assert list(a.join(b, a.c.x == b.c.id).primary_key) == [a.c.id]
- assert list(b.join(c, b.c.x == c.c.id).primary_key) == [b.c.id]
- assert list(a.join(b).join(c, c.c.id == b.c.x).primary_key) \
-@@ -1618,7 +1618,7 @@
- def test_names_overlap_label(self):
- sel = self._names_overlap().apply_labels()
- eq_(
-- sel.c.keys(),
-+ list(sel.c.keys()),
- ['t1_x', 't2_x']
- )
- self._assert_result_keys(sel, ['t1_x', 't2_x'])
-@@ -1632,7 +1632,7 @@
- def test_names_overlap_keys_dont_nolabel(self):
- sel = self._names_overlap_keys_dont()
- eq_(
-- sel.c.keys(),
-+ list(sel.c.keys()),
- ['a', 'b']
- )
- self._assert_result_keys(sel, ['x'])
-@@ -1640,7 +1640,7 @@
- def test_names_overlap_keys_dont_label(self):
- sel = self._names_overlap_keys_dont().apply_labels()
- eq_(
-- sel.c.keys(),
-+ list(sel.c.keys()),
- ['t1_a', 't2_b']
- )
- self._assert_result_keys(sel, ['t1_x', 't2_x'])
-@@ -1654,7 +1654,7 @@
- def test_labels_overlap_nolabel(self):
- sel = self._labels_overlap()
- eq_(
-- sel.c.keys(),
-+ list(sel.c.keys()),
- ['x_id', 'id']
- )
- self._assert_result_keys(sel, ['x_id', 'id'])
-@@ -1663,7 +1663,7 @@
- sel = self._labels_overlap().apply_labels()
- t2 = sel.froms[1]
- eq_(
-- sel.c.keys(),
-+ list(sel.c.keys()),
- ['t_x_id', t2.c.id.anon_label]
- )
- self._assert_result_keys(sel, ['t_x_id', 'id_1'])
-@@ -1677,12 +1677,12 @@
-
- def test_labels_overlap_keylabels_dont_nolabel(self):
- sel = self._labels_overlap_keylabels_dont()
-- eq_(sel.c.keys(), ['a', 'b'])
-+ eq_(list(sel.c.keys()), ['a', 'b'])
- self._assert_result_keys(sel, ['x_id', 'id'])
-
- def test_labels_overlap_keylabels_dont_label(self):
- sel = self._labels_overlap_keylabels_dont().apply_labels()
-- eq_(sel.c.keys(), ['t_a', 't_x_b'])
-+ eq_(list(sel.c.keys()), ['t_a', 't_x_b'])
- self._assert_result_keys(sel, ['t_x_id', 'id_1'])
-
- def _keylabels_overlap_labels_dont(self):
-@@ -1693,13 +1693,13 @@
-
- def test_keylabels_overlap_labels_dont_nolabel(self):
- sel = self._keylabels_overlap_labels_dont()
-- eq_(sel.c.keys(), ['x_id', 'id'])
-+ eq_(list(sel.c.keys()), ['x_id', 'id'])
- self._assert_result_keys(sel, ['a', 'b'])
-
- def test_keylabels_overlap_labels_dont_label(self):
- sel = self._keylabels_overlap_labels_dont().apply_labels()
- t2 = sel.froms[1]
-- eq_(sel.c.keys(), ['t_x_id', t2.c.id.anon_label])
-+ eq_(list(sel.c.keys()), ['t_x_id', t2.c.id.anon_label])
- self._assert_result_keys(sel, ['t_a', 't_x_b'])
- self._assert_subq_result_keys(sel, ['t_a', 't_x_b'])
-
-@@ -1711,14 +1711,14 @@
-
- def test_keylabels_overlap_labels_overlap_nolabel(self):
- sel = self._keylabels_overlap_labels_overlap()
-- eq_(sel.c.keys(), ['x_a', 'a'])
-+ eq_(list(sel.c.keys()), ['x_a', 'a'])
- self._assert_result_keys(sel, ['x_id', 'id'])
- self._assert_subq_result_keys(sel, ['x_id', 'id'])
-
- def test_keylabels_overlap_labels_overlap_label(self):
- sel = self._keylabels_overlap_labels_overlap().apply_labels()
- t2 = sel.froms[1]
-- eq_(sel.c.keys(), ['t_x_a', t2.c.a.anon_label])
-+ eq_(list(sel.c.keys()), ['t_x_a', t2.c.a.anon_label])
- self._assert_result_keys(sel, ['t_x_id', 'id_1'])
- self._assert_subq_result_keys(sel, ['t_x_id', 'id_1'])
-
-@@ -1736,7 +1736,7 @@
- def test_keys_overlap_names_dont_label(self):
- sel = self._keys_overlap_names_dont().apply_labels()
- eq_(
-- sel.c.keys(),
-+ list(sel.c.keys()),
- ['t1_x', 't2_x']
- )
- self._assert_result_keys(sel, ['t1_a', 't2_b'])
-diff -r 9d0639b9d3be test/sql/test_types.py
---- a/test/sql/test_types.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/test/sql/test_types.py Sat Apr 27 19:42:17 2013 -0400
-@@ -141,22 +141,24 @@
- eq_(types.Integer().python_type, int)
- eq_(types.Numeric().python_type, decimal.Decimal)
- eq_(types.Numeric(asdecimal=False).python_type, float)
-- # Py3K
-- #eq_(types.LargeBinary().python_type, bytes)
-- # Py2K
-- eq_(types.LargeBinary().python_type, str)
-- # end Py2K
-+# start Py3K
-+ eq_(types.LargeBinary().python_type, bytes)
-+# end Py3K
-+# start Py2K
-+# eq_(types.LargeBinary().python_type, str)
-+# end Py2K
- eq_(types.Float().python_type, float)
- eq_(types.Interval().python_type, datetime.timedelta)
- eq_(types.Date().python_type, datetime.date)
- eq_(types.DateTime().python_type, datetime.datetime)
-- # Py3K
-- #eq_(types.String().python_type, unicode)
-- # Py2K
-+# start Py3K
- eq_(types.String().python_type, str)
-- # end Py2K
-- eq_(types.Unicode().python_type, unicode)
-- eq_(types.String(convert_unicode=True).python_type, unicode)
-+# end Py3K
-+# start Py2K
-+# eq_(types.String().python_type, str)
-+# end Py2K
-+ eq_(types.Unicode().python_type, str)
-+ eq_(types.String(convert_unicode=True).python_type, str)
-
- assert_raises(
- NotImplementedError,
-@@ -257,14 +259,14 @@
- def test_processing(self):
- users = self.tables.users
- users.insert().execute(
-- user_id=2, goofy='jack', goofy2='jack', goofy4=u'jack',
-- goofy7=u'jack', goofy8=12, goofy9=12)
-+ user_id=2, goofy='jack', goofy2='jack', goofy4='jack',
-+ goofy7='jack', goofy8=12, goofy9=12)
- users.insert().execute(
-- user_id=3, goofy='lala', goofy2='lala', goofy4=u'lala',
-- goofy7=u'lala', goofy8=15, goofy9=15)
-+ user_id=3, goofy='lala', goofy2='lala', goofy4='lala',
-+ goofy7='lala', goofy8=15, goofy9=15)
- users.insert().execute(
-- user_id=4, goofy='fred', goofy2='fred', goofy4=u'fred',
-- goofy7=u'fred', goofy8=9, goofy9=9)
-+ user_id=4, goofy='fred', goofy2='fred', goofy4='fred',
-+ goofy7='fred', goofy8=9, goofy9=9)
-
- l = users.select().order_by(users.c.user_id).execute().fetchall()
- for assertstr, assertint, assertint2, row in zip(
-@@ -278,7 +280,7 @@
- eq_(row[5], assertint)
- eq_(row[6], assertint2)
- for col in row[3], row[4]:
-- assert isinstance(col, unicode)
-+ assert isinstance(col, str)
-
- def test_typedecorator_impl(self):
- for impl_, exp, kw in [
-@@ -715,9 +717,9 @@
- expected
- )
-
-- data = u"Alors vous imaginez ma surprise, au lever du jour, quand "\
-- u"une drôle de petite voix m’a réveillé. "\
-- u"Elle disait: « S’il vous plaît… dessine-moi un mouton! »"
-+ data = "Alors vous imaginez ma surprise, au lever du jour, quand "\
-+ "une drôle de petite voix m’a réveillé. "\
-+ "Elle disait: « S’il vous plaît… dessine-moi un mouton! »"
-
- def test_unicode_warnings_typelevel_native_unicode(self):
-
-@@ -726,13 +728,14 @@
- dialect = default.DefaultDialect()
- dialect.supports_unicode_binds = True
- uni = u.dialect_impl(dialect).bind_processor(dialect)
-- # Py3K
-- #assert_raises(exc.SAWarning, uni, b'x')
-- #assert isinstance(uni(unicodedata), str)
-- # Py2K
-- assert_raises(exc.SAWarning, uni, 'x')
-- assert isinstance(uni(unicodedata), unicode)
-- # end Py2K
-+# start Py3K
-+ assert_raises(exc.SAWarning, uni, b'x')
-+ assert isinstance(uni(unicodedata), str)
-+# end Py3K
-+# start Py2K
-+# assert_raises(exc.SAWarning, uni, 'x')
-+# assert isinstance(uni(unicodedata), unicode)
-+# end Py2K
-
- def test_unicode_warnings_typelevel_sqla_unicode(self):
- unicodedata = self.data
-@@ -740,13 +743,14 @@
- dialect = default.DefaultDialect()
- dialect.supports_unicode_binds = False
- uni = u.dialect_impl(dialect).bind_processor(dialect)
-- # Py3K
-- #assert_raises(exc.SAWarning, uni, b'x')
-- #assert isinstance(uni(unicodedata), bytes)
-- # Py2K
-- assert_raises(exc.SAWarning, uni, 'x')
-- assert isinstance(uni(unicodedata), str)
-- # end Py2K
-+# start Py3K
-+ assert_raises(exc.SAWarning, uni, b'x')
-+ assert isinstance(uni(unicodedata), bytes)
-+# end Py3K
-+# start Py2K
-+# assert_raises(exc.SAWarning, uni, 'x')
-+# assert isinstance(uni(unicodedata), str)
-+# end Py2K
-
- eq_(uni(unicodedata), unicodedata.encode('utf-8'))
-
-@@ -760,13 +764,14 @@
- s = String()
- uni = s.dialect_impl(dialect).bind_processor(dialect)
- # this is not the unicode type - no warning
-- # Py3K
-- #uni(b'x')
-- #assert isinstance(uni(unicodedata), bytes)
-- # Py2K
-- uni('x')
-- assert isinstance(uni(unicodedata), str)
-- # end Py2K
-+# start Py3K
-+ uni(b'x')
-+ assert isinstance(uni(unicodedata), bytes)
-+# end Py3K
-+# start Py2K
-+# uni('x')
-+# assert isinstance(uni(unicodedata), str)
-+# end Py2K
-
- eq_(uni(unicodedata), unicodedata.encode('utf-8'))
-
-@@ -1366,7 +1371,7 @@
- metadata = self.metadata
- t = self._fixture(metadata, Integer, 45)
- val = testing.db.execute("select val from t").scalar()
-- assert isinstance(val, (int, long))
-+ assert isinstance(val, int)
- eq_(val, 45)
-
- @testing.provide_metadata
-diff -r 9d0639b9d3be test/sql/test_unicode.py
---- a/test/sql/test_unicode.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/test/sql/test_unicode.py Sat Apr 27 19:42:17 2013 -0400
-@@ -19,13 +19,13 @@
-
- metadata = MetaData(unicode_bind)
- t1 = Table('unitable1', metadata,
-- Column(u'méil', Integer, primary_key=True),
-- Column(u'\u6e2c\u8a66', Integer),
-+ Column('méil', Integer, primary_key=True),
-+ Column('\u6e2c\u8a66', Integer),
- test_needs_fk=True,
- )
-- t2 = Table(u'Unitéble2', metadata,
-- Column(u'méil', Integer, primary_key=True, key="a"),
-- Column(u'\u6e2c\u8a66', Integer, ForeignKey(u'unitable1.méil'),
-+ t2 = Table('Unitéble2', metadata,
-+ Column('méil', Integer, primary_key=True, key="a"),
-+ Column('\u6e2c\u8a66', Integer, ForeignKey('unitable1.méil'),
- key="b"
- ),
- test_needs_fk=True,
-@@ -33,27 +33,27 @@
-
- # Few DBs support Unicode foreign keys
- if testing.against('sqlite'):
-- t3 = Table(u'\u6e2c\u8a66', metadata,
-- Column(u'\u6e2c\u8a66_id', Integer, primary_key=True,
-+ t3 = Table('\u6e2c\u8a66', metadata,
-+ Column('\u6e2c\u8a66_id', Integer, primary_key=True,
- autoincrement=False),
-- Column(u'unitable1_\u6e2c\u8a66', Integer,
-- ForeignKey(u'unitable1.\u6e2c\u8a66')
-+ Column('unitable1_\u6e2c\u8a66', Integer,
-+ ForeignKey('unitable1.\u6e2c\u8a66')
- ),
-- Column(u'Unitéble2_b', Integer,
-- ForeignKey(u'Unitéble2.b')
-+ Column('Unitéble2_b', Integer,
-+ ForeignKey('Unitéble2.b')
- ),
-- Column(u'\u6e2c\u8a66_self', Integer,
-- ForeignKey(u'\u6e2c\u8a66.\u6e2c\u8a66_id')
-+ Column('\u6e2c\u8a66_self', Integer,
-+ ForeignKey('\u6e2c\u8a66.\u6e2c\u8a66_id')
- ),
- test_needs_fk=True,
- )
- else:
-- t3 = Table(u'\u6e2c\u8a66', metadata,
-- Column(u'\u6e2c\u8a66_id', Integer, primary_key=True,
-+ t3 = Table('\u6e2c\u8a66', metadata,
-+ Column('\u6e2c\u8a66_id', Integer, primary_key=True,
- autoincrement=False),
-- Column(u'unitable1_\u6e2c\u8a66', Integer),
-- Column(u'Unitéble2_b', Integer),
-- Column(u'\u6e2c\u8a66_self', Integer),
-+ Column('unitable1_\u6e2c\u8a66', Integer),
-+ Column('Unitéble2_b', Integer),
-+ Column('\u6e2c\u8a66_self', Integer),
- test_needs_fk=True,
- )
- metadata.create_all()
-@@ -72,42 +72,42 @@
- del unicode_bind
-
- def test_insert(self):
-- t1.insert().execute({u'méil':1, u'\u6e2c\u8a66':5})
-+ t1.insert().execute({'méil':1, '\u6e2c\u8a66':5})
- t2.insert().execute({'a':1, 'b':1})
-- t3.insert().execute({u'\u6e2c\u8a66_id': 1,
-- u'unitable1_\u6e2c\u8a66': 5,
-- u'Unitéble2_b': 1,
-- u'\u6e2c\u8a66_self': 1})
-+ t3.insert().execute({'\u6e2c\u8a66_id': 1,
-+ 'unitable1_\u6e2c\u8a66': 5,
-+ 'Unitéble2_b': 1,
-+ '\u6e2c\u8a66_self': 1})
-
- assert t1.select().execute().fetchall() == [(1, 5)]
- assert t2.select().execute().fetchall() == [(1, 1)]
- assert t3.select().execute().fetchall() == [(1, 5, 1, 1)]
-
- def test_reflect(self):
-- t1.insert().execute({u'méil':2, u'\u6e2c\u8a66':7})
-+ t1.insert().execute({'méil':2, '\u6e2c\u8a66':7})
- t2.insert().execute({'a':2, 'b':2})
-- t3.insert().execute({u'\u6e2c\u8a66_id': 2,
-- u'unitable1_\u6e2c\u8a66': 7,
-- u'Unitéble2_b': 2,
-- u'\u6e2c\u8a66_self': 2})
-+ t3.insert().execute({'\u6e2c\u8a66_id': 2,
-+ 'unitable1_\u6e2c\u8a66': 7,
-+ 'Unitéble2_b': 2,
-+ '\u6e2c\u8a66_self': 2})
-
- meta = MetaData(unicode_bind)
- tt1 = Table(t1.name, meta, autoload=True)
- tt2 = Table(t2.name, meta, autoload=True)
- tt3 = Table(t3.name, meta, autoload=True)
-
-- tt1.insert().execute({u'méil':1, u'\u6e2c\u8a66':5})
-- tt2.insert().execute({u'méil':1, u'\u6e2c\u8a66':1})
-- tt3.insert().execute({u'\u6e2c\u8a66_id': 1,
-- u'unitable1_\u6e2c\u8a66': 5,
-- u'Unitéble2_b': 1,
-- u'\u6e2c\u8a66_self': 1})
-+ tt1.insert().execute({'méil':1, '\u6e2c\u8a66':5})
-+ tt2.insert().execute({'méil':1, '\u6e2c\u8a66':1})
-+ tt3.insert().execute({'\u6e2c\u8a66_id': 1,
-+ 'unitable1_\u6e2c\u8a66': 5,
-+ 'Unitéble2_b': 1,
-+ '\u6e2c\u8a66_self': 1})
-
-- self.assert_(tt1.select(order_by=desc(u'méil')).execute().fetchall() ==
-+ self.assert_(tt1.select(order_by=desc('méil')).execute().fetchall() ==
- [(2, 7), (1, 5)])
-- self.assert_(tt2.select(order_by=desc(u'méil')).execute().fetchall() ==
-+ self.assert_(tt2.select(order_by=desc('méil')).execute().fetchall() ==
- [(2, 2), (1, 1)])
-- self.assert_(tt3.select(order_by=desc(u'\u6e2c\u8a66_id')).
-+ self.assert_(tt3.select(order_by=desc('\u6e2c\u8a66_id')).
- execute().fetchall() ==
- [(2, 7, 2, 2), (1, 5, 1, 1)])
- meta.drop_all()
-@@ -117,7 +117,7 @@
- def test_default_exec(self):
- metadata = MetaData(testing.db)
- t1 = Table('t1', metadata,
-- Column(u'special_col', Integer, Sequence('special_col'), primary_key=True),
-+ Column('special_col', Integer, Sequence('special_col'), primary_key=True),
- Column('data', String(50)) # to appease SQLite without DEFAULT VALUES
- )
- metadata.create_all()
-@@ -128,8 +128,8 @@
- # reset the identifier preparer, so that we can force it to cache
- # a unicode identifier
- engine.dialect.identifier_preparer = engine.dialect.preparer(engine.dialect)
-- select([column(u'special_col')]).select_from(t1).execute().close()
-- assert isinstance(engine.dialect.identifier_preparer.format_sequence(Sequence('special_col')), unicode)
-+ select([column('special_col')]).select_from(t1).execute().close()
-+ assert isinstance(engine.dialect.identifier_preparer.format_sequence(Sequence('special_col')), str)
-
- # now execute, run the sequence. it should run in u"Special_col.nextid" or similar as
- # a unicode object; cx_oracle asserts that this is None or a String (postgresql lets it pass thru).
-diff -r 9d0639b9d3be test/sql/test_update.py
---- a/test/sql/test_update.py Sat Apr 27 17:51:52 2013 -0400
-+++ b/test/sql/test_update.py Sat Apr 27 19:42:17 2013 -0400
-@@ -242,7 +242,7 @@
- 'WHERE '
- 'users.id = addresses.user_id AND '
- 'addresses.email_address = :email_address_1',
-- checkparams={u'email_address_1': 'e1', 'name': 'newname'})
-+ checkparams={'email_address_1': 'e1', 'name': 'newname'})
-
- def test_render_multi_table(self):
- users = self.tables.users
-@@ -250,8 +250,8 @@
- dingalings = self.tables.dingalings
-
- checkparams = {
-- u'email_address_1': 'e1',
-- u'id_1': 2,
-+ 'email_address_1': 'e1',
-+ 'id_1': 2,
- 'name': 'newname'
- }
-
-@@ -285,15 +285,15 @@
- 'WHERE '
- 'users.id = addresses.user_id AND '
- 'addresses.email_address = %s',
-- checkparams={u'email_address_1': 'e1', 'name': 'newname'},
-+ checkparams={'email_address_1': 'e1', 'name': 'newname'},
- dialect=mysql.dialect())
-
- def test_render_subquery(self):
- users, addresses = self.tables.users, self.tables.addresses
-
- checkparams = {
-- u'email_address_1': 'e1',
-- u'id_1': 7,
-+ 'email_address_1': 'e1',
-+ 'id_1': 7,
- 'name': 'newname'
- }
-