Remove legacy calling style of select()
Resolve the following SADeprecationWarning warning: The legacy calling style of select() is deprecated and will be removed in SQLAlchemy 2.0. Please use the new calling style described at select(). Change-Id: Ic5f7240e790425d2689c6870483748650a49bc3d Signed-off-by: Stephen Finucane <stephenfin@redhat.com>
This commit is contained in:
parent
4c1eb966c0
commit
8d7607266c
@ -84,7 +84,7 @@ is preferred:
|
||||
|
||||
@enginefacade.reader.connection
|
||||
def _refresh_from_db(context, cache):
|
||||
sel = sa.select([table.c.id, table.c.name])
|
||||
sel = sa.select(table.c.id, table.c.name)
|
||||
res = context.connection.execute(sel).fetchall()
|
||||
cache.id_cache = {r[1]: r[0] for r in res}
|
||||
cache.str_cache = {r[0]: r[1] for r in res}
|
||||
|
@ -70,7 +70,7 @@ def _connect_ping_listener(connection, branch):
|
||||
try:
|
||||
# run a SELECT 1. use a core select() so that
|
||||
# any details like that needed by the backend are handled.
|
||||
connection.scalar(select([1]))
|
||||
connection.scalar(select(1))
|
||||
except exception.DBConnectionError:
|
||||
# catch DBConnectionError, which is raised by the filter
|
||||
# system.
|
||||
@ -80,7 +80,7 @@ def _connect_ping_listener(connection, branch):
|
||||
# run the select again to re-validate the Connection.
|
||||
LOG.exception(
|
||||
'Database connection was found disconnected; reconnecting')
|
||||
connection.scalar(select([1]))
|
||||
connection.scalar(select(1))
|
||||
finally:
|
||||
connection.should_close_with_result = save_should_close_with_result
|
||||
|
||||
@ -362,7 +362,7 @@ def _init_events(engine, sqlite_synchronous=True, sqlite_fk=False, **kw):
|
||||
# emit our own BEGIN, checking for existing
|
||||
# transactional state
|
||||
if 'in_transaction' not in conn.info:
|
||||
conn.execute("BEGIN")
|
||||
conn.execute(sqlalchemy.text("BEGIN"))
|
||||
conn.info['in_transaction'] = True
|
||||
|
||||
@sqlalchemy.event.listens_for(engine, "rollback")
|
||||
|
@ -485,8 +485,12 @@ def drop_old_duplicate_entries_from_table(engine, table_name,
|
||||
columns_for_select.extend(columns_for_group_by)
|
||||
|
||||
duplicated_rows_select = sqlalchemy.sql.select(
|
||||
columns_for_select, group_by=columns_for_group_by,
|
||||
having=func.count(table.c.id) > 1)
|
||||
*columns_for_select,
|
||||
).group_by(
|
||||
*columns_for_group_by
|
||||
).having(
|
||||
func.count(table.c.id) > 1
|
||||
)
|
||||
|
||||
for row in engine.execute(duplicated_rows_select).fetchall():
|
||||
# NOTE(boris-42): Do not remove row that has the biggest ID.
|
||||
@ -497,7 +501,8 @@ def drop_old_duplicate_entries_from_table(engine, table_name,
|
||||
delete_condition &= table.c[name] == row[name]
|
||||
|
||||
rows_to_delete_select = sqlalchemy.sql.select(
|
||||
[table.c.id]).where(delete_condition)
|
||||
table.c.id,
|
||||
).where(delete_condition)
|
||||
for row in engine.execute(rows_to_delete_select).fetchall():
|
||||
LOG.info("Deleting duplicated row with id: %(id)s from table: "
|
||||
"%(table)s", dict(id=row[0], table=table_name))
|
||||
|
@ -37,11 +37,6 @@ class WarningsFixture(fixtures.Fixture):
|
||||
# ...but filter everything out until we get around to fixing them
|
||||
# FIXME(stephenfin): Remove all of these
|
||||
|
||||
warnings.filterwarnings(
|
||||
'once',
|
||||
message=r'The legacy calling style of select\(\) is deprecated .*',
|
||||
category=sqla_exc.SADeprecationWarning)
|
||||
|
||||
warnings.filterwarnings(
|
||||
'once',
|
||||
message=r'The MetaData.bind argument is deprecated .*',
|
||||
|
@ -2004,7 +2004,7 @@ class LiveFacadeTest(db_test_base._DbTestCase):
|
||||
def test_external_writer_in_reader(self):
|
||||
context = oslo_context.RequestContext()
|
||||
with enginefacade.reader.using(context) as session:
|
||||
ping = session.scalar(select([1]))
|
||||
ping = session.scalar(select(1))
|
||||
self.assertEqual(1, ping)
|
||||
|
||||
# we're definitely a reader
|
||||
|
@ -1165,7 +1165,7 @@ class TestDBDisconnected(TestsExceptionFilter):
|
||||
with self._fixture(dialect_name, exc_obj, 1, is_disconnect):
|
||||
conn = self.engine.connect()
|
||||
with conn.begin():
|
||||
self.assertEqual(1, conn.scalar(sqla.select([1])))
|
||||
self.assertEqual(1, conn.scalar(sqla.select(1)))
|
||||
self.assertFalse(conn.closed)
|
||||
self.assertFalse(conn.invalidated)
|
||||
self.assertTrue(conn.in_transaction())
|
||||
@ -1178,7 +1178,7 @@ class TestDBDisconnected(TestsExceptionFilter):
|
||||
|
||||
# test implicit execution
|
||||
with self._fixture(dialect_name, exc_obj, 1):
|
||||
self.assertEqual(1, self.engine.scalar(sqla.select([1])))
|
||||
self.assertEqual(1, self.engine.scalar(sqla.select(1)))
|
||||
|
||||
def test_mariadb_error_1927(self):
|
||||
for code in [1927]:
|
||||
@ -1286,7 +1286,7 @@ class TestDBConnectRetry(TestsExceptionFilter):
|
||||
2, -1
|
||||
)
|
||||
# conn is good
|
||||
self.assertEqual(1, conn.scalar(sqla.select([1])))
|
||||
self.assertEqual(1, conn.scalar(sqla.select(1)))
|
||||
|
||||
def test_connect_retry_past_failure(self):
|
||||
conn = self._run_test(
|
||||
@ -1295,7 +1295,7 @@ class TestDBConnectRetry(TestsExceptionFilter):
|
||||
2, 3
|
||||
)
|
||||
# conn is good
|
||||
self.assertEqual(1, conn.scalar(sqla.select([1])))
|
||||
self.assertEqual(1, conn.scalar(sqla.select(1)))
|
||||
|
||||
def test_connect_retry_not_candidate_exception(self):
|
||||
self.assertRaises(
|
||||
@ -1362,7 +1362,7 @@ class TestDBConnectPingWrapping(TestsExceptionFilter):
|
||||
self, dialect_name, exc_obj, is_disconnect=True):
|
||||
with self._fixture(dialect_name, exc_obj, 3, is_disconnect):
|
||||
conn = self.engine.connect()
|
||||
self.assertEqual(1, conn.scalar(sqla.select([1])))
|
||||
self.assertEqual(1, conn.scalar(sqla.select(1)))
|
||||
conn.close()
|
||||
|
||||
with self._fixture(dialect_name, exc_obj, 1, is_disconnect):
|
||||
|
@ -121,7 +121,7 @@ class SQLiteSavepointTest(db_test_base._DbTestCase):
|
||||
0,
|
||||
conn.scalar(
|
||||
sqlalchemy.select(
|
||||
[sqlalchemy.func.count(self.test_table.c.id)],
|
||||
sqlalchemy.func.count(self.test_table.c.id),
|
||||
).select_from(self.test_table)
|
||||
)
|
||||
)
|
||||
|
@ -120,7 +120,7 @@ class UpdateMatchTest(db_test_base._DbTestCase):
|
||||
|
||||
def _assert_row(self, pk, values):
|
||||
row = self.session.execute(
|
||||
sql.select([MyModel.__table__]).where(MyModel.__table__.c.id == pk)
|
||||
sql.select(MyModel.__table__).where(MyModel.__table__.c.id == pk)
|
||||
).first()
|
||||
values['id'] = pk
|
||||
self.assertEqual(values, dict(row))
|
||||
|
@ -629,7 +629,7 @@ class TestPaginateQueryActualSQL(test_base.BaseTestCase):
|
||||
marker=FakeTable(user_id='hello',
|
||||
enabled=False))
|
||||
expected_core_sql = (
|
||||
select([FakeTable]).
|
||||
select(FakeTable).
|
||||
order_by(sqlalchemy.asc(FakeTable.enabled)).
|
||||
where(cast(FakeTable.enabled, Integer) > 0).
|
||||
limit(5)
|
||||
@ -648,7 +648,7 @@ class TestPaginateQueryActualSQL(test_base.BaseTestCase):
|
||||
['user_id', 'some_hybrid'],
|
||||
sort_dirs=['asc', 'desc'])
|
||||
expected_core_sql = (
|
||||
select([FakeTable]).
|
||||
select(FakeTable).
|
||||
order_by(sqlalchemy.asc(FakeTable.user_id)).
|
||||
order_by(sqlalchemy.desc(FakeTable.some_hybrid)).
|
||||
limit(5)
|
||||
@ -716,8 +716,10 @@ class TestMigrationUtils(db_test_base._DbTestCase):
|
||||
uniq_values.add(uniq_value)
|
||||
expected_ids.append(value['id'])
|
||||
|
||||
real_ids = [row[0] for row in
|
||||
self.engine.execute(select([test_table.c.id])).fetchall()]
|
||||
real_ids = [
|
||||
row[0] for row in
|
||||
self.engine.execute(select(test_table.c.id)).fetchall()
|
||||
]
|
||||
|
||||
self.assertEqual(len(expected_ids), len(real_ids))
|
||||
for id_ in expected_ids:
|
||||
|
Loading…
Reference in New Issue
Block a user