Skip to content

Instantly share code, notes, and snippets.

@ichard26
Last active December 2, 2021 20:01
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save ichard26/c03fd345f8096cf800eb1d042c536059 to your computer and use it in GitHub Desktop.
Save ichard26/c03fd345f8096cf800eb1d042c536059 to your computer and use it in GitHub Desktop.
[14:59:37] Loaded first analysis: /home/ichard26/programming/oss/black/more-consistent-spacing-compiled.json (cached)
[14:59:38] Loaded second analysis: /home/ichard26/programming/oss/black/jephron-conditional-06a02f32f4be1c.json (cached)
--- a/attrs:src/attr/_funcs.py
+++ b/attrs:src/attr/_funcs.py
@@ -218,42 +218,48 @@
elif isinstance(v, (tuple, list, set, frozenset)):
cf = v.__class__ if retain is True else list
rv.append(
cf(
[
- astuple(
- j,
- recurse=True,
- filter=filter,
- tuple_factory=tuple_factory,
- retain_collection_types=retain,
+ (
+ astuple(
+ j,
+ recurse=True,
+ filter=filter,
+ tuple_factory=tuple_factory,
+ retain_collection_types=retain,
+ )
+ if has(j.__class__)
+ else j
)
- if has(j.__class__)
- else j
for j in v
]
)
)
elif isinstance(v, dict):
df = v.__class__ if retain is True else dict
rv.append(
df(
(
- astuple(
- kk,
- tuple_factory=tuple_factory,
- retain_collection_types=retain,
- )
- if has(kk.__class__)
- else kk,
- astuple(
- vv,
- tuple_factory=tuple_factory,
- retain_collection_types=retain,
- )
- if has(vv.__class__)
- else vv,
+ (
+ astuple(
+ kk,
+ tuple_factory=tuple_factory,
+ retain_collection_types=retain,
+ )
+ if has(kk.__class__)
+ else kk
+ ),
+ (
+ astuple(
+ vv,
+ tuple_factory=tuple_factory,
+ retain_collection_types=retain,
+ )
+ if has(vv.__class__)
+ else vv
+ ),
)
for kk, vv in iteritems(v)
)
)
else:
--- a/attrs:src/attr/_make.py
+++ b/attrs:src/attr/_make.py
@@ -2754,13 +2754,15 @@
if name != "metadata":
bound_setattr(name, value)
else:
bound_setattr(
name,
- metadata_proxy(value)
- if value
- else _empty_metadata_singleton,
+ (
+ metadata_proxy(value)
+ if value
+ else _empty_metadata_singleton
+ ),
)
_a = [
Attribute(
--- a/bandersnatch:src/bandersnatch/tests/test_main.py
+++ b/bandersnatch:src/bandersnatch/tests/test_main.py
@@ -61,13 +61,15 @@
def test_main_reads_config_values(mirror_mock: mock.MagicMock, tmpdir: Path) -> None:
base_config_path = Path(bandersnatch.__file__).parent / "unittest.conf"
diff_file = Path(tempfile.gettempdir()) / "srv/pypi/mirrored-files"
config_lines = [
- f"diff-file = {diff_file.as_posix()}\n"
- if line.startswith("diff-file")
- else line
+ (
+ f"diff-file = {diff_file.as_posix()}\n"
+ if line.startswith("diff-file")
+ else line
+ )
for line in base_config_path.read_text().splitlines()
]
config_path = tmpdir / "unittest.conf"
config_path.write_text("\n".join(config_lines), encoding="utf-8")
sys.argv = ["bandersnatch", "-c", str(config_path), "mirror"]
--- a/django:django/contrib/admin/options.py
+++ b/django:django/contrib/admin/options.py
@@ -1825,14 +1825,16 @@
else:
readonly_fields = self.get_readonly_fields(request, obj)
adminForm = helpers.AdminForm(
form,
list(fieldsets),
- # Clear prepopulated fields on a view-only form to avoid a crash.
- self.get_prepopulated_fields(request, obj)
- if add or self.has_change_permission(request, obj)
- else {},
+ (
+ # Clear prepopulated fields on a view-only form to avoid a crash.
+ self.get_prepopulated_fields(request, obj)
+ if add or self.has_change_permission(request, obj)
+ else {}
+ ),
readonly_fields,
model_admin=self,
)
media = self.media + adminForm.media
--- a/django:django/contrib/admin/templatetags/admin_list.py
+++ b/django:django/contrib/admin/templatetags/admin_list.py
@@ -165,13 +165,13 @@
"ascending": order_type == "asc",
"sort_priority": sort_priority,
"url_primary": cl.get_query_string({ORDER_VAR: '.'.join(o_list_primary)}),
"url_remove": cl.get_query_string({ORDER_VAR: '.'.join(o_list_remove)}),
"url_toggle": cl.get_query_string({ORDER_VAR: '.'.join(o_list_toggle)}),
- "class_attrib": format_html(' class="{}"', ' '.join(th_classes))
- if th_classes
- else '',
+ "class_attrib": (
+ format_html(' class="{}"', ' '.join(th_classes)) if th_classes else ''
+ ),
}
def _boolean_icon(field_val):
icon_url = static(
@@ -261,13 +261,15 @@
attr = pk
value = result.serializable_value(attr)
link_or_text = format_html(
'<a href="{}"{}>{}</a>',
url,
- format_html(' data-popup-opener="{}"', value)
- if cl.is_popup
- else '',
+ (
+ format_html(' data-popup-opener="{}"', value)
+ if cl.is_popup
+ else ''
+ ),
result_repr,
)
yield format_html(
'<{}{}>{}</{}>', table_tag, row_class, link_or_text, table_tag
--- a/django:django/contrib/auth/management/commands/createsuperuser.py
+++ b/django:django/contrib/auth/management/commands/createsuperuser.py
@@ -264,19 +264,23 @@
def _get_input_message(self, field, default=None):
return '%s%s%s: ' % (
capfirst(field.verbose_name),
" (leave blank to use '%s')" % default if default else '',
- ' (%s.%s)'
- % (
- field.remote_field.model._meta.object_name,
- field.m2m_target_field_name()
- if field.many_to_many
- else field.remote_field.field_name,
- )
- if field.remote_field
- else '',
+ (
+ ' (%s.%s)'
+ % (
+ field.remote_field.model._meta.object_name,
+ (
+ field.m2m_target_field_name()
+ if field.many_to_many
+ else field.remote_field.field_name
+ ),
+ )
+ if field.remote_field
+ else ''
+ ),
)
def _validate_username(self, username, verbose_field_name, database):
"""Validate username. If invalid, return a string error message."""
if self.username_field.unique:
--- a/django:django/contrib/gis/db/models/functions.py
+++ b/django:django/contrib/gis/db/models/functions.py
@@ -123,13 +123,15 @@
def as_sqlite(self, compiler, connection, **extra_context):
copy = self.copy()
copy.set_source_expressions(
[
- Value(float(expr.value))
- if hasattr(expr, 'value') and isinstance(expr.value, Decimal)
- else expr
+ (
+ Value(float(expr.value))
+ if hasattr(expr, 'value') and isinstance(expr.value, Decimal)
+ else expr
+ )
for expr in copy.get_source_expressions()
]
)
return copy.as_sql(compiler, connection, **extra_context)
--- a/django:django/contrib/postgres/search.py
+++ b/django:django/contrib/postgres/search.py
@@ -107,13 +107,15 @@
def as_sql(self, compiler, connection, function=None, template=None):
clone = self.copy()
clone.set_source_expressions(
[
Coalesce(
- expression
- if isinstance(expression.output_field, (CharField, TextField))
- else Cast(expression, TextField()),
+ (
+ expression
+ if isinstance(expression.output_field, (CharField, TextField))
+ else Cast(expression, TextField())
+ ),
Value(''),
)
for expression in clone.get_source_expressions()
]
)
--- a/django:django/core/management/base.py
+++ b/django:django/core/management/base.py
@@ -516,13 +516,15 @@
for issues, group_name in sorted_issues:
if issues:
visible_issue_count += len(issues)
formatted = (
- self.style.ERROR(str(e))
- if e.is_serious()
- else self.style.WARNING(str(e))
+ (
+ self.style.ERROR(str(e))
+ if e.is_serious()
+ else self.style.WARNING(str(e))
+ )
for e in issues
)
formatted = "\n".join(sorted(formatted))
body += '\n%s:\n%s\n' % (group_name, formatted)
@@ -531,15 +533,19 @@
if display_num_errors:
if visible_issue_count:
footer += '\n'
footer += "System check identified %s (%s silenced)." % (
- "no issues"
- if visible_issue_count == 0
- else "1 issue"
- if visible_issue_count == 1
- else "%s issues" % visible_issue_count,
+ (
+ "no issues"
+ if visible_issue_count == 0
+ else (
+ "1 issue"
+ if visible_issue_count == 1
+ else "%s issues" % visible_issue_count
+ )
+ ),
len(all_issues) - visible_issue_count,
)
if any(e.is_serious(fail_level) and not e.is_silenced() for e in all_issues):
msg = self.style.ERROR("SystemCheckError: %s" % header) + body + footer
--- a/django:django/core/management/commands/inspectdb.py
+++ b/django:django/core/management/commands/inspectdb.py
@@ -198,12 +198,16 @@
extra_params['blank'] = True
extra_params['null'] = True
field_desc = '%s = %s%s' % (
att_name,
- # Custom fields will have a dotted path
- '' if '.' in field_type else 'models.',
+ (
+ # Custom fields will have a dotted path
+ ''
+ if '.' in field_type
+ else 'models.'
+ ),
field_type,
)
if field_type.startswith(('ForeignKey(', 'OneToOneField(')):
field_desc += ', models.DO_NOTHING'
--- a/django:django/db/backends/base/creation.py
+++ b/django:django/db/backends/base/creation.py
@@ -167,11 +167,11 @@
"""
Return display string for a database for use in various actions.
"""
return "'%s'%s" % (
self.connection.alias,
- " ('%s')" % database_name if verbosity >= 2 else '',
+ (" ('%s')" % database_name) if verbosity >= 2 else '',
)
def _get_test_db_name(self):
"""
Internal implementation - return the name of the test DB that will be
--- a/django:django/db/backends/base/operations.py
+++ b/django:django/db/backends/base/operations.py
@@ -243,12 +243,12 @@
"""Return LIMIT/OFFSET SQL clause."""
limit, offset = self._get_limit_offset_params(low_mark, high_mark)
return ' '.join(
sql
for sql in (
- 'LIMIT %d' % limit if limit else None,
- 'OFFSET %d' % offset if offset else None,
+ ('LIMIT %d' % limit) if limit else None,
+ ('OFFSET %d' % offset) if offset else None,
)
if sql
)
def last_executed_query(self, cursor, sql, params):
--- a/django:django/db/backends/base/schema.py
+++ b/django:django/db/backends/base/schema.py
@@ -610,13 +610,13 @@
field.remote_field.field_name
).column
namespace, _ = split_identifier(model._meta.db_table)
definition += " " + self.sql_create_column_inline_fk % {
'name': self._fk_constraint_name(model, field, constraint_suffix),
- 'namespace': '%s.' % self.quote_name(namespace)
- if namespace
- else '',
+ 'namespace': (
+ '%s.' % self.quote_name(namespace) if namespace else ''
+ ),
'column': self.quote_name(field.column),
'to_table': self.quote_name(to_table),
'to_column': self.quote_name(to_column),
'deferrable': self.connection.ops.deferrable_sql(),
}
@@ -1170,13 +1170,13 @@
return (
self.sql_alter_column_collate
% {
'column': self.quote_name(new_field.column),
'type': new_type,
- 'collation': ' ' + self._collate_sql(new_collation)
- if new_collation
- else '',
+ 'collation': (
+ ' ' + self._collate_sql(new_collation) if new_collation else ''
+ ),
},
[],
)
def _alter_many_to_many(self, model, old_field, new_field, strict):
--- a/django:django/db/backends/oracle/operations.py
+++ b/django:django/db/backends/oracle/operations.py
@@ -295,12 +295,12 @@
def limit_offset_sql(self, low_mark, high_mark):
fetch, offset = self._get_limit_offset_params(low_mark, high_mark)
return ' '.join(
sql
for sql in (
- 'OFFSET %d ROWS' % offset if offset else None,
- 'FETCH FIRST %d ROWS ONLY' % fetch if fetch else None,
+ ('OFFSET %d ROWS' % offset) if offset else None,
+ ('FETCH FIRST %d ROWS ONLY' % fetch) if fetch else None,
)
if sql
)
def last_executed_query(self, cursor, sql, params):
--- a/django:django/db/migrations/operations/models.py
+++ b/django:django/db/migrations/operations/models.py
@@ -56,15 +56,15 @@
# manager names
_check_for_duplicates('fields', (name for name, _ in self.fields))
_check_for_duplicates(
'bases',
(
- base._meta.label_lower
- if hasattr(base, '_meta')
- else base.lower()
- if isinstance(base, str)
- else base
+ (
+ base._meta.label_lower
+ if hasattr(base, '_meta')
+ else base.lower() if isinstance(base, str) else base
+ )
for base in self.bases
),
)
_check_for_duplicates('managers', (name for name, _ in self.managers))
--- a/django:django/db/models/base.py
+++ b/django:django/db/models/base.py
@@ -2035,13 +2035,15 @@
# Any field name that is not present in field_names does not exist.
# Also, ordering by m2m fields is not allowed.
opts = cls._meta
valid_fields = set(
chain.from_iterable(
- (f.name, f.attname)
- if not (f.auto_created and not f.concrete)
- else (f.field.related_query_name(),)
+ (
+ (f.name, f.attname)
+ if not (f.auto_created and not f.concrete)
+ else (f.field.related_query_name(),)
+ )
for f in chain(opts.fields, opts.related_objects)
)
)
invalid_fields.extend(fields - valid_fields)
--- a/django:django/db/models/expressions.py
+++ b/django:django/db/models/expressions.py
@@ -182,13 +182,15 @@
def set_source_expressions(self, exprs):
assert not exprs
def _parse_expressions(self, *expressions):
return [
- arg
- if hasattr(arg, 'resolve_expression')
- else (F(arg) if isinstance(arg, str) else Value(arg))
+ (
+ arg
+ if hasattr(arg, 'resolve_expression')
+ else (F(arg) if isinstance(arg, str) else Value(arg))
+ )
for arg in expressions
]
def as_sql(self, compiler, connection):
"""
@@ -256,13 +258,15 @@
"""
c = self.copy()
c.is_summary = summarize
c.set_source_expressions(
[
- expr.resolve_expression(query, allow_joins, reuse, summarize)
- if expr
- else None
+ (
+ expr.resolve_expression(query, allow_joins, reuse, summarize)
+ if expr
+ else None
+ )
for expr in c.get_source_expressions()
]
)
return c
@@ -337,26 +341,20 @@
from the one the database returns.
"""
field = self.output_field
internal_type = field.get_internal_type()
if internal_type == 'FloatField':
- return (
- lambda value, expression, connection: None
- if value is None
- else float(value)
+ return lambda value, expression, connection: (
+ None if value is None else float(value)
)
elif internal_type.endswith('IntegerField'):
- return (
- lambda value, expression, connection: None
- if value is None
- else int(value)
+ return lambda value, expression, connection: (
+ None if value is None else int(value)
)
elif internal_type == 'DecimalField':
- return (
- lambda value, expression, connection: None
- if value is None
- else Decimal(value)
+ return lambda value, expression, connection: (
+ None if value is None else Decimal(value)
)
return self._convert_value_noop
def get_lookup(self, lookup):
return self.output_field.get_lookup(lookup)
--- a/django:django/db/models/functions/datetime.py
+++ b/django:django/db/models/functions/datetime.py
@@ -288,26 +288,30 @@
):
raise ValueError(
"Cannot truncate DateField '%s' to %s."
% (
field.name,
- output_field.__class__.__name__
- if has_explicit_output_field
- else 'DateTimeField',
+ (
+ output_field.__class__.__name__
+ if has_explicit_output_field
+ else 'DateTimeField'
+ ),
)
)
elif isinstance(field, TimeField) and (
isinstance(output_field, DateTimeField)
or copy.kind in ('year', 'quarter', 'month', 'week', 'day', 'date')
):
raise ValueError(
"Cannot truncate TimeField '%s' to %s."
% (
field.name,
- output_field.__class__.__name__
- if has_explicit_output_field
- else 'DateTimeField',
+ (
+ output_field.__class__.__name__
+ if has_explicit_output_field
+ else 'DateTimeField'
+ ),
)
)
return copy
def convert_value(self, value, expression, connection):
--- a/django:django/db/models/functions/math.py
+++ b/django:django/db/models/functions/math.py
@@ -45,13 +45,15 @@
# arguments are mixed between integer and float or decimal.
# https://www.gaia-gis.it/fossil/libspatialite/tktview?name=0f72cca3a2
clone = self.copy()
clone.set_source_expressions(
[
- Cast(expression, FloatField())
- if isinstance(expression.output_field, IntegerField)
- else expression
+ (
+ Cast(expression, FloatField())
+ if isinstance(expression.output_field, IntegerField)
+ else expression
+ )
for expression in self.get_source_expressions()[::-1]
]
)
return clone.as_sql(compiler, connection, **extra_context)
--- a/django:django/db/models/functions/mixins.py
+++ b/django:django/db/models/functions/mixins.py
@@ -12,13 +12,15 @@
# - MOD(double, double)
output_field = DecimalField(decimal_places=sys.float_info.dig, max_digits=1000)
clone = self.copy()
clone.set_source_expressions(
[
- Cast(expression, output_field)
- if isinstance(expression.output_field, FloatField)
- else expression
+ (
+ Cast(expression, output_field)
+ if isinstance(expression.output_field, FloatField)
+ else expression
+ )
for expression in self.get_source_expressions()
]
)
return clone.as_sql(compiler, connection, **extra_context)
--- a/django:django/db/models/indexes.py
+++ b/django:django/db/models/indexes.py
@@ -189,13 +189,15 @@
return '<%s:%s%s%s%s%s%s%s>' % (
self.__class__.__qualname__,
'' if not self.fields else ' fields=%s' % repr(self.fields),
'' if not self.expressions else ' expressions=%s' % repr(self.expressions),
'' if not self.name else ' name=%s' % repr(self.name),
- ''
- if self.db_tablespace is None
- else ' db_tablespace=%s' % repr(self.db_tablespace),
+ (
+ ''
+ if self.db_tablespace is None
+ else ' db_tablespace=%s' % repr(self.db_tablespace)
+ ),
'' if self.condition is None else ' condition=%s' % self.condition,
'' if not self.include else ' include=%s' % repr(self.include),
'' if not self.opclasses else ' opclasses=%s' % repr(self.opclasses),
)
--- a/django:django/db/models/lookups.py
+++ b/django:django/db/models/lookups.py
@@ -248,13 +248,15 @@
getattr(field, 'get_db_prep_value', None)
or self.lhs.output_field.get_db_prep_value
)
return (
'%s',
- [get_db_prep_value(v, connection, prepared=True) for v in value]
- if self.get_db_prep_lookup_value_is_iterable
- else [get_db_prep_value(value, connection, prepared=True)],
+ (
+ [get_db_prep_value(v, connection, prepared=True) for v in value]
+ if self.get_db_prep_lookup_value_is_iterable
+ else [get_db_prep_value(value, connection, prepared=True)]
+ ),
)
class FieldGetDbPrepValueIterableMixin(FieldGetDbPrepValueMixin):
"""
--- a/django:django/db/models/query.py
+++ b/django:django/db/models/query.py
@@ -73,13 +73,15 @@
(
field,
related_objs,
operator.attrgetter(
*[
- field.attname
- if from_field == 'self'
- else queryset.model._meta.get_field(from_field).attname
+ (
+ field.attname
+ if from_field == 'self'
+ else queryset.model._meta.get_field(from_field).attname
+ )
for from_field in field.from_fields
]
),
)
for field, related_objs in queryset._known_related_objects.items()
@@ -980,13 +982,11 @@
clone = self._values(*_fields, **expressions)
clone._iterable_class = (
NamedValuesListIterable
if named
- else FlatValuesListIterable
- if flat
- else ValuesListIterable
+ else FlatValuesListIterable if flat else ValuesListIterable
)
return clone
def dates(self, field_name, kind, order='ASC'):
"""
@@ -1254,13 +1254,15 @@
clone = self._chain()
names = self._fields
if names is None:
names = set(
chain.from_iterable(
- (field.name, field.attname)
- if hasattr(field, 'attname')
- else (field.name,)
+ (
+ (field.name, field.attname)
+ if hasattr(field, 'attname')
+ else (field.name,)
+ )
for field in self.model._meta.get_fields()
)
)
for alias, annotation in annotations.items():
--- a/django:django/db/models/sql/compiler.py
+++ b/django:django/db/models/sql/compiler.py
@@ -1045,13 +1045,13 @@
klass_info = {
'model': f.remote_field.model,
'field': f,
'reverse': False,
'local_setter': f.set_cached_value,
- 'remote_setter': f.remote_field.set_cached_value
- if f.unique
- else lambda x, y: None,
+ 'remote_setter': (
+ f.remote_field.set_cached_value if f.unique else lambda x, y: None
+ ),
'from_parent': False,
}
related_klass_infos.append(klass_info)
select_fields = []
_, _, _, joins, _, _ = self.query.setup_joins([f.name], opts, root_alias)
--- a/django:django/http/request.py
+++ b/django:django/http/request.py
@@ -166,13 +166,15 @@
# RFC 3986 requires query string arguments to be in the ASCII range.
# Rather than crash if this doesn't happen, we encode defensively.
return '%s%s%s' % (
escape_uri_path(path),
'/' if force_append_slash and not path.endswith('/') else '',
- ('?' + iri_to_uri(self.META.get('QUERY_STRING', '')))
- if self.META.get('QUERY_STRING', '')
- else '',
+ (
+ ('?' + iri_to_uri(self.META.get('QUERY_STRING', '')))
+ if self.META.get('QUERY_STRING', '')
+ else ''
+ ),
)
def get_signed_cookie(self, key, default=RAISE_ERROR, salt='', max_age=None):
"""
Attempt to return a signed cookie. If the signature fails or the
--- a/django:django/template/engine.py
+++ b/django:django/template/engine.py
@@ -69,13 +69,15 @@
'file_charset=%s%s%s autoescape=%s>'
% (
self.__class__.__qualname__,
'' if not self.dirs else ' dirs=%s' % repr(self.dirs),
self.app_dirs,
- ''
- if not self.context_processors
- else ' context_processors=%s' % repr(self.context_processors),
+ (
+ ''
+ if not self.context_processors
+ else ' context_processors=%s' % repr(self.context_processors)
+ ),
self.debug,
repr(self.loaders),
repr(self.string_if_invalid),
repr(self.file_charset),
'' if not self.libraries else ' libraries=%s' % repr(self.libraries),
--- a/django:tests/contenttypes_tests/test_views.py
+++ b/django:tests/contenttypes_tests/test_views.py
@@ -148,14 +148,12 @@
@mock.patch('django.apps.apps.get_model')
def test_shortcut_view_with_null_site_fk(self, get_model):
"""
The shortcut view works if a model's ForeignKey to site is None.
"""
- get_model.side_effect = (
- lambda *args, **kwargs: MockSite
- if args[0] == 'sites.Site'
- else ModelWithNullFKToSite
+ get_model.side_effect = lambda *args, **kwargs: (
+ MockSite if args[0] == 'sites.Site' else ModelWithNullFKToSite
)
obj = ModelWithNullFKToSite.objects.create(title='title')
url = '/shortcut/%s/%s/' % (
ContentType.objects.get_for_model(ModelWithNullFKToSite).id,
@@ -170,14 +168,12 @@
"""
When the object has a ManyToManyField to Site, redirect to the current
site if it's attached to the object or to the domain of the first site
found in the m2m relationship.
"""
- get_model.side_effect = (
- lambda *args, **kwargs: MockSite
- if args[0] == 'sites.Site'
- else ModelWithM2MToSite
+ get_model.side_effect = lambda *args, **kwargs: (
+ MockSite if args[0] == 'sites.Site' else ModelWithM2MToSite
)
# get_current_site() will lookup a Site object, so these must match the
# domains in the MockSite model.
MockSite.objects.bulk_create(
--- a/django:tests/db_functions/comparison/test_nullif.py
+++ b/django:tests/db_functions/comparison/test_nullif.py
@@ -21,13 +21,15 @@
self.assertSequenceEqual(
authors,
[
('smithj',),
(
- ''
- if connection.features.interprets_empty_strings_as_nulls
- else None,
+ (
+ ''
+ if connection.features.interprets_empty_strings_as_nulls
+ else None
+ ),
),
],
)
def test_null_argument(self):
--- a/django:tests/db_functions/datetime/test_extract_trunc.py
+++ b/django:tests/db_functions/datetime/test_extract_trunc.py
@@ -134,13 +134,15 @@
end_datetime=end_datetime,
start_date=start_datetime.date() if start_datetime else None,
end_date=end_datetime.date() if end_datetime else None,
start_time=start_datetime.time() if start_datetime else None,
end_time=end_datetime.time() if end_datetime else None,
- duration=(end_datetime - start_datetime)
- if start_datetime and end_datetime
- else None,
+ duration=(
+ (end_datetime - start_datetime)
+ if start_datetime and end_datetime
+ else None
+ ),
)
def test_extract_year_exact_lookup(self):
"""
Extract year uses a BETWEEN filter to compare the year to allow indexes
--- a/django:tests/db_functions/text/test_md5.py
+++ b/django:tests/db_functions/text/test_md5.py
@@ -33,13 +33,15 @@
[
'6117323d2cabbc17d44c2b44587f682c',
'ca6d48f6772000141e66591aee49d56c',
'bf2c13bc1154e3d2e7df848cbc8be73d',
'd41d8cd98f00b204e9800998ecf8427e',
- 'd41d8cd98f00b204e9800998ecf8427e'
- if connection.features.interprets_empty_strings_as_nulls
- else None,
+ (
+ 'd41d8cd98f00b204e9800998ecf8427e'
+ if connection.features.interprets_empty_strings_as_nulls
+ else None
+ ),
],
)
def test_transform(self):
with register_lookup(CharField, MD5):
--- a/django:tests/db_functions/text/test_sha1.py
+++ b/django:tests/db_functions/text/test_sha1.py
@@ -33,13 +33,15 @@
[
'e61a3587b3f7a142b8c7b9263c82f8119398ecb7',
'0781e0745a2503e6ded05ed5bc554c421d781b0c',
'198d15ea139de04060caf95bc3e0ec5883cba881',
'da39a3ee5e6b4b0d3255bfef95601890afd80709',
- 'da39a3ee5e6b4b0d3255bfef95601890afd80709'
- if connection.features.interprets_empty_strings_as_nulls
- else None,
+ (
+ 'da39a3ee5e6b4b0d3255bfef95601890afd80709'
+ if connection.features.interprets_empty_strings_as_nulls
+ else None
+ ),
],
)
def test_transform(self):
with register_lookup(CharField, SHA1):
--- a/django:tests/db_functions/text/test_sha224.py
+++ b/django:tests/db_functions/text/test_sha224.py
@@ -35,13 +35,15 @@
[
'a61303c220731168452cb6acf3759438b1523e768f464e3704e12f70',
'2297904883e78183cb118fc3dc21a610d60daada7b6ebdbc85139f4d',
'eba942746e5855121d9d8f79e27dfdebed81adc85b6bf41591203080',
'd14a028c2a3a2bc9476102bb288234c415a2b01f828ea62ac5b3e42f',
- 'd14a028c2a3a2bc9476102bb288234c415a2b01f828ea62ac5b3e42f'
- if connection.features.interprets_empty_strings_as_nulls
- else None,
+ (
+ 'd14a028c2a3a2bc9476102bb288234c415a2b01f828ea62ac5b3e42f'
+ if connection.features.interprets_empty_strings_as_nulls
+ else None
+ ),
],
)
def test_transform(self):
with register_lookup(CharField, SHA224):
--- a/django:tests/db_functions/text/test_sha256.py
+++ b/django:tests/db_functions/text/test_sha256.py
@@ -33,13 +33,15 @@
[
'ef61a579c907bbed674c0dbcbcf7f7af8f851538eef7b8e58c5bee0b8cfdac4a',
'6e4cce20cd83fc7c202f21a8b2452a68509cf24d1c272a045b5e0cfc43f0d94e',
'3ad2039e3ec0c88973ae1c0fce5a3dbafdd5a1627da0a92312c54ebfcf43988e',
'e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855',
- 'e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855'
- if connection.features.interprets_empty_strings_as_nulls
- else None,
+ (
+ 'e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855'
+ if connection.features.interprets_empty_strings_as_nulls
+ else None
+ ),
],
)
def test_transform(self):
with register_lookup(CharField, SHA256):
--- a/django:tests/db_functions/text/test_sha384.py
+++ b/django:tests/db_functions/text/test_sha384.py
@@ -33,13 +33,15 @@
[
'9df976bfbcf96c66fbe5cba866cd4deaa8248806f15b69c4010a404112906e4ca7b57e53b9967b80d77d4f5c2982cbc8',
'72202c8005492016cc670219cce82d47d6d2d4273464c742ab5811d691b1e82a7489549e3a73ffa119694f90678ba2e3',
'eda87fae41e59692c36c49e43279c8111a00d79122a282a944e8ba9a403218f049a48326676a43c7ba378621175853b0',
'38b060a751ac96384cd9327eb1b1e36a21fdb71114be07434c0cc7bf63f6e1da274edebfe76f65fbd51ad2f14898b95b',
- '38b060a751ac96384cd9327eb1b1e36a21fdb71114be07434c0cc7bf63f6e1da274edebfe76f65fbd51ad2f14898b95b'
- if connection.features.interprets_empty_strings_as_nulls
- else None,
+ (
+ '38b060a751ac96384cd9327eb1b1e36a21fdb71114be07434c0cc7bf63f6e1da274edebfe76f65fbd51ad2f14898b95b'
+ if connection.features.interprets_empty_strings_as_nulls
+ else None
+ ),
],
)
def test_transform(self):
with register_lookup(CharField, SHA384):
--- a/django:tests/db_functions/text/test_sha512.py
+++ b/django:tests/db_functions/text/test_sha512.py
@@ -37,14 +37,16 @@
'2b719514b5e48cb6ce54687e843a4b3e69a04cdb2a9dc99c3b99bdee419fa7d0',
'b554d182e25fb487a3f2b4285bb8672f98956b5369138e681b467d1f079af116'
'172d88798345a3a7666faf5f35a144c60812d3234dcd35f444624f2faee16857',
'cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce'
'47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e',
- 'cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce'
- '47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e'
- if connection.features.interprets_empty_strings_as_nulls
- else None,
+ (
+ 'cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce'
+ '47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e'
+ if connection.features.interprets_empty_strings_as_nulls
+ else None
+ ),
],
)
def test_transform(self):
with register_lookup(CharField, SHA512):
--- a/django:tests/distinct_on_fields/tests.py
+++ b/django:tests/distinct_on_fields/tests.py
@@ -74,13 +74,15 @@
[self.celeb1, self.celeb2],
),
(StaffTag.objects.distinct('staff', 'tag'), [self.st1]),
(
Tag.objects.order_by('parent__pk', 'pk').distinct('parent'),
- [self.t2, self.t4, self.t1]
- if connection.features.nulls_order_largest
- else [self.t1, self.t2, self.t4],
+ (
+ [self.t2, self.t4, self.t1]
+ if connection.features.nulls_order_largest
+ else [self.t1, self.t2, self.t4]
+ ),
),
(
StaffTag.objects.select_related('staff')
.distinct('staff__name')
.order_by('staff__name'),
--- a/django:tests/invalid_models_tests/test_models.py
+++ b/django:tests/invalid_models_tests/test_models.py
@@ -1970,19 +1970,22 @@
),
]
self.assertEqual(
Model.check(databases=self.databases),
- [
- Error(
- "'constraints' refers to the nonexistent field 'missing_field'.",
- obj=Model,
- id='models.E012',
- ),
- ]
- if connection.features.supports_table_check_constraints
- else [],
+ (
+ [
+ Error(
+ "'constraints' refers to the nonexistent field "
+ "'missing_field'.",
+ obj=Model,
+ id='models.E012',
+ ),
+ ]
+ if connection.features.supports_table_check_constraints
+ else []
+ ),
)
@skipUnlessDBFeature('supports_table_check_constraints')
def test_check_constraint_pointing_to_reverse_fk(self):
class Model(models.Model):
@@ -2264,19 +2267,22 @@
),
]
self.assertEqual(
Model.check(databases=self.databases),
- [
- Error(
- "'constraints' refers to the nonexistent field 'missing_field'.",
- obj=Model,
- id='models.E012',
- ),
- ]
- if connection.features.supports_partial_indexes
- else [],
+ (
+ [
+ Error(
+ "'constraints' refers to the nonexistent field "
+ "'missing_field'.",
+ obj=Model,
+ id='models.E012',
+ ),
+ ]
+ if connection.features.supports_partial_indexes
+ else []
+ ),
)
def test_unique_constraint_condition_pointing_to_joined_fields(self):
class Model(models.Model):
age = models.SmallIntegerField()
@@ -2292,19 +2298,21 @@
),
]
self.assertEqual(
Model.check(databases=self.databases),
- [
- Error(
- "'constraints' refers to the joined field 'parent__age__lt'.",
- obj=Model,
- id='models.E041',
- )
- ]
- if connection.features.supports_partial_indexes
- else [],
+ (
+ [
+ Error(
+ "'constraints' refers to the joined field 'parent__age__lt'.",
+ obj=Model,
+ id='models.E041',
+ )
+ ]
+ if connection.features.supports_partial_indexes
+ else []
+ ),
)
def test_unique_constraint_pointing_to_reverse_o2o(self):
class Model(models.Model):
parent = models.OneToOneField('self', models.CASCADE)
@@ -2319,19 +2327,21 @@
),
]
self.assertEqual(
Model.check(databases=self.databases),
- [
- Error(
- "'constraints' refers to the nonexistent field 'model'.",
- obj=Model,
- id='models.E012',
- ),
- ]
- if connection.features.supports_partial_indexes
- else [],
+ (
+ [
+ Error(
+ "'constraints' refers to the nonexistent field 'model'.",
+ obj=Model,
+ id='models.E012',
+ ),
+ ]
+ if connection.features.supports_partial_indexes
+ else []
+ ),
)
def test_deferrable_unique_constraint(self):
class Model(models.Model):
age = models.IntegerField()
--- a/django:tests/postgres_tests/migrations/0001_setup_extensions.py
+++ b/django:tests/postgres_tests/migrations/0001_setup_extensions.py
@@ -38,11 +38,15 @@
BtreeGistExtension(),
CITextExtension(),
# Ensure CreateExtension quotes extension names by creating one with a
# dash in its name.
CreateExtension('uuid-ossp'),
- # CryptoExtension is required for RandomUUID() on PostgreSQL < 13.
- CryptoExtension() if needs_crypto_extension else mock.Mock(),
+ (
+ # CryptoExtension is required for RandomUUID() on PostgreSQL < 13.
+ CryptoExtension()
+ if needs_crypto_extension
+ else mock.Mock()
+ ),
HStoreExtension(),
TrigramExtension(),
UnaccentExtension(),
]
--- a/django:tests/schema/fields.py
+++ b/django:tests/schema/fields.py
@@ -39,13 +39,15 @@
self,
to,
related_name=related_name,
related_query_name=related_query_name,
limit_choices_to=limit_choices_to,
- symmetrical=symmetrical
- if symmetrical is not None
- else (to == RECURSIVE_RELATIONSHIP_CONSTANT),
+ symmetrical=(
+ symmetrical
+ if symmetrical is not None
+ else (to == RECURSIVE_RELATIONSHIP_CONSTANT)
+ ),
through=through,
through_fields=through_fields,
db_constraint=db_constraint,
)
self.swappable = swappable
--- a/hypothesis:hypothesis-python/src/hypothesis/extra/codemods.py
+++ b/hypothesis:hypothesis-python/src/hypothesis/extra/codemods.py
@@ -205,11 +205,13 @@
assign_nospace = cst.AssignEqual(
whitespace_before=cst.SimpleWhitespace(""),
whitespace_after=cst.SimpleWhitespace(""),
)
newargs = [
- arg
- if arg.keyword or arg.star or p.kind is not Parameter.KEYWORD_ONLY
- else arg.with_changes(keyword=cst.Name(p.name), equal=assign_nospace)
+ (
+ arg
+ if arg.keyword or arg.star or p.kind is not Parameter.KEYWORD_ONLY
+ else arg.with_changes(keyword=cst.Name(p.name), equal=assign_nospace)
+ )
for p, arg in zip(signature(func).parameters.values(), updated_node.args)
]
return updated_node.with_changes(args=newargs)
--- a/hypothesis:hypothesis-python/src/hypothesis/extra/ghostwriter.py
+++ b/hypothesis:hypothesis-python/src/hypothesis/extra/ghostwriter.py
@@ -589,13 +589,15 @@
The fancy part is that we'll check the docstring for any known exceptions
which `func` might raise, and catch-and-reject on them... *unless* they're
subtypes of `except_`, which will be handled in an outer try-except block.
"""
args = ", ".join(
- (v or p.name)
- if p.kind is inspect.Parameter.POSITIONAL_ONLY
- else f"{p.name}={v or p.name}"
+ (
+ (v or p.name)
+ if p.kind is inspect.Parameter.POSITIONAL_ONLY
+ else f"{p.name}={v or p.name}"
+ )
for v, p in zip_longest(pass_variables, _get_params(func).values())
)
call = f"{_get_qualname(func, include_module=True)}({args})"
if assign:
call = f"{assign} = {call}"
@@ -654,13 +656,15 @@
exceptions.append(_get_qualname(ex, include_module=True))
# And finally indent the existing test body into a try-except block
# which catches these exceptions and calls `hypothesis.reject()`.
test_body = SUPPRESS_BLOCK.format(
test_body=indent(test_body, prefix=" "),
- exceptions="(" + ", ".join(exceptions) + ")"
- if len(exceptions) > 1
- else exceptions[0],
+ exceptions=(
+ "(" + ", ".join(exceptions) + ")"
+ if len(exceptions) > 1
+ else exceptions[0]
+ ),
)
if assertions:
test_body = f"{test_body}\n{assertions}"
--- a/hypothesis:hypothesis-python/src/hypothesis/extra/pandas/impl.py
+++ b/hypothesis:hypothesis-python/src/hypothesis/extra/pandas/impl.py
@@ -293,13 +293,15 @@
return pandas.Series(result_data, index=index, dtype=dtype)
else:
return pandas.Series(
(),
index=index,
- dtype=dtype
- if dtype is not None
- else draw(dtype_for_elements_strategy(elements)),
+ dtype=(
+ dtype
+ if dtype is not None
+ else draw(dtype_for_elements_strategy(elements))
+ ),
)
return result()
--- a/hypothesis:hypothesis-python/src/hypothesis/internal/conjecture/data.py
+++ b/hypothesis:hypothesis-python/src/hypothesis/internal/conjecture/data.py
@@ -825,13 +825,15 @@
interesting_origin=self.interesting_origin,
buffer=self.buffer,
examples=self.examples,
blocks=self.blocks,
output=self.output,
- extra_information=self.extra_information
- if self.extra_information.has_information()
- else None,
+ extra_information=(
+ self.extra_information
+ if self.extra_information.has_information()
+ else None
+ ),
has_discards=self.has_discards,
target_observations=self.target_observations,
tags=frozenset(self.tags),
forced_indices=self.forced_indices,
)
--- a/hypothesis:hypothesis-python/src/hypothesis/internal/escalation.py
+++ b/hypothesis:hypothesis-python/src/hypothesis/internal/escalation.py
@@ -116,9 +116,13 @@
filename, lineno, *_ = traceback.extract_tb(tb)[-1]
return (
type(exception),
filename,
lineno,
- # Note that if __cause__ is set it is always equal to __context__, explicitly
- # to support introspection when debugging, so we can use that unconditionally.
- get_interesting_origin(exception.__context__) if exception.__context__ else (),
+ (
+ # Note that if __cause__ is set it is always equal to __context__, explicitly
+ # to support introspection when debugging, so we can use that unconditionally.
+ get_interesting_origin(exception.__context__)
+ if exception.__context__
+ else ()
+ ),
)
--- a/hypothesis:hypothesis-python/src/hypothesis/provisional.py
+++ b/hypothesis:hypothesis-python/src/hypothesis/provisional.py
@@ -144,13 +144,15 @@
# It has been extracted to top-level so that we can test it independently
# of `urls()`, which helps with getting non-flaky coverage of the lambda.
_url_fragments_strategy = (
st.lists(
st.builds(
- lambda char, encode: f"%{ord(char):02X}"
- if (encode or char not in FRAGMENT_SAFE_CHARACTERS)
- else char,
+ lambda char, encode: (
+ f"%{ord(char):02X}"
+ if (encode or char not in FRAGMENT_SAFE_CHARACTERS)
+ else char
+ ),
st.characters(min_codepoint=0, max_codepoint=255),
st.booleans(),
),
min_size=1,
)
--- a/hypothesis:hypothesis-python/src/hypothesis/strategies/_internal/types.py
+++ b/hypothesis:hypothesis-python/src/hypothesis/strategies/_internal/types.py
@@ -766,13 +766,15 @@
if not thing.__args__: # pragma: no cover # varies by minor version
return st.functions()
# Note that a list can only appear in __args__ under Python 3.9 with the
# collections.abc version; see https://bugs.python.org/issue42195
return st.functions(
- like=(lambda: None)
- if len(thing.__args__) == 1 or thing.__args__[0] == []
- else (lambda *a, **k: None),
+ like=(
+ (lambda: None)
+ if len(thing.__args__) == 1 or thing.__args__[0] == []
+ else (lambda *a, **k: None)
+ ),
returns=st.from_type(thing.__args__[-1]),
)
@register(typing.TypeVar)
--- a/hypothesis:hypothesis-python/tests/cover/test_lookup.py
+++ b/hypothesis:hypothesis-python/tests/cover/test_lookup.py
@@ -102,12 +102,16 @@
@pytest.mark.parametrize(
"typ",
[
collections.abc.ByteString,
- # These are nonexistent or exist-but-are-not-types on Python 3.6
- typing.Match if sys.version_info[:2] >= (3, 7) else int,
+ (
+ # These are nonexistent or exist-but-are-not-types on Python 3.6
+ typing.Match
+ if sys.version_info[:2] >= (3, 7)
+ else int
+ ),
typing.Pattern if sys.version_info[:2] >= (3, 7) else int,
getattr(re, "Match", int),
getattr(re, "Pattern", int),
],
ids=repr,
--- a/pandas:pandas/core/apply.py
+++ b/pandas:pandas/core/apply.py
@@ -1360,13 +1360,15 @@
>>> kwarg_list = [('a', '<lambda>'), ('a', '<lambda>'), ('b', '<lambda>')]
>>> _make_unique_kwarg_list(kwarg_list)
[('a', '<lambda>_0'), ('a', '<lambda>_1'), ('b', '<lambda>')]
"""
return [
- (pair[0], "_".join([pair[1], str(seq[:i].count(pair))]))
- if seq.count(pair) > 1
- else pair
+ (
+ (pair[0], "_".join([pair[1], str(seq[:i].count(pair))]))
+ if seq.count(pair) > 1
+ else pair
+ )
for i, pair in enumerate(seq)
]
def relabel_result(
--- a/pandas:pandas/core/array_algos/replace.py
+++ b/pandas:pandas/core/array_algos/replace.py
@@ -82,13 +82,15 @@
if not regex:
op = lambda x: operator.eq(x, b)
else:
op = np.vectorize(
- lambda x: bool(re.search(b, x))
- if isinstance(x, str) and isinstance(b, (str, Pattern))
- else False
+ lambda x: (
+ bool(re.search(b, x))
+ if isinstance(x, str) and isinstance(b, (str, Pattern))
+ else False
+ )
)
# GH#32621 use mask to avoid comparing to NAs
if isinstance(a, np.ndarray):
a = a[mask]
--- a/pandas:pandas/core/arrays/integer.py
+++ b/pandas:pandas/core/arrays/integer.py
@@ -101,13 +101,15 @@
# error: List comprehension has incompatible type List[Union[Any,
# dtype, ExtensionDtype]]; expected List[Union[dtype, None, type,
# _SupportsDtype, str, Tuple[Any, Union[int, Sequence[int]]],
# List[Any], _DtypeDict, Tuple[Any, Any]]]
[
- t.numpy_dtype # type: ignore[misc]
- if isinstance(t, BaseMaskedDtype)
- else t
+ (
+ t.numpy_dtype # type: ignore[misc]
+ if isinstance(t, BaseMaskedDtype)
+ else t
+ )
for t in dtypes
],
[],
)
if np.issubdtype(np_dtype, np.integer):
--- a/pandas:pandas/core/arrays/period.py
+++ b/pandas:pandas/core/arrays/period.py
@@ -1201,10 +1201,12 @@
# error: Argument 2 to "repeat" has incompatible type "Optional[int]"; expected
# "Union[Union[int, integer[Any]], Union[bool, bool_], ndarray, Sequence[Union[int,
# integer[Any]]], Sequence[Union[bool, bool_]], Sequence[Sequence[Any]]]"
return [
- np.asarray(x)
- if isinstance(x, (np.ndarray, list, ABCSeries))
- else np.repeat(x, length) # type: ignore[arg-type]
+ (
+ np.asarray(x)
+ if isinstance(x, (np.ndarray, list, ABCSeries))
+ else np.repeat(x, length)
+ ) # type: ignore[arg-type]
for x in fields
]
--- a/pandas:pandas/core/internals/concat.py
+++ b/pandas:pandas/core/internals/concat.py
@@ -142,13 +142,15 @@
for arr in to_concat
]
return type(to_concat_no_proxy[0])._concat_same_type(to_concat, axis=0)
to_concat = [
- arr.to_array(target_dtype)
- if isinstance(arr, NullArrayProxy)
- else cast_to_common_type(arr, target_dtype)
+ (
+ arr.to_array(target_dtype)
+ if isinstance(arr, NullArrayProxy)
+ else cast_to_common_type(arr, target_dtype)
+ )
for arr in to_concat
]
if isinstance(to_concat[0], ExtensionArray):
cls = type(to_concat[0])
--- a/pandas:pandas/core/internals/construction.py
+++ b/pandas:pandas/core/internals/construction.py
@@ -468,13 +468,15 @@
if copy:
# arrays_to_mgr (via form_blocks) won't make copies for EAs
# dtype attr check to exclude EADtype-castable strs
arrays = [
- x
- if not hasattr(x, "dtype") or not isinstance(x.dtype, ExtensionDtype)
- else x.copy()
+ (
+ x
+ if not hasattr(x, "dtype") or not isinstance(x.dtype, ExtensionDtype)
+ else x.copy()
+ )
for x in arrays
]
# TODO: can we get rid of the dt64tz special case above?
return arrays_to_mgr(arrays, columns, index, dtype=dtype, typ=typ, consolidate=copy)
--- a/pandas:pandas/core/reshape/merge.py
+++ b/pandas:pandas/core/reshape/merge.py
@@ -920,13 +920,15 @@
)
elif result._is_level_reference(name):
if isinstance(result.index, MultiIndex):
key_col.name = name
idx_list = [
- result.index.get_level_values(level_name)
- if level_name != name
- else key_col
+ (
+ result.index.get_level_values(level_name)
+ if level_name != name
+ else key_col
+ )
for level_name in result.index.names
]
result.set_index(idx_list, inplace=True)
else:
@@ -1902,13 +1904,15 @@
def flip(xs) -> np.ndarray:
"""unlike np.transpose, this returns an array of tuples"""
# error: Item "ndarray" of "Union[Any, Union[ExtensionArray, ndarray]]" has
# no attribute "_values_for_argsort"
xs = [
- x
- if not is_extension_array_dtype(x)
- else extract_array(x)._values_for_argsort() # type: ignore[union-attr]
+ (
+ x
+ if not is_extension_array_dtype(x)
+ else extract_array(x)._values_for_argsort()
+ ) # type: ignore[union-attr]
for x in xs
]
labels = list(string.ascii_lowercase[: len(xs)])
dtypes = [x.dtype for x in xs]
labeled_dtypes = list(zip(labels, dtypes))
--- a/pandas:pandas/core/reshape/reshape.py
+++ b/pandas:pandas/core/reshape/reshape.py
@@ -661,12 +661,16 @@
# The dtype of each level must be explicitly set to avoid inferring the wrong type.
# See GH-36991.
return MultiIndex.from_arrays(
[
- # Not all indices can accept None values.
- Index(new_lev, dtype=lev.dtype) if None not in new_lev else new_lev
+ (
+ # Not all indices can accept None values.
+ Index(new_lev, dtype=lev.dtype)
+ if None not in new_lev
+ else new_lev
+ )
for new_lev, lev in zip(new_levs, columns.levels)
],
names=columns.names[:-1],
)
--- a/pandas:pandas/core/sorting.py
+++ b/pandas:pandas/core/sorting.py
@@ -502,13 +502,15 @@
sort_levels = [index._get_level_number(lev) for lev in sort_levels]
else:
sort_levels = list(range(index.nlevels)) # satisfies mypy
mapped = [
- ensure_key_mapped(index._get_level_values(level), key)
- if level in sort_levels
- else index._get_level_values(level)
+ (
+ ensure_key_mapped(index._get_level_values(level), key)
+ if level in sort_levels
+ else index._get_level_values(level)
+ )
for level in range(index.nlevels)
]
return type(index).from_arrays(mapped)
--- a/pandas:pandas/io/formats/style_render.py
+++ b/pandas:pandas/io/formats/style_render.py
@@ -339,13 +339,15 @@
(
f"{self.css['blank']} {self.css['level']}{r}"
if name is None
else f"{self.css['index_name']} {self.css['level']}{r}"
),
- name
- if (name is not None and not self.hide_column_names)
- else self.css["blank_value"],
+ (
+ name
+ if (name is not None and not self.hide_column_names)
+ else self.css["blank_value"]
+ ),
not all(self.hide_index_),
)
]
if clabels:
@@ -618,13 +620,13 @@
row_body_headers = []
else:
row_body_headers = [
{
**col,
- "display_value": col["display_value"]
- if col["is_visible"]
- else "",
+ "display_value": (
+ col["display_value"] if col["is_visible"] else ""
+ ),
"cellstyle": self.ctx_index[r, c] if col["is_visible"] else [],
}
for c, col in enumerate(row)
if col["type"] == "th"
]
--- a/pandas:pandas/io/json/_normalize.py
+++ b/pandas:pandas/io/json/_normalize.py
@@ -147,13 +147,15 @@
new_key = f"{key_string}{separator}{key}"
_normalise_json(
data=value,
# to avoid adding the separator to the start of every key
# GH#43831 avoid adding key if key_string blank
- key_string=new_key
- if new_key[: len(separator)] != separator
- else new_key[len(separator) :],
+ key_string=(
+ new_key
+ if new_key[: len(separator)] != separator
+ else new_key[len(separator) :]
+ ),
normalized_dict=normalized_dict,
separator=separator,
)
else:
normalized_dict[key_string] = data
@@ -492,13 +494,15 @@
_recursive_extract(obj[path[0]], path[1:], seen_meta, level=level + 1)
else:
for obj in data:
recs = _pull_records(obj, path[0])
recs = [
- nested_to_record(r, sep=sep, max_level=max_level)
- if isinstance(r, dict)
- else r
+ (
+ nested_to_record(r, sep=sep, max_level=max_level)
+ if isinstance(r, dict)
+ else r
+ )
for r in recs
]
# For repeating the metadata later
lengths.append(len(recs))
--- a/pandas:pandas/io/parsers/arrow_parser_wrapper.py
+++ b/pandas:pandas/io/parsers/arrow_parser_wrapper.py
@@ -69,13 +69,13 @@
and option_name
in ("include_columns", "null_values", "true_values", "false_values")
}
self.read_options = {
"autogenerate_column_names": self.header is None,
- "skip_rows": self.header
- if self.header is not None
- else self.kwds["skiprows"],
+ "skip_rows": (
+ self.header if self.header is not None else self.kwds["skiprows"]
+ ),
}
def _finalize_output(self, frame: DataFrame) -> DataFrame:
"""
Processes data read in based on kwargs.
--- a/pandas:pandas/tests/strings/test_cat.py
+++ b/pandas:pandas/tests/strings/test_cat.py
@@ -278,13 +278,15 @@
expected_outer = Series(["aaA", "bbB", "c-C", "ddD", "-e-"])
# joint index of rhs [t, u]; u will be forced have index of s
rhs_idx = (
t.index.intersection(s.index)
if join == "inner"
- else t.index.union(s.index)
- if join == "outer"
- else t.index.append(s.index.difference(t.index))
+ else (
+ t.index.union(s.index)
+ if join == "outer"
+ else t.index.append(s.index.difference(t.index))
+ )
)
expected = expected_outer.loc[s.index.join(rhs_idx, how=join)]
result = s.str.cat([t, u], join=join, na_rep="-")
tm.assert_series_equal(result, expected)
--- a/pandas:pandas/util/_decorators.py
+++ b/pandas:pandas/util/_decorators.py
@@ -384,13 +384,15 @@
docstring_components.append(docstring)
# formatting templates and concatenating docstring
decorated.__doc__ = "".join(
[
- component.format(**params)
- if isinstance(component, str)
- else dedent(component.__doc__ or "")
+ (
+ component.format(**params)
+ if isinstance(component, str)
+ else dedent(component.__doc__ or "")
+ )
for component in docstring_components
]
)
# error: "F" has no attribute "_docstring_components"
--- a/pillow:Tests/test_imagecms.py
+++ b/pillow:Tests/test_imagecms.py
@@ -305,13 +305,15 @@
# recursively and then check equality.
power = 10 ** digits
def truncate_tuple(tuple_or_float):
return tuple(
- truncate_tuple(val)
- if isinstance(val, tuple)
- else int(val * power) / power
+ (
+ truncate_tuple(val)
+ if isinstance(val, tuple)
+ else int(val * power) / power
+ )
for val in tuple_or_float
)
assert truncate_tuple(tup1) == truncate_tuple(tup2)
--- a/pillow:src/PIL/IcoImagePlugin.py
+++ b/pillow:src/PIL/IcoImagePlugin.py
@@ -44,13 +44,15 @@
"sizes",
[(16, 16), (24, 24), (32, 32), (48, 48), (64, 64), (128, 128), (256, 256)],
)
width, height = im.size
sizes = filter(
- lambda x: False
- if (x[0] > width or x[1] > height or x[0] > 256 or x[1] > 256)
- else True,
+ lambda x: (
+ False
+ if (x[0] > width or x[1] > height or x[0] > 256 or x[1] > 256)
+ else True
+ ),
sizes,
)
sizes = list(sizes)
fp.write(struct.pack("<H", len(sizes))) # idCount(2)
offset = fp.tell() + len(sizes) * 16
--- a/pillow:src/PIL/PdfImagePlugin.py
+++ b/pillow:src/PIL/PdfImagePlugin.py
@@ -53,13 +53,13 @@
existing_pdf = PdfParser.PdfParser(f=fp, filename=filename, mode="w+b")
resolution = im.encoderinfo.get("resolution", 72.0)
info = {
- "title": None
- if is_appending
- else os.path.splitext(os.path.basename(filename))[0],
+ "title": (
+ None if is_appending else os.path.splitext(os.path.basename(filename))[0]
+ ),
"author": None,
"subject": None,
"keywords": None,
"creator": None,
"producer": None,
--- a/poetry:poetry/console/commands/init.py
+++ b/poetry:poetry/console/commands/init.py
@@ -461,13 +461,15 @@
dict(
[
("name", package.name),
(
"path",
- path.relative_to(cwd).as_posix()
- if not is_absolute
- else path.as_posix(),
+ (
+ path.relative_to(cwd).as_posix()
+ if not is_absolute
+ else path.as_posix()
+ ),
),
]
+ ([("extras", extras)] if extras else [])
)
)
--- a/poetry:poetry/console/commands/self/update.py
+++ b/poetry:poetry/console/commands/self/update.py
@@ -100,13 +100,13 @@
self.line("No release found for the specified version")
return 1
packages.sort(
key=cmp_to_key(
- lambda x, y: 0
- if x.version == y.version
- else int(x.version < y.version or -1)
+ lambda x, y: (
+ 0 if x.version == y.version else int(x.version < y.version or -1)
+ )
)
)
release = None
for package in packages:
--- a/poetry:poetry/installation/installer.py
+++ b/poetry:poetry/installation/installer.py
@@ -395,13 +395,15 @@
"" if installs == 1 else "s",
updates,
"" if updates == 1 else "s",
uninstalls,
"" if uninstalls == 1 else "s",
- f", <info>{skipped}</> skipped"
- if skipped and self.is_verbose()
- else "",
+ (
+ f", <info>{skipped}</> skipped"
+ if skipped and self.is_verbose()
+ else ""
+ ),
)
)
self._io.write_line("")
--- a/pyanalyze:pyanalyze/format_strings.py
+++ b/pyanalyze:pyanalyze/format_strings.py
@@ -94,13 +94,13 @@
precision = match.group("precision")
if precision is not None:
precision = cls._parse_int_field(precision[1:])
return cls(
conversion_type=cls._maybe_decode(conversion_type),
- mapping_key=cls._maybe_decode(mapping_key)
- if mapping_key is not None
- else None,
+ mapping_key=(
+ cls._maybe_decode(mapping_key) if mapping_key is not None else None
+ ),
conversion_flags=cls._maybe_decode(match.group("conversion_flags")),
field_width=field_width,
precision=precision,
length_modifier=cls._maybe_decode(match.group("length_modifier")),
is_bytes=is_bytes,
--- a/pyanalyze:pyanalyze/name_check_visitor.py
+++ b/pyanalyze:pyanalyze/name_check_visitor.py
@@ -4193,13 +4193,15 @@
print_output=False,
)
with inner_attribute_checker_obj as inner_attribute_checker, unused_finder as inner_unused_finder:
all_failures = super(NameCheckVisitor, cls)._run_on_files(
files,
- attribute_checker=attribute_checker
- if attribute_checker is not None
- else inner_attribute_checker,
+ attribute_checker=(
+ attribute_checker
+ if attribute_checker is not None
+ else inner_attribute_checker
+ ),
unused_finder=inner_unused_finder,
settings=settings,
**kwargs,
)
if unused_finder is not None:
--- a/pyanalyze:pyanalyze/stacked_scopes.py
+++ b/pyanalyze:pyanalyze/stacked_scopes.py
@@ -952,13 +952,15 @@
or (ctx.varname not in self.name_to_all_definition_nodes)
or isinstance(self.referencing_value_vars[ctx.varname], ReferencingValue)
)
values = [
- UNINITIALIZED_VALUE
- if node is _UNINITIALIZED and not should_use_unconstrained
- else self._resolve_value(self.definition_node_to_value[node], ctx)
+ (
+ UNINITIALIZED_VALUE
+ if node is _UNINITIALIZED and not should_use_unconstrained
+ else self._resolve_value(self.definition_node_to_value[node], ctx)
+ )
for node in nodes
]
return _constrain_value(values, constraints, fallback_value=ctx.fallback_value)
def _add_composite(self, varname: Varname) -> None:
--- a/pyanalyze:pyanalyze/test_format_strings.py
+++ b/pyanalyze:pyanalyze/test_format_strings.py
@@ -72,13 +72,15 @@
is_bytes = isinstance(pattern, bytes)
assert_eq(
PercentFormatString(
pattern, is_bytes, specifiers=specifiers, raw_pieces=raw_pieces
),
- PercentFormatString.from_bytes_pattern(pattern)
- if is_bytes
- else PercentFormatString.from_pattern(pattern),
+ (
+ PercentFormatString.from_bytes_pattern(pattern)
+ if is_bytes
+ else PercentFormatString.from_pattern(pattern)
+ ),
)
DOT_FORMAT_TESTCASES = [
("", []),
--- a/pyanalyze:pyanalyze/test_stacked_scopes.py
+++ b/pyanalyze:pyanalyze/test_stacked_scopes.py
@@ -831,12 +831,14 @@
assert_is_value(y, MultiValuedValue([KnownValue(True), KnownValue(False)]))
if y:
assert_is_value(y, KnownValue(True))
else:
assert_is_value(y, KnownValue(False))
- assert_is_value(y, KnownValue(True)) if y else assert_is_value(
- y, KnownValue(False)
+ (
+ assert_is_value(y, KnownValue(True))
+ if y
+ else assert_is_value(y, KnownValue(False))
)
@assert_passes()
def test_isinstance(self):
class A(object):
--- a/pyanalyze:pyanalyze/typeshed.py
+++ b/pyanalyze:pyanalyze/typeshed.py
@@ -448,13 +448,13 @@
arg = arg.replace(kind=SigParameter.POSITIONAL_OR_KEYWORD)
cleaned_arguments.append(arg)
return Signature.make(
cleaned_arguments,
callable=obj,
- return_annotation=GenericValue(Awaitable, [return_value])
- if is_async_fn
- else return_value,
+ return_annotation=(
+ GenericValue(Awaitable, [return_value]) if is_async_fn else return_value
+ ),
)
def _parse_param_list(
self,
args: Iterable[ast3.arg],
--- a/ptr:ptr.py
+++ b/ptr:ptr.py
@@ -383,13 +383,15 @@
print_non_configured: bool,
) -> Dict[Path, Dict]:
get_tests_start_time = time()
all_setup_pys = find_setup_pys(
base_path,
- set(CONFIG["ptr"]["exclude_patterns"].split())
- if CONFIG["ptr"]["exclude_patterns"]
- else set(),
+ (
+ set(CONFIG["ptr"]["exclude_patterns"].split())
+ if CONFIG["ptr"]["exclude_patterns"]
+ else set()
+ ),
)
stats["total.setup_pys"] = len(all_setup_pys)
non_configured_modules = [] # type: List[Path]
test_modules = {} # type: Dict[Path, Dict]
--- a/scikit-lego:sklego/meta/zero_inflated_regressor.py
+++ b/scikit-lego:sklego/meta/zero_inflated_regressor.py
@@ -109,13 +109,15 @@
except NotFittedError:
self.regressor_ = clone(self.regressor)
self.regressor_.fit(
X[non_zero_indices],
y[non_zero_indices],
- sample_weight=sample_weight[non_zero_indices]
- if sample_weight is not None
- else None,
+ sample_weight=(
+ sample_weight[non_zero_indices]
+ if sample_weight is not None
+ else None
+ ),
)
else:
raise ValueError(
"The predicted training labels are all zero, making the regressor"
" obsolete. Change the classifier or use a plain regressor instead."
--- a/sqlalchemy:lib/sqlalchemy/connectors/pyodbc.py
+++ b/sqlalchemy:lib/sqlalchemy/connectors/pyodbc.py
@@ -170,13 +170,15 @@
# NOTE: as of #6058, this won't be called if the use_setinputsizes flag
# is False, or if no types were specified in list_of_tuples
cursor.setinputsizes(
[
- (dbtype, None, None)
- if not isinstance(dbtype, tuple)
- else dbtype
+ (
+ (dbtype, None, None)
+ if not isinstance(dbtype, tuple)
+ else dbtype
+ )
for key, dbtype, sqltype in list_of_tuples
]
)
def set_isolation_level(self, connection, level):
--- a/sqlalchemy:lib/sqlalchemy/dialects/mssql/base.py
+++ b/sqlalchemy:lib/sqlalchemy/dialects/mssql/base.py
@@ -2146,14 +2146,16 @@
)
elif binary.type._type_affinity is sqltypes.Numeric:
type_expression = "ELSE CAST(JSON_VALUE(%s, %s) AS %s)" % (
self.process(binary.left, **kw),
self.process(binary.right, **kw),
- "FLOAT"
- if isinstance(binary.type, sqltypes.Float)
- else "NUMERIC(%s, %s)"
- % (binary.type.precision, binary.type.scale),
+ (
+ "FLOAT"
+ if isinstance(binary.type, sqltypes.Float)
+ else "NUMERIC(%s, %s)"
+ % (binary.type.precision, binary.type.scale)
+ ),
)
elif binary.type._type_affinity is sqltypes.Boolean:
# the NULL handling is particularly weird with boolean, so
# explicitly return numeric (BIT) constants
type_expression = (
@@ -2324,13 +2326,15 @@
)
# handle other included columns
if index.dialect_options["mssql"]["include"]:
inclusions = [
- index.table.c[col]
- if isinstance(col, util.string_types)
- else col
+ (
+ index.table.c[col]
+ if isinstance(col, util.string_types)
+ else col
+ )
for col in index.dialect_options["mssql"]["include"]
]
text += " INCLUDE (%s)" % ", ".join(
[preparer.quote(c.name) for c in inclusions]
--- a/sqlalchemy:lib/sqlalchemy/dialects/mysql/base.py
+++ b/sqlalchemy:lib/sqlalchemy/dialects/mysql/base.py
@@ -2136,21 +2136,23 @@
preparer = self.preparer
table = preparer.format_table(index.table)
columns = [
self.sql_compiler.process(
- elements.Grouping(expr)
- if (
- isinstance(expr, elements.BinaryExpression)
- or (
- isinstance(expr, elements.UnaryExpression)
- and expr.modifier
- not in (operators.desc_op, operators.asc_op)
+ (
+ elements.Grouping(expr)
+ if (
+ isinstance(expr, elements.BinaryExpression)
+ or (
+ isinstance(expr, elements.UnaryExpression)
+ and expr.modifier
+ not in (operators.desc_op, operators.asc_op)
+ )
+ or isinstance(expr, functions.FunctionElement)
)
- or isinstance(expr, functions.FunctionElement)
- )
- else expr,
+ else expr
+ ),
include_table=False,
literal_binds=True,
)
for expr in index.expressions
]
@@ -2176,16 +2178,18 @@
if isinstance(length, dict):
# length value can be a (column_name --> integer value)
# mapping specifying the prefix length for each column of the
# index
columns = ", ".join(
- "%s(%d)" % (expr, length[col.name])
- if col.name in length
- else (
- "%s(%d)" % (expr, length[expr])
- if expr in length
- else "%s" % expr
+ (
+ "%s(%d)" % (expr, length[col.name])
+ if col.name in length
+ else (
+ "%s(%d)" % (expr, length[expr])
+ if expr in length
+ else "%s" % expr
+ )
)
for col, expr in zip(index.expressions, columns)
)
else:
# or can be an integer value specifying the same
--- a/sqlalchemy:lib/sqlalchemy/dialects/postgresql/array.py
+++ b/sqlalchemy:lib/sqlalchemy/dialects/postgresql/array.py
@@ -102,13 +102,15 @@
)
if isinstance(main_type, ARRAY):
self.type = ARRAY(
main_type.item_type,
- dimensions=main_type.dimensions + 1
- if main_type.dimensions is not None
- else 2,
+ dimensions=(
+ main_type.dimensions + 1
+ if main_type.dimensions is not None
+ else 2
+ ),
)
else:
self.type = ARRAY(main_type)
@property
--- a/sqlalchemy:lib/sqlalchemy/dialects/postgresql/base.py
+++ b/sqlalchemy:lib/sqlalchemy/dialects/postgresql/base.py
@@ -2503,13 +2503,15 @@
)
if select._fetch_clause is not None:
text += "\n FETCH FIRST (%s)%s ROWS %s" % (
self.process(select._fetch_clause, **kw),
" PERCENT" if select._fetch_clause_options["percent"] else "",
- "WITH TIES"
- if select._fetch_clause_options["with_ties"]
- else "ONLY",
+ (
+ "WITH TIES"
+ if select._fetch_clause_options["with_ties"]
+ else "ONLY"
+ ),
)
return text
class PGDDLCompiler(compiler.DDLCompiler):
@@ -2636,13 +2638,15 @@
ops = index.dialect_options["postgresql"]["ops"]
text += "(%s)" % ", ".join(
[
self.sql_compiler.process(
- expr.self_group()
- if not isinstance(expr, expression.ColumnClause)
- else expr,
+ (
+ expr.self_group()
+ if not isinstance(expr, expression.ColumnClause)
+ else expr
+ ),
include_table=False,
literal_binds=True,
)
+ (
(" " + ops[expr.key])
@@ -2654,13 +2658,15 @@
)
includeclause = index.dialect_options["postgresql"]["include"]
if includeclause:
inclusions = [
- index.table.c[col]
- if isinstance(col, util.string_types)
- else col
+ (
+ index.table.c[col]
+ if isinstance(col, util.string_types)
+ else col
+ )
for col in includeclause
]
text += " INCLUDE (%s)" % ", ".join(
[preparer.quote(c.name) for c in inclusions]
)
@@ -2871,21 +2877,25 @@
identifier_preparer = self.dialect.identifier_preparer
return identifier_preparer.format_type(type_)
def visit_TIMESTAMP(self, type_, **kw):
return "TIMESTAMP%s %s" % (
- "(%d)" % type_.precision
- if getattr(type_, "precision", None) is not None
- else "",
+ (
+ "(%d)" % type_.precision
+ if getattr(type_, "precision", None) is not None
+ else ""
+ ),
(type_.timezone and "WITH" or "WITHOUT") + " TIME ZONE",
)
def visit_TIME(self, type_, **kw):
return "TIME%s %s" % (
- "(%d)" % type_.precision
- if getattr(type_, "precision", None) is not None
- else "",
+ (
+ "(%d)" % type_.precision
+ if getattr(type_, "precision", None) is not None
+ else ""
+ ),
(type_.timezone and "WITH" or "WITHOUT") + " TIME ZONE",
)
def visit_INTERVAL(self, type_, **kw):
text = "INTERVAL"
@@ -3531,13 +3541,13 @@
"SELECT c.relname FROM pg_class c "
"JOIN pg_namespace n ON n.oid = c.relnamespace "
"WHERE n.nspname = :schema AND c.relkind in ('r', 'p')"
).columns(relname=sqltypes.Unicode),
dict(
- schema=schema
- if schema is not None
- else self.default_schema_name
+ schema=(
+ schema if schema is not None else self.default_schema_name
+ )
),
)
return [name for name, in result]
@reflection.cache
@@ -3547,13 +3557,13 @@
"SELECT c.relname FROM pg_class c "
"JOIN pg_namespace n ON n.oid = c.relnamespace "
"WHERE n.nspname = :schema AND c.relkind = 'f'"
).columns(relname=sqltypes.Unicode),
dict(
- schema=schema
- if schema is not None
- else self.default_schema_name
+ schema=(
+ schema if schema is not None else self.default_schema_name
+ )
),
)
return [name for name, in result]
@reflection.cache
@@ -3581,13 +3591,13 @@
"JOIN pg_namespace n ON n.oid = c.relnamespace "
"WHERE n.nspname = :schema AND c.relkind IN (%s)"
% (", ".join("'%s'" % elem for elem in kinds))
).columns(relname=sqltypes.Unicode),
dict(
- schema=schema
- if schema is not None
- else self.default_schema_name
+ schema=(
+ schema if schema is not None else self.default_schema_name
+ )
),
)
return [name for name, in result]
@reflection.cache
@@ -3617,13 +3627,13 @@
"JOIN pg_namespace n ON n.oid = c.relnamespace "
"WHERE n.nspname = :schema AND c.relname = :view_name "
"AND c.relkind IN ('v', 'm')"
).columns(view_def=sqltypes.Unicode),
dict(
- schema=schema
- if schema is not None
- else self.default_schema_name,
+ schema=(
+ schema if schema is not None else self.default_schema_name
+ ),
view_name=view_name,
),
)
return view_def
@@ -3700,13 +3710,15 @@
domains = self._load_domains(connection)
# dictionary with (name, ) if default search path or (schema, name)
# as keys
enums = dict(
- ((rec["name"],), rec)
- if rec["visible"]
- else ((rec["schema"], rec["name"]), rec)
+ (
+ ((rec["name"],), rec)
+ if rec["visible"]
+ else ((rec["schema"], rec["name"]), rec)
+ )
for rec in self._load_enums(connection, schema="*")
)
# format columns
columns = []
@@ -4119,20 +4131,28 @@
and ix.indisprimary = 'f'
ORDER BY
t.relname,
i.relname
""" % (
- # version 8.3 here was based on observing the
- # cast does not work in PG 8.2.4, does work in 8.3.0.
- # nothing in PG changelogs regarding this.
- "::varchar" if self.server_version_info >= (8, 3) else "",
- "ix.indoption::varchar"
- if self.server_version_info >= (8, 3)
- else "NULL",
- "i.reloptions"
- if self.server_version_info >= (8, 2)
- else "NULL",
+ (
+ # version 8.3 here was based on observing the
+ # cast does not work in PG 8.2.4, does work in 8.3.0.
+ # nothing in PG changelogs regarding this.
+ "::varchar"
+ if self.server_version_info >= (8, 3)
+ else ""
+ ),
+ (
+ "ix.indoption::varchar"
+ if self.server_version_info >= (8, 3)
+ else "NULL"
+ ),
+ (
+ "i.reloptions"
+ if self.server_version_info >= (8, 2)
+ else "NULL"
+ ),
self._pg_index_any("a.attnum", "ix.indkey"),
)
else:
IDX_SQL = """
SELECT
@@ -4163,13 +4183,15 @@
and ix.indisprimary = 'f'
ORDER BY
t.relname,
i.relname
""" % (
- "ix.indnkeyatts"
- if self.server_version_info >= (11, 0)
- else "NULL",
+ (
+ "ix.indnkeyatts"
+ if self.server_version_info >= (11, 0)
+ else "NULL"
+ ),
)
t = sql.text(IDX_SQL).columns(
relname=sqltypes.Unicode, attname=sqltypes.Unicode
)
--- a/sqlalchemy:lib/sqlalchemy/engine/base.py
+++ b/sqlalchemy:lib/sqlalchemy/engine/base.py
@@ -1927,13 +1927,13 @@
parameters,
e,
self.dialect.dbapi.Error,
hide_parameters=self.engine.hide_parameters,
dialect=self.dialect,
- ismulti=context.executemany
- if context is not None
- else None,
+ ismulti=(
+ context.executemany if context is not None else None
+ ),
),
with_traceback=exc_info[2],
from_=e,
)
self._reentrant_error = True
@@ -1953,13 +1953,13 @@
e,
self.dialect.dbapi.Error,
hide_parameters=self.engine.hide_parameters,
connection_invalidated=self._is_disconnect,
dialect=self.dialect,
- ismulti=context.executemany
- if context is not None
- else None,
+ ismulti=(
+ context.executemany if context is not None else None
+ ),
)
else:
sqlalchemy_exception = None
newraise = None
--- a/sqlalchemy:lib/sqlalchemy/engine/cursor.py
+++ b/sqlalchemy:lib/sqlalchemy/engine/cursor.py
@@ -366,13 +366,15 @@
return [
(
idx,
idx,
rmap_entry[RM_OBJECTS],
- rmap_entry[RM_NAME].lower()
- if not case_sensitive
- else rmap_entry[RM_NAME],
+ (
+ rmap_entry[RM_NAME].lower()
+ if not case_sensitive
+ else rmap_entry[RM_NAME]
+ ),
rmap_entry[RM_RENDERED_NAME],
context.get_result_processor(
rmap_entry[RM_TYPE],
rmap_entry[RM_RENDERED_NAME],
cursor_description[idx][1],
@@ -736,13 +738,15 @@
else key._tq_label.lower()
)
in map_
):
result = map_[
- key._tq_label
- if self.case_sensitive
- else key._tq_label.lower()
+ (
+ key._tq_label
+ if self.case_sensitive
+ else key._tq_label.lower()
+ )
]
elif (
hasattr(key, "name")
and (key.name if self.case_sensitive else key.name.lower())
in map_
--- a/sqlalchemy:lib/sqlalchemy/engine/default.py
+++ b/sqlalchemy:lib/sqlalchemy/engine/default.py
@@ -1050,13 +1050,15 @@
# execute() or executemany() method.
parameters = []
if compiled.positional:
for compiled_params in self.compiled_parameters:
param = [
- processors[key](compiled_params[key])
- if key in processors
- else compiled_params[key]
+ (
+ processors[key](compiled_params[key])
+ if key in processors
+ else compiled_params[key]
+ )
for key in positiontup
]
parameters.append(dialect.execute_sequence_format(param))
else:
encode = not dialect.supports_unicode_statements
@@ -1064,20 +1066,24 @@
encoder = dialect._encoder
for compiled_params in self.compiled_parameters:
if encode:
param = {
- encoder(key)[0]: processors[key](compiled_params[key])
- if key in processors
- else compiled_params[key]
+ encoder(key)[0]: (
+ processors[key](compiled_params[key])
+ if key in processors
+ else compiled_params[key]
+ )
for key in compiled_params
}
else:
param = {
- key: processors[key](compiled_params[key])
- if key in processors
- else compiled_params[key]
+ key: (
+ processors[key](compiled_params[key])
+ if key in processors
+ else compiled_params[key]
+ )
for key in compiled_params
}
parameters.append(param)
@@ -1704,23 +1710,27 @@
processors = compiled._bind_processors
if compiled.positional:
positiontup = compiled.positiontup
parameters = self.dialect.execute_sequence_format(
[
- processors[key](compiled_params[key])
- if key in processors
- else compiled_params[key]
+ (
+ processors[key](compiled_params[key])
+ if key in processors
+ else compiled_params[key]
+ )
for key in positiontup
]
)
else:
parameters = dict(
(
key,
- processors[key](compiled_params[key])
- if key in processors
- else compiled_params[key],
+ (
+ processors[key](compiled_params[key])
+ if key in processors
+ else compiled_params[key]
+ ),
)
for key in compiled_params
)
return self._execute_scalar(
util.text_type(compiled), type_, parameters=parameters
--- a/sqlalchemy:lib/sqlalchemy/event/legacy.py
+++ b/sqlalchemy:lib/sqlalchemy/event/legacy.py
@@ -103,13 +103,13 @@
" \"listen for the '%(event_name)s' event\"\n"
"\n # ... (event handling logic) ...\n"
)
text %= {
- "current_since": " (arguments as of %s)" % current_since
- if current_since
- else "",
+ "current_since": (
+ " (arguments as of %s)" % current_since if current_since else ""
+ ),
"event_name": fn.__name__,
"has_kw_arguments": ", **kw" if dispatch_collection.has_kw else "",
"named_event_arguments": ", ".join(dispatch_collection.arg_names),
"example_kw_arg": example_kw_arg,
"sample_target": sample_target,
@@ -129,13 +129,13 @@
" \"listen for the '%(event_name)s' event\"\n"
"\n # ... (event handling logic) ...\n"
% {
"since": since,
"event_name": fn.__name__,
- "has_kw_arguments": " **kw"
- if dispatch_collection.has_kw
- else "",
+ "has_kw_arguments": (
+ " **kw" if dispatch_collection.has_kw else ""
+ ),
"named_event_arguments": ", ".join(args),
"sample_target": sample_target,
}
)
return text
--- a/sqlalchemy:lib/sqlalchemy/ext/mypy/util.py
+++ b/sqlalchemy:lib/sqlalchemy/ext/mypy/util.py
@@ -262,13 +262,15 @@
bound_type = node.node
return Instance(
bound_type,
[
- unbound_to_instance(api, arg)
- if isinstance(arg, UnboundType)
- else arg
+ (
+ unbound_to_instance(api, arg)
+ if isinstance(arg, UnboundType)
+ else arg
+ )
for arg in typ.args
],
)
else:
return typ
--- a/sqlalchemy:lib/sqlalchemy/orm/collections.py
+++ b/sqlalchemy:lib/sqlalchemy/orm/collections.py
@@ -1636,13 +1636,15 @@
set: (
{"appender": "add", "remover": "remove", "iterator": "__iter__"},
_set_decorators(),
),
# decorators are required for dicts and object collections.
- dict: ({"iterator": "values"}, _dict_decorators())
- if util.py3k
- else ({"iterator": "itervalues"}, _dict_decorators()),
+ dict: (
+ ({"iterator": "values"}, _dict_decorators())
+ if util.py3k
+ else ({"iterator": "itervalues"}, _dict_decorators())
+ ),
}
class MappedCollection(dict):
"""A basic dictionary-based collection class.
--- a/sqlalchemy:lib/sqlalchemy/orm/context.py
+++ b/sqlalchemy:lib/sqlalchemy/orm/context.py
@@ -414,13 +414,15 @@
)
else:
self.statement = statement
self._label_convention = self._column_naming_convention(
- statement._label_style
- if not statement._is_textual and not statement.is_dml
- else LABEL_STYLE_NONE,
+ (
+ statement._label_style
+ if not statement._is_textual and not statement.is_dml
+ else LABEL_STYLE_NONE
+ ),
self.use_legacy_query_style,
)
_QueryEntity.to_compile_state(
self,
@@ -891,15 +893,19 @@
@classmethod
def get_columns_clause_froms(cls, statement):
return cls._normalize_froms(
itertools.chain.from_iterable(
- element._from_objects
- if "parententity" not in element._annotations
- else [
- element._annotations["parententity"].__clause_element__()
- ]
+ (
+ element._from_objects
+ if "parententity" not in element._annotations
+ else [
+ element._annotations[
+ "parententity"
+ ].__clause_element__()
+ ]
+ )
for element in statement._raw_columns
)
)
@classmethod
@@ -1023,13 +1029,15 @@
# elements are converted into label references. For the
# eager load / subquery wrapping case, we need to un-coerce
# the original expressions outside of the label references
# in order to have them render.
unwrapped_order_by = [
- elem.element
- if isinstance(elem, sql.elements._label_reference)
- else elem
+ (
+ elem.element
+ if isinstance(elem, sql.elements._label_reference)
+ else elem
+ )
for elem in self.order_by
]
order_by_col_expr = sql_util.expand_column_list_from_order_by(
self.primary_columns, unwrapped_order_by
@@ -1256,13 +1264,15 @@
# 'ORM only' check, if this query were generated from a
# subquery of itself, i.e. _from_selectable(), apply adaption
# to all SQL constructs.
adapters.append(
(
- False
- if self.compile_options._orm_only_from_obj_alias
- else True,
+ (
+ False
+ if self.compile_options._orm_only_from_obj_alias
+ else True
+ ),
self._from_obj_alias.replace,
)
)
if self._aliased_generations:
@@ -2186,13 +2196,16 @@
{
"name": ent._label_name,
"type": ent.type,
"aliased": getattr(insp_ent, "is_aliased_class", False),
"expr": ent.expr,
- "entity": getattr(insp_ent, "entity", None)
- if ent.entity_zero is not None and not insp_ent.is_clause_element
- else None,
+ "entity": (
+ getattr(insp_ent, "entity", None)
+ if ent.entity_zero is not None
+ and not insp_ent.is_clause_element
+ else None
+ ),
}
for ent, insp_ent in [
(
_ent,
(
--- a/sqlalchemy:lib/sqlalchemy/orm/dependency.py
+++ b/sqlalchemy:lib/sqlalchemy/orm/dependency.py
@@ -162,13 +162,15 @@
# by a preprocessor on this state/attribute. In the
# case of deletes we may try to load missing items here as well.
sum_ = state.manager[self.key].impl.get_all_pending(
state,
state.dict,
- self._passive_delete_flag
- if isdelete
- else attributes.PASSIVE_NO_INITIALIZE,
+ (
+ self._passive_delete_flag
+ if isdelete
+ else attributes.PASSIVE_NO_INITIALIZE
+ ),
)
if not sum_:
continue
--- a/sqlalchemy:lib/sqlalchemy/orm/evaluator.py
+++ b/sqlalchemy:lib/sqlalchemy/orm/evaluator.py
@@ -121,14 +121,12 @@
)
else:
raise UnevaluatableError("Cannot evaluate column: %s" % clause)
get_corresponding_attr = operator.attrgetter(key)
- return (
- lambda obj: get_corresponding_attr(obj)
- if obj is not None
- else _NO_OBJECT
+ return lambda obj: (
+ get_corresponding_attr(obj) if obj is not None else _NO_OBJECT
)
def visit_tuple(self, clause):
return self.visit_clauselist(clause)
--- a/sqlalchemy:lib/sqlalchemy/orm/loading.py
+++ b/sqlalchemy:lib/sqlalchemy/orm/loading.py
@@ -103,30 +103,36 @@
return go
if context.load_options._legacy_uniquing:
unique_filters = [
- _no_unique
- if context.yield_per
- else id
- if (
- ent.use_id_for_hash
- or ent._non_hashable_value
- or ent._null_column_type
- )
- else None
+ (
+ _no_unique
+ if context.yield_per
+ else (
+ id
+ if (
+ ent.use_id_for_hash
+ or ent._non_hashable_value
+ or ent._null_column_type
+ )
+ else None
+ )
+ )
for ent in context.compile_state._entities
]
else:
unique_filters = [
- _no_unique
- if context.yield_per
- else _not_hashable(ent.column.type)
- if (not ent.use_id_for_hash and ent._non_hashable_value)
- else id
- if ent.use_id_for_hash
- else None
+ (
+ _no_unique
+ if context.yield_per
+ else (
+ _not_hashable(ent.column.type)
+ if (not ent.use_id_for_hash and ent._non_hashable_value)
+ else id if ent.use_id_for_hash else None
+ )
+ )
for ent in context.compile_state._entities
]
row_metadata = SimpleResultMetaData(
labels, extra, _unique_filters=unique_filters
--- a/sqlalchemy:lib/sqlalchemy/orm/mapper.py
+++ b/sqlalchemy:lib/sqlalchemy/orm/mapper.py
@@ -1991,13 +1991,15 @@
def __str__(self):
return "mapped class %s%s->%s" % (
self.class_.__name__,
self.non_primary and " (non-primary)" or "",
- self.local_table.description
- if self.local_table is not None
- else self.persist_selectable.description,
+ (
+ self.local_table.description
+ if self.local_table is not None
+ else self.persist_selectable.description
+ ),
)
def _is_orphan(self, state):
orphan_possible = False
for mapper in self.iterate_to_root():
--- a/sqlalchemy:lib/sqlalchemy/orm/persistence.py
+++ b/sqlalchemy:lib/sqlalchemy/orm/persistence.py
@@ -287,15 +287,17 @@
(
state,
state_dict,
sub_mapper,
connection,
- mapper._get_committed_state_attr_by_column(
- state, state_dict, mapper.version_id_col
- )
- if mapper.version_id_col is not None
- else None,
+ (
+ mapper._get_committed_state_attr_by_column(
+ state, state_dict, mapper.version_id_col
+ )
+ if mapper.version_id_col is not None
+ else None
+ ),
)
for state, state_dict, sub_mapper, connection in states_to_update
if table in sub_mapper._pks_by_table
)
@@ -1020,13 +1022,15 @@
state_dict,
c,
c.context.compiled_parameters[0],
value_params,
True,
- c.returned_defaults
- if not c.context.executemany
- else None,
+ (
+ c.returned_defaults
+ if not c.context.executemany
+ else None
+ ),
)
if check_rowcount:
if rows != len(records):
raise orm_exc.StaleDataError(
@@ -1118,13 +1122,15 @@
state_dict,
c,
last_inserted_params,
value_params,
False,
- c.returned_defaults
- if not c.context.executemany
- else None,
+ (
+ c.returned_defaults
+ if not c.context.executemany
+ else None
+ ),
)
else:
_postfetch_bulk_save(mapper_rec, state_dict, table)
else:
@@ -1242,13 +1248,15 @@
state_dict,
result,
result.context.compiled_parameters[0],
value_params,
False,
- result.returned_defaults
- if not result.context.executemany
- else None,
+ (
+ result.returned_defaults
+ if not result.context.executemany
+ else None
+ ),
)
else:
_postfetch_bulk_save(mapper_rec, state_dict, table)
--- a/sqlalchemy:lib/sqlalchemy/orm/query.py
+++ b/sqlalchemy:lib/sqlalchemy/orm/query.py
@@ -2341,20 +2341,26 @@
self._aliased_generation = self._next_aliased_generation()
if self._aliased_generation:
_props = [
(
- prop[0],
- sql_util._deep_annotate(
- prop[1],
- {"aliased_generation": self._aliased_generation},
+ (
+ prop[0],
+ (
+ sql_util._deep_annotate(
+ prop[1],
+ {
+ "aliased_generation": self._aliased_generation
+ },
+ )
+ if isinstance(prop[1], expression.ClauseElement)
+ else prop[1]
+ ),
)
- if isinstance(prop[1], expression.ClauseElement)
- else prop[1],
+ if len(prop) == 2
+ else prop
)
- if len(prop) == 2
- else prop
for prop in _props
]
# legacy ^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -2365,16 +2371,20 @@
prop[0],
legacy=True,
apply_propagate_attrs=self,
),
(
- coercions.expect(roles.OnClauseRole, prop[1], legacy=True)
- # if not isinstance(prop[1], str)
- # else prop[1]
- )
- if len(prop) == 2
- else None,
+ (
+ coercions.expect(
+ roles.OnClauseRole, prop[1], legacy=True
+ )
+ # if not isinstance(prop[1], str)
+ # else prop[1]
+ )
+ if len(prop) == 2
+ else None
+ ),
None,
{
"isouter": isouter,
"aliased": aliased,
"from_joinpoint": True if i > 0 else from_joinpoint,
--- a/sqlalchemy:lib/sqlalchemy/orm/relationships.py
+++ b/sqlalchemy:lib/sqlalchemy/orm/relationships.py
@@ -1806,13 +1806,15 @@
# the last known value
current_value = mapper._get_state_attr_by_column(
state,
dict_,
column,
- passive=attributes.PASSIVE_OFF
- if state.persistent
- else attributes.PASSIVE_NO_FETCH ^ attributes.INIT_OK,
+ passive=(
+ attributes.PASSIVE_OFF
+ if state.persistent
+ else attributes.PASSIVE_NO_FETCH ^ attributes.INIT_OK
+ ),
)
if current_value is attributes.NEVER_SET:
if not existing_is_available:
raise sa_exc.InvalidRequestError(
@@ -2352,13 +2354,15 @@
"would allow "
"delete-orphan cascade to take place in this direction, set "
"the single_parent=True flag."
% {
"rel": self,
- "direction": "many-to-one"
- if self.direction is MANYTOONE
- else "many-to-many",
+ "direction": (
+ "many-to-one"
+ if self.direction is MANYTOONE
+ else "many-to-many"
+ ),
"clsname": self.parent.class_.__name__,
"relatedcls": self.mapper.class_.__name__,
},
code="bbf0",
)
--- a/sqlalchemy:lib/sqlalchemy/orm/strategies.py
+++ b/sqlalchemy:lib/sqlalchemy/orm/strategies.py
@@ -1075,13 +1075,15 @@
mapper.class_manager,
LoadLazyAttribute(
key,
self,
loadopt,
- loadopt._generate_extra_criteria(context)
- if loadopt._extra_criteria
- else None,
+ (
+ loadopt._generate_extra_criteria(context)
+ if loadopt._extra_criteria
+ else None
+ ),
),
key,
)
populators["new"].append((self.key, set_lazy_callable))
@@ -1465,13 +1467,15 @@
),
]
elif ltj > 2:
middle = [
(
- orm_util.AliasedClass(item[0])
- if not inspect(item[0]).is_aliased_class
- else item[0].entity,
+ (
+ orm_util.AliasedClass(item[0])
+ if not inspect(item[0]).is_aliased_class
+ else item[0].entity
+ ),
item[1],
)
for item in to_join[1:-1]
]
inner = []
@@ -2108,13 +2112,15 @@
context.attributes[key] = idx = context.attributes[key] + 1
if idx >= len(self._aliased_class_pool):
to_adapt = orm_util.AliasedClass(
self.mapper,
- alias=alt_selectable._anonymous_fromclause(flat=True)
- if alt_selectable is not None
- else None,
+ alias=(
+ alt_selectable._anonymous_fromclause(flat=True)
+ if alt_selectable is not None
+ else None
+ ),
flat=True,
use_mapper_path=True,
)
# load up the .columns collection on the Alias() before
@@ -2938,13 +2944,15 @@
# "orig" options if extra_criteria is present, because the copy
# of extra_criteria will have different boundparam than that of
# the QueryableAttribute in the path
new_options = [
- orig_opt._adjust_for_extra_criteria(context)
- if orig_opt._is_strategy_option
- else orig_opt
+ (
+ orig_opt._adjust_for_extra_criteria(context)
+ if orig_opt._is_strategy_option
+ else orig_opt
+ )
for orig_opt in options
if orig_opt._is_compile_state or orig_opt._is_legacy_option
]
# propagate user defined options from the current query
--- a/sqlalchemy:lib/sqlalchemy/orm/strategy_options.py
+++ b/sqlalchemy:lib/sqlalchemy/orm/strategy_options.py
@@ -199,13 +199,15 @@
if self.context:
cloned.context = util.OrderedDict(
[
(
key,
- value._deep_clone(applied, process)
- if isinstance(value, Load)
- else value,
+ (
+ value._deep_clone(applied, process)
+ if isinstance(value, Load)
+ else value
+ ),
)
for key, value in self.context.items()
]
)
@@ -429,13 +431,15 @@
ac = orm_util.with_polymorphic(
ext_info.mapper.base_mapper,
ext_info.mapper,
aliased=True,
_use_mapper_path=True,
- _existing_alias=inspect(existing)
- if existing is not None
- else None,
+ _existing_alias=(
+ inspect(existing)
+ if existing is not None
+ else None
+ ),
)
ext_info = inspect(ac)
path.entity_path[prop].set(
--- a/sqlalchemy:lib/sqlalchemy/orm/util.py
+++ b/sqlalchemy:lib/sqlalchemy/orm/util.py
@@ -501,16 +501,20 @@
self._aliased_insp = AliasedInsp(
self,
insp,
alias,
name,
- with_polymorphic_mappers
- if with_polymorphic_mappers
- else mapper.with_polymorphic_mappers,
- with_polymorphic_discriminator
- if with_polymorphic_discriminator is not None
- else mapper.polymorphic_on,
+ (
+ with_polymorphic_mappers
+ if with_polymorphic_mappers
+ else mapper.with_polymorphic_mappers
+ ),
+ (
+ with_polymorphic_discriminator
+ if with_polymorphic_discriminator is not None
+ else mapper.polymorphic_on
+ ),
base_alias,
use_mapper_path,
adapt_on_names,
represents_outer_join,
)
@@ -1112,13 +1116,15 @@
self.where_criteria = lambdas.DeferredLambdaElement(
where_criteria,
roles.WhereHavingRole,
lambda_args=(
_WrapUserEntity(
- self.root_entity
- if self.root_entity is not None
- else self.entity.entity,
+ (
+ self.root_entity
+ if self.root_entity is not None
+ else self.entity.entity
+ ),
),
),
opts=lambdas.LambdaOptions(
track_closure_variables=track_closure_variables
),
--- a/sqlalchemy:lib/sqlalchemy/pool/base.py
+++ b/sqlalchemy:lib/sqlalchemy/pool/base.py
@@ -721,13 +721,15 @@
if dbapi_connection is not None:
if connection_record and echo:
pool.logger.debug(
"Connection %r being returned to pool%s",
dbapi_connection,
- ", transaction state was already reset by caller"
- if not reset
- else "",
+ (
+ ", transaction state was already reset by caller"
+ if not reset
+ else ""
+ ),
)
try:
fairy = fairy or _ConnectionFairy(
dbapi_connection,
--- a/sqlalchemy:lib/sqlalchemy/sql/annotation.py
+++ b/sqlalchemy:lib/sqlalchemy/sql/annotation.py
@@ -29,13 +29,15 @@
return (
"_annotations",
tuple(
(
key,
- value._gen_cache_key(anon_map_, [])
- if isinstance(value, HasCacheKey)
- else value,
+ (
+ value._gen_cache_key(anon_map_, [])
+ if isinstance(value, HasCacheKey)
+ else value
+ ),
)
for key, value in [
(key, self._annotations[key])
for key in sorted(self._annotations)
]
--- a/sqlalchemy:lib/sqlalchemy/sql/coercions.py
+++ b/sqlalchemy:lib/sqlalchemy/sql/coercions.py
@@ -553,13 +553,15 @@
non_literal_expressions[o] = elements.Null()
if non_literal_expressions:
return elements.ClauseList(
*[
- non_literal_expressions[o]
- if o in non_literal_expressions
- else expr._bind_param(operator, o)
+ (
+ non_literal_expressions[o]
+ if o in non_literal_expressions
+ else expr._bind_param(operator, o)
+ )
for o in element
]
)
else:
return expr._bind_param(operator, element, expanding=True)
@@ -823,13 +825,13 @@
"or use %(literal_column)s(%(column)r) "
"for more specificity"
% {
"column": util.ellipses_string(element),
"argname": "for argument %s" % (argname,) if argname else "",
- "literal_column": "literal_column"
- if guess_is_literal
- else "column",
+ "literal_column": (
+ "literal_column" if guess_is_literal else "column"
+ ),
}
)
class ReturnsRowsImpl(RoleImpl):
--- a/sqlalchemy:lib/sqlalchemy/sql/compiler.py
+++ b/sqlalchemy:lib/sqlalchemy/sql/compiler.py
@@ -886,15 +886,17 @@
return dict(
(key, value)
for key, value in (
(
self.bind_names[bindparam],
- bindparam.type._cached_bind_processor(self.dialect)
- if not bindparam.type._is_tuple_type
- else tuple(
- elem_type._cached_bind_processor(self.dialect)
- for elem_type in bindparam.type.types
+ (
+ bindparam.type._cached_bind_processor(self.dialect)
+ if not bindparam.type._is_tuple_type
+ else tuple(
+ elem_type._cached_bind_processor(self.dialect)
+ for elem_type in bindparam.type.types
+ )
),
)
for bindparam in self.bind_names
)
if value is not None
@@ -1315,13 +1317,18 @@
table = self.statement.table
ret = {col: idx for idx, col in enumerate(self.returning)}
getters = [
- (operator.itemgetter(ret[col]), True)
- if col in ret
- else (operator.methodcaller("get", key_getter(col), None), False)
+ (
+ (operator.itemgetter(ret[col]), True)
+ if col in ret
+ else (
+ operator.methodcaller("get", key_getter(col), None),
+ False,
+ )
+ )
for col in table.primary_key
]
row_fn = result.result_tuple([col.key for col in table.primary_key])
@@ -1687,28 +1694,48 @@
)
def _format_frame_clause(self, range_, **kw):
return "%s AND %s" % (
- "UNBOUNDED PRECEDING"
- if range_[0] is elements.RANGE_UNBOUNDED
- else "CURRENT ROW"
- if range_[0] is elements.RANGE_CURRENT
- else "%s PRECEDING"
- % (self.process(elements.literal(abs(range_[0])), **kw),)
- if range_[0] < 0
- else "%s FOLLOWING"
- % (self.process(elements.literal(range_[0]), **kw),),
- "UNBOUNDED FOLLOWING"
- if range_[1] is elements.RANGE_UNBOUNDED
- else "CURRENT ROW"
- if range_[1] is elements.RANGE_CURRENT
- else "%s PRECEDING"
- % (self.process(elements.literal(abs(range_[1])), **kw),)
- if range_[1] < 0
- else "%s FOLLOWING"
- % (self.process(elements.literal(range_[1]), **kw),),
+ (
+ "UNBOUNDED PRECEDING"
+ if range_[0] is elements.RANGE_UNBOUNDED
+ else (
+ "CURRENT ROW"
+ if range_[0] is elements.RANGE_CURRENT
+ else (
+ "%s PRECEDING"
+ % (
+ self.process(
+ elements.literal(abs(range_[0])), **kw
+ ),
+ )
+ if range_[0] < 0
+ else "%s FOLLOWING"
+ % (self.process(elements.literal(range_[0]), **kw),)
+ )
+ )
+ ),
+ (
+ "UNBOUNDED FOLLOWING"
+ if range_[1] is elements.RANGE_UNBOUNDED
+ else (
+ "CURRENT ROW"
+ if range_[1] is elements.RANGE_CURRENT
+ else (
+ "%s PRECEDING"
+ % (
+ self.process(
+ elements.literal(abs(range_[1])), **kw
+ ),
+ )
+ if range_[1] < 0
+ else "%s FOLLOWING"
+ % (self.process(elements.literal(range_[1]), **kw),)
+ )
+ )
+ ),
)
def visit_over(self, over, **kwargs):
if over.range_:
range_ = "RANGE BETWEEN %s" % self._format_frame_clause(
@@ -2812,16 +2839,18 @@
ret += "(%s)" % (
", ".join(
"%s%s"
% (
self.preparer.quote(col.name),
- " %s"
- % self.dialect.type_compiler.process(
- col.type, **kwargs
- )
- if alias._render_derived_w_types
- else "",
+ (
+ " %s"
+ % self.dialect.type_compiler.process(
+ col.type, **kwargs
+ )
+ if alias._render_derived_w_types
+ else ""
+ ),
)
for col in alias.c
)
)
@@ -3643,13 +3672,15 @@
)
if select._fetch_clause is not None:
text += "\n FETCH FIRST %s%s ROWS %s" % (
self.process(select._fetch_clause, **kw),
" PERCENT" if select._fetch_clause_options["percent"] else "",
- "WITH TIES"
- if select._fetch_clause_options["with_ties"]
- else "ONLY",
+ (
+ "WITH TIES"
+ if select._fetch_clause_options["with_ties"]
+ else "ONLY"
+ ),
)
return text
def visit_table(
self,
--- a/sqlalchemy:lib/sqlalchemy/sql/crud.py
+++ b/sqlalchemy:lib/sqlalchemy/sql/crud.py
@@ -489,23 +489,27 @@
value = _create_bind_param(
compiler,
c,
value,
required=value is REQUIRED,
- name=_col_bind_name(c)
- if not compile_state._has_multi_parameters
- else "%s_m0" % _col_bind_name(c),
+ name=(
+ _col_bind_name(c)
+ if not compile_state._has_multi_parameters
+ else "%s_m0" % _col_bind_name(c)
+ ),
**kw
)
elif value._is_bind_parameter:
value = _handle_values_anonymous_param(
compiler,
c,
value,
- name=_col_bind_name(c)
- if not compile_state._has_multi_parameters
- else "%s_m0" % _col_bind_name(c),
+ name=(
+ _col_bind_name(c)
+ if not compile_state._has_multi_parameters
+ else "%s_m0" % _col_bind_name(c)
+ ),
**kw
)
else:
# value is a SQL expression
value = compiler.process(value.self_group(), **kw)
--- a/sqlalchemy:lib/sqlalchemy/sql/ddl.py
+++ b/sqlalchemy:lib/sqlalchemy/sql/ddl.py
@@ -956,14 +956,16 @@
unsorted_tables = [t for t in tables if self._can_drop_table(t)]
collection = list(
reversed(
sort_tables_and_constraints(
unsorted_tables,
- filter_fn=lambda constraint: False
- if not self.dialect.supports_alter
- or constraint.name is None
- else None,
+ filter_fn=lambda constraint: (
+ False
+ if not self.dialect.supports_alter
+ or constraint.name is None
+ else None
+ ),
)
)
)
except exc.CircularDependencyError as err2:
if not self.dialect.supports_alter:
--- a/sqlalchemy:lib/sqlalchemy/sql/default_comparator.py
+++ b/sqlalchemy:lib/sqlalchemy/sql/default_comparator.py
@@ -210,13 +210,15 @@
other,
expr=expr,
operator=operators.match_op,
),
result_type=type_api.MATCHTYPE,
- negate=operators.not_match_op
- if op is operators.match_op
- else operators.match_op,
+ negate=(
+ operators.not_match_op
+ if op is operators.match_op
+ else operators.match_op
+ ),
**kw
)
def _distinct_impl(expr, op, **kw):
@@ -246,13 +248,15 @@
operator=operators.and_,
group=False,
group_contents=False,
),
op,
- negate=operators.not_between_op
- if op is operators.between_op
- else operators.between_op,
+ negate=(
+ operators.not_between_op
+ if op is operators.between_op
+ else operators.between_op
+ ),
modifiers=kw,
)
def _collate_impl(expr, op, other, **kw):
@@ -270,13 +274,15 @@
return _boolean_compare(
expr,
op,
pattern,
flags=flags,
- negate=operators.not_regexp_match_op
- if op is operators.regexp_match_op
- else operators.regexp_match_op,
+ negate=(
+ operators.not_regexp_match_op
+ if op is operators.regexp_match_op
+ else operators.regexp_match_op
+ ),
**kw
)
def _regexp_replace_impl(expr, op, pattern, replacement, flags, **kw):
--- a/sqlalchemy:lib/sqlalchemy/sql/dml.py
+++ b/sqlalchemy:lib/sqlalchemy/sql/dml.py
@@ -74,16 +74,18 @@
% statement.__visit_name__.upper()
)
for parameters in statement._multi_values:
multi_parameters = [
- {
- c.key: value
- for c, value in zip(statement.table.c, parameter_set)
- }
- if isinstance(parameter_set, collections_abc.Sequence)
- else parameter_set
+ (
+ {
+ c.key: value
+ for c, value in zip(statement.table.c, parameter_set)
+ }
+ if isinstance(parameter_set, collections_abc.Sequence)
+ else parameter_set
+ )
for parameter_set in parameters
]
if self._no_parameters:
self._no_parameters = False
--- a/sqlalchemy:lib/sqlalchemy/sql/elements.py
+++ b/sqlalchemy:lib/sqlalchemy/sql/elements.py
@@ -983,13 +983,15 @@
key = self._proxy_key
else:
key = name
co = ColumnClause(
- coercions.expect(roles.TruncatedLabelRole, name)
- if name_is_truncatable
- else name,
+ (
+ coercions.expect(roles.TruncatedLabelRole, name)
+ if name_is_truncatable
+ else name
+ ),
type_=getattr(self, "type", None),
_selectable=selectable,
)
co._propagate_attrs = selectable._propagate_attrs
@@ -1504,13 +1506,16 @@
key = quoted_name(key, quote)
if unique:
self.key = _anonymous_label.safe_construct(
id(self),
- key
- if key is not None and not isinstance(key, _anonymous_label)
- else "param",
+ (
+ key
+ if key is not None
+ and not isinstance(key, _anonymous_label)
+ else "param"
+ ),
sanitize_key=True,
)
self._key_is_anon = True
elif key:
self.key = key
@@ -2153,13 +2158,15 @@
argument as it also indicates positional ordering.
"""
selectable = util.preloaded.sql_selectable
positional_input_cols = [
- ColumnClause(col.key, types.pop(col.key))
- if col.key in types
- else col
+ (
+ ColumnClause(col.key, types.pop(col.key))
+ if col.key in types
+ else col
+ )
for col in cols
]
keyed_input_cols = [
ColumnClause(key, type_) for key, type_ in types.items()
]
@@ -2507,13 +2514,13 @@
"Invoking %(name)s() without arguments is deprecated, and "
"will be disallowed in a future release. For an empty "
"%(name)s() construct, use %(name)s(%(continue_on)s, *args)."
% {
"name": operator.__name__,
- "continue_on": "True"
- if continue_on is True_._singleton
- else "False",
+ "continue_on": (
+ "True" if continue_on is True_._singleton else "False"
+ ),
},
version="1.4",
)
return cls._construct_raw(operator)
@@ -4704,13 +4711,15 @@
name_is_truncatable=False,
disallow_is_literal=False,
**kw
):
c = ColumnClause(
- coercions.expect(roles.TruncatedLabelRole, name or self.name)
- if name_is_truncatable
- else (name or self.name),
+ (
+ coercions.expect(roles.TruncatedLabelRole, name or self.name)
+ if name_is_truncatable
+ else (name or self.name)
+ ),
type_=self.type,
_selectable=selectable,
is_literal=False,
)
c._propagate_attrs = selectable._propagate_attrs
@@ -5005,13 +5014,15 @@
name is None
or name == self.name
)
)
c = self._constructor(
- coercions.expect(roles.TruncatedLabelRole, name or self.name)
- if name_is_truncatable
- else (name or self.name),
+ (
+ coercions.expect(roles.TruncatedLabelRole, name or self.name)
+ if name_is_truncatable
+ else (name or self.name)
+ ),
type_=self.type,
_selectable=selectable,
is_literal=is_literal,
)
c._propagate_attrs = selectable._propagate_attrs
--- a/sqlalchemy:lib/sqlalchemy/sql/schema.py
+++ b/sqlalchemy:lib/sqlalchemy/sql/schema.py
@@ -1968,15 +1968,17 @@
" with this Column object until its 'name' has "
"been assigned."
)
try:
c = self._constructor(
- coercions.expect(
- roles.TruncatedLabelRole, name if name else self.name
- )
- if name_is_truncatable
- else (name or self.name),
+ (
+ coercions.expect(
+ roles.TruncatedLabelRole, name if name else self.name
+ )
+ if name_is_truncatable
+ else (name or self.name)
+ ),
self.type,
# this may actually be ._proxy_key when the key is incoming
key=key if key else name if name else self.key,
primary_key=self.primary_key,
nullable=self.nullable,
@@ -3779,14 +3781,16 @@
fkc = ForeignKeyConstraint(
[x.parent.key for x in self.elements],
[
x._get_colspec(
schema=schema,
- table_name=target_table.name
- if target_table is not None
- and x._table_key() == x.parent.table.key
- else None,
+ table_name=(
+ target_table.name
+ if target_table is not None
+ and x._table_key() == x.parent.table.key
+ else None
+ ),
)
for x in self.elements
],
name=self.name,
onupdate=self.onupdate,
--- a/sqlalchemy:lib/sqlalchemy/sql/sqltypes.py
+++ b/sqlalchemy:lib/sqlalchemy/sql/sqltypes.py
@@ -733,13 +733,15 @@
)
# we're a "numeric", DBAPI returns floats, convert.
return processors.to_decimal_processor_factory(
decimal.Decimal,
- self.scale
- if self.scale is not None
- else self._default_decimal_return_scale,
+ (
+ self.scale
+ if self.scale is not None
+ else self._default_decimal_return_scale
+ ),
)
else:
if dialect.supports_native_decimal:
return processors.to_float
else:
@@ -2477,13 +2479,15 @@
index = coercions.expect(
roles.BinaryElementRole,
index,
expr=self.expr,
operator=operators.json_getitem_op,
- bindparam_type=JSON.JSONIntIndexType
- if isinstance(index, int)
- else JSON.JSONStrIndexType,
+ bindparam_type=(
+ JSON.JSONIntIndexType
+ if isinstance(index, int)
+ else JSON.JSONStrIndexType
+ ),
)
operator = operators.json_getitem_op
return operator, index, self.type
--- a/sqlalchemy:lib/sqlalchemy/sql/traversals.py
+++ b/sqlalchemy:lib/sqlalchemy/sql/traversals.py
@@ -176,15 +176,17 @@
result += (attrname, obj)
elif meth is PROPAGATE_ATTRS:
result += (
attrname,
obj["compile_state_plugin"],
- obj["plugin_subject"]._gen_cache_key(
- anon_map, bindparams
- )
- if obj["plugin_subject"]
- else None,
+ (
+ obj["plugin_subject"]._gen_cache_key(
+ anon_map, bindparams
+ )
+ if obj["plugin_subject"]
+ else None
+ ),
)
elif meth is InternalTraversal.dp_annotations_key:
# obj is here is the _annotations dict. however, we
# want to use the memoized cache key version of it. for
# Columns, this should be long lived. For select()
@@ -436,22 +438,26 @@
return tuple(obj)
def visit_multi(self, attrname, obj, parent, anon_map, bindparams):
return (
attrname,
- obj._gen_cache_key(anon_map, bindparams)
- if isinstance(obj, HasCacheKey)
- else obj,
+ (
+ obj._gen_cache_key(anon_map, bindparams)
+ if isinstance(obj, HasCacheKey)
+ else obj
+ ),
)
def visit_multi_list(self, attrname, obj, parent, anon_map, bindparams):
return (
attrname,
tuple(
- elem._gen_cache_key(anon_map, bindparams)
- if isinstance(elem, HasCacheKey)
- else elem
+ (
+ elem._gen_cache_key(anon_map, bindparams)
+ if isinstance(elem, HasCacheKey)
+ else elem
+ )
for elem in obj
),
)
def visit_has_cache_key_tuples(
@@ -547,21 +553,29 @@
):
is_legacy = "legacy" in attrname
return tuple(
(
- target
- if is_legacy and isinstance(target, str)
- else target._gen_cache_key(anon_map, bindparams),
- onclause
- if is_legacy and isinstance(onclause, str)
- else onclause._gen_cache_key(anon_map, bindparams)
- if onclause is not None
- else None,
- from_._gen_cache_key(anon_map, bindparams)
- if from_ is not None
- else None,
+ (
+ target
+ if is_legacy and isinstance(target, str)
+ else target._gen_cache_key(anon_map, bindparams)
+ ),
+ (
+ onclause
+ if is_legacy and isinstance(onclause, str)
+ else (
+ onclause._gen_cache_key(anon_map, bindparams)
+ if onclause is not None
+ else None
+ )
+ ),
+ (
+ from_._gen_cache_key(anon_map, bindparams)
+ if from_ is not None
+ else None
+ ),
tuple([(key, flags[key]) for key in sorted(flags)]),
)
for (target, onclause, from_, flags) in obj
)
@@ -624,13 +638,15 @@
return (
attrname,
tuple(
(
key,
- value._gen_cache_key(anon_map, bindparams)
- if isinstance(value, HasCacheKey)
- else value,
+ (
+ value._gen_cache_key(anon_map, bindparams)
+ if isinstance(value, HasCacheKey)
+ else value
+ ),
)
for key, value in [(key, obj[key]) for key in sorted(obj)]
),
)
@@ -657,13 +673,15 @@
):
return (
attrname,
tuple(
(
- key._gen_cache_key(anon_map, bindparams)
- if hasattr(key, "__clause_element__")
- else key,
+ (
+ key._gen_cache_key(anon_map, bindparams)
+ if hasattr(key, "__clause_element__")
+ else key
+ ),
value._gen_cache_key(anon_map, bindparams),
)
for key, value in obj
),
)
@@ -674,13 +692,15 @@
# insert ordering will retain that sorting
return (
attrname,
tuple(
(
- k._gen_cache_key(anon_map, bindparams)
- if hasattr(k, "__clause_element__")
- else k,
+ (
+ k._gen_cache_key(anon_map, bindparams)
+ if hasattr(k, "__clause_element__")
+ else k
+ ),
obj[k]._gen_cache_key(anon_map, bindparams),
)
for k in obj
),
)
@@ -811,13 +831,15 @@
self, attrname, parent, element, clone=_clone, **kw
):
# sequence of 2-tuples
return [
(
- clone(key, **kw)
- if hasattr(key, "__clause_element__")
- else key,
+ (
+ clone(key, **kw)
+ if hasattr(key, "__clause_element__")
+ else key
+ ),
clone(value, **kw),
)
for key, value in element
]
@@ -835,13 +857,15 @@
# sequence of sequences, each sequence contains a list/dict/tuple
def copy(elem):
if isinstance(elem, (list, tuple)):
return [
- clone(value, **kw)
- if hasattr(value, "__clause_element__")
- else value
+ (
+ clone(value, **kw)
+ if hasattr(value, "__clause_element__")
+ else value
+ )
for value in elem
]
elif isinstance(elem, dict):
return {
(
--- a/sqlalchemy:lib/sqlalchemy/sql/type_api.py
+++ b/sqlalchemy:lib/sqlalchemy/sql/type_api.py
@@ -623,13 +623,15 @@
def _static_cache_key(self):
names = util.get_cls_kwargs(self.__class__)
return (self.__class__,) + tuple(
(
k,
- self.__dict__[k]._static_cache_key
- if isinstance(self.__dict__[k], TypeEngine)
- else self.__dict__[k],
+ (
+ self.__dict__[k]._static_cache_key
+ if isinstance(self.__dict__[k], TypeEngine)
+ else self.__dict__[k]
+ ),
)
for k in names
if k in self.__dict__ and not k.startswith("_")
)
--- a/sqlalchemy:lib/sqlalchemy/testing/exclusions.py
+++ b/sqlalchemy:lib/sqlalchemy/testing/exclusions.py
@@ -230,16 +230,16 @@
def _format_description(self, config, negate=False):
bool_ = self(config)
if negate:
bool_ = not negate
return self.description % {
- "driver": config.db.url.get_driver_name()
- if config
- else "<no driver>",
- "database": config.db.url.get_backend_name()
- if config
- else "<no database>",
+ "driver": (
+ config.db.url.get_driver_name() if config else "<no driver>"
+ ),
+ "database": (
+ config.db.url.get_backend_name() if config else "<no database>"
+ ),
"doesnt_support": "doesn't support" if bool_ else "does support",
"does_support": "does support" if bool_ else "doesn't support",
}
def _as_string(self, config=None, negate=False):
--- a/sqlalchemy:lib/sqlalchemy/testing/plugin/pytestplugin.py
+++ b/sqlalchemy:lib/sqlalchemy/testing/plugin/pytestplugin.py
@@ -603,13 +603,13 @@
_combination_id_fns = {
"i": lambda obj: obj,
"r": repr,
"s": str,
- "n": lambda obj: obj.__name__
- if hasattr(obj, "__name__")
- else type(obj).__name__,
+ "n": lambda obj: (
+ obj.__name__ if hasattr(obj, "__name__") else type(obj).__name__
+ ),
}
def combinations(self, *arg_sets, **kw):
"""Facade for pytest.mark.parametrize.
--- a/sqlalchemy:lib/sqlalchemy/util/deprecations.py
+++ b/sqlalchemy:lib/sqlalchemy/util/deprecations.py
@@ -87,13 +87,15 @@
message = (
".. deprecated:: 1.4 The %s class is considered legacy as of the "
"1.x series of SQLAlchemy and %s in 2.0."
% (
clsname,
- "will be removed"
- if not becomes_legacy
- else "becomes a legacy construct",
+ (
+ "will be removed"
+ if not becomes_legacy
+ else "becomes a legacy construct"
+ ),
)
)
if alternative:
message += " " + alternative
@@ -190,13 +192,15 @@
"The %s %s is considered legacy as of the "
"1.x series of SQLAlchemy and %s in 2.0."
% (
api_name,
type_,
- "will be removed"
- if not becomes_legacy
- else "becomes a legacy construct",
+ (
+ "will be removed"
+ if not becomes_legacy
+ else "becomes a legacy construct"
+ ),
)
)
if ":attr:" in api_name:
attribute_ok = kw.pop("warn_on_attribute_access", False)
--- a/sqlalchemy:test/dialect/mssql/test_compiler.py
+++ b/sqlalchemy:test/dialect/mssql/test_compiler.py
@@ -621,20 +621,24 @@
metadata = MetaData()
tbl = Table(
"test",
metadata,
Column("id", Integer, primary_key=True),
- schema=quoted_name("Foo.dbo", True)
- if not use_schema_translate
- else None,
+ schema=(
+ quoted_name("Foo.dbo", True)
+ if not use_schema_translate
+ else None
+ ),
)
self.assert_compile(
select(tbl),
"SELECT [Foo.dbo].test.id FROM [Foo.dbo].test",
- schema_translate_map={None: quoted_name("Foo.dbo", True)}
- if use_schema_translate
- else None,
+ schema_translate_map=(
+ {None: quoted_name("Foo.dbo", True)}
+ if use_schema_translate
+ else None
+ ),
render_schema_translate=True if use_schema_translate else False,
)
@testing.combinations((True,), (False,), argnames="use_schema_translate")
def test_force_schema_quoted_w_dot_case_sensitive(
@@ -648,13 +652,13 @@
schema="[Foo.dbo]" if not use_schema_translate else None,
)
self.assert_compile(
select(tbl),
"SELECT [Foo.dbo].test.id FROM [Foo.dbo].test",
- schema_translate_map={None: "[Foo.dbo]"}
- if use_schema_translate
- else None,
+ schema_translate_map=(
+ {None: "[Foo.dbo]"} if use_schema_translate else None
+ ),
render_schema_translate=True if use_schema_translate else False,
)
@testing.combinations((True,), (False,), argnames="use_schema_translate")
def test_schema_autosplit_w_dot_case_insensitive(
@@ -668,13 +672,13 @@
schema="foo.dbo" if not use_schema_translate else None,
)
self.assert_compile(
select(tbl),
"SELECT foo.dbo.test.id FROM foo.dbo.test",
- schema_translate_map={None: "foo.dbo"}
- if use_schema_translate
- else None,
+ schema_translate_map=(
+ {None: "foo.dbo"} if use_schema_translate else None
+ ),
render_schema_translate=True if use_schema_translate else False,
)
@testing.combinations((True,), (False,), argnames="use_schema_translate")
def test_schema_autosplit_w_dot_case_sensitive(self, use_schema_translate):
@@ -686,13 +690,13 @@
schema="Foo.dbo" if not use_schema_translate else None,
)
self.assert_compile(
select(tbl),
"SELECT [Foo].dbo.test.id FROM [Foo].dbo.test",
- schema_translate_map={None: "Foo.dbo"}
- if use_schema_translate
- else None,
+ schema_translate_map=(
+ {None: "Foo.dbo"} if use_schema_translate else None
+ ),
render_schema_translate=True if use_schema_translate else False,
)
def test_delete_schema(self):
metadata = MetaData()
--- a/sqlalchemy:test/dialect/mysql/test_reflection.py
+++ b/sqlalchemy:test/dialect/mysql/test_reflection.py
@@ -660,35 +660,41 @@
{"name": "q", "nullable": True, "default": None},
{"name": "p", "nullable": True, "default": current_timestamp},
{
"name": "r",
"nullable": False,
- "default": None
- if explicit_defaults_for_timestamp
- else (
- "%(current_timestamp)s ON UPDATE %(current_timestamp)s"
- )
- % {"current_timestamp": current_timestamp},
+ "default": (
+ None
+ if explicit_defaults_for_timestamp
+ else (
+ "%(current_timestamp)s "
+ "ON UPDATE %(current_timestamp)s"
+ )
+ % {"current_timestamp": current_timestamp}
+ ),
},
{"name": "s", "nullable": False, "default": current_timestamp},
{
"name": "t",
- "nullable": True
- if explicit_defaults_for_timestamp
- else False,
- "default": None
- if explicit_defaults_for_timestamp
- else (
- "%(current_timestamp)s ON UPDATE %(current_timestamp)s"
- )
- % {"current_timestamp": current_timestamp},
+ "nullable": (
+ True if explicit_defaults_for_timestamp else False
+ ),
+ "default": (
+ None
+ if explicit_defaults_for_timestamp
+ else (
+ "%(current_timestamp)s "
+ "ON UPDATE %(current_timestamp)s"
+ )
+ % {"current_timestamp": current_timestamp}
+ ),
},
{
"name": "u",
- "nullable": True
- if explicit_defaults_for_timestamp
- else False,
+ "nullable": (
+ True if explicit_defaults_for_timestamp else False
+ ),
"default": current_timestamp,
},
],
)
--- a/sqlalchemy:test/engine/test_execute.py
+++ b/sqlalchemy:test/engine/test_execute.py
@@ -384,18 +384,20 @@
name = util.u("méil")
users = self.tables.users
with testing.db.connect() as conn:
assert_raises_message(
tsa.exc.StatementError,
- util.u(
- "A value is required for bind parameter 'uname'\n"
- r".*SELECT users.user_name AS .m\xe9il."
- )
- if util.py2k
- else util.u(
- "A value is required for bind parameter 'uname'\n"
- ".*SELECT users.user_name AS .méil."
+ (
+ util.u(
+ "A value is required for bind parameter 'uname'\n"
+ r".*SELECT users.user_name AS .m\xe9il."
+ )
+ if util.py2k
+ else util.u(
+ "A value is required for bind parameter 'uname'\n"
+ ".*SELECT users.user_name AS .méil."
+ )
),
conn.execute,
select(users.c.user_name.label(name)).where(
users.c.user_name == bindparam("uname")
),
--- a/sqlalchemy:test/ext/mypy/test_mypy_plugin_py3k.py
+++ b/sqlalchemy:test/ext/mypy/test_mypy_plugin_py3k.py
@@ -55,13 +55,15 @@
"--cache-dir",
cachedir,
"--config-file",
os.path.join(
cachedir,
- "sqla_mypy_config.cfg"
- if use_plugin
- else "plain_mypy_config.cfg",
+ (
+ "sqla_mypy_config.cfg"
+ if use_plugin
+ else "plain_mypy_config.cfg"
+ ),
),
]
args.append(path)
--- a/sqlalchemy:test/ext/test_associationproxy.py
+++ b/sqlalchemy:test/ext/test_associationproxy.py
@@ -1648,11 +1648,11 @@
for ii in range(16):
user = User("user%d" % ii)
if ii % 2 == 0:
user.singular = Singular(
- value="singular%d" % ii if ii % 4 == 0 else None
+ value=("singular%d" % ii) if ii % 4 == 0 else None
)
session.add(user)
for jj in words[(ii % len(words)) : ((ii + 3) % len(words))]:
k = Keyword(jj)
user.keywords.append(k)
--- a/sqlalchemy:test/ext/test_automap.py
+++ b/sqlalchemy:test/ext/test_automap.py
@@ -468,15 +468,18 @@
Table(
"table_%d" % i,
m,
Column("id", Integer, primary_key=True),
Column("data", String(50)),
- Column(
- "t_%d_id" % (i - 1), ForeignKey("table_%d.id" % (i - 1))
- )
- if i > 4
- else None,
+ (
+ Column(
+ "t_%d_id" % (i - 1),
+ ForeignKey("table_%d.id" % (i - 1)),
+ )
+ if i > 4
+ else None
+ ),
)
m.drop_all(e)
m.create_all(e)
def _automap(self, e):
--- a/sqlalchemy:test/ext/test_compiler.py
+++ b/sqlalchemy:test/ext/test_compiler.py
@@ -200,13 +200,15 @@
t1 = table("t1", column("q"))
stmt = select(my_function(t1.c.q))
self.assert_compile(
stmt,
- "SELECT my_function(t1.q) AS my_function_1 FROM t1"
- if named
- else "SELECT my_function(t1.q) AS anon_1 FROM t1",
+ (
+ "SELECT my_function(t1.q) AS my_function_1 FROM t1"
+ if named
+ else "SELECT my_function(t1.q) AS anon_1 FROM t1"
+ ),
dialect="sqlite",
)
if named:
eq_(stmt.selected_columns.keys(), ["my_function"])
--- a/sqlalchemy:test/orm/inheritance/test_relationship.py
+++ b/sqlalchemy:test/orm/inheritance/test_relationship.py
@@ -2955,13 +2955,15 @@
q = sess.query(Engineer, Manager).join(Engineer.manager)
else:
m1 = aliased(Manager, flat=True)
q = sess.query(Engineer, m1).join(Engineer.manager.of_type(m1))
- with _aliased_join_warning(
- "Manager->managers"
- ) if autoalias else util.nullcontext():
+ with (
+ _aliased_join_warning("Manager->managers")
+ if autoalias
+ else util.nullcontext()
+ ):
self.assert_compile(
q,
"SELECT people.type AS people_type, engineers.id AS "
"engineers_id, "
"people.id AS people_id, "
--- a/sqlalchemy:test/orm/inheritance/test_single.py
+++ b/sqlalchemy:test/orm/inheritance/test_single.py
@@ -1840,13 +1840,15 @@
q = s.query(Boss).join(Engineer, Engineer.manager_id == Boss.id)
else:
e1 = aliased(Engineer, flat=True)
q = s.query(Boss).join(e1, e1.manager_id == Boss.id)
- with _aliased_join_warning(
- "Engineer->engineer"
- ) if autoalias else util.nullcontext():
+ with (
+ _aliased_join_warning("Engineer->engineer")
+ if autoalias
+ else util.nullcontext()
+ ):
self.assert_compile(
q,
"SELECT manager.id AS manager_id, employee.id AS employee_id, "
"employee.name AS employee_name, "
"employee.type AS employee_type, "
@@ -1905,13 +1907,15 @@
q = s.query(Engineer).join(Boss, Engineer.manager_id == Boss.id)
else:
b1 = aliased(Boss, flat=True)
q = s.query(Engineer).join(b1, Engineer.manager_id == b1.id)
- with _aliased_join_warning(
- "Boss->manager"
- ) if autoalias else util.nullcontext():
+ with (
+ _aliased_join_warning("Boss->manager")
+ if autoalias
+ else util.nullcontext()
+ ):
self.assert_compile(
q,
"SELECT engineer.id AS engineer_id, "
"employee.id AS employee_id, "
"employee.name AS employee_name, "
--- a/sqlalchemy:test/orm/test_deprecations.py
+++ b/sqlalchemy:test/orm/test_deprecations.py
@@ -5681,16 +5681,18 @@
if not session_present:
q = q.with_session(None)
eq_ignore_whitespace(
str(q),
- "SELECT users.id AS users_id, users.name AS users_name "
- "FROM users WHERE users.id = ?"
- if expect_bound
- else (
+ (
"SELECT users.id AS users_id, users.name AS users_name "
- "FROM users WHERE users.id = :id_1"
+ "FROM users WHERE users.id = ?"
+ if expect_bound
+ else (
+ "SELECT users.id AS users_id, users.name AS users_name "
+ "FROM users WHERE users.id = :id_1"
+ )
),
)
def test_query_bound_session(self):
self._test(True, True, True)
--- a/sqlalchemy:test/orm/test_events.py
+++ b/sqlalchemy:test/orm/test_events.py
@@ -287,13 +287,13 @@
is_update=ctx.is_update,
is_delete=ctx.is_delete,
is_orm_statement=ctx.is_orm_statement,
is_relationship_load=ctx.is_relationship_load,
is_column_load=ctx.is_column_load,
- lazy_loaded_from=ctx.lazy_loaded_from
- if ctx.is_select
- else None,
+ lazy_loaded_from=(
+ ctx.lazy_loaded_from if ctx.is_select else None
+ ),
)
return canary
def test_all_mappers_accessor_one(self):
@@ -1276,13 +1276,15 @@
lambda instance, context, attrs: instance.unloaded,
),
(
lambda session: session,
"loaded_as_persistent",
- lambda session, instance: instance.unloaded
- if instance.__class__.__name__ == "A"
- else None,
+ lambda session, instance: (
+ instance.unloaded
+ if instance.__class__.__name__ == "A"
+ else None
+ ),
),
argnames="target, event_name, fn",
)(fn)
def teardown_test(self):
--- a/sqlalchemy:test/orm/test_lazy_relations.py
+++ b/sqlalchemy:test/orm/test_lazy_relations.py
@@ -1079,15 +1079,17 @@
Address,
addresses,
properties={
"user": relationship(
User,
- primaryjoin=and_(
- users.c.id == addresses.c.user_id, users.c.id != 27
- )
- if dont_use_get
- else None,
+ primaryjoin=(
+ and_(
+ users.c.id == addresses.c.user_id, users.c.id != 27
+ )
+ if dont_use_get
+ else None
+ ),
back_populates="addresses",
)
},
)
--- a/sqlalchemy:test/orm/test_relationship_criteria.py
+++ b/sqlalchemy:test/orm/test_relationship_criteria.py
@@ -1108,13 +1108,15 @@
result = go(value)
eq_(
result.scalars().unique().all(),
- self._user_minus_edwood(*user_address_fixture)
- if value == "ed@wood.com"
- else self._user_minus_edlala(*user_address_fixture),
+ (
+ self._user_minus_edwood(*user_address_fixture)
+ if value == "ed@wood.com"
+ else self._user_minus_edlala(*user_address_fixture)
+ ),
)
asserter.assert_(
CompiledSQL(
"SELECT users.id, users.name, addresses_1.id AS id_1, "
@@ -1176,13 +1178,15 @@
with self.sql_execution_asserter() as asserter:
result = go(value)
eq_(
result.scalars().unique().all(),
- self._user_minus_edwood(*user_address_fixture)
- if value == "ed@wood.com"
- else self._user_minus_edlala(*user_address_fixture),
+ (
+ self._user_minus_edwood(*user_address_fixture)
+ if value == "ed@wood.com"
+ else self._user_minus_edlala(*user_address_fixture)
+ ),
)
asserter.assert_(
CompiledSQL(
"SELECT users.id, users.name FROM users ORDER BY users.id"
@@ -1305,13 +1309,15 @@
result = go(value)
eq_(
result.scalars().unique().all(),
- self._user_minus_edwood(*user_address_fixture)
- if value == "ed@wood.com"
- else self._user_minus_edlala(*user_address_fixture),
+ (
+ self._user_minus_edwood(*user_address_fixture)
+ if value == "ed@wood.com"
+ else self._user_minus_edlala(*user_address_fixture)
+ ),
)
asserter.assert_(
CompiledSQL(
"SELECT users.id, users.name FROM users ORDER BY users.id"
@@ -1378,13 +1384,15 @@
result = go(value)
eq_(
result.scalars().unique().all(),
- self._user_minus_edwood(*user_address_fixture)
- if value == "ed@wood.com"
- else self._user_minus_edlala(*user_address_fixture),
+ (
+ self._user_minus_edwood(*user_address_fixture)
+ if value == "ed@wood.com"
+ else self._user_minus_edlala(*user_address_fixture)
+ ),
)
asserter.assert_(
CompiledSQL(
"SELECT users.id, users.name FROM users ORDER BY users.id"
--- a/sqlalchemy:test/sql/test_compiler.py
+++ b/sqlalchemy:test/sql/test_compiler.py
@@ -6385,13 +6385,13 @@
dialect = default.DefaultDialect()
with mock.patch.object(
dialect.statement_compiler,
"translate_select_structure",
- lambda self, to_translate, **kw: wrapped_again
- if to_translate is stmt
- else to_translate,
+ lambda self, to_translate, **kw: (
+ wrapped_again if to_translate is stmt else to_translate
+ ),
):
compiled = stmt.compile(dialect=dialect)
proxied = [obj[0] for (k, n, obj, type_) in compiled._result_columns]
for orig_obj, proxied_obj in zip(orig, proxied):
@@ -6444,13 +6444,13 @@
dialect = default.DefaultDialect()
with mock.patch.object(
dialect.statement_compiler,
"translate_select_structure",
- lambda self, to_translate, **kw: wrapped_again
- if to_translate is stmt
- else to_translate,
+ lambda self, to_translate, **kw: (
+ wrapped_again if to_translate is stmt else to_translate
+ ),
):
compiled = stmt.compile(dialect=dialect)
proxied = [obj[0] for (k, n, obj, type_) in compiled._result_columns]
for orig_obj, proxied_obj in zip(orig, proxied):
--- a/sqlalchemy:test/sql/test_lambdas.py
+++ b/sqlalchemy:test/sql/test_lambdas.py
@@ -383,13 +383,15 @@
def run_my_statement(parameter, add_criteria=False):
stmt = lambda_stmt(lambda: select(tab))
stmt = stmt.add_criteria(
- lambda s: s.where(tab.c.col > parameter)
- if add_criteria
- else s.where(tab.c.col == parameter),
+ lambda s: (
+ s.where(tab.c.col > parameter)
+ if add_criteria
+ else s.where(tab.c.col == parameter)
+ ),
)
stmt += lambda s: s.order_by(tab.c.id)
return stmt
@@ -408,13 +410,15 @@
def run_my_statement(parameter, add_criteria=False):
stmt = lambda_stmt(lambda: select(tab))
stmt = stmt.add_criteria(
- lambda s: s.where(tab.c.col > parameter)
- if add_criteria
- else s.where(tab.c.col == parameter),
+ lambda s: (
+ s.where(tab.c.col > parameter)
+ if add_criteria
+ else s.where(tab.c.col == parameter)
+ ),
track_on=[add_criteria],
)
stmt += lambda s: s.order_by(tab.c.id)
@@ -1810,13 +1814,13 @@
qq = [3, 4, 5]
# lambda produces either "t1 IN vv" or "t2 IN qq" based on the
# argument. will not produce a consistent cache key
elem = lambdas.DeferredLambdaElement(
- lambda tab: tab.c.q.in_(vv)
- if tab.name == "t1"
- else tab.c.q.in_(qq),
+ lambda tab: (
+ tab.c.q.in_(vv) if tab.name == "t1" else tab.c.q.in_(qq)
+ ),
roles.WhereHavingRole,
lambda_args=(t1,),
opts=lambdas.LambdaOptions(track_closure_variables=False),
)
--- a/sqlalchemy:test/sql/test_operators.py
+++ b/sqlalchemy:test/sql/test_operators.py
@@ -385,13 +385,15 @@
)
def test_modulus(self, modulus, paramstyle):
col = column("somecol", modulus())
self.assert_compile(
col.modulus(),
- "somecol %%"
- if paramstyle in ("format", "pyformat")
- else "somecol %",
+ (
+ "somecol %%"
+ if paramstyle in ("format", "pyformat")
+ else "somecol %"
+ ),
dialect=default.DefaultDialect(paramstyle=paramstyle),
)
@testing.combinations(
("format",),
@@ -402,13 +404,15 @@
)
def test_modulus_prefix(self, modulus, paramstyle):
col = column("somecol", modulus())
self.assert_compile(
col.modulus_prefix(),
- "%% somecol"
- if paramstyle in ("format", "pyformat")
- else "% somecol",
+ (
+ "%% somecol"
+ if paramstyle in ("format", "pyformat")
+ else "% somecol"
+ ),
dialect=default.DefaultDialect(paramstyle=paramstyle),
)
def test_factorial(self, factorial):
col = column("somecol", factorial())
--- a/sqlalchemy:test/sql/test_types.py
+++ b/sqlalchemy:test/sql/test_types.py
@@ -1287,13 +1287,15 @@
# replaced with binds; CAST can't affect the bound parameter
# on the way in here
eq_(
conn.execute(new_stmt).fetchall(),
- [("x", "BIND_INxBIND_OUT")]
- if coerce_fn is type_coerce
- else [("x", "xBIND_OUT")],
+ (
+ [("x", "BIND_INxBIND_OUT")]
+ if coerce_fn is type_coerce
+ else [("x", "xBIND_OUT")]
+ ),
)
def test_cast_bind(self, connection):
self._test_bind(cast, connection)
@@ -1311,13 +1313,15 @@
coerce_fn(bindparam(None, "x", String(50), unique=True), MyType),
)
eq_(
conn.execute(stmt).fetchall(),
- [("x", "BIND_INxBIND_OUT")]
- if coerce_fn is type_coerce
- else [("x", "xBIND_OUT")],
+ (
+ [("x", "BIND_INxBIND_OUT")]
+ if coerce_fn is type_coerce
+ else [("x", "xBIND_OUT")]
+ ),
)
def test_cast_existing_typed(self, connection):
MyType = self.MyType
coerce_fn = cast
--- a/virtualenv:src/virtualenv/discovery/py_info.py
+++ b/virtualenv:src/virtualenv/discovery/py_info.py
@@ -225,22 +225,26 @@
", ".join(
"{}={}".format(k, v)
for k, v in (
("spec", self.spec),
(
- "system"
- if self.system_executable is not None and self.system_executable != self.executable
- else None,
+ (
+ "system"
+ if self.system_executable is not None and self.system_executable != self.executable
+ else None
+ ),
self.system_executable,
),
(
- "original"
- if (
- self.original_executable != self.system_executable
- and self.original_executable != self.executable
- )
- else None,
+ (
+ "original"
+ if (
+ self.original_executable != self.system_executable
+ and self.original_executable != self.executable
+ )
+ else None
+ ),
self.original_executable,
),
("exe", self.executable),
("platform", self.platform),
("version", repr(self.version)),
--- a/virtualenv:tests/unit/create/test_creator.py
+++ b/virtualenv:tests/unit/create/test_creator.py
@@ -112,19 +112,21 @@
@pytest.mark.parametrize(
"creator, isolated",
[
- pytest.param(
- *i,
- marks=pytest.mark.xfail(
- reason="https://bitbucket.org/pypy/pypy/issues/3159/pypy36-730-venv-fails-with-copies-on-linux",
- strict=True,
+ (
+ pytest.param(
+ *i,
+ marks=pytest.mark.xfail(
+ reason="https://bitbucket.org/pypy/pypy/issues/3159/pypy36-730-venv-fails-with-copies-on-linux",
+ strict=True,
+ )
)
+ if _VENV_BUG_ON and i[0][0] == "venv" and i[0][1] == "copies"
+ else i
)
- if _VENV_BUG_ON and i[0][0] == "venv" and i[0][1] == "copies"
- else i
for i in product(CREATE_METHODS, ["isolated", "global"])
],
ids=lambda i: "-".join(i) if isinstance(i, tuple) else i,
)
def test_create_no_seed(python, creator, isolated, system, coverage_env, special_name_dir):
--- a/warehouse:tests/unit/accounts/test_views.py
+++ b/warehouse:tests/unit/accounts/test_views.py
@@ -2058,13 +2058,15 @@
]
assert isinstance(result, HTTPSeeOther)
assert result.headers["Location"] == "/"
assert db_request.route_path.calls == [
- pretend.call("manage.project.roles", project_name=project.name)
- if desired_role == "Owner"
- else pretend.call("packaging.project", name=project.name)
+ (
+ pretend.call("manage.project.roles", project_name=project.name)
+ if desired_role == "Owner"
+ else pretend.call("packaging.project", name=project.name)
+ )
]
@pytest.mark.parametrize(
("exception", "message"),
[
--- a/warehouse:tests/unit/email/test_init.py
+++ b/warehouse:tests/unit/email/test_init.py
@@ -506,13 +506,13 @@
"tag": "account✉sent",
"user_id": stub_user.id,
"ip_address": pyramid_request.remote_addr,
"additional": {
"from_": "noreply@example.com",
- "to": "other@example.com"
- if stub_email
- else "email@example.com",
+ "to": (
+ "other@example.com" if stub_email else "email@example.com"
+ ),
"subject": "Email Subject",
"redact_ip": False,
},
},
)
--- a/warehouse:warehouse/manage/views.py
+++ b/warehouse:warehouse/manage/views.py
@@ -276,13 +276,13 @@
self.user_service.record_event(
self.request.user.id,
tag="account✉primary:change",
ip_address=self.request.remote_addr,
additional={
- "old_primary": previous_primary_email.email
- if previous_primary_email
- else None,
+ "old_primary": (
+ previous_primary_email.email if previous_primary_email else None
+ ),
"new_primary": new_primary_email.email,
},
)
self.request.session.flash(
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment