Remove unnnecessary pylint configs from components [m-r]* (#98924)

This commit is contained in:
Ville Skyttä 2023-08-24 01:56:50 +03:00 committed by GitHub
parent 3b4774d9ed
commit 34b47a2597
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
51 changed files with 23 additions and 116 deletions

View file

@ -31,7 +31,6 @@ ATTR_IMAGES = "images"
DEFAULT_SANDBOX = False DEFAULT_SANDBOX = False
# pylint: disable=no-value-for-parameter
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{vol.Required(CONF_RECIPIENT): vol.Email(), vol.Optional(CONF_SENDER): vol.Email()} {vol.Required(CONF_RECIPIENT): vol.Email(), vol.Optional(CONF_SENDER): vol.Email()}
) )

View file

@ -144,7 +144,7 @@ def error_response(
def supports_encryption() -> bool: def supports_encryption() -> bool:
"""Test if we support encryption.""" """Test if we support encryption."""
try: try:
import nacl # noqa: F401 pylint: disable=unused-import, import-outside-toplevel import nacl # noqa: F401 pylint: disable=import-outside-toplevel
return True return True
except OSError: except OSError:

View file

@ -59,7 +59,6 @@ def push_registrations(hass):
return targets return targets
# pylint: disable=invalid-name
def log_rate_limits(hass, device_name, resp, level=logging.INFO): def log_rate_limits(hass, device_name, resp, level=logging.INFO):
"""Output rate limit log line at given level.""" """Output rate limit log line at given level."""
if ATTR_PUSH_RATE_LIMITS not in resp: if ATTR_PUSH_RATE_LIMITS not in resp:

View file

@ -85,7 +85,6 @@ class MpdDevice(MediaPlayerEntity):
_attr_media_content_type = MediaType.MUSIC _attr_media_content_type = MediaType.MUSIC
# pylint: disable=no-member
def __init__(self, server, port, password, name): def __init__(self, server, port, password, name):
"""Initialize the MPD device.""" """Initialize the MPD device."""
self.server = server self.server = server

View file

@ -563,9 +563,7 @@ async def async_get_broker_settings(
) )
schema = vol.Schema({cv.string: cv.template}) schema = vol.Schema({cv.string: cv.template})
schema(validated_user_input[CONF_WS_HEADERS]) schema(validated_user_input[CONF_WS_HEADERS])
except JSON_DECODE_EXCEPTIONS + ( # pylint: disable=wrong-exception-operation except JSON_DECODE_EXCEPTIONS + (vol.MultipleInvalid,):
vol.MultipleInvalid,
):
errors["base"] = "bad_ws_headers" errors["base"] = "bad_ws_headers"
return False return False
return True return True

View file

@ -307,31 +307,31 @@ class MqttLightJson(MqttEntity, LightEntity, RestoreEntity):
self._attr_color_mode = ColorMode.HS self._attr_color_mode = ColorMode.HS
self._attr_hs_color = (hue, saturation) self._attr_hs_color = (hue, saturation)
elif color_mode == ColorMode.RGB: elif color_mode == ColorMode.RGB:
r = int(values["color"]["r"]) # pylint: disable=invalid-name r = int(values["color"]["r"])
g = int(values["color"]["g"]) # pylint: disable=invalid-name g = int(values["color"]["g"])
b = int(values["color"]["b"]) # pylint: disable=invalid-name b = int(values["color"]["b"])
self._attr_color_mode = ColorMode.RGB self._attr_color_mode = ColorMode.RGB
self._attr_rgb_color = (r, g, b) self._attr_rgb_color = (r, g, b)
elif color_mode == ColorMode.RGBW: elif color_mode == ColorMode.RGBW:
r = int(values["color"]["r"]) # pylint: disable=invalid-name r = int(values["color"]["r"])
g = int(values["color"]["g"]) # pylint: disable=invalid-name g = int(values["color"]["g"])
b = int(values["color"]["b"]) # pylint: disable=invalid-name b = int(values["color"]["b"])
w = int(values["color"]["w"]) # pylint: disable=invalid-name w = int(values["color"]["w"])
self._attr_color_mode = ColorMode.RGBW self._attr_color_mode = ColorMode.RGBW
self._attr_rgbw_color = (r, g, b, w) self._attr_rgbw_color = (r, g, b, w)
elif color_mode == ColorMode.RGBWW: elif color_mode == ColorMode.RGBWW:
r = int(values["color"]["r"]) # pylint: disable=invalid-name r = int(values["color"]["r"])
g = int(values["color"]["g"]) # pylint: disable=invalid-name g = int(values["color"]["g"])
b = int(values["color"]["b"]) # pylint: disable=invalid-name b = int(values["color"]["b"])
c = int(values["color"]["c"]) # pylint: disable=invalid-name c = int(values["color"]["c"])
w = int(values["color"]["w"]) # pylint: disable=invalid-name w = int(values["color"]["w"])
self._attr_color_mode = ColorMode.RGBWW self._attr_color_mode = ColorMode.RGBWW
self._attr_rgbww_color = (r, g, b, c, w) self._attr_rgbww_color = (r, g, b, c, w)
elif color_mode == ColorMode.WHITE: elif color_mode == ColorMode.WHITE:
self._attr_color_mode = ColorMode.WHITE self._attr_color_mode = ColorMode.WHITE
elif color_mode == ColorMode.XY: elif color_mode == ColorMode.XY:
x = float(values["color"]["x"]) # pylint: disable=invalid-name x = float(values["color"]["x"])
y = float(values["color"]["y"]) # pylint: disable=invalid-name y = float(values["color"]["y"])
self._attr_color_mode = ColorMode.XY self._attr_color_mode = ColorMode.XY
self._attr_xy_color = (x, y) self._attr_xy_color = (x, y)
except (KeyError, ValueError): except (KeyError, ValueError):

View file

@ -3,7 +3,7 @@ from __future__ import annotations
import logging import logging
from mycroftapi import MycroftAPI # pylint: disable=import-error from mycroftapi import MycroftAPI
from homeassistant.components.notify import BaseNotificationService from homeassistant.components.notify import BaseNotificationService
from homeassistant.core import HomeAssistant from homeassistant.core import HomeAssistant

View file

@ -188,7 +188,6 @@ class OpenCVImageProcessor(ImageProcessingEntity):
cv_image, scaleFactor=scale, minNeighbors=neighbors, minSize=min_size cv_image, scaleFactor=scale, minNeighbors=neighbors, minSize=min_size
) )
regions = [] regions = []
# pylint: disable=invalid-name
for x, y, w, h in detections: for x, y, w, h in detections:
regions.append((int(x), int(y), int(w), int(h))) regions.append((int(x), int(y), int(w), int(h)))
total_matches += 1 total_matches += 1

View file

@ -278,7 +278,6 @@ class OwnTracksContext:
func(**msg) func(**msg)
self._pending_msg.clear() self._pending_msg.clear()
# pylint: disable=method-hidden
@callback @callback
def async_see(self, **data): def async_see(self, **data):
"""Send a see message to the device tracker.""" """Send a see message to the device tracker."""

View file

@ -223,11 +223,9 @@ class PandoraMediaPlayer(MediaPlayerEntity):
_LOGGER.warning("On unexpected station list page") _LOGGER.warning("On unexpected station list page")
self._pianobar.sendcontrol("m") # press enter self._pianobar.sendcontrol("m") # press enter
self._pianobar.sendcontrol("m") # do it again b/c an 'i' got in self._pianobar.sendcontrol("m") # do it again b/c an 'i' got in
# pylint: disable=assignment-from-none
response = self.update_playing_status() response = self.update_playing_status()
elif match_idx == 3: elif match_idx == 3:
_LOGGER.debug("Received new playlist list") _LOGGER.debug("Received new playlist list")
# pylint: disable=assignment-from-none
response = self.update_playing_status() response = self.update_playing_status()
else: else:
response = self._pianobar.before.decode("utf-8") response = self._pianobar.before.decode("utf-8")

View file

@ -20,7 +20,6 @@ CONFIG_SCHEMA = vol.Schema(
DOMAIN: cv.schema_with_slug_keys( DOMAIN: cv.schema_with_slug_keys(
vol.Schema( vol.Schema(
{ {
# pylint: disable=no-value-for-parameter
vol.Optional(CONF_TITLE): cv.string, vol.Optional(CONF_TITLE): cv.string,
vol.Optional(CONF_ICON): cv.icon, vol.Optional(CONF_ICON): cv.icon,
vol.Optional(CONF_REQUIRE_ADMIN, default=False): cv.boolean, vol.Optional(CONF_REQUIRE_ADMIN, default=False): cv.boolean,

View file

@ -37,8 +37,6 @@ def _find_duplicates(
literal_column("1").label("is_duplicate"), literal_column("1").label("is_duplicate"),
) )
.group_by(table.metadata_id, table.start) .group_by(table.metadata_id, table.start)
# https://github.com/sqlalchemy/sqlalchemy/issues/9189
# pylint: disable-next=not-callable
.having(func.count() > 1) .having(func.count() > 1)
.subquery() .subquery()
) )
@ -195,8 +193,6 @@ def _find_statistics_meta_duplicates(session: Session) -> list[int]:
literal_column("1").label("is_duplicate"), literal_column("1").label("is_duplicate"),
) )
.group_by(StatisticsMeta.statistic_id) .group_by(StatisticsMeta.statistic_id)
# https://github.com/sqlalchemy/sqlalchemy/issues/9189
# pylint: disable-next=not-callable
.having(func.count() > 1) .having(func.count() > 1)
.subquery() .subquery()
) )

View file

@ -3,9 +3,7 @@
from enum import StrEnum from enum import StrEnum
from homeassistant.const import ATTR_ATTRIBUTION, ATTR_RESTORED, ATTR_SUPPORTED_FEATURES from homeassistant.const import ATTR_ATTRIBUTION, ATTR_RESTORED, ATTR_SUPPORTED_FEATURES
from homeassistant.helpers.json import ( # noqa: F401 pylint: disable=unused-import from homeassistant.helpers.json import JSON_DUMP # noqa: F401
JSON_DUMP,
)
DATA_INSTANCE = "recorder_instance" DATA_INSTANCE = "recorder_instance"
SQLITE_URL_PREFIX = "sqlite://" SQLITE_URL_PREFIX = "sqlite://"

View file

@ -63,7 +63,6 @@ from .models import (
# SQLAlchemy Schema # SQLAlchemy Schema
# pylint: disable=invalid-name
class Base(DeclarativeBase): class Base(DeclarativeBase):
"""Base class for tables.""" """Base class for tables."""

View file

@ -39,7 +39,6 @@ class DBInterruptibleThreadPoolExecutor(InterruptibleThreadPoolExecutor):
# When the executor gets lost, the weakref callback will wake up # When the executor gets lost, the weakref callback will wake up
# the worker threads. # the worker threads.
# pylint: disable=invalid-name
def weakref_cb( # type: ignore[no-untyped-def] def weakref_cb( # type: ignore[no-untyped-def]
_: Any, _: Any,
q=self._work_queue, q=self._work_queue,

View file

@ -565,8 +565,6 @@ def _get_states_for_entities_stmt(
most_recent_states_for_entities_by_date := ( most_recent_states_for_entities_by_date := (
select( select(
States.entity_id.label("max_entity_id"), States.entity_id.label("max_entity_id"),
# https://github.com/sqlalchemy/sqlalchemy/issues/9189
# pylint: disable-next=not-callable
func.max(States.last_updated_ts).label("max_last_updated"), func.max(States.last_updated_ts).label("max_last_updated"),
) )
.filter( .filter(
@ -590,8 +588,6 @@ def _get_states_for_entities_stmt(
( (
most_recent_states_for_entities_by_date := select( most_recent_states_for_entities_by_date := select(
States.entity_id.label("max_entity_id"), States.entity_id.label("max_entity_id"),
# https://github.com/sqlalchemy/sqlalchemy/issues/9189
# pylint: disable-next=not-callable
func.max(States.last_updated).label("max_last_updated"), func.max(States.last_updated).label("max_last_updated"),
) )
.filter( .filter(

View file

@ -432,8 +432,6 @@ def _get_last_state_changes_single_stmt(metadata_id: int) -> Select:
lastest_state_for_metadata_id := ( lastest_state_for_metadata_id := (
select( select(
States.metadata_id.label("max_metadata_id"), States.metadata_id.label("max_metadata_id"),
# https://github.com/sqlalchemy/sqlalchemy/issues/9189
# pylint: disable-next=not-callable
func.max(States.last_updated_ts).label("max_last_updated"), func.max(States.last_updated_ts).label("max_last_updated"),
) )
.filter(States.metadata_id == metadata_id) .filter(States.metadata_id == metadata_id)
@ -537,8 +535,6 @@ def _get_start_time_state_for_entities_stmt(
most_recent_states_for_entities_by_date := ( most_recent_states_for_entities_by_date := (
select( select(
States.metadata_id.label("max_metadata_id"), States.metadata_id.label("max_metadata_id"),
# https://github.com/sqlalchemy/sqlalchemy/issues/9189
# pylint: disable-next=not-callable
func.max(States.last_updated_ts).label("max_last_updated"), func.max(States.last_updated_ts).label("max_last_updated"),
) )
.filter( .filter(

View file

@ -524,7 +524,7 @@ def _update_states_table_with_foreign_key_options(
return return
states_key_constraints = Base.metadata.tables[TABLE_STATES].foreign_key_constraints states_key_constraints = Base.metadata.tables[TABLE_STATES].foreign_key_constraints
old_states_table = Table( # noqa: F841 pylint: disable=unused-variable old_states_table = Table( # noqa: F841
TABLE_STATES, MetaData(), *(alter["old_fk"] for alter in alters) # type: ignore[arg-type] TABLE_STATES, MetaData(), *(alter["old_fk"] for alter in alters) # type: ignore[arg-type]
) )
@ -553,9 +553,7 @@ def _drop_foreign_key_constraints(
drops.append(ForeignKeyConstraint((), (), name=foreign_key["name"])) drops.append(ForeignKeyConstraint((), (), name=foreign_key["name"]))
# Bind the ForeignKeyConstraints to the table # Bind the ForeignKeyConstraints to the table
old_table = Table( # noqa: F841 pylint: disable=unused-variable old_table = Table(table, MetaData(), *drops) # noqa: F841
table, MetaData(), *drops
)
for drop in drops: for drop in drops:
with session_scope(session=session_maker()) as session: with session_scope(session=session_maker()) as session:
@ -772,8 +770,6 @@ def _apply_update( # noqa: C901
with session_scope(session=session_maker()) as session: with session_scope(session=session_maker()) as session:
if session.query(Statistics.id).count() and ( if session.query(Statistics.id).count() and (
last_run_string := session.query( last_run_string := session.query(
# https://github.com/sqlalchemy/sqlalchemy/issues/9189
# pylint: disable-next=not-callable
func.max(StatisticsRuns.start) func.max(StatisticsRuns.start)
).scalar() ).scalar()
): ):

View file

@ -7,8 +7,6 @@ from typing import overload
import homeassistant.util.dt as dt_util import homeassistant.util.dt as dt_util
# pylint: disable=invalid-name
_LOGGER = logging.getLogger(__name__) _LOGGER = logging.getLogger(__name__)
DB_TIMEZONE = "+00:00" DB_TIMEZONE = "+00:00"

View file

@ -76,8 +76,6 @@ def find_states_metadata_ids(entity_ids: Iterable[str]) -> StatementLambdaElemen
def _state_attrs_exist(attr: int | None) -> Select: def _state_attrs_exist(attr: int | None) -> Select:
"""Check if a state attributes id exists in the states table.""" """Check if a state attributes id exists in the states table."""
# https://github.com/sqlalchemy/sqlalchemy/issues/9189
# pylint: disable-next=not-callable
return select(func.min(States.attributes_id)).where(States.attributes_id == attr) return select(func.min(States.attributes_id)).where(States.attributes_id == attr)
@ -315,8 +313,6 @@ def data_ids_exist_in_events_with_fast_in_distinct(
def _event_data_id_exist(data_id: int | None) -> Select: def _event_data_id_exist(data_id: int | None) -> Select:
"""Check if a event data id exists in the events table.""" """Check if a event data id exists in the events table."""
# https://github.com/sqlalchemy/sqlalchemy/issues/9189
# pylint: disable-next=not-callable
return select(func.min(Events.data_id)).where(Events.data_id == data_id) return select(func.min(Events.data_id)).where(Events.data_id == data_id)
@ -659,8 +655,6 @@ def find_statistics_runs_to_purge(
def find_latest_statistics_runs_run_id() -> StatementLambdaElement: def find_latest_statistics_runs_run_id() -> StatementLambdaElement:
"""Find the latest statistics_runs run_id.""" """Find the latest statistics_runs run_id."""
# https://github.com/sqlalchemy/sqlalchemy/issues/9189
# pylint: disable-next=not-callable
return lambda_stmt(lambda: select(func.max(StatisticsRuns.run_id))) return lambda_stmt(lambda: select(func.max(StatisticsRuns.run_id)))
@ -696,8 +690,6 @@ def find_legacy_detached_states_and_attributes_to_purge(
def find_legacy_row() -> StatementLambdaElement: def find_legacy_row() -> StatementLambdaElement:
"""Check if there are still states in the table with an event_id.""" """Check if there are still states in the table with an event_id."""
# https://github.com/sqlalchemy/sqlalchemy/issues/9189
# pylint: disable-next=not-callable
return lambda_stmt(lambda: select(func.max(States.event_id))) return lambda_stmt(lambda: select(func.max(States.event_id)))

View file

@ -103,11 +103,7 @@ QUERY_STATISTICS_SHORT_TERM = (
QUERY_STATISTICS_SUMMARY_MEAN = ( QUERY_STATISTICS_SUMMARY_MEAN = (
StatisticsShortTerm.metadata_id, StatisticsShortTerm.metadata_id,
func.avg(StatisticsShortTerm.mean), func.avg(StatisticsShortTerm.mean),
# https://github.com/sqlalchemy/sqlalchemy/issues/9189
# pylint: disable-next=not-callable
func.min(StatisticsShortTerm.min), func.min(StatisticsShortTerm.min),
# https://github.com/sqlalchemy/sqlalchemy/issues/9189
# pylint: disable-next=not-callable
func.max(StatisticsShortTerm.max), func.max(StatisticsShortTerm.max),
) )
@ -417,8 +413,6 @@ def compile_missing_statistics(instance: Recorder) -> bool:
exception_filter=_filter_unique_constraint_integrity_error(instance), exception_filter=_filter_unique_constraint_integrity_error(instance),
) as session: ) as session:
# Find the newest statistics run, if any # Find the newest statistics run, if any
# https://github.com/sqlalchemy/sqlalchemy/issues/9189
# pylint: disable-next=not-callable
if last_run := session.query(func.max(StatisticsRuns.start)).scalar(): if last_run := session.query(func.max(StatisticsRuns.start)).scalar():
start = max(start, process_timestamp(last_run) + timedelta(minutes=5)) start = max(start, process_timestamp(last_run) + timedelta(minutes=5))
@ -1078,17 +1072,11 @@ def _get_max_mean_min_statistic_in_sub_period(
# Calculate max, mean, min # Calculate max, mean, min
columns = select() columns = select()
if "max" in types: if "max" in types:
# https://github.com/sqlalchemy/sqlalchemy/issues/9189
# pylint: disable-next=not-callable
columns = columns.add_columns(func.max(table.max)) columns = columns.add_columns(func.max(table.max))
if "mean" in types: if "mean" in types:
columns = columns.add_columns(func.avg(table.mean)) columns = columns.add_columns(func.avg(table.mean))
# https://github.com/sqlalchemy/sqlalchemy/issues/9189
# pylint: disable-next=not-callable
columns = columns.add_columns(func.count(table.mean)) columns = columns.add_columns(func.count(table.mean))
if "min" in types: if "min" in types:
# https://github.com/sqlalchemy/sqlalchemy/issues/9189
# pylint: disable-next=not-callable
columns = columns.add_columns(func.min(table.min)) columns = columns.add_columns(func.min(table.min))
stmt = _generate_max_mean_min_statistic_in_sub_period_stmt( stmt = _generate_max_mean_min_statistic_in_sub_period_stmt(
columns, start_time, end_time, table, metadata_id columns, start_time, end_time, table, metadata_id
@ -1831,8 +1819,6 @@ def _latest_short_term_statistics_stmt(
most_recent_statistic_row := ( most_recent_statistic_row := (
select( select(
StatisticsShortTerm.metadata_id, StatisticsShortTerm.metadata_id,
# https://github.com/sqlalchemy/sqlalchemy/issues/9189
# pylint: disable-next=not-callable
func.max(StatisticsShortTerm.start_ts).label("start_max"), func.max(StatisticsShortTerm.start_ts).label("start_max"),
) )
.where(StatisticsShortTerm.metadata_id.in_(metadata_ids)) .where(StatisticsShortTerm.metadata_id.in_(metadata_ids))
@ -1895,8 +1881,6 @@ def _generate_statistics_at_time_stmt(
( (
most_recent_statistic_ids := ( most_recent_statistic_ids := (
select( select(
# https://github.com/sqlalchemy/sqlalchemy/issues/9189
# pylint: disable-next=not-callable
func.max(table.start_ts).label("max_start_ts"), func.max(table.start_ts).label("max_start_ts"),
table.metadata_id.label("max_metadata_id"), table.metadata_id.label("max_metadata_id"),
) )

View file

@ -426,7 +426,7 @@ def _datetime_or_none(value: str) -> datetime | None:
def build_mysqldb_conv() -> dict: def build_mysqldb_conv() -> dict:
"""Build a MySQLDB conv dict that uses cisco8601 to parse datetimes.""" """Build a MySQLDB conv dict that uses cisco8601 to parse datetimes."""
# Late imports since we only call this if they are using mysqldb # Late imports since we only call this if they are using mysqldb
# pylint: disable=import-outside-toplevel,import-error # pylint: disable=import-outside-toplevel
from MySQLdb.constants import FIELD_TYPE from MySQLdb.constants import FIELD_TYPE
from MySQLdb.converters import conversions from MySQLdb.converters import conversions

View file

@ -23,7 +23,6 @@ from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
_LOGGER = logging.getLogger(__name__) _LOGGER = logging.getLogger(__name__)
# pylint: disable=no-value-for-parameter
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{ {
vol.Required(CONF_URL): vol.Url(), vol.Required(CONF_URL): vol.Url(),

View file

@ -1,7 +1,6 @@
"""Tests for mobile_app component.""" """Tests for mobile_app component."""
from http import HTTPStatus from http import HTTPStatus
# pylint: disable=unused-import
import pytest import pytest
from homeassistant.components.mobile_app.const import DOMAIN from homeassistant.components.mobile_app.const import DOMAIN

View file

@ -279,7 +279,7 @@ BAD_MESSAGE = {"_type": "unsupported", "tst": 1}
BAD_JSON_PREFIX = "--$this is bad json#--" BAD_JSON_PREFIX = "--$this is bad json#--"
BAD_JSON_SUFFIX = "** and it ends here ^^" BAD_JSON_SUFFIX = "** and it ends here ^^"
# pylint: disable=invalid-name, len-as-condition # pylint: disable=len-as-condition
@pytest.fixture @pytest.fixture
@ -311,8 +311,6 @@ def context(hass, setup_comp):
orig_context = owntracks.OwnTracksContext orig_context = owntracks.OwnTracksContext
context = None context = None
# pylint: disable=no-value-for-parameter
def store_context(*args): def store_context(*args):
"""Store the context.""" """Store the context."""
nonlocal context nonlocal context
@ -1503,7 +1501,7 @@ async def test_encrypted_payload_no_topic_key(hass: HomeAssistant, setup_comp) -
async def test_encrypted_payload_libsodium(hass: HomeAssistant, setup_comp) -> None: async def test_encrypted_payload_libsodium(hass: HomeAssistant, setup_comp) -> None:
"""Test sending encrypted message payload.""" """Test sending encrypted message payload."""
try: try:
import nacl # noqa: F401 pylint: disable=unused-import import nacl # noqa: F401
except (ImportError, OSError): except (ImportError, OSError):
pytest.skip("PyNaCl/libsodium is not installed") pytest.skip("PyNaCl/libsodium is not installed")
return return

View file

@ -1,6 +1,5 @@
"""The test repairing events schema.""" """The test repairing events schema."""
# pylint: disable=invalid-name
from unittest.mock import ANY, patch from unittest.mock import ANY, patch
import pytest import pytest

View file

@ -1,6 +1,5 @@
"""The test repairing states schema.""" """The test repairing states schema."""
# pylint: disable=invalid-name
from unittest.mock import ANY, patch from unittest.mock import ANY, patch
import pytest import pytest

View file

@ -1,7 +1,5 @@
"""Test removing statistics duplicates.""" """Test removing statistics duplicates."""
from collections.abc import Callable from collections.abc import Callable
# pylint: disable=invalid-name
import importlib import importlib
from pathlib import Path from pathlib import Path
import sys import sys

View file

@ -1,6 +1,5 @@
"""The test repairing statistics schema.""" """The test repairing statistics schema."""
# pylint: disable=invalid-name
from unittest.mock import ANY, patch from unittest.mock import ANY, patch
import pytest import pytest

View file

@ -1,6 +1,5 @@
"""The test validating and repairing schema.""" """The test validating and repairing schema."""
# pylint: disable=invalid-name
from unittest.mock import patch from unittest.mock import patch
import pytest import pytest

View file

@ -26,7 +26,6 @@ from homeassistant.helpers.json import JSONEncoder
import homeassistant.util.dt as dt_util import homeassistant.util.dt as dt_util
# SQLAlchemy Schema # SQLAlchemy Schema
# pylint: disable=invalid-name
Base = declarative_base() Base = declarative_base()
_LOGGER = logging.getLogger(__name__) _LOGGER = logging.getLogger(__name__)

View file

@ -39,7 +39,6 @@ from homeassistant.helpers.json import JSONEncoder
import homeassistant.util.dt as dt_util import homeassistant.util.dt as dt_util
# SQLAlchemy Schema # SQLAlchemy Schema
# pylint: disable=invalid-name
Base = declarative_base() Base = declarative_base()
SCHEMA_VERSION = 16 SCHEMA_VERSION = 16

View file

@ -39,7 +39,6 @@ from homeassistant.helpers.json import JSONEncoder
import homeassistant.util.dt as dt_util import homeassistant.util.dt as dt_util
# SQLAlchemy Schema # SQLAlchemy Schema
# pylint: disable=invalid-name
Base = declarative_base() Base = declarative_base()
SCHEMA_VERSION = 18 SCHEMA_VERSION = 18

View file

@ -45,7 +45,6 @@ from homeassistant.helpers.json import JSONEncoder
import homeassistant.util.dt as dt_util import homeassistant.util.dt as dt_util
# SQLAlchemy Schema # SQLAlchemy Schema
# pylint: disable=invalid-name
Base = declarative_base() Base = declarative_base()
SCHEMA_VERSION = 22 SCHEMA_VERSION = 22

View file

@ -43,7 +43,6 @@ from homeassistant.helpers.json import JSONEncoder
import homeassistant.util.dt as dt_util import homeassistant.util.dt as dt_util
# SQLAlchemy Schema # SQLAlchemy Schema
# pylint: disable=invalid-name
Base = declarative_base() Base = declarative_base()
SCHEMA_VERSION = 23 SCHEMA_VERSION = 23

View file

@ -51,7 +51,6 @@ from homeassistant.helpers.json import JSONEncoder
import homeassistant.util.dt as dt_util import homeassistant.util.dt as dt_util
# SQLAlchemy Schema # SQLAlchemy Schema
# pylint: disable=invalid-name
Base = declarative_base() Base = declarative_base()
SCHEMA_VERSION = 23 SCHEMA_VERSION = 23

View file

@ -39,7 +39,6 @@ from homeassistant.helpers.typing import UNDEFINED, UndefinedType
import homeassistant.util.dt as dt_util import homeassistant.util.dt as dt_util
# SQLAlchemy Schema # SQLAlchemy Schema
# pylint: disable=invalid-name
Base = declarative_base() Base = declarative_base()
SCHEMA_VERSION = 25 SCHEMA_VERSION = 25

View file

@ -45,7 +45,6 @@ from homeassistant.core import Context, Event, EventOrigin, State, split_entity_
import homeassistant.util.dt as dt_util import homeassistant.util.dt as dt_util
# SQLAlchemy Schema # SQLAlchemy Schema
# pylint: disable=invalid-name
Base = declarative_base() Base = declarative_base()
SCHEMA_VERSION = 28 SCHEMA_VERSION = 28

View file

@ -55,7 +55,6 @@ from homeassistant.util.json import JSON_DECODE_EXCEPTIONS, json_loads
ALL_DOMAIN_EXCLUDE_ATTRS = {ATTR_ATTRIBUTION, ATTR_RESTORED, ATTR_SUPPORTED_FEATURES} ALL_DOMAIN_EXCLUDE_ATTRS = {ATTR_ATTRIBUTION, ATTR_RESTORED, ATTR_SUPPORTED_FEATURES}
# SQLAlchemy Schema # SQLAlchemy Schema
# pylint: disable=invalid-name
Base = declarative_base() Base = declarative_base()
SCHEMA_VERSION = 30 SCHEMA_VERSION = 30

View file

@ -55,7 +55,6 @@ from homeassistant.util.json import JSON_DECODE_EXCEPTIONS, json_loads
ALL_DOMAIN_EXCLUDE_ATTRS = {ATTR_ATTRIBUTION, ATTR_RESTORED, ATTR_SUPPORTED_FEATURES} ALL_DOMAIN_EXCLUDE_ATTRS = {ATTR_ATTRIBUTION, ATTR_RESTORED, ATTR_SUPPORTED_FEATURES}
# SQLAlchemy Schema # SQLAlchemy Schema
# pylint: disable=invalid-name
Base = declarative_base() Base = declarative_base()
SCHEMA_VERSION = 32 SCHEMA_VERSION = 32

View file

@ -1,5 +1,4 @@
"""The tests for the recorder filter matching the EntityFilter component.""" """The tests for the recorder filter matching the EntityFilter component."""
# pylint: disable=invalid-name
import json import json
from unittest.mock import patch from unittest.mock import patch

View file

@ -2,8 +2,6 @@
from __future__ import annotations from __future__ import annotations
from collections.abc import Callable from collections.abc import Callable
# pylint: disable=invalid-name
from copy import copy from copy import copy
from datetime import datetime, timedelta from datetime import datetime, timedelta
import json import json

View file

@ -2,8 +2,6 @@
from __future__ import annotations from __future__ import annotations
from collections.abc import Callable from collections.abc import Callable
# pylint: disable=invalid-name
from copy import copy from copy import copy
from datetime import datetime, timedelta from datetime import datetime, timedelta
import json import json

View file

@ -2,8 +2,6 @@
from __future__ import annotations from __future__ import annotations
from collections.abc import Callable from collections.abc import Callable
# pylint: disable=invalid-name
from copy import copy from copy import copy
from datetime import datetime, timedelta from datetime import datetime, timedelta
import json import json

View file

@ -594,7 +594,6 @@ def test_setup_without_migration(hass_recorder: Callable[..., HomeAssistant]) ->
assert recorder.get_instance(hass).schema_version == SCHEMA_VERSION assert recorder.get_instance(hass).schema_version == SCHEMA_VERSION
# pylint: disable=invalid-name
def test_saving_state_include_domains( def test_saving_state_include_domains(
hass_recorder: Callable[..., HomeAssistant] hass_recorder: Callable[..., HomeAssistant]
) -> None: ) -> None:
@ -955,7 +954,6 @@ async def test_defaults_set(hass: HomeAssistant) -> None:
assert await async_setup_component(hass, "history", {}) assert await async_setup_component(hass, "history", {})
assert recorder_config is not None assert recorder_config is not None
# pylint: disable=unsubscriptable-object
assert recorder_config["auto_purge"] assert recorder_config["auto_purge"]
assert recorder_config["auto_repack"] assert recorder_config["auto_repack"]
assert recorder_config["purge_keep_days"] == 10 assert recorder_config["purge_keep_days"] == 10

View file

@ -1,5 +1,4 @@
"""The tests for the recorder filter matching the EntityFilter component.""" """The tests for the recorder filter matching the EntityFilter component."""
# pylint: disable=invalid-name
import importlib import importlib
import sys import sys
from unittest.mock import patch from unittest.mock import patch

View file

@ -1,6 +1,5 @@
"""Test data purging.""" """Test data purging."""
# pylint: disable=invalid-name
from datetime import datetime, timedelta from datetime import datetime, timedelta
import json import json
import sqlite3 import sqlite3

View file

@ -1,7 +1,5 @@
"""The tests for sensor recorder platform.""" """The tests for sensor recorder platform."""
from collections.abc import Callable from collections.abc import Callable
# pylint: disable=invalid-name
from datetime import timedelta from datetime import timedelta
from unittest.mock import patch from unittest.mock import patch

View file

@ -4,8 +4,6 @@ The v23 schema used for these tests has been slightly modified to add the
EventData table to allow the recorder to startup successfully. EventData table to allow the recorder to startup successfully.
""" """
from functools import partial from functools import partial
# pylint: disable=invalid-name
import importlib import importlib
import json import json
from pathlib import Path from pathlib import Path

View file

@ -1,5 +1,4 @@
"""The tests for recorder platform migrating data from v30.""" """The tests for recorder platform migrating data from v30."""
# pylint: disable=invalid-name
import asyncio import asyncio
from datetime import timedelta from datetime import timedelta
import importlib import importlib

View file

@ -1,5 +1,4 @@
"""The tests for sensor recorder platform.""" """The tests for sensor recorder platform."""
# pylint: disable=invalid-name
import datetime import datetime
from datetime import timedelta from datetime import timedelta
from statistics import fmean from statistics import fmean