Remove unnnecessary pylint configs from components [m-r]* (#98924)

This commit is contained in:
Ville Skyttä 2023-08-24 01:56:50 +03:00 committed by GitHub
parent 3b4774d9ed
commit 34b47a2597
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
51 changed files with 23 additions and 116 deletions

View file

@ -31,7 +31,6 @@ ATTR_IMAGES = "images"
DEFAULT_SANDBOX = False
# pylint: disable=no-value-for-parameter
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{vol.Required(CONF_RECIPIENT): vol.Email(), vol.Optional(CONF_SENDER): vol.Email()}
)

View file

@ -144,7 +144,7 @@ def error_response(
def supports_encryption() -> bool:
"""Test if we support encryption."""
try:
import nacl # noqa: F401 pylint: disable=unused-import, import-outside-toplevel
import nacl # noqa: F401 pylint: disable=import-outside-toplevel
return True
except OSError:

View file

@ -59,7 +59,6 @@ def push_registrations(hass):
return targets
# pylint: disable=invalid-name
def log_rate_limits(hass, device_name, resp, level=logging.INFO):
"""Output rate limit log line at given level."""
if ATTR_PUSH_RATE_LIMITS not in resp:

View file

@ -85,7 +85,6 @@ class MpdDevice(MediaPlayerEntity):
_attr_media_content_type = MediaType.MUSIC
# pylint: disable=no-member
def __init__(self, server, port, password, name):
"""Initialize the MPD device."""
self.server = server

View file

@ -563,9 +563,7 @@ async def async_get_broker_settings(
)
schema = vol.Schema({cv.string: cv.template})
schema(validated_user_input[CONF_WS_HEADERS])
except JSON_DECODE_EXCEPTIONS + ( # pylint: disable=wrong-exception-operation
vol.MultipleInvalid,
):
except JSON_DECODE_EXCEPTIONS + (vol.MultipleInvalid,):
errors["base"] = "bad_ws_headers"
return False
return True

View file

@ -307,31 +307,31 @@ class MqttLightJson(MqttEntity, LightEntity, RestoreEntity):
self._attr_color_mode = ColorMode.HS
self._attr_hs_color = (hue, saturation)
elif color_mode == ColorMode.RGB:
r = int(values["color"]["r"]) # pylint: disable=invalid-name
g = int(values["color"]["g"]) # pylint: disable=invalid-name
b = int(values["color"]["b"]) # pylint: disable=invalid-name
r = int(values["color"]["r"])
g = int(values["color"]["g"])
b = int(values["color"]["b"])
self._attr_color_mode = ColorMode.RGB
self._attr_rgb_color = (r, g, b)
elif color_mode == ColorMode.RGBW:
r = int(values["color"]["r"]) # pylint: disable=invalid-name
g = int(values["color"]["g"]) # pylint: disable=invalid-name
b = int(values["color"]["b"]) # pylint: disable=invalid-name
w = int(values["color"]["w"]) # pylint: disable=invalid-name
r = int(values["color"]["r"])
g = int(values["color"]["g"])
b = int(values["color"]["b"])
w = int(values["color"]["w"])
self._attr_color_mode = ColorMode.RGBW
self._attr_rgbw_color = (r, g, b, w)
elif color_mode == ColorMode.RGBWW:
r = int(values["color"]["r"]) # pylint: disable=invalid-name
g = int(values["color"]["g"]) # pylint: disable=invalid-name
b = int(values["color"]["b"]) # pylint: disable=invalid-name
c = int(values["color"]["c"]) # pylint: disable=invalid-name
w = int(values["color"]["w"]) # pylint: disable=invalid-name
r = int(values["color"]["r"])
g = int(values["color"]["g"])
b = int(values["color"]["b"])
c = int(values["color"]["c"])
w = int(values["color"]["w"])
self._attr_color_mode = ColorMode.RGBWW
self._attr_rgbww_color = (r, g, b, c, w)
elif color_mode == ColorMode.WHITE:
self._attr_color_mode = ColorMode.WHITE
elif color_mode == ColorMode.XY:
x = float(values["color"]["x"]) # pylint: disable=invalid-name
y = float(values["color"]["y"]) # pylint: disable=invalid-name
x = float(values["color"]["x"])
y = float(values["color"]["y"])
self._attr_color_mode = ColorMode.XY
self._attr_xy_color = (x, y)
except (KeyError, ValueError):

View file

@ -3,7 +3,7 @@ from __future__ import annotations
import logging
from mycroftapi import MycroftAPI # pylint: disable=import-error
from mycroftapi import MycroftAPI
from homeassistant.components.notify import BaseNotificationService
from homeassistant.core import HomeAssistant

View file

@ -188,7 +188,6 @@ class OpenCVImageProcessor(ImageProcessingEntity):
cv_image, scaleFactor=scale, minNeighbors=neighbors, minSize=min_size
)
regions = []
# pylint: disable=invalid-name
for x, y, w, h in detections:
regions.append((int(x), int(y), int(w), int(h)))
total_matches += 1

View file

@ -278,7 +278,6 @@ class OwnTracksContext:
func(**msg)
self._pending_msg.clear()
# pylint: disable=method-hidden
@callback
def async_see(self, **data):
"""Send a see message to the device tracker."""

View file

@ -223,11 +223,9 @@ class PandoraMediaPlayer(MediaPlayerEntity):
_LOGGER.warning("On unexpected station list page")
self._pianobar.sendcontrol("m") # press enter
self._pianobar.sendcontrol("m") # do it again b/c an 'i' got in
# pylint: disable=assignment-from-none
response = self.update_playing_status()
elif match_idx == 3:
_LOGGER.debug("Received new playlist list")
# pylint: disable=assignment-from-none
response = self.update_playing_status()
else:
response = self._pianobar.before.decode("utf-8")

View file

@ -20,7 +20,6 @@ CONFIG_SCHEMA = vol.Schema(
DOMAIN: cv.schema_with_slug_keys(
vol.Schema(
{
# pylint: disable=no-value-for-parameter
vol.Optional(CONF_TITLE): cv.string,
vol.Optional(CONF_ICON): cv.icon,
vol.Optional(CONF_REQUIRE_ADMIN, default=False): cv.boolean,

View file

@ -37,8 +37,6 @@ def _find_duplicates(
literal_column("1").label("is_duplicate"),
)
.group_by(table.metadata_id, table.start)
# https://github.com/sqlalchemy/sqlalchemy/issues/9189
# pylint: disable-next=not-callable
.having(func.count() > 1)
.subquery()
)
@ -195,8 +193,6 @@ def _find_statistics_meta_duplicates(session: Session) -> list[int]:
literal_column("1").label("is_duplicate"),
)
.group_by(StatisticsMeta.statistic_id)
# https://github.com/sqlalchemy/sqlalchemy/issues/9189
# pylint: disable-next=not-callable
.having(func.count() > 1)
.subquery()
)

View file

@ -3,9 +3,7 @@
from enum import StrEnum
from homeassistant.const import ATTR_ATTRIBUTION, ATTR_RESTORED, ATTR_SUPPORTED_FEATURES
from homeassistant.helpers.json import ( # noqa: F401 pylint: disable=unused-import
JSON_DUMP,
)
from homeassistant.helpers.json import JSON_DUMP # noqa: F401
DATA_INSTANCE = "recorder_instance"
SQLITE_URL_PREFIX = "sqlite://"

View file

@ -63,7 +63,6 @@ from .models import (
# SQLAlchemy Schema
# pylint: disable=invalid-name
class Base(DeclarativeBase):
"""Base class for tables."""

View file

@ -39,7 +39,6 @@ class DBInterruptibleThreadPoolExecutor(InterruptibleThreadPoolExecutor):
# When the executor gets lost, the weakref callback will wake up
# the worker threads.
# pylint: disable=invalid-name
def weakref_cb( # type: ignore[no-untyped-def]
_: Any,
q=self._work_queue,

View file

@ -565,8 +565,6 @@ def _get_states_for_entities_stmt(
most_recent_states_for_entities_by_date := (
select(
States.entity_id.label("max_entity_id"),
# https://github.com/sqlalchemy/sqlalchemy/issues/9189
# pylint: disable-next=not-callable
func.max(States.last_updated_ts).label("max_last_updated"),
)
.filter(
@ -590,8 +588,6 @@ def _get_states_for_entities_stmt(
(
most_recent_states_for_entities_by_date := select(
States.entity_id.label("max_entity_id"),
# https://github.com/sqlalchemy/sqlalchemy/issues/9189
# pylint: disable-next=not-callable
func.max(States.last_updated).label("max_last_updated"),
)
.filter(

View file

@ -432,8 +432,6 @@ def _get_last_state_changes_single_stmt(metadata_id: int) -> Select:
lastest_state_for_metadata_id := (
select(
States.metadata_id.label("max_metadata_id"),
# https://github.com/sqlalchemy/sqlalchemy/issues/9189
# pylint: disable-next=not-callable
func.max(States.last_updated_ts).label("max_last_updated"),
)
.filter(States.metadata_id == metadata_id)
@ -537,8 +535,6 @@ def _get_start_time_state_for_entities_stmt(
most_recent_states_for_entities_by_date := (
select(
States.metadata_id.label("max_metadata_id"),
# https://github.com/sqlalchemy/sqlalchemy/issues/9189
# pylint: disable-next=not-callable
func.max(States.last_updated_ts).label("max_last_updated"),
)
.filter(

View file

@ -524,7 +524,7 @@ def _update_states_table_with_foreign_key_options(
return
states_key_constraints = Base.metadata.tables[TABLE_STATES].foreign_key_constraints
old_states_table = Table( # noqa: F841 pylint: disable=unused-variable
old_states_table = Table( # noqa: F841
TABLE_STATES, MetaData(), *(alter["old_fk"] for alter in alters) # type: ignore[arg-type]
)
@ -553,9 +553,7 @@ def _drop_foreign_key_constraints(
drops.append(ForeignKeyConstraint((), (), name=foreign_key["name"]))
# Bind the ForeignKeyConstraints to the table
old_table = Table( # noqa: F841 pylint: disable=unused-variable
table, MetaData(), *drops
)
old_table = Table(table, MetaData(), *drops) # noqa: F841
for drop in drops:
with session_scope(session=session_maker()) as session:
@ -772,8 +770,6 @@ def _apply_update( # noqa: C901
with session_scope(session=session_maker()) as session:
if session.query(Statistics.id).count() and (
last_run_string := session.query(
# https://github.com/sqlalchemy/sqlalchemy/issues/9189
# pylint: disable-next=not-callable
func.max(StatisticsRuns.start)
).scalar()
):

View file

@ -7,8 +7,6 @@ from typing import overload
import homeassistant.util.dt as dt_util
# pylint: disable=invalid-name
_LOGGER = logging.getLogger(__name__)
DB_TIMEZONE = "+00:00"

View file

@ -76,8 +76,6 @@ def find_states_metadata_ids(entity_ids: Iterable[str]) -> StatementLambdaElemen
def _state_attrs_exist(attr: int | None) -> Select:
"""Check if a state attributes id exists in the states table."""
# https://github.com/sqlalchemy/sqlalchemy/issues/9189
# pylint: disable-next=not-callable
return select(func.min(States.attributes_id)).where(States.attributes_id == attr)
@ -315,8 +313,6 @@ def data_ids_exist_in_events_with_fast_in_distinct(
def _event_data_id_exist(data_id: int | None) -> Select:
"""Check if a event data id exists in the events table."""
# https://github.com/sqlalchemy/sqlalchemy/issues/9189
# pylint: disable-next=not-callable
return select(func.min(Events.data_id)).where(Events.data_id == data_id)
@ -659,8 +655,6 @@ def find_statistics_runs_to_purge(
def find_latest_statistics_runs_run_id() -> StatementLambdaElement:
"""Find the latest statistics_runs run_id."""
# https://github.com/sqlalchemy/sqlalchemy/issues/9189
# pylint: disable-next=not-callable
return lambda_stmt(lambda: select(func.max(StatisticsRuns.run_id)))
@ -696,8 +690,6 @@ def find_legacy_detached_states_and_attributes_to_purge(
def find_legacy_row() -> StatementLambdaElement:
"""Check if there are still states in the table with an event_id."""
# https://github.com/sqlalchemy/sqlalchemy/issues/9189
# pylint: disable-next=not-callable
return lambda_stmt(lambda: select(func.max(States.event_id)))

View file

@ -103,11 +103,7 @@ QUERY_STATISTICS_SHORT_TERM = (
QUERY_STATISTICS_SUMMARY_MEAN = (
StatisticsShortTerm.metadata_id,
func.avg(StatisticsShortTerm.mean),
# https://github.com/sqlalchemy/sqlalchemy/issues/9189
# pylint: disable-next=not-callable
func.min(StatisticsShortTerm.min),
# https://github.com/sqlalchemy/sqlalchemy/issues/9189
# pylint: disable-next=not-callable
func.max(StatisticsShortTerm.max),
)
@ -417,8 +413,6 @@ def compile_missing_statistics(instance: Recorder) -> bool:
exception_filter=_filter_unique_constraint_integrity_error(instance),
) as session:
# Find the newest statistics run, if any
# https://github.com/sqlalchemy/sqlalchemy/issues/9189
# pylint: disable-next=not-callable
if last_run := session.query(func.max(StatisticsRuns.start)).scalar():
start = max(start, process_timestamp(last_run) + timedelta(minutes=5))
@ -1078,17 +1072,11 @@ def _get_max_mean_min_statistic_in_sub_period(
# Calculate max, mean, min
columns = select()
if "max" in types:
# https://github.com/sqlalchemy/sqlalchemy/issues/9189
# pylint: disable-next=not-callable
columns = columns.add_columns(func.max(table.max))
if "mean" in types:
columns = columns.add_columns(func.avg(table.mean))
# https://github.com/sqlalchemy/sqlalchemy/issues/9189
# pylint: disable-next=not-callable
columns = columns.add_columns(func.count(table.mean))
if "min" in types:
# https://github.com/sqlalchemy/sqlalchemy/issues/9189
# pylint: disable-next=not-callable
columns = columns.add_columns(func.min(table.min))
stmt = _generate_max_mean_min_statistic_in_sub_period_stmt(
columns, start_time, end_time, table, metadata_id
@ -1831,8 +1819,6 @@ def _latest_short_term_statistics_stmt(
most_recent_statistic_row := (
select(
StatisticsShortTerm.metadata_id,
# https://github.com/sqlalchemy/sqlalchemy/issues/9189
# pylint: disable-next=not-callable
func.max(StatisticsShortTerm.start_ts).label("start_max"),
)
.where(StatisticsShortTerm.metadata_id.in_(metadata_ids))
@ -1895,8 +1881,6 @@ def _generate_statistics_at_time_stmt(
(
most_recent_statistic_ids := (
select(
# https://github.com/sqlalchemy/sqlalchemy/issues/9189
# pylint: disable-next=not-callable
func.max(table.start_ts).label("max_start_ts"),
table.metadata_id.label("max_metadata_id"),
)

View file

@ -426,7 +426,7 @@ def _datetime_or_none(value: str) -> datetime | None:
def build_mysqldb_conv() -> dict:
"""Build a MySQLDB conv dict that uses cisco8601 to parse datetimes."""
# Late imports since we only call this if they are using mysqldb
# pylint: disable=import-outside-toplevel,import-error
# pylint: disable=import-outside-toplevel
from MySQLdb.constants import FIELD_TYPE
from MySQLdb.converters import conversions

View file

@ -23,7 +23,6 @@ from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
_LOGGER = logging.getLogger(__name__)
# pylint: disable=no-value-for-parameter
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_URL): vol.Url(),

View file

@ -1,7 +1,6 @@
"""Tests for mobile_app component."""
from http import HTTPStatus
# pylint: disable=unused-import
import pytest
from homeassistant.components.mobile_app.const import DOMAIN

View file

@ -279,7 +279,7 @@ BAD_MESSAGE = {"_type": "unsupported", "tst": 1}
BAD_JSON_PREFIX = "--$this is bad json#--"
BAD_JSON_SUFFIX = "** and it ends here ^^"
# pylint: disable=invalid-name, len-as-condition
# pylint: disable=len-as-condition
@pytest.fixture
@ -311,8 +311,6 @@ def context(hass, setup_comp):
orig_context = owntracks.OwnTracksContext
context = None
# pylint: disable=no-value-for-parameter
def store_context(*args):
"""Store the context."""
nonlocal context
@ -1503,7 +1501,7 @@ async def test_encrypted_payload_no_topic_key(hass: HomeAssistant, setup_comp) -
async def test_encrypted_payload_libsodium(hass: HomeAssistant, setup_comp) -> None:
"""Test sending encrypted message payload."""
try:
import nacl # noqa: F401 pylint: disable=unused-import
import nacl # noqa: F401
except (ImportError, OSError):
pytest.skip("PyNaCl/libsodium is not installed")
return

View file

@ -1,6 +1,5 @@
"""The test repairing events schema."""
# pylint: disable=invalid-name
from unittest.mock import ANY, patch
import pytest

View file

@ -1,6 +1,5 @@
"""The test repairing states schema."""
# pylint: disable=invalid-name
from unittest.mock import ANY, patch
import pytest

View file

@ -1,7 +1,5 @@
"""Test removing statistics duplicates."""
from collections.abc import Callable
# pylint: disable=invalid-name
import importlib
from pathlib import Path
import sys

View file

@ -1,6 +1,5 @@
"""The test repairing statistics schema."""
# pylint: disable=invalid-name
from unittest.mock import ANY, patch
import pytest

View file

@ -1,6 +1,5 @@
"""The test validating and repairing schema."""
# pylint: disable=invalid-name
from unittest.mock import patch
import pytest

View file

@ -26,7 +26,6 @@ from homeassistant.helpers.json import JSONEncoder
import homeassistant.util.dt as dt_util
# SQLAlchemy Schema
# pylint: disable=invalid-name
Base = declarative_base()
_LOGGER = logging.getLogger(__name__)

View file

@ -39,7 +39,6 @@ from homeassistant.helpers.json import JSONEncoder
import homeassistant.util.dt as dt_util
# SQLAlchemy Schema
# pylint: disable=invalid-name
Base = declarative_base()
SCHEMA_VERSION = 16

View file

@ -39,7 +39,6 @@ from homeassistant.helpers.json import JSONEncoder
import homeassistant.util.dt as dt_util
# SQLAlchemy Schema
# pylint: disable=invalid-name
Base = declarative_base()
SCHEMA_VERSION = 18

View file

@ -45,7 +45,6 @@ from homeassistant.helpers.json import JSONEncoder
import homeassistant.util.dt as dt_util
# SQLAlchemy Schema
# pylint: disable=invalid-name
Base = declarative_base()
SCHEMA_VERSION = 22

View file

@ -43,7 +43,6 @@ from homeassistant.helpers.json import JSONEncoder
import homeassistant.util.dt as dt_util
# SQLAlchemy Schema
# pylint: disable=invalid-name
Base = declarative_base()
SCHEMA_VERSION = 23

View file

@ -51,7 +51,6 @@ from homeassistant.helpers.json import JSONEncoder
import homeassistant.util.dt as dt_util
# SQLAlchemy Schema
# pylint: disable=invalid-name
Base = declarative_base()
SCHEMA_VERSION = 23

View file

@ -39,7 +39,6 @@ from homeassistant.helpers.typing import UNDEFINED, UndefinedType
import homeassistant.util.dt as dt_util
# SQLAlchemy Schema
# pylint: disable=invalid-name
Base = declarative_base()
SCHEMA_VERSION = 25

View file

@ -45,7 +45,6 @@ from homeassistant.core import Context, Event, EventOrigin, State, split_entity_
import homeassistant.util.dt as dt_util
# SQLAlchemy Schema
# pylint: disable=invalid-name
Base = declarative_base()
SCHEMA_VERSION = 28

View file

@ -55,7 +55,6 @@ from homeassistant.util.json import JSON_DECODE_EXCEPTIONS, json_loads
ALL_DOMAIN_EXCLUDE_ATTRS = {ATTR_ATTRIBUTION, ATTR_RESTORED, ATTR_SUPPORTED_FEATURES}
# SQLAlchemy Schema
# pylint: disable=invalid-name
Base = declarative_base()
SCHEMA_VERSION = 30

View file

@ -55,7 +55,6 @@ from homeassistant.util.json import JSON_DECODE_EXCEPTIONS, json_loads
ALL_DOMAIN_EXCLUDE_ATTRS = {ATTR_ATTRIBUTION, ATTR_RESTORED, ATTR_SUPPORTED_FEATURES}
# SQLAlchemy Schema
# pylint: disable=invalid-name
Base = declarative_base()
SCHEMA_VERSION = 32

View file

@ -1,5 +1,4 @@
"""The tests for the recorder filter matching the EntityFilter component."""
# pylint: disable=invalid-name
import json
from unittest.mock import patch

View file

@ -2,8 +2,6 @@
from __future__ import annotations
from collections.abc import Callable
# pylint: disable=invalid-name
from copy import copy
from datetime import datetime, timedelta
import json

View file

@ -2,8 +2,6 @@
from __future__ import annotations
from collections.abc import Callable
# pylint: disable=invalid-name
from copy import copy
from datetime import datetime, timedelta
import json

View file

@ -2,8 +2,6 @@
from __future__ import annotations
from collections.abc import Callable
# pylint: disable=invalid-name
from copy import copy
from datetime import datetime, timedelta
import json

View file

@ -594,7 +594,6 @@ def test_setup_without_migration(hass_recorder: Callable[..., HomeAssistant]) ->
assert recorder.get_instance(hass).schema_version == SCHEMA_VERSION
# pylint: disable=invalid-name
def test_saving_state_include_domains(
hass_recorder: Callable[..., HomeAssistant]
) -> None:
@ -955,7 +954,6 @@ async def test_defaults_set(hass: HomeAssistant) -> None:
assert await async_setup_component(hass, "history", {})
assert recorder_config is not None
# pylint: disable=unsubscriptable-object
assert recorder_config["auto_purge"]
assert recorder_config["auto_repack"]
assert recorder_config["purge_keep_days"] == 10

View file

@ -1,5 +1,4 @@
"""The tests for the recorder filter matching the EntityFilter component."""
# pylint: disable=invalid-name
import importlib
import sys
from unittest.mock import patch

View file

@ -1,6 +1,5 @@
"""Test data purging."""
# pylint: disable=invalid-name
from datetime import datetime, timedelta
import json
import sqlite3

View file

@ -1,7 +1,5 @@
"""The tests for sensor recorder platform."""
from collections.abc import Callable
# pylint: disable=invalid-name
from datetime import timedelta
from unittest.mock import patch

View file

@ -4,8 +4,6 @@ The v23 schema used for these tests has been slightly modified to add the
EventData table to allow the recorder to startup successfully.
"""
from functools import partial
# pylint: disable=invalid-name
import importlib
import json
from pathlib import Path

View file

@ -1,5 +1,4 @@
"""The tests for recorder platform migrating data from v30."""
# pylint: disable=invalid-name
import asyncio
from datetime import timedelta
import importlib

View file

@ -1,5 +1,4 @@
"""The tests for sensor recorder platform."""
# pylint: disable=invalid-name
import datetime
from datetime import timedelta
from statistics import fmean