Add SSDP integration (#24090)
* Add SSDP integration * Fix tests * Sort all the things * Add netdisco to test requirements
This commit is contained in:
parent
97b671171b
commit
9debbfb1a8
22 changed files with 436 additions and 28 deletions
|
@ -15,6 +15,7 @@
|
|||
"mobile_app",
|
||||
"person",
|
||||
"script",
|
||||
"ssdp",
|
||||
"sun",
|
||||
"system_health",
|
||||
"updater",
|
||||
|
|
|
@ -33,7 +33,6 @@ SERVICE_HASS_IOS_APP = 'hass_ios'
|
|||
SERVICE_HASSIO = 'hassio'
|
||||
SERVICE_HOMEKIT = 'homekit'
|
||||
SERVICE_HEOS = 'heos'
|
||||
SERVICE_HUE = 'philips_hue'
|
||||
SERVICE_IGD = 'igd'
|
||||
SERVICE_IKEA_TRADFRI = 'ikea_tradfri'
|
||||
SERVICE_KONNECTED = 'konnected'
|
||||
|
@ -54,7 +53,6 @@ CONFIG_ENTRY_HANDLERS = {
|
|||
SERVICE_DECONZ: 'deconz',
|
||||
'google_cast': 'cast',
|
||||
SERVICE_HEOS: 'heos',
|
||||
SERVICE_HUE: 'hue',
|
||||
SERVICE_TELLDUSLIVE: 'tellduslive',
|
||||
SERVICE_IKEA_TRADFRI: 'tradfri',
|
||||
'sonos': 'sonos',
|
||||
|
|
|
@ -137,17 +137,22 @@ class HueFlowHandler(config_entries.ConfigFlow):
|
|||
errors=errors,
|
||||
)
|
||||
|
||||
async def async_step_discovery(self, discovery_info):
|
||||
async def async_step_ssdp(self, discovery_info):
|
||||
"""Handle a discovered Hue bridge.
|
||||
|
||||
This flow is triggered by the discovery component. It will check if the
|
||||
This flow is triggered by the SSDP component. It will check if the
|
||||
host is already configured and delegate to the import step if not.
|
||||
"""
|
||||
# Filter out emulated Hue
|
||||
if "HASS Bridge" in discovery_info.get('name', ''):
|
||||
return self.async_abort(reason='already_configured')
|
||||
|
||||
host = discovery_info.get('host')
|
||||
# pylint: disable=unsupported-assignment-operation
|
||||
host = self.context['host'] = discovery_info.get('host')
|
||||
|
||||
if any(host == flow['context']['host']
|
||||
for flow in self._async_in_progress()):
|
||||
return self.async_abort(reason='already_in_progress')
|
||||
|
||||
if host in configured_hosts(self.hass):
|
||||
return self.async_abort(reason='already_configured')
|
||||
|
|
|
@ -6,6 +6,11 @@
|
|||
"requirements": [
|
||||
"aiohue==1.9.1"
|
||||
],
|
||||
"ssdp": {
|
||||
"manufacturer": [
|
||||
"Royal Philips Electronics"
|
||||
]
|
||||
},
|
||||
"dependencies": [],
|
||||
"codeowners": [
|
||||
"@balloob"
|
||||
|
|
|
@ -23,7 +23,8 @@
|
|||
"all_configured": "All Philips Hue bridges are already configured",
|
||||
"unknown": "Unknown error occurred",
|
||||
"cannot_connect": "Unable to connect to the bridge",
|
||||
"already_configured": "Bridge is already configured"
|
||||
"already_configured": "Bridge is already configured",
|
||||
"already_in_progress": "Config flow for bridge is already in progress."
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
170
homeassistant/components/ssdp/__init__.py
Normal file
170
homeassistant/components/ssdp/__init__.py
Normal file
|
@ -0,0 +1,170 @@
|
|||
"""The SSDP integration."""
|
||||
import asyncio
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
from urllib.parse import urlparse
|
||||
from xml.etree import ElementTree
|
||||
|
||||
import aiohttp
|
||||
from netdisco import ssdp, util
|
||||
|
||||
from homeassistant.helpers.event import async_track_time_interval
|
||||
from homeassistant.generated.ssdp import SSDP
|
||||
|
||||
DOMAIN = 'ssdp'
|
||||
SCAN_INTERVAL = timedelta(seconds=60)
|
||||
|
||||
ATTR_HOST = 'host'
|
||||
ATTR_PORT = 'port'
|
||||
ATTR_SSDP_DESCRIPTION = 'ssdp_description'
|
||||
ATTR_ST = 'ssdp_st'
|
||||
ATTR_NAME = 'name'
|
||||
ATTR_MODEL_NAME = 'model_name'
|
||||
ATTR_MODEL_NUMBER = 'model_number'
|
||||
ATTR_SERIAL = 'serial_number'
|
||||
ATTR_MANUFACTURER = 'manufacturer'
|
||||
ATTR_UDN = 'udn'
|
||||
ATTR_UPNP_DEVICE_TYPE = 'upnp_device_type'
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def async_setup(hass, config):
|
||||
"""Set up the SSDP integration."""
|
||||
async def initialize():
|
||||
scanner = Scanner(hass)
|
||||
await scanner.async_scan(None)
|
||||
async_track_time_interval(hass, scanner.async_scan, SCAN_INTERVAL)
|
||||
|
||||
hass.loop.create_task(initialize())
|
||||
|
||||
return True
|
||||
|
||||
|
||||
class Scanner:
|
||||
"""Class to manage SSDP scanning."""
|
||||
|
||||
def __init__(self, hass):
|
||||
"""Initialize class."""
|
||||
self.hass = hass
|
||||
self.seen = set()
|
||||
self._description_cache = {}
|
||||
|
||||
async def async_scan(self, _):
|
||||
"""Scan for new entries."""
|
||||
_LOGGER.debug("Scanning")
|
||||
# Run 3 times as packets can get lost
|
||||
for _ in range(3):
|
||||
entries = await self.hass.async_add_executor_job(ssdp.scan)
|
||||
await self._process_entries(entries)
|
||||
|
||||
# We clear the cache after each run. We track discovered entries
|
||||
# so will never need a description twice.
|
||||
self._description_cache.clear()
|
||||
|
||||
async def _process_entries(self, entries):
|
||||
"""Process SSDP entries."""
|
||||
tasks = []
|
||||
|
||||
for entry in entries:
|
||||
key = (entry.st, entry.location)
|
||||
|
||||
if key in self.seen:
|
||||
continue
|
||||
|
||||
self.seen.add(key)
|
||||
|
||||
tasks.append(self._process_entry(entry))
|
||||
|
||||
if not tasks:
|
||||
return
|
||||
|
||||
to_load = [result for result in await asyncio.gather(*tasks)
|
||||
if result is not None]
|
||||
|
||||
if not to_load:
|
||||
return
|
||||
|
||||
for entry, info, domains in to_load:
|
||||
|
||||
for domain in domains:
|
||||
_LOGGER.debug("Discovered %s at %s", domain, entry.location)
|
||||
await self.hass.config_entries.flow.async_init(
|
||||
domain, context={'source': DOMAIN}, data=info
|
||||
)
|
||||
|
||||
async def _process_entry(self, entry):
|
||||
"""Process a single entry."""
|
||||
domains = set(SSDP["st"].get(entry.st, []))
|
||||
|
||||
xml_location = entry.location
|
||||
|
||||
if not xml_location:
|
||||
if domains:
|
||||
return (entry, info_from_entry(entry, None), domains)
|
||||
return None
|
||||
|
||||
# Multiple entries usally share same location. Make sure
|
||||
# we fetch it only once.
|
||||
info_req = self._description_cache.get(xml_location)
|
||||
|
||||
if info_req is None:
|
||||
info_req = self._description_cache[xml_location] = \
|
||||
self.hass.async_create_task(
|
||||
self._fetch_description(xml_location))
|
||||
|
||||
info = await info_req
|
||||
|
||||
domains.update(SSDP["manufacturer"].get(info.get('manufacturer'), []))
|
||||
domains.update(SSDP["device_type"].get(info.get('deviceType'), []))
|
||||
|
||||
if domains:
|
||||
return (entry, info_from_entry(entry, info), domains)
|
||||
|
||||
return None
|
||||
|
||||
async def _fetch_description(self, xml_location):
|
||||
"""Fetch an XML description."""
|
||||
session = self.hass.helpers.aiohttp_client.async_get_clientsession()
|
||||
try:
|
||||
resp = await session.get(xml_location, timeout=5)
|
||||
xml = await resp.text()
|
||||
|
||||
# Samsung Smart TV sometimes returns an empty document the
|
||||
# first time. Retry once.
|
||||
if not xml:
|
||||
resp = await session.get(xml_location, timeout=5)
|
||||
xml = await resp.text()
|
||||
except aiohttp.ClientError as err:
|
||||
_LOGGER.debug("Error fetching %s: %s", xml_location, err)
|
||||
return None
|
||||
|
||||
try:
|
||||
tree = ElementTree.fromstring(xml)
|
||||
except ElementTree.ParseError as err:
|
||||
_LOGGER.debug("Error parsing %s: %s", xml_location, err)
|
||||
return None
|
||||
|
||||
return util.etree_to_dict(tree).get('root', {}).get('device', {})
|
||||
|
||||
|
||||
def info_from_entry(entry, device_info):
|
||||
"""Get most important info from an entry."""
|
||||
url = urlparse(entry.location)
|
||||
info = {
|
||||
ATTR_HOST: url.hostname,
|
||||
ATTR_PORT: url.port,
|
||||
ATTR_SSDP_DESCRIPTION: entry.location,
|
||||
ATTR_ST: entry.st,
|
||||
}
|
||||
|
||||
if device_info:
|
||||
info[ATTR_NAME] = device_info.get('friendlyName')
|
||||
info[ATTR_MODEL_NAME] = device_info.get('modelName')
|
||||
info[ATTR_MODEL_NUMBER] = device_info.get('modelNumber')
|
||||
info[ATTR_SERIAL] = device_info.get('serialNumber')
|
||||
info[ATTR_MANUFACTURER] = device_info.get('manufacturer')
|
||||
info[ATTR_UDN] = device_info.get('UDN')
|
||||
info[ATTR_UPNP_DEVICE_TYPE] = device_info.get('deviceType')
|
||||
|
||||
return info
|
12
homeassistant/components/ssdp/manifest.json
Normal file
12
homeassistant/components/ssdp/manifest.json
Normal file
|
@ -0,0 +1,12 @@
|
|||
{
|
||||
"domain": "ssdp",
|
||||
"name": "SSDP",
|
||||
"documentation": "https://www.home-assistant.io/components/ssdp",
|
||||
"requirements": [
|
||||
"netdisco==2.6.0"
|
||||
],
|
||||
"dependencies": [
|
||||
],
|
||||
"codeowners": [
|
||||
]
|
||||
}
|
|
@ -53,10 +53,8 @@ async def async_setup(hass, config):
|
|||
_LOGGER.debug("Discovered new device %s %s", name, info)
|
||||
|
||||
for domain in zeroconf_manifest.SERVICE_TYPES[service_type]:
|
||||
hass.async_create_task(
|
||||
hass.config_entries.flow.async_init(
|
||||
domain, context={'source': DOMAIN}, data=info
|
||||
)
|
||||
await hass.config_entries.flow.async_init(
|
||||
domain, context={'source': DOMAIN}, data=info
|
||||
)
|
||||
|
||||
def service_update(_, service_type, name, state_change):
|
||||
|
|
|
@ -169,6 +169,8 @@ UNRECOVERABLE_STATES = (
|
|||
|
||||
DISCOVERY_NOTIFICATION_ID = 'config_entry_discovery'
|
||||
DISCOVERY_SOURCES = (
|
||||
'ssdp',
|
||||
'zeroconf',
|
||||
SOURCE_DISCOVERY,
|
||||
SOURCE_IMPORT,
|
||||
)
|
||||
|
|
|
@ -58,6 +58,8 @@ class FlowManager:
|
|||
context: Optional[Dict] = None,
|
||||
data: Any = None) -> Any:
|
||||
"""Start a configuration flow."""
|
||||
if context is None:
|
||||
context = {}
|
||||
flow = await self._async_create_flow(
|
||||
handler, context=context, data=data)
|
||||
flow.hass = self.hass
|
||||
|
|
15
homeassistant/generated/ssdp.py
Normal file
15
homeassistant/generated/ssdp.py
Normal file
|
@ -0,0 +1,15 @@
|
|||
"""Automatically generated by hassfest.
|
||||
|
||||
To update, run python3 -m hassfest
|
||||
"""
|
||||
|
||||
|
||||
SSDP = {
|
||||
"device_type": {},
|
||||
"manufacturer": {
|
||||
"Royal Philips Electronics": [
|
||||
"hue"
|
||||
]
|
||||
},
|
||||
"st": {}
|
||||
}
|
|
@ -778,6 +778,7 @@ nessclient==0.9.15
|
|||
netdata==0.1.2
|
||||
|
||||
# homeassistant.components.discovery
|
||||
# homeassistant.components.ssdp
|
||||
netdisco==2.6.0
|
||||
|
||||
# homeassistant.components.neurio_energy
|
||||
|
|
|
@ -181,6 +181,10 @@ mbddns==0.1.2
|
|||
# homeassistant.components.mfi
|
||||
mficlient==0.3.0
|
||||
|
||||
# homeassistant.components.discovery
|
||||
# homeassistant.components.ssdp
|
||||
netdisco==2.6.0
|
||||
|
||||
# homeassistant.components.iqvia
|
||||
# homeassistant.components.opencv
|
||||
# homeassistant.components.tensorflow
|
||||
|
|
|
@ -89,6 +89,7 @@ TEST_REQUIREMENTS = (
|
|||
'luftdaten',
|
||||
'mbddns',
|
||||
'mficlient',
|
||||
'netdisco',
|
||||
'numpy',
|
||||
'oauth2client',
|
||||
'paho-mqtt',
|
||||
|
|
|
@ -4,15 +4,23 @@ import sys
|
|||
|
||||
from .model import Integration, Config
|
||||
from . import (
|
||||
dependencies, manifest, codeowners, services, config_flow, zeroconf)
|
||||
codeowners,
|
||||
config_flow,
|
||||
dependencies,
|
||||
manifest,
|
||||
services,
|
||||
ssdp,
|
||||
zeroconf,
|
||||
)
|
||||
|
||||
PLUGINS = [
|
||||
manifest,
|
||||
dependencies,
|
||||
codeowners,
|
||||
services,
|
||||
config_flow,
|
||||
zeroconf
|
||||
dependencies,
|
||||
manifest,
|
||||
services,
|
||||
ssdp,
|
||||
zeroconf,
|
||||
]
|
||||
|
||||
|
||||
|
|
|
@ -12,6 +12,11 @@ MANIFEST_SCHEMA = vol.Schema({
|
|||
vol.Required('name'): str,
|
||||
vol.Optional('config_flow'): bool,
|
||||
vol.Optional('zeroconf'): [str],
|
||||
vol.Optional('ssdp'): vol.Schema({
|
||||
vol.Optional('st'): [str],
|
||||
vol.Optional('manufacturer'): [str],
|
||||
vol.Optional('device_type'): [str],
|
||||
}),
|
||||
vol.Required('documentation'): str,
|
||||
vol.Required('requirements'): [str],
|
||||
vol.Required('dependencies'): [str],
|
||||
|
|
88
script/hassfest/ssdp.py
Normal file
88
script/hassfest/ssdp.py
Normal file
|
@ -0,0 +1,88 @@
|
|||
"""Generate ssdp file."""
|
||||
from collections import OrderedDict, defaultdict
|
||||
import json
|
||||
from typing import Dict
|
||||
|
||||
from .model import Integration, Config
|
||||
|
||||
BASE = """
|
||||
\"\"\"Automatically generated by hassfest.
|
||||
|
||||
To update, run python3 -m hassfest
|
||||
\"\"\"
|
||||
|
||||
|
||||
SSDP = {}
|
||||
""".strip()
|
||||
|
||||
|
||||
def sort_dict(value):
|
||||
"""Sort a dictionary."""
|
||||
return OrderedDict((key, value[key])
|
||||
for key in sorted(value))
|
||||
|
||||
|
||||
def generate_and_validate(integrations: Dict[str, Integration]):
|
||||
"""Validate and generate ssdp data."""
|
||||
data = {
|
||||
'st': defaultdict(list),
|
||||
'manufacturer': defaultdict(list),
|
||||
'device_type': defaultdict(list),
|
||||
}
|
||||
|
||||
for domain in sorted(integrations):
|
||||
integration = integrations[domain]
|
||||
|
||||
if not integration.manifest:
|
||||
continue
|
||||
|
||||
ssdp = integration.manifest.get('ssdp')
|
||||
|
||||
if not ssdp:
|
||||
continue
|
||||
|
||||
try:
|
||||
with open(str(integration.path / "config_flow.py")) as fp:
|
||||
if ' async_step_ssdp(' not in fp.read():
|
||||
integration.add_error(
|
||||
'ssdp', 'Config flow has no async_step_ssdp')
|
||||
continue
|
||||
except FileNotFoundError:
|
||||
integration.add_error(
|
||||
'ssdp',
|
||||
'SSDP info in a manifest requires a config flow to exist'
|
||||
)
|
||||
continue
|
||||
|
||||
for key in 'st', 'manufacturer', 'device_type':
|
||||
if key not in ssdp:
|
||||
continue
|
||||
|
||||
for value in ssdp[key]:
|
||||
data[key][value].append(domain)
|
||||
|
||||
data = sort_dict({key: sort_dict(value) for key, value in data.items()})
|
||||
return BASE.format(json.dumps(data, indent=4))
|
||||
|
||||
|
||||
def validate(integrations: Dict[str, Integration], config: Config):
|
||||
"""Validate ssdp file."""
|
||||
ssdp_path = config.root / 'homeassistant/generated/ssdp.py'
|
||||
config.cache['ssdp'] = content = generate_and_validate(integrations)
|
||||
|
||||
with open(str(ssdp_path), 'r') as fp:
|
||||
if fp.read().strip() != content:
|
||||
config.add_error(
|
||||
"ssdp",
|
||||
"File ssdp.py is not up to date. "
|
||||
"Run python3 -m script.hassfest",
|
||||
fixable=True
|
||||
)
|
||||
return
|
||||
|
||||
|
||||
def generate(integrations: Dict[str, Integration], config: Config):
|
||||
"""Generate ssdp file."""
|
||||
ssdp_path = config.root / 'homeassistant/generated/ssdp.py'
|
||||
with open(str(ssdp_path), 'w') as fp:
|
||||
fp.write(config.cache['ssdp'] + '\n')
|
|
@ -31,6 +31,19 @@ def generate_and_validate(integrations: Dict[str, Integration]):
|
|||
if not service_types:
|
||||
continue
|
||||
|
||||
try:
|
||||
with open(str(integration.path / "config_flow.py")) as fp:
|
||||
if ' async_step_zeroconf(' not in fp.read():
|
||||
integration.add_error(
|
||||
'zeroconf', 'Config flow has no async_step_zeroconf')
|
||||
continue
|
||||
except FileNotFoundError:
|
||||
integration.add_error(
|
||||
'zeroconf',
|
||||
'Zeroconf info in a manifest requires a config flow to exist'
|
||||
)
|
||||
continue
|
||||
|
||||
for service_type in service_types:
|
||||
|
||||
if service_type not in service_type_dict:
|
||||
|
|
|
@ -185,14 +185,15 @@ async def test_flow_link_unknown_host(hass):
|
|||
}
|
||||
|
||||
|
||||
async def test_bridge_discovery(hass):
|
||||
async def test_bridge_ssdp(hass):
|
||||
"""Test a bridge being discovered."""
|
||||
flow = config_flow.HueFlowHandler()
|
||||
flow.hass = hass
|
||||
flow.context = {}
|
||||
|
||||
with patch.object(config_flow, 'get_bridge',
|
||||
side_effect=errors.AuthenticationRequired):
|
||||
result = await flow.async_step_discovery({
|
||||
result = await flow.async_step_ssdp({
|
||||
'host': '0.0.0.0',
|
||||
'serial': '1234'
|
||||
})
|
||||
|
@ -201,12 +202,13 @@ async def test_bridge_discovery(hass):
|
|||
assert result['step_id'] == 'link'
|
||||
|
||||
|
||||
async def test_bridge_discovery_emulated_hue(hass):
|
||||
async def test_bridge_ssdp_emulated_hue(hass):
|
||||
"""Test if discovery info is from an emulated hue instance."""
|
||||
flow = config_flow.HueFlowHandler()
|
||||
flow.hass = hass
|
||||
flow.context = {}
|
||||
|
||||
result = await flow.async_step_discovery({
|
||||
result = await flow.async_step_ssdp({
|
||||
'name': 'HASS Bridge',
|
||||
'host': '0.0.0.0',
|
||||
'serial': '1234'
|
||||
|
@ -215,7 +217,7 @@ async def test_bridge_discovery_emulated_hue(hass):
|
|||
assert result['type'] == 'abort'
|
||||
|
||||
|
||||
async def test_bridge_discovery_already_configured(hass):
|
||||
async def test_bridge_ssdp_already_configured(hass):
|
||||
"""Test if a discovered bridge has already been configured."""
|
||||
MockConfigEntry(domain='hue', data={
|
||||
'host': '0.0.0.0'
|
||||
|
@ -223,8 +225,9 @@ async def test_bridge_discovery_already_configured(hass):
|
|||
|
||||
flow = config_flow.HueFlowHandler()
|
||||
flow.hass = hass
|
||||
flow.context = {}
|
||||
|
||||
result = await flow.async_step_discovery({
|
||||
result = await flow.async_step_ssdp({
|
||||
'host': '0.0.0.0',
|
||||
'serial': '1234'
|
||||
})
|
||||
|
|
1
tests/components/ssdp/__init__.py
Normal file
1
tests/components/ssdp/__init__.py
Normal file
|
@ -0,0 +1 @@
|
|||
"""Tests for the SSDP integration."""
|
78
tests/components/ssdp/test_init.py
Normal file
78
tests/components/ssdp/test_init.py
Normal file
|
@ -0,0 +1,78 @@
|
|||
"""Test the SSDP integration."""
|
||||
from unittest.mock import patch, Mock
|
||||
|
||||
from homeassistant.generated import ssdp as gn_ssdp
|
||||
from homeassistant.components import ssdp
|
||||
|
||||
from tests.common import mock_coro
|
||||
|
||||
|
||||
async def test_scan_match_st(hass):
|
||||
"""Test matching based on ST."""
|
||||
scanner = ssdp.Scanner(hass)
|
||||
|
||||
with patch('netdisco.ssdp.scan', return_value=[
|
||||
Mock(st="mock-st", location=None)
|
||||
]), patch.dict(
|
||||
gn_ssdp.SSDP['st'], {'mock-st': ['mock-domain']}
|
||||
), patch.object(
|
||||
hass.config_entries.flow, 'async_init',
|
||||
return_value=mock_coro()
|
||||
) as mock_init:
|
||||
await scanner.async_scan(None)
|
||||
|
||||
assert len(mock_init.mock_calls) == 1
|
||||
assert mock_init.mock_calls[0][1][0] == 'mock-domain'
|
||||
assert mock_init.mock_calls[0][2]['context'] == {'source': 'ssdp'}
|
||||
|
||||
|
||||
async def test_scan_match_manufacturer(hass, aioclient_mock):
|
||||
"""Test matching based on ST."""
|
||||
aioclient_mock.get('http://1.1.1.1', text="""
|
||||
<root>
|
||||
<device>
|
||||
<manufacturer>Paulus</manufacturer>
|
||||
</device>
|
||||
</root>
|
||||
""")
|
||||
scanner = ssdp.Scanner(hass)
|
||||
|
||||
with patch('netdisco.ssdp.scan', return_value=[
|
||||
Mock(st="mock-st", location='http://1.1.1.1')
|
||||
]), patch.dict(
|
||||
gn_ssdp.SSDP['manufacturer'], {'Paulus': ['mock-domain']}
|
||||
), patch.object(
|
||||
hass.config_entries.flow, 'async_init',
|
||||
return_value=mock_coro()
|
||||
) as mock_init:
|
||||
await scanner.async_scan(None)
|
||||
|
||||
assert len(mock_init.mock_calls) == 1
|
||||
assert mock_init.mock_calls[0][1][0] == 'mock-domain'
|
||||
assert mock_init.mock_calls[0][2]['context'] == {'source': 'ssdp'}
|
||||
|
||||
|
||||
async def test_scan_match_device_type(hass, aioclient_mock):
|
||||
"""Test matching based on ST."""
|
||||
aioclient_mock.get('http://1.1.1.1', text="""
|
||||
<root>
|
||||
<device>
|
||||
<deviceType>Paulus</deviceType>
|
||||
</device>
|
||||
</root>
|
||||
""")
|
||||
scanner = ssdp.Scanner(hass)
|
||||
|
||||
with patch('netdisco.ssdp.scan', return_value=[
|
||||
Mock(st="mock-st", location='http://1.1.1.1')
|
||||
]), patch.dict(
|
||||
gn_ssdp.SSDP['device_type'], {'Paulus': ['mock-domain']}
|
||||
), patch.object(
|
||||
hass.config_entries.flow, 'async_init',
|
||||
return_value=mock_coro()
|
||||
) as mock_init:
|
||||
await scanner.async_scan(None)
|
||||
|
||||
assert len(mock_init.mock_calls) == 1
|
||||
assert mock_init.mock_calls[0][1][0] == 'mock-domain'
|
||||
assert mock_init.mock_calls[0][2]['context'] == {'source': 'ssdp'}
|
|
@ -21,16 +21,13 @@ def manager():
|
|||
raise data_entry_flow.UnknownHandler
|
||||
|
||||
flow = handler()
|
||||
flow.init_step = context.get('init_step', 'init') \
|
||||
if context is not None else 'init'
|
||||
flow.source = context.get('source') \
|
||||
if context is not None else 'user_input'
|
||||
flow.init_step = context.get('init_step', 'init')
|
||||
flow.source = context.get('source')
|
||||
return flow
|
||||
|
||||
async def async_add_entry(flow, result):
|
||||
if result['type'] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY:
|
||||
result['source'] = flow.context.get('source') \
|
||||
if flow.context is not None else 'user'
|
||||
result['source'] = flow.context.get('source')
|
||||
entries.append(result)
|
||||
return result
|
||||
|
||||
|
@ -173,7 +170,7 @@ async def test_create_saves_data(manager):
|
|||
assert entry['handler'] == 'test'
|
||||
assert entry['title'] == 'Test Title'
|
||||
assert entry['data'] == 'Test Data'
|
||||
assert entry['source'] == 'user'
|
||||
assert entry['source'] is None
|
||||
|
||||
|
||||
async def test_discovery_init_flow(manager):
|
||||
|
|
Loading…
Add table
Reference in a new issue