From 7ecc4b5b79557547c19383a6e85a372cd2699019 Mon Sep 17 00:00:00 2001 From: Arjan <44190435+vingerha@users.noreply.github.com> Date: Thu, 9 Nov 2023 18:18:53 +0100 Subject: [PATCH 01/42] various adds realtime initially, better handling of timezone (using HA timezone), extends routes with short_name Moves refresh interval to options (breaking change) --- custom_components/gtfs2/__init__.py | 37 ++- custom_components/gtfs2/config_flow.py | 88 +++++- custom_components/gtfs2/const.py | 9 +- custom_components/gtfs2/coordinator.py | 73 ++++- custom_components/gtfs2/gtfs_helper.py | 56 ++-- custom_components/gtfs2/gtfs_rt_helper.py | 308 +++++++++++++++++++ custom_components/gtfs2/manifest.json | 2 +- custom_components/gtfs2/sensor.py | 36 ++- custom_components/gtfs2/strings.json | 23 +- custom_components/gtfs2/translations/en.json | 24 +- custom_components/gtfs2/translations/fr.json | 17 +- 11 files changed, 612 insertions(+), 61 deletions(-) create mode 100644 custom_components/gtfs2/gtfs_rt_helper.py diff --git a/custom_components/gtfs2/__init__.py b/custom_components/gtfs2/__init__.py index ae5c8de..b5a204f 100644 --- a/custom_components/gtfs2/__init__.py +++ b/custom_components/gtfs2/__init__.py @@ -5,26 +5,51 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant, ServiceCall -from .const import DOMAIN, PLATFORMS, DEFAULT_PATH -from .coordinator import GTFSUpdateCoordinator +from datetime import timedelta + +from .const import DOMAIN, PLATFORMS, DEFAULT_PATH, DEFAULT_REFRESH_INTERVAL +from .coordinator import GTFSUpdateCoordinator, GTFSRealtimeUpdateCoordinator import voluptuous as vol from .gtfs_helper import get_gtfs _LOGGER = logging.getLogger(__name__) +async def async_migrate_entry(hass, config_entry: ConfigEntry) -> bool: + """Migrate old entry.""" + _LOGGER.debug("Migrating from version %s", config_entry.version) + + if config_entry.version == 1: + + new = {**config_entry.data} + new['extract_from'] = 'url' + new.pop('refresh_interval') + + config_entry.version = 2 + hass.config_entries.async_update_entry(config_entry, data=new) + + _LOGGER.debug("Migration to version %s successful", config_entry.version) + + return True + async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up GTFS from a config entry.""" hass.data.setdefault(DOMAIN, {}) coordinator = GTFSUpdateCoordinator(hass, entry) + + coordinator_rt = GTFSRealtimeUpdateCoordinator(hass, entry) await coordinator.async_config_entry_first_refresh() + + await coordinator_rt.async_config_entry_first_refresh() hass.data[DOMAIN][entry.entry_id] = { "coordinator": coordinator, } - + + entry.async_on_unload(entry.add_update_listener(update_listener)) + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) return True @@ -50,3 +75,9 @@ def update_gtfs(call): hass.services.register( DOMAIN, "update_gtfs", update_gtfs) return True + +async def update_listener(hass: HomeAssistant, entry: ConfigEntry): + """Handle options update.""" + hass.data[DOMAIN][entry.entry_id]['coordinator'].update_interval = timedelta(minutes=entry.options.get("refresh_interval", DEFAULT_REFRESH_INTERVAL)) + + return True \ No newline at end of file diff --git a/custom_components/gtfs2/config_flow.py b/custom_components/gtfs2/config_flow.py index 54d9018..b155ef0 100644 --- a/custom_components/gtfs2/config_flow.py +++ b/custom_components/gtfs2/config_flow.py @@ -8,8 +8,9 @@ from homeassistant import config_entries from homeassistant.data_entry_flow import FlowResult import homeassistant.helpers.config_validation as cv +from homeassistant.core import HomeAssistant, callback -from .const import DEFAULT_PATH, DOMAIN +from .const import DEFAULT_PATH, DOMAIN, DEFAULT_REFRESH_INTERVAL from .gtfs_helper import ( get_gtfs, @@ -33,7 +34,7 @@ class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN): """Handle a config flow for GTFS.""" - VERSION = 1 + VERSION = 2 def __init__(self) -> None: """Init ConfigFlow.""" @@ -67,10 +68,11 @@ async def async_step_user(self, user_input: dict | None = None) -> FlowResult: return await self.async_step_remove() else: user_input["url"] = "na" + user_input["extract_from"] = "zip" self._user_inputs.update(user_input) _LOGGER.debug(f"UserInputs File: {self._user_inputs}") return await self.async_step_route() - + async def async_step_source(self, user_input: dict | None = None) -> FlowResult: """Handle a flow initialized by the user.""" errors: dict[str, str] = {} @@ -78,6 +80,7 @@ async def async_step_source(self, user_input: dict | None = None) -> FlowResult: check_data = await self._check_data(user_input) if check_data: errors["base"] = check_data + return self.async_abort(reason=check_data) else: self._user_inputs.update(user_input) _LOGGER.debug(f"UserInputs Data: {self._user_inputs}") @@ -88,11 +91,12 @@ async def async_step_source(self, user_input: dict | None = None) -> FlowResult: data_schema=vol.Schema( { vol.Required("file"): str, - vol.Required("url"): str, + vol.Required("extract_from"): vol.In({"zip": "Existing Zipfile with same name", "url": "URL below"}), + vol.Required("url", default="na"): str, }, ), errors=errors, - ) + ) async def async_step_remove(self, user_input: dict | None = None) -> FlowResult: """Handle a flow initialized by the user.""" @@ -121,8 +125,7 @@ async def async_step_route(self, user_input: dict | None = None) -> FlowResult: self._pygtfs = get_gtfs( self.hass, DEFAULT_PATH, - self._user_inputs["file"], - self._user_inputs["url"], + self._user_inputs, False, ) errors: dict[str, str] = {} @@ -163,7 +166,6 @@ async def async_step_stops(self, user_input: dict | None = None) -> FlowResult: vol.Required("destination", default=last_stop): vol.In(stops), vol.Required("name"): str, vol.Optional("offset", default=0): int, - vol.Required("refresh_interval", default=15): int, vol.Required("include_tomorrow"): vol.In( {False: "No", True: "Yes"} ), @@ -185,15 +187,15 @@ async def async_step_stops(self, user_input: dict | None = None) -> FlowResult: async def _check_data(self, data): self._pygtfs = await self.hass.async_add_executor_job( - get_gtfs, self.hass, DEFAULT_PATH, data["file"], data["url"], False + get_gtfs, self.hass, DEFAULT_PATH, data, False ) - if self._pygtfs == "no_data_file": - return "no_data_file" + if self._pygtfs == "no_data_file" or "no_zip_file": + return self._pygtfs return None async def _check_config(self, data): self._pygtfs = await self.hass.async_add_executor_job( - get_gtfs, self.hass, DEFAULT_PATH, data["file"], data["url"], False + get_gtfs, self.hass, DEFAULT_PATH, data, False ) if self._pygtfs == "no_data_file": return "no_data_file" @@ -210,7 +212,7 @@ async def _check_config(self, data): try: self._data["next_departure"] = await self.hass.async_add_executor_job( - get_next_departure, self._data + get_next_departure, self ) except Exception as ex: # pylint: disable=broad-except _LOGGER.info( @@ -222,3 +224,63 @@ async def _check_config(self, data): if self._data["next_departure"]: return None return "stop_incorrect" + + @staticmethod + @callback + def async_get_options_flow( + config_entry: config_entries.ConfigEntry, + ) -> config_entries.OptionsFlow: + """Create the options flow.""" + return GTFSOptionsFlowHandler(config_entry) + + +class GTFSOptionsFlowHandler(config_entries.OptionsFlow): + def __init__(self, config_entry: config_entries.ConfigEntry) -> None: + """Initialize options flow.""" + self.config_entry = config_entry + self._data: dict[str, str] = {} + self._user_inputs: dict = {} + + async def async_step_init( + self, user_input: dict[str, Any] | None = None + ) -> FlowResult: + """Manage the options.""" + if user_input is not None: + if user_input['real_time']: + self._user_inputs.update(user_input) + _LOGGER.debug(f"GTFS Options with realtime: {self._user_inputs}") + return await self.async_step_real_time() + else: + self._user_inputs.update(user_input) + _LOGGER.debug(f"GTFS Options without realtime: {self._user_inputs}") + return self.async_create_entry(title="", data=self._user_inputs) + + return self.async_show_form( + step_id="init", + data_schema=vol.Schema( + { + vol.Optional("refresh_interval", default=self.config_entry.options.get("refresh_interval", DEFAULT_REFRESH_INTERVAL)): int, + vol.Required("real_time"): vol.In({False: "No", True: "Yes"}), + } + ), + ) + + async def async_step_real_time( + self, user_input: dict[str, Any] | None = None + ) -> FlowResult: + """Handle a realtime initialized by the user.""" + errors: dict[str, str] = {} + if user_input is not None: + self._user_inputs.update(user_input) + return self.async_create_entry(title="", data=self._user_inputs) + + return self.async_show_form( + step_id="real_time", + data_schema=vol.Schema( + { + vol.Required("trip_update_url"): str, + vol.Required("vehicle_position_url"): str, + }, + ), + errors=errors, + ) \ No newline at end of file diff --git a/custom_components/gtfs2/const.py b/custom_components/gtfs2/const.py index 2b6c88b..5a6f8ca 100644 --- a/custom_components/gtfs2/const.py +++ b/custom_components/gtfs2/const.py @@ -37,11 +37,6 @@ ATTR_WHEELCHAIR_DESTINATION = "destination_station_wheelchair_boarding_available" ATTR_WHEELCHAIR_ORIGIN = "origin_station_wheelchair_boarding_available" -CONF_DATA = "data" -CONF_DESTINATION = "destination" -CONF_ORIGIN = "origin" -CONF_TOMORROW = "include_tomorrow" - BICYCLE_ALLOWED_DEFAULT = STATE_UNKNOWN BICYCLE_ALLOWED_OPTIONS = {1: True, 2: False} DROP_OFF_TYPE_DEFAULT = STATE_UNKNOWN @@ -240,3 +235,7 @@ WHEELCHAIR_ACCESS_OPTIONS = {1: True, 2: False} WHEELCHAIR_BOARDING_DEFAULT = STATE_UNKNOWN WHEELCHAIR_BOARDING_OPTIONS = {1: True, 2: False} + + +# Realtime specifics +DEFAULT_REFRESH_INTERVAL_RT = 1 diff --git a/custom_components/gtfs2/coordinator.py b/custom_components/gtfs2/coordinator.py index 54e4b35..70f8a0d 100644 --- a/custom_components/gtfs2/coordinator.py +++ b/custom_components/gtfs2/coordinator.py @@ -8,14 +8,15 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.update_coordinator import DataUpdateCoordinator -from .const import DEFAULT_PATH, DEFAULT_REFRESH_INTERVAL +from .const import DEFAULT_PATH, DEFAULT_REFRESH_INTERVAL, DEFAULT_REFRESH_INTERVAL_RT from .gtfs_helper import get_gtfs, get_next_departure, check_datasource_index +from .gtfs_rt_helper import get_rt_route_statuses, get_next_services _LOGGER = logging.getLogger(__name__) class GTFSUpdateCoordinator(DataUpdateCoordinator): - """Data update coordinator for the Pronote integration.""" + """Data update coordinator for the GTFS integration.""" config_entry: ConfigEntry @@ -25,20 +26,20 @@ def __init__(self, hass: HomeAssistant, entry: ConfigEntry) -> None: hass=hass, logger=_LOGGER, name=entry.entry_id, - update_interval=timedelta( - minutes=entry.data.get("refresh_interval", DEFAULT_REFRESH_INTERVAL) - ), + update_interval=timedelta(minutes=entry.options.get("refresh_interval", DEFAULT_REFRESH_INTERVAL)), ) self.config_entry = entry self.hass = hass + self._pygtfs = "" self._data: dict[str, str] = {} async def _async_update_data(self) -> dict[str, str]: """Update.""" data = self.config_entry.data + options = self.config_entry.options self._pygtfs = get_gtfs( - self.hass, DEFAULT_PATH, data["file"], data["url"], False + self.hass, DEFAULT_PATH, data, False ) self._data = { "schedule": self._pygtfs, @@ -56,9 +57,65 @@ async def _async_update_data(self) -> dict[str, str]: try: self._data["next_departure"] = await self.hass.async_add_executor_job( - get_next_departure, self._data + get_next_departure, self ) except Exception as ex: # pylint: disable=broad-except _LOGGER.error("Error getting gtfs data from generic helper: %s", ex) + _LOGGER.debug("GTFS coordinator data from helper: %s", self._data["next_departure"]) + return self._data + +class GTFSRealtimeUpdateCoordinator(DataUpdateCoordinator): + """Data update coordinator for the GTFSRT integration.""" + + config_entry: ConfigEntry + + + def __init__(self, hass: HomeAssistant, entry: ConfigEntry) -> None: + """Initialize the coordinator.""" + _LOGGER.debug("GTFS RT: coordinator init") + super().__init__( + hass=hass, + logger=_LOGGER, + name=entry.entry_id, + update_interval=timedelta(minutes=entry.options.get("refresh_interval_rt", DEFAULT_REFRESH_INTERVAL_RT)), + ) + self.config_entry = entry + self.hass = hass + self._data: dict[str, str] = {} + + async def _async_update_data(self) -> dict[str, str]: + """Update.""" + data = self.config_entry.data + options = self.config_entry.options + _LOGGER.debug("GTFS RT: coordinator async_update_data: %s", data) + _LOGGER.debug("GTFS RT: coordinator async_update_data options: %s", options) + #add real_time if setup + + + if options["real_time"]: + + """Initialize the info object.""" + self._trip_update_url = options["trip_update_url"] + self._vehicle_position_url = options["vehicle_position_url"] + self._route_delimiter = "-" +# if options["CONF_API_KEY"] is not None: +# self._headers = {"Authorization": options["CONF_API_KEY"]} +# elif options["CONF_X_API_KEY"] is not None: +# self._headers = {"x-api-key": options["CONF_X_API_KEY"]} +# else: +# self._headers = None + self._headers = None + self.info = {} + self._route_id = data["route"].split(": ")[0] + self._stop_id = data["origin"].split(": ")[0] + self._direction = data["direction"] + self._relative = False + #_LOGGER.debug("GTFS RT: Realtime data: %s", self._data) + self._data = await self.hass.async_add_executor_job(get_rt_route_statuses, self) + self._get_next_service = await self.hass.async_add_executor_job(get_next_services, self) + _LOGGER.debug("GTFS RT: Realtime next service: %s", self._get_next_service) + else: + _LOGGER.debug("GTFS RT: Issue with options") + - return self._data + return self._get_next_service diff --git a/custom_components/gtfs2/gtfs_helper.py b/custom_components/gtfs2/gtfs_helper.py index c6fcb34..acd5a4f 100644 --- a/custom_components/gtfs2/gtfs_helper.py +++ b/custom_components/gtfs2/gtfs_helper.py @@ -9,18 +9,24 @@ from sqlalchemy.sql import text import homeassistant.util.dt as dt_util +from homeassistant.core import HomeAssistant _LOGGER = logging.getLogger(__name__) -def get_next_departure(data): - _LOGGER.debug("Get next departure with data: %s", data) +def get_next_departure(self): + _LOGGER.debug("Get next departure with data: %s", self._data) """Get next departures from data.""" - schedule = data["schedule"] - start_station_id = data["origin"] - end_station_id = data["destination"] - offset = data["offset"] - include_tomorrow = data["include_tomorrow"] + if self.hass.config.time_zone is None: + _LOGGER.error("Timezone is not set in Home Assistant configuration") + timezone = "UTC" + else: + timezone=dt_util.get_time_zone(self.hass.config.time_zone) + schedule = self._data["schedule"] + start_station_id = self._data["origin"] + end_station_id = self._data["destination"] + offset = self._data["offset"] + include_tomorrow = self._data["include_tomorrow"] now = dt_util.now().replace(tzinfo=None) + datetime.timedelta(minutes=offset) now_date = now.strftime(dt_util.DATE_STR_FORMAT) yesterday = now - datetime.timedelta(days=1) @@ -243,8 +249,8 @@ def get_next_departure(data): f"{dest_depart.strftime(dt_util.DATE_STR_FORMAT)} {item['dest_depart_time']}" ) - depart_time = dt_util.parse_datetime(origin_depart_time) - arrival_time = dt_util.parse_datetime(dest_arrival_time) + depart_time = dt_util.parse_datetime(origin_depart_time).replace(tzinfo=timezone) + arrival_time = dt_util.parse_datetime(dest_arrival_time).replace(tzinfo=timezone) origin_stop_time = { "Arrival Time": origin_arrival_time, @@ -283,25 +289,33 @@ def get_next_departure(data): } -def get_gtfs(hass, path, filename, url, update=False): +def get_gtfs(hass, path, data, update=False): """Get gtfs file.""" - file = filename + ".zip" + _LOGGER.debug("Getting gtfs with data: %s", data) + filename = data["file"] + url = data["url"] + file = data["file"] + ".zip" gtfs_dir = hass.config.path(path) os.makedirs(gtfs_dir, exist_ok=True) if update and os.path.exists(os.path.join(gtfs_dir, file)): remove_datasource(hass, path, filename) - - if not os.path.exists(os.path.join(gtfs_dir, file)): - try: - r = requests.get(url, allow_redirects=True) - open(os.path.join(gtfs_dir, file), "wb").write(r.content) - except Exception as ex: # pylint: disable=broad-except - _LOGGER.error("The given URL or GTFS data file/folder was not found") - return "no_data_file" + if data["extract_from"] == "zip": + if not os.path.exists(os.path.join(gtfs_dir, file)): + _LOGGER.error("The given GTFS zipfile was not found") + return "no_zip_file" + if data["extract_from"] == "url": + if not os.path.exists(os.path.join(gtfs_dir, file)): + try: + r = requests.get(url, allow_redirects=True) + open(os.path.join(gtfs_dir, file), "wb").write(r.content) + except Exception as ex: # pylint: disable=broad-except + _LOGGER.error("The given URL or GTFS data file/folder was not found") + return "no_data_file" (gtfs_root, _) = os.path.splitext(file) sqlite_file = f"{gtfs_root}.sqlite?check_same_thread=False" joined_path = os.path.join(gtfs_dir, sqlite_file) + _LOGGER.debug("unpacking: %s", joined_path) gtfs = pygtfs.Schedule(joined_path) # check or wait for unpack journal = os.path.join(gtfs_dir, filename + ".sqlite-journal") @@ -314,7 +328,7 @@ def get_gtfs(hass, path, filename, url, update=False): def get_route_list(schedule): sql_routes = f""" - SELECT route_id, route_long_name from routes + SELECT route_id, route_short_name, route_long_name from routes order by cast(route_id as decimal) """ # noqa: S608 result = schedule.engine.connect().execute( @@ -327,7 +341,7 @@ def get_route_list(schedule): row = row_cursor._asdict() routes_list.append(list(row_cursor)) for x in routes_list: - val = x[0] + ": " + x[1] + val = x[0] + ": " + x[1] + " (" + x[2] + ")" routes.append(val) _LOGGER.debug(f"routes: {routes}") return routes diff --git a/custom_components/gtfs2/gtfs_rt_helper.py b/custom_components/gtfs2/gtfs_rt_helper.py new file mode 100644 index 0000000..b8b83b0 --- /dev/null +++ b/custom_components/gtfs2/gtfs_rt_helper.py @@ -0,0 +1,308 @@ +import logging +from datetime import datetime, timedelta + +import homeassistant.helpers.config_validation as cv +import homeassistant.util.dt as dt_util +import requests +import voluptuous as vol +from google.transit import gtfs_realtime_pb2 +from homeassistant.components.sensor import PLATFORM_SCHEMA +from homeassistant.const import ATTR_LATITUDE, ATTR_LONGITUDE, CONF_NAME +from homeassistant.helpers.entity import Entity +from homeassistant.util import Throttle + +_LOGGER = logging.getLogger(__name__) + +ATTR_STOP_ID = "Stop ID" +ATTR_ROUTE = "Route" +ATTR_DIRECTION_ID = "Direction ID" +ATTR_DUE_IN = "Due in" +ATTR_DUE_AT = "Due at" +ATTR_NEXT_UP = "Next Service" +ATTR_ICON = "Icon" +ATTR_UNIT_OF_MEASUREMENT = "unit_of_measurement" +ATTR_DEVICE_CLASS = "device_class" + +CONF_API_KEY = "api_key" +CONF_X_API_KEY = "x_api_key" +CONF_STOP_ID = "stopid" +CONF_ROUTE = "route" +CONF_DIRECTION_ID = "directionid" +CONF_DEPARTURES = "departures" +CONF_TRIP_UPDATE_URL = "trip_update_url" +CONF_VEHICLE_POSITION_URL = "vehicle_position_url" +CONF_ROUTE_DELIMITER = "route_delimiter" +CONF_ICON = "icon" +CONF_SERVICE_TYPE = "service_type" +CONF_RELATIVE_TIME = "show_relative_time" + +DEFAULT_SERVICE = "Service" +DEFAULT_ICON = "mdi:bus" +DEFAULT_DIRECTION = "0" + +MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=60) +TIME_STR_FORMAT = "%H:%M" + +PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( + { + vol.Required(CONF_TRIP_UPDATE_URL): cv.string, + vol.Optional(CONF_API_KEY): cv.string, + vol.Optional(CONF_X_API_KEY): cv.string, + vol.Optional(CONF_VEHICLE_POSITION_URL): cv.string, + vol.Optional(CONF_ROUTE_DELIMITER): cv.string, + + vol.Optional(CONF_DEPARTURES): [ + { + vol.Required(CONF_NAME): cv.string, + vol.Required(CONF_STOP_ID): cv.string, + vol.Required(CONF_ROUTE): cv.string, + vol.Optional(CONF_RELATIVE_TIME, default=True): cv.boolean, + vol.Optional( + CONF_DIRECTION_ID, + default=DEFAULT_DIRECTION, # type: ignore + ): str, + vol.Optional( + CONF_ICON, default=DEFAULT_ICON # type: ignore + ): cv.string, + vol.Optional( + CONF_SERVICE_TYPE, default=DEFAULT_SERVICE # type: ignore + ): cv.string, + } + ], + } +) + +def due_in_minutes(timestamp): + """Get the remaining minutes from now until a given datetime object.""" + diff = timestamp - dt_util.now().replace(tzinfo=None) + return int(diff.total_seconds() / 60) + + +def log_info(data: list, indent_level: int) -> None: + indents = " " * indent_level + info_str = f"{indents}{': '.join(str(x) for x in data)}" + _LOGGER.info(info_str) + + +def log_error(data: list, indent_level: int) -> None: + indents = " " * indent_level + info_str = f"{indents}{': '.join(str(x) for x in data)}" + _LOGGER.error(info_str) + + +def log_debug(data: list, indent_level: int) -> None: + indents = " " * indent_level + info_str = f"{indents}{' '.join(str(x) for x in data)}" + _LOGGER.debug(info_str) + + + +def get_gtfs_feed_entities(url: str, headers, label: str): + feed = gtfs_realtime_pb2.FeedMessage() # type: ignore + + # TODO add timeout to requests call + response = requests.get(url, headers=headers, timeout=20) + if response.status_code == 200: + log_info([f"Successfully updated {label}", response.status_code], 0) + else: + log_error( + [ + f"Updating {label} got", + response.status_code, + response.content, + ], + 0, + ) + + feed.ParseFromString(response.content) + return feed.entity + + + +## reworked for gtfs2 + +def get_next_services(self): + self.data = self._data + self._stop = self._stop_id + self._route = self._route_id + self._direction = self._direction + _LOGGER.debug("Get Next Services, route/direction/stop: %s", self.data.get(self._route, {}).get(self._direction, {}).get(self._stop, [])) + + next_services = self.data.get(self._route, {}).get(self._direction, {}).get(self._stop, []) + if self.hass.config.time_zone is None: + _LOGGER.error("Timezone is not set in Home Assistant configuration") + timezone = "UTC" + else: + timezone=dt_util.get_time_zone(self.hass.config.time_zone) + + if self._relative : + return ( + due_in_minutes(next_services[0].arrival_time) + if len(next_services) > 0 + else "-" + ) + else: + return ( + next_services[0].arrival_time.replace(tzinfo=timezone) + if len(next_services) > 0 + else "-" + ) + +def get_rt_route_statuses(self): + + vehicle_positions = {} + + + class StopDetails: + def __init__(self, arrival_time, position): + self.arrival_time = arrival_time + self.position = position + + departure_times = {} + + feed_entities = get_gtfs_feed_entities( + url=self._trip_update_url, headers=self._headers, label="trip data" + ) + + for entity in feed_entities: + if entity.HasField("trip_update"): + # If delimiter specified split the route ID in the gtfs rt feed + log_debug( + [ + "Received Trip ID", + entity.trip_update.trip.trip_id, + "Route ID:", + entity.trip_update.trip.route_id, + "direction ID", + entity.trip_update.trip.direction_id, + "Start Time:", + entity.trip_update.trip.start_time, + "Start Date:", + entity.trip_update.trip.start_date, + ], + 1, + ) + if self._route_delimiter is not None: + route_id_split = entity.trip_update.trip.route_id.split( + self._route_delimiter + ) + if route_id_split[0] == self._route_delimiter: + route_id = entity.trip_update.trip.route_id + else: + route_id = route_id_split[0] + log_debug( + [ + "Feed Route ID", + entity.trip_update.trip.route_id, + "changed to", + route_id, + ], + 1, + ) + + else: + route_id = entity.trip_update.trip.route_id + + if route_id not in departure_times: + departure_times[route_id] = {} + + if entity.trip_update.trip.direction_id is not None: + direction_id = str(entity.trip_update.trip.direction_id) + else: + direction_id = DEFAULT_DIRECTION + if direction_id not in departure_times[route_id]: + departure_times[route_id][direction_id] = {} + + for stop in entity.trip_update.stop_time_update: + stop_id = stop.stop_id + if not departure_times[route_id][direction_id].get( + stop_id + ): + departure_times[route_id][direction_id][stop_id] = [] + # Use stop arrival time; + # fall back on departure time if not available + if stop.arrival.time == 0: + stop_time = stop.departure.time + else: + stop_time = stop.arrival.time + log_debug( + [ + "Stop:", + stop_id, + "Stop Sequence:", + stop.stop_sequence, + "Stop Time:", + stop_time, + ], + 2, + ) + # Ignore arrival times in the past + if due_in_minutes(datetime.fromtimestamp(stop_time)) >= 0: + log_debug( + [ + "Adding route ID", + route_id, + "trip ID", + entity.trip_update.trip.trip_id, + "direction ID", + entity.trip_update.trip.direction_id, + "stop ID", + stop_id, + "stop time", + stop_time, + ], + 3, + ) + + details = StopDetails( + datetime.fromtimestamp(stop_time), + vehicle_positions.get( + entity.trip_update.trip.trip_id + ), + ) + departure_times[route_id][direction_id][ + stop_id + ].append(details) + + # Sort by arrival time + for route in departure_times: + for direction in departure_times[route]: + for stop in departure_times[route][direction]: + departure_times[route][direction][stop].sort( + key=lambda t: t.arrival_time + ) + + self.info = departure_times + + return departure_times + +def get_rt_vehicle_positions(self): + positions = {} + feed_entities = get_gtfs_feed_entities( + url=self._vehicle_position_url, + headers=self._headers, + label="vehicle positions", + ) + + for entity in feed_entities: + vehicle = entity.vehicle + + if not vehicle.trip.trip_id: + # Vehicle is not in service + continue + log_debug( + [ + "Adding position for trip ID", + vehicle.trip.trip_id, + "position latitude", + vehicle.position.latitude, + "longitude", + vehicle.position.longitude, + ], + 2, + ) + + positions[vehicle.trip.trip_id] = vehicle.position + + return positions + diff --git a/custom_components/gtfs2/manifest.json b/custom_components/gtfs2/manifest.json index 9349aaf..63d4565 100644 --- a/custom_components/gtfs2/manifest.json +++ b/custom_components/gtfs2/manifest.json @@ -6,6 +6,6 @@ "documentation": "https://github.com/vingerha/gtfs2", "iot_class": "local_polling", "issue_tracker": "https://github.com/vingerha/gtfs2/issues", - "requirements": ["pygtfs==0.1.9"], + "requirements": ["pygtfs==0.1.9","gtfs-realtime-bindings==1.0.0"], "version": "0.1.2" } diff --git a/custom_components/gtfs2/sensor.py b/custom_components/gtfs2/sensor.py index 4498762..0f0dc21 100644 --- a/custom_components/gtfs2/sensor.py +++ b/custom_components/gtfs2/sensor.py @@ -68,6 +68,12 @@ async def async_setup_entry( ] await coordinator.async_config_entry_first_refresh() + + coordinator_rt: GTFSRealtimeUpdateCoordinator = hass.data[DOMAIN][config_entry.entry_id][ + "coordinator" + ] + + await coordinator_rt.async_config_entry_first_refresh() sensors = [ GTFSDepartureSensor(coordinator), @@ -82,7 +88,6 @@ class GTFSDepartureSensor(CoordinatorEntity, SensorEntity): def __init__(self, coordinator) -> None: """Initialize the GTFSsensor.""" super().__init__(coordinator) - self._name = coordinator.data["name"] self._attributes: dict[str, Any] = {} @@ -185,7 +190,7 @@ def _update_attrs(self): # noqa: C901 PLR0911 ) self._agency = False - # Define the state as a UTC timestamp with ISO 8601 format + # Define the state as a Agency TZ, then help TZ (which is UTC if no HA TZ set) if not self._departure: self._state = None elif self._agency: @@ -198,10 +203,11 @@ def _update_attrs(self): # noqa: C901 PLR0911 ) else: _LOGGER.debug( - "Self._departure time for state value UTC: %s", + "Self._departure time from helper: %s", {self._departure["departure_time"]}, ) - self._state = self._departure["departure_time"].replace(tzinfo=dt_util.UTC) + self._state = self._departure["departure_time"] + # settin state value self._attr_native_value = self._state @@ -398,3 +404,25 @@ def remove_keys(self, prefix: str) -> None: self._attributes = { k: v for k, v in self._attributes.items() if not k.startswith(prefix) } + + +class GTFSRealtimeDepartureSensor(CoordinatorEntity, SensorEntity): + """Implementation of a GTFS departure sensor.""" + + def __init__(self, coordinator) -> None: + """Initialize the GTFSsensor.""" + super().__init__(coordinator) + _LOGGER.debug("GTFS RT Sensor: coordinator data: %s", coordinator.data ) + self._attributes = self._update_attrs() + self._attr_extra_state_attributes = self._attributes + + @callback + def _handle_coordinator_update(self) -> None: + """Handle updated data from the coordinator.""" + self._update_attrs() + super()._handle_coordinator_update() + + def _update_attrs(self): # noqa: C901 PLR0911 + _LOGGER.debug(f"GTFS RT Sensor update attr DATA: {self.coordinator}") + self._attributes["next_departure_realtime"] = self.coordinator + return self._attributes \ No newline at end of file diff --git a/custom_components/gtfs2/strings.json b/custom_components/gtfs2/strings.json index 3b2aeac..3f4ddb3 100644 --- a/custom_components/gtfs2/strings.json +++ b/custom_components/gtfs2/strings.json @@ -43,12 +43,31 @@ "unknown": "[%key:common::config_flow::error::unknown%]", "stop_incorrect": "Start and/or End destination probably incorrect, check logs", "generic_failure": "Overall failure, check logs", - "no_data_file": "Data collection issue: URL incorrect or filename not in the correct folder" + "no_data_file": "Data collection issue: URL incorrect or filename not in the correct folder", + "no_zip_file": "Data collection issue: ZIP file not in the correct folder" }, "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", "files_deleted": "Datasource deleted, this may affect existing routes", - "stop_incorrect": "Start and/or End destination incorrect, possibly not in same direction, check logs" + "stop_incorrect": "Start and/or End destination incorrect, possibly not in same direction, check logs", + "no_zip_file": "Data collection issue: ZIP file not in the correct folder" + } + }, + "options": { + "step": { + "init": { + "description": "Customize the way the integration works", + "data": { + "refresh_interval": "Data refresh interval (in minutes)" + } + }, + "real_time": { + "description": "Provide url to real time API", + "data": { + "trip_update_url": "URL to trip data", + "vehicle_position_url": "URL to vehicle position (can be the same as trip data)" + } + } } } } diff --git a/custom_components/gtfs2/translations/en.json b/custom_components/gtfs2/translations/en.json index 8654ce0..2384116 100644 --- a/custom_components/gtfs2/translations/en.json +++ b/custom_components/gtfs2/translations/en.json @@ -37,12 +37,32 @@ "unknown": "[%key:common::config_flow::error::unknown%]", "stop_incorrect": "Start and/or End destination incorrect, possibly no transport 'today' or not in same direction, check logs", "generic_failure": "Overall failure, check logs", - "no_data_file": "Data collection issue: URL incorrect or filename not in the correct folder" + "no_data_file": "Data collection issue: URL incorrect or filename not in the correct folder", + "no_zip_file": "Data collection issue: ZIP file not in the correct folder" }, "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", "files_deleted": "Datasource deleted, this may affect existing routes", - "stop_incorrect": "Start and/or End destination incorrect, possibly no transport 'today' or not in same direction, check logs" + "stop_incorrect": "Start and/or End destination incorrect, possibly no transport 'today' or not in same direction, check logs", + "no_data_file": "Data collection issue: URL incorrect or filename not in the correct folder", + "no_zip_file": "Data collection issue: ZIP file not existing in the correct folder, note that it is capital-sensitive" + } + }, + "options": { + "step": { + "init": { + "description": "Customize the way the integration works", + "data": { + "refresh_interval": "Data refresh interval (in minutes)" + } + }, + "real_time": { + "description": "Provide url to real time API", + "data": { + "trip_update_url": "URL to trip data", + "vehicle_position_url": "URL to vehicle position (can be the same as trip data)" + } + } } } } diff --git a/custom_components/gtfs2/translations/fr.json b/custom_components/gtfs2/translations/fr.json index 4fb1911..3a4d315 100644 --- a/custom_components/gtfs2/translations/fr.json +++ b/custom_components/gtfs2/translations/fr.json @@ -37,12 +37,25 @@ "unknown": "[%key:common::config_flow::error::unknown%]", "stop_incorrect": "Arrêt de départ et/ou de fin incorrecte, éventuellement pas de transport « aujourd'hui » ou pas dans la même direction, vérifiez les logs d'érreur", "generic_failure": "Échec global, vérifiez les logs d'érreur", - "no_data_file": "Problème de collecte de données : URL incorrecte ou nom de fichier ne se trouvant pas dans le bon dossier" + "no_data_file": "Problème de collecte de données : URL incorrecte ou nom de fichier ne se trouve pas dans le bon dossier", + "no_zip_file": "Problème de collecte de données : fichier ZIP ne se trouve pas dans le bon dossier" }, "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", "files_deleted": "Source de données supprimée, cela peut affecter les itinéraires existants", - "stop_incorrect": "Arrêt de départ et/ou de fin incorrecte, éventuellement pas de transport « aujourd'hui » ou pas dans la même direction, vérifiez logs d'érreur" + "stop_incorrect": "Arrêt de départ et/ou de fin incorrecte, éventuellement pas de transport « aujourd'hui » ou pas dans la même direction, vérifiez logs d'érreur", + "no_data_file": "Problème de collecte de données : URL incorrecte ou nom de fichier ne se trouve pas dans le bon dossier", + "no_zip_file": "Problème de collecte de données : fichier ZIP ne se trouve pas dans le bon dossier, note: sensible à la casse" + } + }, + "options": { + "step": { + "init": { + "description": "Personnalisez le fonctionnement de l'intégration", + "data": { + "refresh_interval": "Personnalisez le fonctionnement de l'intégration" + } + } } } } \ No newline at end of file From c90384f2e586b68f68019a23d5fad63951d1e253 Mon Sep 17 00:00:00 2001 From: Arjan <44190435+vingerha@users.noreply.github.com> Date: Sat, 11 Nov 2023 07:27:29 +0100 Subject: [PATCH 02/42] Align --- custom_components/gtfs2/__init__.py | 8 +--- custom_components/gtfs2/config_flow.py | 5 ++- custom_components/gtfs2/const.py | 3 -- custom_components/gtfs2/coordinator.py | 8 ++-- custom_components/gtfs2/gtfs_helper.py | 18 ++++++-- custom_components/gtfs2/manifest.json | 4 +- custom_components/gtfs2/sensor.py | 47 ++++++++++++++------ custom_components/gtfs2/strings.json | 2 +- custom_components/gtfs2/translations/en.json | 2 +- custom_components/gtfs2/translations/fr.json | 2 +- 10 files changed, 62 insertions(+), 37 deletions(-) diff --git a/custom_components/gtfs2/__init__.py b/custom_components/gtfs2/__init__.py index b5a204f..fa688c5 100644 --- a/custom_components/gtfs2/__init__.py +++ b/custom_components/gtfs2/__init__.py @@ -37,19 +37,15 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: hass.data.setdefault(DOMAIN, {}) coordinator = GTFSUpdateCoordinator(hass, entry) - - coordinator_rt = GTFSRealtimeUpdateCoordinator(hass, entry) - await coordinator.async_config_entry_first_refresh() + #await coordinator.async_config_entry_first_refresh() - await coordinator_rt.async_config_entry_first_refresh() - hass.data[DOMAIN][entry.entry_id] = { "coordinator": coordinator, } entry.async_on_unload(entry.add_update_listener(update_listener)) - + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) return True diff --git a/custom_components/gtfs2/config_flow.py b/custom_components/gtfs2/config_flow.py index b155ef0..aeef210 100644 --- a/custom_components/gtfs2/config_flow.py +++ b/custom_components/gtfs2/config_flow.py @@ -91,7 +91,7 @@ async def async_step_source(self, user_input: dict | None = None) -> FlowResult: data_schema=vol.Schema( { vol.Required("file"): str, - vol.Required("extract_from"): vol.In({"zip": "Existing Zipfile with same name", "url": "URL below"}), + vol.Required("extract_from"): vol.In({"zip": "Use gtfs2/zipfile with above name, without extension", "url": "Use URL below, leave 'na' if using zip"}), vol.Required("url", default="na"): str, }, ), @@ -246,6 +246,7 @@ async def async_step_init( ) -> FlowResult: """Manage the options.""" if user_input is not None: + user_input['real_time'] = False if user_input['real_time']: self._user_inputs.update(user_input) _LOGGER.debug(f"GTFS Options with realtime: {self._user_inputs}") @@ -260,7 +261,7 @@ async def async_step_init( data_schema=vol.Schema( { vol.Optional("refresh_interval", default=self.config_entry.options.get("refresh_interval", DEFAULT_REFRESH_INTERVAL)): int, - vol.Required("real_time"): vol.In({False: "No", True: "Yes"}), +# vol.Required("real_time"): vol.In({False: "No", True: "Yes"}), } ), ) diff --git a/custom_components/gtfs2/const.py b/custom_components/gtfs2/const.py index 5a6f8ca..865b814 100644 --- a/custom_components/gtfs2/const.py +++ b/custom_components/gtfs2/const.py @@ -236,6 +236,3 @@ WHEELCHAIR_BOARDING_DEFAULT = STATE_UNKNOWN WHEELCHAIR_BOARDING_OPTIONS = {1: True, 2: False} - -# Realtime specifics -DEFAULT_REFRESH_INTERVAL_RT = 1 diff --git a/custom_components/gtfs2/coordinator.py b/custom_components/gtfs2/coordinator.py index 70f8a0d..c04f29a 100644 --- a/custom_components/gtfs2/coordinator.py +++ b/custom_components/gtfs2/coordinator.py @@ -8,7 +8,7 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.update_coordinator import DataUpdateCoordinator -from .const import DEFAULT_PATH, DEFAULT_REFRESH_INTERVAL, DEFAULT_REFRESH_INTERVAL_RT +from .const import DEFAULT_PATH, DEFAULT_REFRESH_INTERVAL from .gtfs_helper import get_gtfs, get_next_departure, check_datasource_index from .gtfs_rt_helper import get_rt_route_statuses, get_next_services @@ -92,7 +92,7 @@ async def _async_update_data(self) -> dict[str, str]: #add real_time if setup - if options["real_time"]: + if "real_time" in options: """Initialize the info object.""" self._trip_update_url = options["trip_update_url"] @@ -115,7 +115,7 @@ async def _async_update_data(self) -> dict[str, str]: self._get_next_service = await self.hass.async_add_executor_job(get_next_services, self) _LOGGER.debug("GTFS RT: Realtime next service: %s", self._get_next_service) else: - _LOGGER.debug("GTFS RT: Issue with options") - + _LOGGER.error("GTFS RT: Issue with entity options") + return "---" return self._get_next_service diff --git a/custom_components/gtfs2/gtfs_helper.py b/custom_components/gtfs2/gtfs_helper.py index acd5a4f..e2b0a67 100644 --- a/custom_components/gtfs2/gtfs_helper.py +++ b/custom_components/gtfs2/gtfs_helper.py @@ -48,7 +48,7 @@ def get_next_departure(self): tomorrow_order = f"calendar.{tomorrow_name} DESC," sql_query = f""" - SELECT trip.trip_id, trip.route_id,route.route_long_name, + SELECT trip.trip_id, trip.route_id,trip.trip_headsign,route.route_long_name, time(origin_stop_time.arrival_time) AS origin_arrival_time, time(origin_stop_time.departure_time) AS origin_depart_time, date(origin_stop_time.departure_time) AS origin_depart_date, @@ -93,7 +93,7 @@ def get_next_departure(self): AND calendar.start_date <= :today AND calendar.end_date >= :today UNION ALL - SELECT trip.trip_id, trip.route_id,route.route_long_name, + SELECT trip.trip_id, trip.route_id,trip.trip_headsign,route.route_long_name, time(origin_stop_time.arrival_time) AS origin_arrival_time, time(origin_stop_time.departure_time) AS origin_depart_time, date(origin_stop_time.departure_time) AS origin_depart_date, @@ -211,7 +211,7 @@ def get_next_departure(self): _LOGGER.debug( "Timetable Remaining Departures on this Start/Stop: %s", timetable_remaining ) - + # create upcoming timetable with line info timetable_remaining_line = [] for key2, value in sorted(timetable.items()): if datetime.datetime.strptime(key2, "%Y-%m-%d %H:%M:%S") > now: @@ -222,6 +222,17 @@ def get_next_departure(self): "Timetable Remaining Departures on this Start/Stop, per line: %s", timetable_remaining_line, ) + # create upcoming timetable with headsign + timetable_remaining_headsign = [] + for key2, value in sorted(timetable.items()): + if datetime.datetime.strptime(key2, "%Y-%m-%d %H:%M:%S") > now: + timetable_remaining_headsign.append( + str(key2) + " (" + str(value["trip_headsign"]) + ")" + ) + _LOGGER.debug( + "Timetable Remaining Departures on this Start/Stop, with headsign: %s", + timetable_remaining_headsign, + ) # Format arrival and departure dates and times, accounting for the # possibility of times crossing over midnight. @@ -286,6 +297,7 @@ def get_next_departure(self): "destination_stop_time": destination_stop_time, "next_departures": timetable_remaining, "next_departures_lines": timetable_remaining_line, + "next_departures_headsign": timetable_remaining_headsign, } diff --git a/custom_components/gtfs2/manifest.json b/custom_components/gtfs2/manifest.json index 63d4565..9c94526 100644 --- a/custom_components/gtfs2/manifest.json +++ b/custom_components/gtfs2/manifest.json @@ -6,6 +6,6 @@ "documentation": "https://github.com/vingerha/gtfs2", "iot_class": "local_polling", "issue_tracker": "https://github.com/vingerha/gtfs2/issues", - "requirements": ["pygtfs==0.1.9","gtfs-realtime-bindings==1.0.0"], - "version": "0.1.2" + "requirements": ["pygtfs==0.1.9"], + "version": "0.1.5" } diff --git a/custom_components/gtfs2/sensor.py b/custom_components/gtfs2/sensor.py index 0f0dc21..30800fd 100644 --- a/custom_components/gtfs2/sensor.py +++ b/custom_components/gtfs2/sensor.py @@ -12,6 +12,8 @@ from homeassistant.util import slugify import homeassistant.util.dt as dt_util +from .coordinator import GTFSRealtimeUpdateCoordinator + from .const import ( ATTR_ARRIVAL, ATTR_BICYCLE, @@ -64,22 +66,18 @@ async def async_setup_entry( ) -> None: """Initialize the setup.""" coordinator: GTFSUpdateCoordinator = hass.data[DOMAIN][config_entry.entry_id][ - "coordinator" + "coordinator" ] await coordinator.async_config_entry_first_refresh() - - coordinator_rt: GTFSRealtimeUpdateCoordinator = hass.data[DOMAIN][config_entry.entry_id][ - "coordinator" - ] - - await coordinator_rt.async_config_entry_first_refresh() sensors = [ GTFSDepartureSensor(coordinator), ] async_add_entities(sensors, False) + + class GTFSDepartureSensor(CoordinatorEntity, SensorEntity): @@ -375,6 +373,14 @@ def _update_attrs(self): # noqa: C901 PLR0911 self._attributes["next_departures_lines"] = self._departure[ "next_departures_lines" ][:10] + + # Add next departures with their headsign + prefix = "next_departures_headsign" + self._attributes["next_departures_headsign"] = [] + if self._next_departures: + self._attributes["next_departures_headsign"] = self._departure[ + "next_departures_headsign" + ][:10] self._attributes["updated_at"] = dt_util.now().replace(tzinfo=None) self._attr_extra_state_attributes = self._attributes @@ -406,23 +412,36 @@ def remove_keys(self, prefix: str) -> None: } -class GTFSRealtimeDepartureSensor(CoordinatorEntity, SensorEntity): +class GTFSRealtimeDepartureSensor(CoordinatorEntity): """Implementation of a GTFS departure sensor.""" - def __init__(self, coordinator) -> None: + def __init__(self, coordinator: GTFSRealtimeUpdateCoordinator) -> None: """Initialize the GTFSsensor.""" super().__init__(coordinator) + self._name = coordinator.data["name"] + "_rt" + self._attributes: dict[str, Any] = {} + + self._attr_unique_id = f"gtfs-{self._name}_rt" + self._attr_device_info = DeviceInfo( + name=f"GTFS - {self._name}", + entry_type=DeviceEntryType.SERVICE, + identifiers={(DOMAIN, f"GTFS - {self._name}_rt")}, + manufacturer="GTFS", + model=self._name, + ) _LOGGER.debug("GTFS RT Sensor: coordinator data: %s", coordinator.data ) - self._attributes = self._update_attrs() + self._coordinator = coordinator + self._attributes = self._update_attrs_rt() self._attr_extra_state_attributes = self._attributes @callback def _handle_coordinator_update(self) -> None: """Handle updated data from the coordinator.""" - self._update_attrs() + self._update_attrs_rt() super()._handle_coordinator_update() - def _update_attrs(self): # noqa: C901 PLR0911 - _LOGGER.debug(f"GTFS RT Sensor update attr DATA: {self.coordinator}") - self._attributes["next_departure_realtime"] = self.coordinator + def _update_attrs_rt(self): # noqa: C901 PLR0911 + _LOGGER.debug(f"GTFS RT Sensor update attr DATA: {self._coordinator.data}") + self._attr_native_value = coordinator.data + self._attributes["next_departure_realtime"] = self._coordinator.data return self._attributes \ No newline at end of file diff --git a/custom_components/gtfs2/strings.json b/custom_components/gtfs2/strings.json index 3f4ddb3..2426849 100644 --- a/custom_components/gtfs2/strings.json +++ b/custom_components/gtfs2/strings.json @@ -12,7 +12,7 @@ "file": "New datasource name", "url": "external url to gtfs data file" }, - "description": "NOTE: with a new url: this may take minutes after submit" + "description": "NOTE: with a new url/zip, this may take a long time after submit" }, "remove": { "data": { diff --git a/custom_components/gtfs2/translations/en.json b/custom_components/gtfs2/translations/en.json index 2384116..66d1da6 100644 --- a/custom_components/gtfs2/translations/en.json +++ b/custom_components/gtfs2/translations/en.json @@ -12,7 +12,7 @@ "file": "New datasource name", "url": "external url to gtfs data (zip) file" }, - "description": "NOTE: with a new url, this may take quite a bit of time, \n depending on file-size and system performance" + "description": "NOTE: with a new url/zip, this may take quite a bit of time, \n depending on file-size and system performance" }, "route": { "data": { diff --git a/custom_components/gtfs2/translations/fr.json b/custom_components/gtfs2/translations/fr.json index 3a4d315..72f02bc 100644 --- a/custom_components/gtfs2/translations/fr.json +++ b/custom_components/gtfs2/translations/fr.json @@ -12,7 +12,7 @@ "file": "Nom de la nouvelle source de données", "url": "URL externe vers le fichier (zip) des données GTFS" }, - "description": "REMARQUE : avec une nouvelle URL, cela peut prendre du temps après la soumission, selon la taille du fichier et performance du serveur" + "description": "REMARQUE: avec une nouvelle URL/zip, cela peut prendre du temps après la soumission, \n selon la taille du fichier et performance du serveur" }, "route": { "data": { From 1ada7a91bc0f79b6053748743fc66b63a788b21a Mon Sep 17 00:00:00 2001 From: Arjan <44190435+vingerha@users.noreply.github.com> Date: Sat, 11 Nov 2023 07:32:51 +0100 Subject: [PATCH 03/42] Add library for realtime --- custom_components/gtfs2/manifest.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/custom_components/gtfs2/manifest.json b/custom_components/gtfs2/manifest.json index 9c94526..8dbe4c3 100644 --- a/custom_components/gtfs2/manifest.json +++ b/custom_components/gtfs2/manifest.json @@ -6,6 +6,6 @@ "documentation": "https://github.com/vingerha/gtfs2", "iot_class": "local_polling", "issue_tracker": "https://github.com/vingerha/gtfs2/issues", - "requirements": ["pygtfs==0.1.9"], + "requirements": ["pygtfs==0.1.9","gtfs-realtime-bindings==1.0.0"], "version": "0.1.5" } From aaa00ace6fa74018cc9116f66f485e3fb2df72b7 Mon Sep 17 00:00:00 2001 From: Arjan <44190435+vingerha@users.noreply.github.com> Date: Sat, 11 Nov 2023 08:05:38 +0100 Subject: [PATCH 04/42] Align for realtime --- custom_components/gtfs2/__init__.py | 30 +++-- custom_components/gtfs2/config_flow.py | 5 +- custom_components/gtfs2/coordinator.py | 141 ++++++++++------------ custom_components/gtfs2/gtfs_helper.py | 1 + custom_components/gtfs2/gtfs_rt_helper.py | 2 +- custom_components/gtfs2/sensor.py | 49 ++------ 6 files changed, 106 insertions(+), 122 deletions(-) diff --git a/custom_components/gtfs2/__init__.py b/custom_components/gtfs2/__init__.py index fa688c5..4e71276 100644 --- a/custom_components/gtfs2/__init__.py +++ b/custom_components/gtfs2/__init__.py @@ -8,7 +8,7 @@ from datetime import timedelta from .const import DOMAIN, PLATFORMS, DEFAULT_PATH, DEFAULT_REFRESH_INTERVAL -from .coordinator import GTFSUpdateCoordinator, GTFSRealtimeUpdateCoordinator +from .coordinator import GTFSUpdateCoordinator import voluptuous as vol from .gtfs_helper import get_gtfs @@ -20,12 +20,25 @@ async def async_migrate_entry(hass, config_entry: ConfigEntry) -> bool: if config_entry.version == 1: - new = {**config_entry.data} - new['extract_from'] = 'url' - new.pop('refresh_interval') - - config_entry.version = 2 + new_data = {**config_entry.data} + new_data['extract_from'] = 'url' + new_data.pop('refresh_interval') + + new_options = {**config_entry.options} + new_options['real_time'] = False + new_options['refresh_interval'] = 15 + + config_entry.version = 3 hass.config_entries.async_update_entry(config_entry, data=new) + hass.config_entries.async_update_entry(config_entry, options=new_options) + + if config_entry.version == 2: + + new_options = {**config_entry.options} + new_options['real_time'] = False + + config_entry.version = 3 + hass.config_entries.async_update_entry(config_entry, options=new_options) _LOGGER.debug("Migration to version %s successful", config_entry.version) @@ -40,6 +53,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: #await coordinator.async_config_entry_first_refresh() + if not coordinator.last_update_success: + raise ConfigEntryNotReady + hass.data[DOMAIN][entry.entry_id] = { "coordinator": coordinator, } @@ -74,6 +90,6 @@ def update_gtfs(call): async def update_listener(hass: HomeAssistant, entry: ConfigEntry): """Handle options update.""" - hass.data[DOMAIN][entry.entry_id]['coordinator'].update_interval = timedelta(minutes=entry.options.get("refresh_interval", DEFAULT_REFRESH_INTERVAL)) + hass.data[DOMAIN][entry.entry_id]['coordinator'].update_interval = timedelta(minutes=1) return True \ No newline at end of file diff --git a/custom_components/gtfs2/config_flow.py b/custom_components/gtfs2/config_flow.py index aeef210..1d56101 100644 --- a/custom_components/gtfs2/config_flow.py +++ b/custom_components/gtfs2/config_flow.py @@ -34,7 +34,7 @@ class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN): """Handle a config flow for GTFS.""" - VERSION = 2 + VERSION = 3 def __init__(self) -> None: """Init ConfigFlow.""" @@ -246,7 +246,6 @@ async def async_step_init( ) -> FlowResult: """Manage the options.""" if user_input is not None: - user_input['real_time'] = False if user_input['real_time']: self._user_inputs.update(user_input) _LOGGER.debug(f"GTFS Options with realtime: {self._user_inputs}") @@ -261,7 +260,7 @@ async def async_step_init( data_schema=vol.Schema( { vol.Optional("refresh_interval", default=self.config_entry.options.get("refresh_interval", DEFAULT_REFRESH_INTERVAL)): int, -# vol.Required("real_time"): vol.In({False: "No", True: "Yes"}), + vol.Required("real_time"): vol.In({False: "No", True: "Yes"}), } ), ) diff --git a/custom_components/gtfs2/coordinator.py b/custom_components/gtfs2/coordinator.py index c04f29a..8614b10 100644 --- a/custom_components/gtfs2/coordinator.py +++ b/custom_components/gtfs2/coordinator.py @@ -4,9 +4,11 @@ from datetime import timedelta import logging + from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.update_coordinator import DataUpdateCoordinator +import homeassistant.util.dt as dt_util from .const import DEFAULT_PATH, DEFAULT_REFRESH_INTERVAL from .gtfs_helper import get_gtfs, get_next_departure, check_datasource_index @@ -26,7 +28,7 @@ def __init__(self, hass: HomeAssistant, entry: ConfigEntry) -> None: hass=hass, logger=_LOGGER, name=entry.entry_id, - update_interval=timedelta(minutes=entry.options.get("refresh_interval", DEFAULT_REFRESH_INTERVAL)), + update_interval=timedelta(minutes=1), ) self.config_entry = entry self.hass = hass @@ -35,87 +37,78 @@ def __init__(self, hass: HomeAssistant, entry: ConfigEntry) -> None: self._data: dict[str, str] = {} async def _async_update_data(self) -> dict[str, str]: - """Update.""" + """Get the latest data from GTFS and GTFS relatime, depending refresh interval""" data = self.config_entry.data options = self.config_entry.options self._pygtfs = get_gtfs( self.hass, DEFAULT_PATH, data, False ) - self._data = { - "schedule": self._pygtfs, - "origin": data["origin"].split(": ")[0], - "destination": data["destination"].split(": ")[0], - "offset": data["offset"], - "include_tomorrow": data["include_tomorrow"], - "gtfs_dir": DEFAULT_PATH, - "name": data["name"], - } + previous_data = None if self.data is None else self.data.copy() + _LOGGER.debug("Previous data: %s", previous_data) - check_index = await self.hass.async_add_executor_job( - check_datasource_index, self._pygtfs - ) - - try: - self._data["next_departure"] = await self.hass.async_add_executor_job( - get_next_departure, self - ) - except Exception as ex: # pylint: disable=broad-except - _LOGGER.error("Error getting gtfs data from generic helper: %s", ex) - _LOGGER.debug("GTFS coordinator data from helper: %s", self._data["next_departure"]) - return self._data + if previous_data is not None and (previous_data["next_departure"]["gtfs_updated_at"] + timedelta(minutes=options.get("refresh_interval", DEFAULT_REFRESH_INTERVAL))) > dt_util.now().replace(tzinfo=None): + _LOGGER.debug("Do nothing") + self._data = previous_data + + if previous_data is None or (previous_data["next_departure"]["gtfs_updated_at"] + timedelta(minutes=options.get("refresh_interval", DEFAULT_REFRESH_INTERVAL))) < dt_util.now().replace(tzinfo=None): + self._data = { + "schedule": self._pygtfs, + "origin": data["origin"].split(": ")[0], + "destination": data["destination"].split(": ")[0], + "offset": data["offset"], + "include_tomorrow": data["include_tomorrow"], + "gtfs_dir": DEFAULT_PATH, + "name": data["name"], + } -class GTFSRealtimeUpdateCoordinator(DataUpdateCoordinator): - """Data update coordinator for the GTFSRT integration.""" - - config_entry: ConfigEntry - - - def __init__(self, hass: HomeAssistant, entry: ConfigEntry) -> None: - """Initialize the coordinator.""" - _LOGGER.debug("GTFS RT: coordinator init") - super().__init__( - hass=hass, - logger=_LOGGER, - name=entry.entry_id, - update_interval=timedelta(minutes=entry.options.get("refresh_interval_rt", DEFAULT_REFRESH_INTERVAL_RT)), - ) - self.config_entry = entry - self.hass = hass - self._data: dict[str, str] = {} - - async def _async_update_data(self) -> dict[str, str]: - """Update.""" - data = self.config_entry.data - options = self.config_entry.options - _LOGGER.debug("GTFS RT: coordinator async_update_data: %s", data) - _LOGGER.debug("GTFS RT: coordinator async_update_data options: %s", options) - #add real_time if setup - + check_index = await self.hass.async_add_executor_job( + check_datasource_index, self._pygtfs + ) + + try: + self._data["next_departure"] = await self.hass.async_add_executor_job( + get_next_departure, self + ) + except Exception as ex: # pylint: disable=broad-except + _LOGGER.error("Error getting gtfs data from generic helper: %s", ex) + return None + _LOGGER.debug("GTFS coordinator data from helper: %s", self._data["next_departure"]) + if "real_time" in options: - - """Initialize the info object.""" - self._trip_update_url = options["trip_update_url"] - self._vehicle_position_url = options["vehicle_position_url"] - self._route_delimiter = "-" -# if options["CONF_API_KEY"] is not None: -# self._headers = {"Authorization": options["CONF_API_KEY"]} -# elif options["CONF_X_API_KEY"] is not None: -# self._headers = {"x-api-key": options["CONF_X_API_KEY"]} -# else: -# self._headers = None - self._headers = None - self.info = {} - self._route_id = data["route"].split(": ")[0] - self._stop_id = data["origin"].split(": ")[0] - self._direction = data["direction"] - self._relative = False - #_LOGGER.debug("GTFS RT: Realtime data: %s", self._data) - self._data = await self.hass.async_add_executor_job(get_rt_route_statuses, self) - self._get_next_service = await self.hass.async_add_executor_job(get_next_services, self) - _LOGGER.debug("GTFS RT: Realtime next service: %s", self._get_next_service) + if options["real_time"]: + + """Initialize the info object.""" + self._trip_update_url = options["trip_update_url"] + self._vehicle_position_url = options["vehicle_position_url"] + self._route_delimiter = "-" + # if options["CONF_API_KEY"] is not None: + # self._headers = {"Authorization": options["CONF_API_KEY"]} + # elif options["CONF_X_API_KEY"] is not None: + # self._headers = {"x-api-key": options["CONF_X_API_KEY"]} + # else: + # self._headers = None + self._headers = None + self.info = {} + self._route_id = self._data["next_departure"]["route_id"] + self._stop_id = data["origin"].split(": ")[0] + self._direction = data["direction"] + self._relative = False + #_LOGGER.debug("GTFS RT: Realtime data: %s", self._data) + try: + self._get_rt_route_statuses = await self.hass.async_add_executor_job(get_rt_route_statuses, self) + self._get_next_service = await self.hass.async_add_executor_job(get_next_services, self) + except Exception as ex: # pylint: disable=broad-except + _LOGGER.error("Error getting gtfs realtime data: %s", ex) + self._get_next_service = "error" + else: + _LOGGER.info("GTFS RT: RealTime = false, selected in entity options") + self._get_next_service = "n.a." else: - _LOGGER.error("GTFS RT: Issue with entity options") - return "---" + _LOGGER.debug("GTFS RT: RealTime not selected in entity options") + self._get_next_service = "n.a." + self._data["next_departure"]["next_departure_realtime"] = self._get_next_service + self._data["next_departure"]["gtfs_rt_updated_at"] = dt_util.now().replace(tzinfo=None) + + return self._data - return self._get_next_service diff --git a/custom_components/gtfs2/gtfs_helper.py b/custom_components/gtfs2/gtfs_helper.py index e2b0a67..331ecd1 100644 --- a/custom_components/gtfs2/gtfs_helper.py +++ b/custom_components/gtfs2/gtfs_helper.py @@ -298,6 +298,7 @@ def get_next_departure(self): "next_departures": timetable_remaining, "next_departures_lines": timetable_remaining_line, "next_departures_headsign": timetable_remaining_headsign, + "gtfs_updated_at": dt_util.now().replace(tzinfo=None), } diff --git a/custom_components/gtfs2/gtfs_rt_helper.py b/custom_components/gtfs2/gtfs_rt_helper.py index b8b83b0..5d9940d 100644 --- a/custom_components/gtfs2/gtfs_rt_helper.py +++ b/custom_components/gtfs2/gtfs_rt_helper.py @@ -122,7 +122,7 @@ def get_gtfs_feed_entities(url: str, headers, label: str): ## reworked for gtfs2 def get_next_services(self): - self.data = self._data + self.data = self._get_rt_route_statuses self._stop = self._stop_id self._route = self._route_id self._direction = self._direction diff --git a/custom_components/gtfs2/sensor.py b/custom_components/gtfs2/sensor.py index 30800fd..102120c 100644 --- a/custom_components/gtfs2/sensor.py +++ b/custom_components/gtfs2/sensor.py @@ -12,8 +12,6 @@ from homeassistant.util import slugify import homeassistant.util.dt as dt_util -from .coordinator import GTFSRealtimeUpdateCoordinator - from .const import ( ATTR_ARRIVAL, ATTR_BICYCLE, @@ -381,8 +379,18 @@ def _update_attrs(self): # noqa: C901 PLR0911 self._attributes["next_departures_headsign"] = self._departure[ "next_departures_headsign" ][:10] - - self._attributes["updated_at"] = dt_util.now().replace(tzinfo=None) + + # Add next departure realtime + self._attributes["next_departure_realtime"] = self._departure[ + "next_departure_realtime" + ] + self._attributes["gtfs_rt_updated_at"] = self._departure[ + "gtfs_rt_updated_at" + ] + + self._attributes["gtfs_updated_at"] = self._departure[ + "gtfs_updated_at" + ] self._attr_extra_state_attributes = self._attributes return self._attr_extra_state_attributes @@ -412,36 +420,3 @@ def remove_keys(self, prefix: str) -> None: } -class GTFSRealtimeDepartureSensor(CoordinatorEntity): - """Implementation of a GTFS departure sensor.""" - - def __init__(self, coordinator: GTFSRealtimeUpdateCoordinator) -> None: - """Initialize the GTFSsensor.""" - super().__init__(coordinator) - self._name = coordinator.data["name"] + "_rt" - self._attributes: dict[str, Any] = {} - - self._attr_unique_id = f"gtfs-{self._name}_rt" - self._attr_device_info = DeviceInfo( - name=f"GTFS - {self._name}", - entry_type=DeviceEntryType.SERVICE, - identifiers={(DOMAIN, f"GTFS - {self._name}_rt")}, - manufacturer="GTFS", - model=self._name, - ) - _LOGGER.debug("GTFS RT Sensor: coordinator data: %s", coordinator.data ) - self._coordinator = coordinator - self._attributes = self._update_attrs_rt() - self._attr_extra_state_attributes = self._attributes - - @callback - def _handle_coordinator_update(self) -> None: - """Handle updated data from the coordinator.""" - self._update_attrs_rt() - super()._handle_coordinator_update() - - def _update_attrs_rt(self): # noqa: C901 PLR0911 - _LOGGER.debug(f"GTFS RT Sensor update attr DATA: {self._coordinator.data}") - self._attr_native_value = coordinator.data - self._attributes["next_departure_realtime"] = self._coordinator.data - return self._attributes \ No newline at end of file From f46f750ad074f772ee3e84461e91a0170c302046 Mon Sep 17 00:00:00 2001 From: Arjan <44190435+vingerha@users.noreply.github.com> Date: Sat, 11 Nov 2023 16:03:01 +0100 Subject: [PATCH 05/42] Add option to x the datasource extraction process --- custom_components/gtfs2/config_flow.py | 60 +++++++++++--------- custom_components/gtfs2/gtfs_helper.py | 26 +++++---- custom_components/gtfs2/strings.json | 3 +- custom_components/gtfs2/translations/en.json | 3 +- custom_components/gtfs2/translations/fr.json | 3 +- 5 files changed, 55 insertions(+), 40 deletions(-) diff --git a/custom_components/gtfs2/config_flow.py b/custom_components/gtfs2/config_flow.py index 1d56101..6fde206 100644 --- a/custom_components/gtfs2/config_flow.py +++ b/custom_components/gtfs2/config_flow.py @@ -76,27 +76,27 @@ async def async_step_user(self, user_input: dict | None = None) -> FlowResult: async def async_step_source(self, user_input: dict | None = None) -> FlowResult: """Handle a flow initialized by the user.""" errors: dict[str, str] = {} - if user_input is not None: - check_data = await self._check_data(user_input) - if check_data: - errors["base"] = check_data - return self.async_abort(reason=check_data) - else: - self._user_inputs.update(user_input) - _LOGGER.debug(f"UserInputs Data: {self._user_inputs}") - return await self.async_step_route() - - return self.async_show_form( - step_id="source", - data_schema=vol.Schema( - { - vol.Required("file"): str, - vol.Required("extract_from"): vol.In({"zip": "Use gtfs2/zipfile with above name, without extension", "url": "Use URL below, leave 'na' if using zip"}), - vol.Required("url", default="na"): str, - }, - ), - errors=errors, - ) + if user_input is None: + return self.async_show_form( + step_id="source", + data_schema=vol.Schema( + { + vol.Required("file"): str, + vol.Required("extract_from"): vol.In({"zip": "Use gtfs2/zipfile with above name, without extension", "url": "Use URL below, leave 'na' if using zip"}), + vol.Required("url", default="na"): str, + }, + ), + errors=errors, + ) + check_data = await self._check_data(user_input) + _LOGGER.debug("Source check data: %s", check_data) + if check_data : + errors["base"] = check_data + return self.async_abort(reason=check_data) + else: + self._user_inputs.update(user_input) + _LOGGER.debug(f"UserInputs Data: {self._user_inputs}") + return await self.async_step_route() async def async_step_remove(self, user_input: dict | None = None) -> FlowResult: """Handle a flow initialized by the user.""" @@ -122,13 +122,19 @@ async def async_step_remove(self, user_input: dict | None = None) -> FlowResult: async def async_step_route(self, user_input: dict | None = None) -> FlowResult: """Handle the route.""" + errors: dict[str, str] = {} + check_data = await self._check_data(self._user_inputs) + _LOGGER.debug("Source check data: %s", check_data) + if check_data : + errors["base"] = check_data + return self.async_abort(reason=check_data) self._pygtfs = get_gtfs( self.hass, DEFAULT_PATH, self._user_inputs, False, ) - errors: dict[str, str] = {} + if user_input is None: return self.async_show_form( step_id="route", @@ -140,6 +146,7 @@ async def async_step_route(self, user_input: dict | None = None) -> FlowResult: ), }, ), + errors=errors, ) self._user_inputs.update(user_input) _LOGGER.debug(f"UserInputs Route: {self._user_inputs}") @@ -189,7 +196,8 @@ async def _check_data(self, data): self._pygtfs = await self.hass.async_add_executor_job( get_gtfs, self.hass, DEFAULT_PATH, data, False ) - if self._pygtfs == "no_data_file" or "no_zip_file": + _LOGGER.debug("Checkdata: %s ", self._pygtfs) + if self._pygtfs in ['no_data_file', 'no_zip_file', 'extracting'] : return self._pygtfs return None @@ -260,7 +268,7 @@ async def async_step_init( data_schema=vol.Schema( { vol.Optional("refresh_interval", default=self.config_entry.options.get("refresh_interval", DEFAULT_REFRESH_INTERVAL)): int, - vol.Required("real_time"): vol.In({False: "No", True: "Yes"}), + vol.Required("real_time", default=self.config_entry.options.get("real_time")): vol.In({False: "No", True: "Yes"}), } ), ) @@ -278,8 +286,8 @@ async def async_step_real_time( step_id="real_time", data_schema=vol.Schema( { - vol.Required("trip_update_url"): str, - vol.Required("vehicle_position_url"): str, + vol.Required("trip_update_url", default=self.config_entry.options.get("trip_update_url")): str, + vol.Required("vehicle_position_url", default=self.config_entry.options.get("vehicle_position_url")): str, }, ), errors=errors, diff --git a/custom_components/gtfs2/gtfs_helper.py b/custom_components/gtfs2/gtfs_helper.py index 331ecd1..60fa7f8 100644 --- a/custom_components/gtfs2/gtfs_helper.py +++ b/custom_components/gtfs2/gtfs_helper.py @@ -7,6 +7,7 @@ import requests import pygtfs from sqlalchemy.sql import text +import multiprocessing import homeassistant.util.dt as dt_util from homeassistant.core import HomeAssistant @@ -305,11 +306,19 @@ def get_next_departure(self): def get_gtfs(hass, path, data, update=False): """Get gtfs file.""" _LOGGER.debug("Getting gtfs with data: %s", data) + gtfs_dir = hass.config.path(path) + os.makedirs(gtfs_dir, exist_ok=True) filename = data["file"] url = data["url"] file = data["file"] + ".zip" - gtfs_dir = hass.config.path(path) - os.makedirs(gtfs_dir, exist_ok=True) + sqlite = data["file"] + ".sqlite" + journal = os.path.join(gtfs_dir, filename + ".sqlite-journal") + _LOGGER.debug("filename__: %s", filename[-2:]) + _LOGGER.debug("journal: %s", journal) + _LOGGER.debug("journal exist: %s", os.path.exists(journal)) + if os.path.exists(journal) : + _LOGGER.debug("Still unpacking %s", filename) + return "extracting" if update and os.path.exists(os.path.join(gtfs_dir, file)): remove_datasource(hass, path, filename) if data["extract_from"] == "zip": @@ -327,18 +336,13 @@ def get_gtfs(hass, path, data, update=False): (gtfs_root, _) = os.path.splitext(file) sqlite_file = f"{gtfs_root}.sqlite?check_same_thread=False" - joined_path = os.path.join(gtfs_dir, sqlite_file) - _LOGGER.debug("unpacking: %s", joined_path) - gtfs = pygtfs.Schedule(joined_path) - # check or wait for unpack - journal = os.path.join(gtfs_dir, filename + ".sqlite-journal") - while os.path.isfile(journal): - time.sleep(10) + joined_path = os.path.join(gtfs_dir, sqlite_file) + gtfs = pygtfs.Schedule(joined_path) if not gtfs.feeds: + _LOGGER.info("Starting gtfs file unpacking: %s", joined_path) pygtfs.append_feed(gtfs, os.path.join(gtfs_dir, file)) return gtfs - def get_route_list(schedule): sql_routes = f""" SELECT route_id, route_short_name, route_long_name from routes @@ -397,7 +401,7 @@ def get_datasources(hass, path) -> dict[str]: datasources = [] for file in files: if file.endswith(".sqlite"): - datasources.append(file.split(".")[0]) + datasources.append(file.split(".")[0]) _LOGGER.debug(f"datasources: {datasources}") return datasources diff --git a/custom_components/gtfs2/strings.json b/custom_components/gtfs2/strings.json index 2426849..d24a0e0 100644 --- a/custom_components/gtfs2/strings.json +++ b/custom_components/gtfs2/strings.json @@ -50,7 +50,8 @@ "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", "files_deleted": "Datasource deleted, this may affect existing routes", "stop_incorrect": "Start and/or End destination incorrect, possibly not in same direction, check logs", - "no_zip_file": "Data collection issue: ZIP file not in the correct folder" + "no_zip_file": "Data collection issue: ZIP file not in the correct folder", + "extracting": "Extracting data, this can take a while" } }, "options": { diff --git a/custom_components/gtfs2/translations/en.json b/custom_components/gtfs2/translations/en.json index 66d1da6..8a8c42d 100644 --- a/custom_components/gtfs2/translations/en.json +++ b/custom_components/gtfs2/translations/en.json @@ -45,7 +45,8 @@ "files_deleted": "Datasource deleted, this may affect existing routes", "stop_incorrect": "Start and/or End destination incorrect, possibly no transport 'today' or not in same direction, check logs", "no_data_file": "Data collection issue: URL incorrect or filename not in the correct folder", - "no_zip_file": "Data collection issue: ZIP file not existing in the correct folder, note that it is capital-sensitive" + "no_zip_file": "Data collection issue: ZIP file not existing in the correct folder, note that it is capital-sensitive", + "extracting": "Extracting data, this can take a while" } }, "options": { diff --git a/custom_components/gtfs2/translations/fr.json b/custom_components/gtfs2/translations/fr.json index 72f02bc..cbd7b72 100644 --- a/custom_components/gtfs2/translations/fr.json +++ b/custom_components/gtfs2/translations/fr.json @@ -45,7 +45,8 @@ "files_deleted": "Source de données supprimée, cela peut affecter les itinéraires existants", "stop_incorrect": "Arrêt de départ et/ou de fin incorrecte, éventuellement pas de transport « aujourd'hui » ou pas dans la même direction, vérifiez logs d'érreur", "no_data_file": "Problème de collecte de données : URL incorrecte ou nom de fichier ne se trouve pas dans le bon dossier", - "no_zip_file": "Problème de collecte de données : fichier ZIP ne se trouve pas dans le bon dossier, note: sensible à la casse" + "no_zip_file": "Problème de collecte de données : fichier ZIP ne se trouve pas dans le bon dossier, note: sensible à la casse", + "extracting": "Extraction des données, cela prend du temps" } }, "options": { From ce16b55c8a9855f00648e85989fea1d67ec8f3fb Mon Sep 17 00:00:00 2001 From: Arjan <44190435+vingerha@users.noreply.github.com> Date: Sat, 11 Nov 2023 18:51:46 +0100 Subject: [PATCH 06/42] Fix localisation in config_flow --- custom_components/gtfs2/config_flow.py | 26 ++++++++-------- custom_components/gtfs2/translations/en.json | 20 +++++++++++-- custom_components/gtfs2/translations/fr.json | 31 +++++++++++++++++--- 3 files changed, 57 insertions(+), 20 deletions(-) diff --git a/custom_components/gtfs2/config_flow.py b/custom_components/gtfs2/config_flow.py index 6fde206..0dd2c89 100644 --- a/custom_components/gtfs2/config_flow.py +++ b/custom_components/gtfs2/config_flow.py @@ -9,6 +9,7 @@ from homeassistant.data_entry_flow import FlowResult import homeassistant.helpers.config_validation as cv from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers import selector from .const import DEFAULT_PATH, DOMAIN, DEFAULT_REFRESH_INTERVAL @@ -81,8 +82,8 @@ async def async_step_source(self, user_input: dict | None = None) -> FlowResult: step_id="source", data_schema=vol.Schema( { + vol.Required("extract_from"): selector.SelectSelector(selector.SelectSelectorConfig(options=["zip", "url"], translation_key="extract_from")), vol.Required("file"): str, - vol.Required("extract_from"): vol.In({"zip": "Use gtfs2/zipfile with above name, without extension", "url": "Use URL below, leave 'na' if using zip"}), vol.Required("url", default="na"): str, }, ), @@ -141,9 +142,7 @@ async def async_step_route(self, user_input: dict | None = None) -> FlowResult: data_schema=vol.Schema( { vol.Required("route"): vol.In(get_route_list(self._pygtfs)), - vol.Required("direction"): vol.In( - {"0": "Outward", "1": "Return"} - ), + vol.Required("direction"): selector.SelectSelector(selector.SelectSelectorConfig(options=["0", "1"], translation_key="direction")), }, ), errors=errors, @@ -173,9 +172,7 @@ async def async_step_stops(self, user_input: dict | None = None) -> FlowResult: vol.Required("destination", default=last_stop): vol.In(stops), vol.Required("name"): str, vol.Optional("offset", default=0): int, - vol.Required("include_tomorrow"): vol.In( - {False: "No", True: "Yes"} - ), + vol.Optional("include_tomorrow", default = False): selector.BooleanSelector(), }, ), errors=errors, @@ -262,15 +259,16 @@ async def async_step_init( self._user_inputs.update(user_input) _LOGGER.debug(f"GTFS Options without realtime: {self._user_inputs}") return self.async_create_entry(title="", data=self._user_inputs) - - return self.async_show_form( - step_id="init", - data_schema=vol.Schema( - { + + opt1_schema = { vol.Optional("refresh_interval", default=self.config_entry.options.get("refresh_interval", DEFAULT_REFRESH_INTERVAL)): int, - vol.Required("real_time", default=self.config_entry.options.get("real_time")): vol.In({False: "No", True: "Yes"}), + vol.Optional("real_time", default=self.config_entry.options.get("real_time")): selector.BooleanSelector() } - ), + + + return self.async_show_form( + step_id="init", + data_schema=vol.Schema(opt1_schema) ) async def async_step_real_time( diff --git a/custom_components/gtfs2/translations/en.json b/custom_components/gtfs2/translations/en.json index 8a8c42d..136f371 100644 --- a/custom_components/gtfs2/translations/en.json +++ b/custom_components/gtfs2/translations/en.json @@ -10,7 +10,8 @@ "source": { "data": { "file": "New datasource name", - "url": "external url to gtfs data (zip) file" + "url": "external url to gtfs data (zip) file", + "extract_from": "Extract data from:" }, "description": "NOTE: with a new url/zip, this may take quite a bit of time, \n depending on file-size and system performance" }, @@ -54,7 +55,8 @@ "init": { "description": "Customize the way the integration works", "data": { - "refresh_interval": "Data refresh interval (in minutes)" + "refresh_interval": "Data refresh interval (in minutes)", + "real_time": "Setup Realtime integration? \n (needs data from the same source)" } }, "real_time": { @@ -65,5 +67,19 @@ } } } + }, + "selector": { + "extract_from": { + "options": { + "zip": "ZIP: expects a file in gtfs2-folder with below name, without extension .zip", + "url": "URL: uses your URL below, leave 'na' if using zip" + } + }, + "direction": { + "options": { + "0": "Outward", + "1": "Return" + } + } } } diff --git a/custom_components/gtfs2/translations/fr.json b/custom_components/gtfs2/translations/fr.json index cbd7b72..f6dde6e 100644 --- a/custom_components/gtfs2/translations/fr.json +++ b/custom_components/gtfs2/translations/fr.json @@ -10,9 +10,10 @@ "source": { "data": { "file": "Nom de la nouvelle source de données", - "url": "URL externe vers le fichier (zip) des données GTFS" + "url": "URL externe vers le fichier (zip) des données GTFS", + "extract_from": "Collecte données de:" }, - "description": "REMARQUE: avec une nouvelle URL/zip, cela peut prendre du temps après la soumission, \n selon la taille du fichier et performance du serveur" + "description": "REMARQUE: avec une nouvelle URL/zip, cela peut prendre du temps après la soumission, selon la taille du fichier et performance du serveur" }, "route": { "data": { @@ -54,9 +55,31 @@ "init": { "description": "Personnalisez le fonctionnement de l'intégration", "data": { - "refresh_interval": "Personnalisez le fonctionnement de l'intégration" + "refresh_interval": "Intervalle d'actualisation en minutes", + "real_time": "Ajoute intégration temps réel? \n (nécessite données de la même source)" } + }, + "real_time": { + "description": "URL vers données temps réel", + "data": { + "trip_update_url": "URL vers: trip data", + "vehicle_position_url": "URL vers: position véhicule (ou trip data)" + } + } + } + }, + "selector": { + "extract_from": { + "options": { + "zip": "ZIP: attend un fichier dans dossier 'gtfs2' avec le même nom, sans extension .zip", + "url": "URL: utilise l'URL below, laisse 'na' si zip" + } + }, + "direction": { + "options": { + "0": "Aller", + "1": "Retour" } } } -} \ No newline at end of file +} From 460c79df54f03a29c5b7601ddd5e07c49ccd9f1e Mon Sep 17 00:00:00 2001 From: Arjan <44190435+vingerha@users.noreply.github.com> Date: Sun, 12 Nov 2023 17:03:06 +0100 Subject: [PATCH 07/42] Improving realtime stability, adding lat/long --- custom_components/gtfs2/__init__.py | 19 ++- custom_components/gtfs2/config_flow.py | 18 ++- custom_components/gtfs2/const.py | 34 +++++ custom_components/gtfs2/coordinator.py | 45 +++--- custom_components/gtfs2/gtfs_helper.py | 1 + custom_components/gtfs2/gtfs_rt_helper.py | 145 ++++++++++--------- custom_components/gtfs2/sensor.py | 30 ++-- custom_components/gtfs2/strings.json | 2 + custom_components/gtfs2/translations/en.json | 6 +- custom_components/gtfs2/translations/fr.json | 4 +- 10 files changed, 189 insertions(+), 115 deletions(-) diff --git a/custom_components/gtfs2/__init__.py b/custom_components/gtfs2/__init__.py index 4e71276..d578282 100644 --- a/custom_components/gtfs2/__init__.py +++ b/custom_components/gtfs2/__init__.py @@ -27,8 +27,10 @@ async def async_migrate_entry(hass, config_entry: ConfigEntry) -> bool: new_options = {**config_entry.options} new_options['real_time'] = False new_options['refresh_interval'] = 15 + new_options['api_key'] = "" + new_options['x_api_key'] = "" - config_entry.version = 3 + config_entry.version = 4 hass.config_entries.async_update_entry(config_entry, data=new) hass.config_entries.async_update_entry(config_entry, options=new_options) @@ -36,9 +38,20 @@ async def async_migrate_entry(hass, config_entry: ConfigEntry) -> bool: new_options = {**config_entry.options} new_options['real_time'] = False + new_options['api_key'] = "" + new_options['x_api_key'] = "" - config_entry.version = 3 - hass.config_entries.async_update_entry(config_entry, options=new_options) + config_entry.version = 4 + hass.config_entries.async_update_entry(config_entry, options=new_options) + + if config_entry.version == 3: + + new_options = {**config_entry.options} + new_options['api_key'] = "" + new_options['x_api_key'] = "" + + config_entry.version = 4 + hass.config_entries.async_update_entry(config_entry, options=new_options) _LOGGER.debug("Migration to version %s successful", config_entry.version) diff --git a/custom_components/gtfs2/config_flow.py b/custom_components/gtfs2/config_flow.py index 0dd2c89..8eab881 100644 --- a/custom_components/gtfs2/config_flow.py +++ b/custom_components/gtfs2/config_flow.py @@ -11,7 +11,15 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers import selector -from .const import DEFAULT_PATH, DOMAIN, DEFAULT_REFRESH_INTERVAL +from .const import ( + DEFAULT_PATH, + DOMAIN, + DEFAULT_REFRESH_INTERVAL, + CONF_API_KEY, + CONF_X_API_KEY, + CONF_VEHICLE_POSITION_URL, + CONF_TRIP_UPDATE_URL +) from .gtfs_helper import ( get_gtfs, @@ -35,7 +43,7 @@ class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN): """Handle a config flow for GTFS.""" - VERSION = 3 + VERSION = 4 def __init__(self) -> None: """Init ConfigFlow.""" @@ -284,8 +292,10 @@ async def async_step_real_time( step_id="real_time", data_schema=vol.Schema( { - vol.Required("trip_update_url", default=self.config_entry.options.get("trip_update_url")): str, - vol.Required("vehicle_position_url", default=self.config_entry.options.get("vehicle_position_url")): str, + vol.Required(CONF_TRIP_UPDATE_URL, default=self.config_entry.options.get(CONF_TRIP_UPDATE_URL)): str, + vol.Required(CONF_VEHICLE_POSITION_URL, default=self.config_entry.options.get(CONF_VEHICLE_POSITION_URL)): str, + vol.Optional(CONF_API_KEY, default="na"): str, + vol.Optional(CONF_X_API_KEY,default="na"): str }, ), errors=errors, diff --git a/custom_components/gtfs2/const.py b/custom_components/gtfs2/const.py index 865b814..bae3ae7 100644 --- a/custom_components/gtfs2/const.py +++ b/custom_components/gtfs2/const.py @@ -236,3 +236,37 @@ WHEELCHAIR_BOARDING_DEFAULT = STATE_UNKNOWN WHEELCHAIR_BOARDING_OPTIONS = {1: True, 2: False} +#gtfs_rt +ATTR_STOP_ID = "Stop ID" +ATTR_ROUTE = "Route" +ATTR_DIRECTION_ID = "Direction ID" +ATTR_DUE_IN = "Due in" +ATTR_DUE_AT = "Due at" +ATTR_NEXT_UP = "Next Service" +ATTR_ICON = "Icon" +ATTR_UNIT_OF_MEASUREMENT = "unit_of_measurement" +ATTR_DEVICE_CLASS = "device_class" +ATTR_LATITUDE = "latitude" +ATTR_LONGITUDE = "longitude" +ATTR_RT_UPDATED_AT = "gtfs_rt_updated_at" + +CONF_API_KEY = "api_key" +CONF_X_API_KEY = "x_api_key" +CONF_STOP_ID = "stopid" +CONF_ROUTE = "route" +CONF_DIRECTION_ID = "directionid" +CONF_DEPARTURES = "departures" +CONF_TRIP_UPDATE_URL = "trip_update_url" +CONF_VEHICLE_POSITION_URL = "vehicle_position_url" +CONF_ROUTE_DELIMITER = "route_delimiter" +CONF_ICON = "icon" +CONF_SERVICE_TYPE = "service_type" +CONF_RELATIVE_TIME = "show_relative_time" + +DEFAULT_SERVICE = "Service" +DEFAULT_ICON = "mdi:bus" +DEFAULT_DIRECTION = "0" + +TIME_STR_FORMAT = "%H:%M" + + diff --git a/custom_components/gtfs2/coordinator.py b/custom_components/gtfs2/coordinator.py index 8614b10..3c3fcdd 100644 --- a/custom_components/gtfs2/coordinator.py +++ b/custom_components/gtfs2/coordinator.py @@ -10,7 +10,16 @@ from homeassistant.helpers.update_coordinator import DataUpdateCoordinator import homeassistant.util.dt as dt_util -from .const import DEFAULT_PATH, DEFAULT_REFRESH_INTERVAL +from .const import ( + DEFAULT_PATH, + DEFAULT_REFRESH_INTERVAL, + CONF_API_KEY, + CONF_X_API_KEY, + ATTR_DUE_IN, + ATTR_LATITUDE, + ATTR_LONGITUDE, + ATTR_RT_UPDATED_AT +) from .gtfs_helper import get_gtfs, get_next_departure, check_datasource_index from .gtfs_rt_helper import get_rt_route_statuses, get_next_services @@ -40,14 +49,14 @@ async def _async_update_data(self) -> dict[str, str]: """Get the latest data from GTFS and GTFS relatime, depending refresh interval""" data = self.config_entry.data options = self.config_entry.options + self._pygtfs = get_gtfs( self.hass, DEFAULT_PATH, data, False ) previous_data = None if self.data is None else self.data.copy() - _LOGGER.debug("Previous data: %s", previous_data) - + if previous_data is not None and (previous_data["next_departure"]["gtfs_updated_at"] + timedelta(minutes=options.get("refresh_interval", DEFAULT_REFRESH_INTERVAL))) > dt_util.now().replace(tzinfo=None): - _LOGGER.debug("Do nothing") + # do nothing awaiting refresh interval self._data = previous_data if previous_data is None or (previous_data["next_departure"]["gtfs_updated_at"] + timedelta(minutes=options.get("refresh_interval", DEFAULT_REFRESH_INTERVAL))) < dt_util.now().replace(tzinfo=None): @@ -74,41 +83,39 @@ async def _async_update_data(self) -> dict[str, str]: return None _LOGGER.debug("GTFS coordinator data from helper: %s", self._data["next_departure"]) - + # collect and return rt attributes + # STILL REQUIRES A SOLUTION IF TIMING OUT + self._data["next_departure"]["next_departure_realtime_attr"] = {ATTR_RT_UPDATED_AT: "", ATTR_DUE_IN: "-", ATTR_LATITUDE: "", ATTR_LONGITUDE: ""} if "real_time" in options: if options["real_time"]: - + self._get_next_service = {} """Initialize the info object.""" self._trip_update_url = options["trip_update_url"] self._vehicle_position_url = options["vehicle_position_url"] self._route_delimiter = "-" - # if options["CONF_API_KEY"] is not None: - # self._headers = {"Authorization": options["CONF_API_KEY"]} - # elif options["CONF_X_API_KEY"] is not None: - # self._headers = {"x-api-key": options["CONF_X_API_KEY"]} - # else: - # self._headers = None + if CONF_API_KEY in options: + self._headers = {"Authorization": options[CONF_API_KEY]} + elif CONF_X_API_KEY in options: + self._headers = {"x-api-key": options[CONF_X_API_KEY]} + else: + self._headers = None self._headers = None self.info = {} self._route_id = self._data["next_departure"]["route_id"] self._stop_id = data["origin"].split(": ")[0] self._direction = data["direction"] self._relative = False - #_LOGGER.debug("GTFS RT: Realtime data: %s", self._data) try: self._get_rt_route_statuses = await self.hass.async_add_executor_job(get_rt_route_statuses, self) self._get_next_service = await self.hass.async_add_executor_job(get_next_services, self) except Exception as ex: # pylint: disable=broad-except _LOGGER.error("Error getting gtfs realtime data: %s", ex) self._get_next_service = "error" + self._data["next_departure"]["next_departure_realtime_attr"] = self._get_next_service + self._data["next_departure"]["next_departure_realtime_attr"]["gtfs_rt_updated_at"] = dt_util.now().replace(tzinfo=None) else: - _LOGGER.info("GTFS RT: RealTime = false, selected in entity options") - self._get_next_service = "n.a." + _LOGGER.info("GTFS RT: RealTime = false, selected in entity options") else: _LOGGER.debug("GTFS RT: RealTime not selected in entity options") - self._get_next_service = "n.a." - self._data["next_departure"]["next_departure_realtime"] = self._get_next_service - self._data["next_departure"]["gtfs_rt_updated_at"] = dt_util.now().replace(tzinfo=None) - return self._data diff --git a/custom_components/gtfs2/gtfs_helper.py b/custom_components/gtfs2/gtfs_helper.py index 60fa7f8..ec42e13 100644 --- a/custom_components/gtfs2/gtfs_helper.py +++ b/custom_components/gtfs2/gtfs_helper.py @@ -300,6 +300,7 @@ def get_next_departure(self): "next_departures_lines": timetable_remaining_line, "next_departures_headsign": timetable_remaining_headsign, "gtfs_updated_at": dt_util.now().replace(tzinfo=None), + "next_departure_realtime_attr": {}, } diff --git a/custom_components/gtfs2/gtfs_rt_helper.py b/custom_components/gtfs2/gtfs_rt_helper.py index 5d9940d..62efd93 100644 --- a/custom_components/gtfs2/gtfs_rt_helper.py +++ b/custom_components/gtfs2/gtfs_rt_helper.py @@ -13,63 +13,36 @@ _LOGGER = logging.getLogger(__name__) -ATTR_STOP_ID = "Stop ID" -ATTR_ROUTE = "Route" -ATTR_DIRECTION_ID = "Direction ID" -ATTR_DUE_IN = "Due in" -ATTR_DUE_AT = "Due at" -ATTR_NEXT_UP = "Next Service" -ATTR_ICON = "Icon" -ATTR_UNIT_OF_MEASUREMENT = "unit_of_measurement" -ATTR_DEVICE_CLASS = "device_class" - -CONF_API_KEY = "api_key" -CONF_X_API_KEY = "x_api_key" -CONF_STOP_ID = "stopid" -CONF_ROUTE = "route" -CONF_DIRECTION_ID = "directionid" -CONF_DEPARTURES = "departures" -CONF_TRIP_UPDATE_URL = "trip_update_url" -CONF_VEHICLE_POSITION_URL = "vehicle_position_url" -CONF_ROUTE_DELIMITER = "route_delimiter" -CONF_ICON = "icon" -CONF_SERVICE_TYPE = "service_type" -CONF_RELATIVE_TIME = "show_relative_time" - -DEFAULT_SERVICE = "Service" -DEFAULT_ICON = "mdi:bus" -DEFAULT_DIRECTION = "0" - -MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=60) -TIME_STR_FORMAT = "%H:%M" - -PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( - { - vol.Required(CONF_TRIP_UPDATE_URL): cv.string, - vol.Optional(CONF_API_KEY): cv.string, - vol.Optional(CONF_X_API_KEY): cv.string, - vol.Optional(CONF_VEHICLE_POSITION_URL): cv.string, - vol.Optional(CONF_ROUTE_DELIMITER): cv.string, - - vol.Optional(CONF_DEPARTURES): [ - { - vol.Required(CONF_NAME): cv.string, - vol.Required(CONF_STOP_ID): cv.string, - vol.Required(CONF_ROUTE): cv.string, - vol.Optional(CONF_RELATIVE_TIME, default=True): cv.boolean, - vol.Optional( - CONF_DIRECTION_ID, - default=DEFAULT_DIRECTION, # type: ignore - ): str, - vol.Optional( - CONF_ICON, default=DEFAULT_ICON # type: ignore - ): cv.string, - vol.Optional( - CONF_SERVICE_TYPE, default=DEFAULT_SERVICE # type: ignore - ): cv.string, - } - ], - } +from .const import ( + + ATTR_STOP_ID, + ATTR_ROUTE, + ATTR_DIRECTION_ID, + ATTR_DUE_IN, + ATTR_DUE_AT, + ATTR_NEXT_UP, + ATTR_ICON, + ATTR_UNIT_OF_MEASUREMENT, + ATTR_DEVICE_CLASS, + + CONF_API_KEY, + CONF_X_API_KEY, + CONF_STOP_ID, + CONF_ROUTE, + CONF_DIRECTION_ID, + CONF_DEPARTURES, + CONF_TRIP_UPDATE_URL, + CONF_VEHICLE_POSITION_URL, + CONF_ROUTE_DELIMITER, + CONF_ICON, + CONF_SERVICE_TYPE, + CONF_RELATIVE_TIME, + + DEFAULT_SERVICE, + DEFAULT_ICON, + DEFAULT_DIRECTION, + + TIME_STR_FORMAT ) def due_in_minutes(timestamp): @@ -98,6 +71,7 @@ def log_debug(data: list, indent_level: int) -> None: def get_gtfs_feed_entities(url: str, headers, label: str): + _LOGGER.debug(f"GTFS RT get_feed_entities for url: {url} , headers: {headers}, label: {label}") feed = gtfs_realtime_pb2.FeedMessage() # type: ignore # TODO add timeout to requests call @@ -114,21 +88,17 @@ def get_gtfs_feed_entities(url: str, headers, label: str): 0, ) - feed.ParseFromString(response.content) + feed.ParseFromString(response.content) + return feed.entity - - -## reworked for gtfs2 - def get_next_services(self): self.data = self._get_rt_route_statuses self._stop = self._stop_id self._route = self._route_id self._direction = self._direction - _LOGGER.debug("Get Next Services, route/direction/stop: %s", self.data.get(self._route, {}).get(self._direction, {}).get(self._stop, [])) - next_services = self.data.get(self._route, {}).get(self._direction, {}).get(self._stop, []) + if self.hass.config.time_zone is None: _LOGGER.error("Timezone is not set in Home Assistant configuration") timezone = "UTC" @@ -136,23 +106,61 @@ def get_next_services(self): timezone=dt_util.get_time_zone(self.hass.config.time_zone) if self._relative : - return ( + due_in = ( due_in_minutes(next_services[0].arrival_time) if len(next_services) > 0 else "-" ) else: - return ( + due_in = ( next_services[0].arrival_time.replace(tzinfo=timezone) if len(next_services) > 0 else "-" ) + attrs = { + ATTR_DUE_IN: due_in, + ATTR_STOP_ID: self._stop, + ATTR_ROUTE: self._route, + ATTR_DIRECTION_ID: self._direction, + } + if len(next_services) > 0: + attrs[ATTR_DUE_AT] = ( + next_services[0].arrival_time.strftime(TIME_STR_FORMAT) + if len(next_services) > 0 + else "-" + ) + attrs[ATTR_LATITUDE] = "" + attrs[ATTR_LONGITUDE] = "" + if next_services[0].position: + attrs[ATTR_LATITUDE] = next_services[0].position.latitude + attrs[ATTR_LONGITUDE] = next_services[0].position.longitude + if len(next_services) > 1: + attrs[ATTR_NEXT_UP] = ( + next_services[1].arrival_time.strftime(TIME_STR_FORMAT) + if len(next_services) > 1 + else "-" + ) + if self._relative : + attrs[ATTR_UNIT_OF_MEASUREMENT] = "min" + else : + attrs[ATTR_DEVICE_CLASS] = ( + "timestamp" + if len(next_services) > 0 + else "" + ) + + _LOGGER.debug("GTFS RT next services attributes: %s", attrs) + return attrs + def get_rt_route_statuses(self): - + vehicle_positions = {} - + if self._vehicle_position_url != "" : + vehicle_positions = get_rt_vehicle_positions(self) + + class StopDetails: def __init__(self, arrival_time, position): self.arrival_time = arrival_time @@ -283,7 +291,6 @@ def get_rt_vehicle_positions(self): headers=self._headers, label="vehicle positions", ) - for entity in feed_entities: vehicle = entity.vehicle diff --git a/custom_components/gtfs2/sensor.py b/custom_components/gtfs2/sensor.py index 102120c..df42d4d 100644 --- a/custom_components/gtfs2/sensor.py +++ b/custom_components/gtfs2/sensor.py @@ -361,36 +361,32 @@ def _update_attrs(self): # noqa: C901 PLR0911 prefix = "next_departures" self._attributes["next_departures"] = [] if self._next_departures: - self._attributes["next_departures"] = self._departure["next_departures"][ - :10 - ] + self._attributes["next_departures"] = self._departure[ + "next_departures"][:10] # Add next departures with their lines prefix = "next_departures_lines" self._attributes["next_departures_lines"] = [] if self._next_departures: self._attributes["next_departures_lines"] = self._departure[ - "next_departures_lines" - ][:10] + "next_departures_lines"][:10] # Add next departures with their headsign prefix = "next_departures_headsign" self._attributes["next_departures_headsign"] = [] if self._next_departures: self._attributes["next_departures_headsign"] = self._departure[ - "next_departures_headsign" - ][:10] + "next_departures_headsign"][:10] + + self._attributes["gtfs_updated_at"] = self._departure[ + "gtfs_updated_at"] - # Add next departure realtime - self._attributes["next_departure_realtime"] = self._departure[ - "next_departure_realtime" - ] - self._attributes["gtfs_rt_updated_at"] = self._departure[ - "gtfs_rt_updated_at" - ] + _LOGGER.debug("next dep realtime attr: %s", self._departure["next_departure_realtime_attr"]) + # Add next departure realtime + self._attributes["gtfs_rt_updated_at"] = self._departure["next_departure_realtime_attr"]["gtfs_rt_updated_at"] + self._attributes["latitude"] = self._departure["next_departure_realtime_attr"]["latitude"] + self._attributes["longitude"] = self._departure["next_departure_realtime_attr"]["longitude"] + self._attributes["next_departure_realtime"] = self._departure["next_departure_realtime_attr"]["Due in"] - self._attributes["gtfs_updated_at"] = self._departure[ - "gtfs_updated_at" - ] self._attr_extra_state_attributes = self._attributes return self._attr_extra_state_attributes diff --git a/custom_components/gtfs2/strings.json b/custom_components/gtfs2/strings.json index d24a0e0..24a2b8b 100644 --- a/custom_components/gtfs2/strings.json +++ b/custom_components/gtfs2/strings.json @@ -67,6 +67,8 @@ "data": { "trip_update_url": "URL to trip data", "vehicle_position_url": "URL to vehicle position (can be the same as trip data)" + "api_key": "API key, if required", + "x_api_key": "X_API key, if required" } } } diff --git a/custom_components/gtfs2/translations/en.json b/custom_components/gtfs2/translations/en.json index 136f371..ab713f3 100644 --- a/custom_components/gtfs2/translations/en.json +++ b/custom_components/gtfs2/translations/en.json @@ -63,7 +63,9 @@ "description": "Provide url to real time API", "data": { "trip_update_url": "URL to trip data", - "vehicle_position_url": "URL to vehicle position (can be the same as trip data)" + "vehicle_position_url": "URL to vehicle position (or same as trip data)", + "api_key": "API key, if required", + "x_api_key": "X_API key, if required" } } } @@ -77,7 +79,7 @@ }, "direction": { "options": { - "0": "Outward", + "0": "Outbound", "1": "Return" } } diff --git a/custom_components/gtfs2/translations/fr.json b/custom_components/gtfs2/translations/fr.json index f6dde6e..06b1e4b 100644 --- a/custom_components/gtfs2/translations/fr.json +++ b/custom_components/gtfs2/translations/fr.json @@ -63,7 +63,9 @@ "description": "URL vers données temps réel", "data": { "trip_update_url": "URL vers: trip data", - "vehicle_position_url": "URL vers: position véhicule (ou trip data)" + "vehicle_position_url": "URL vers: position véhicule (ou trip data)", + "api_key": "API key, si nécessaire", + "x_api_key": "X_API key, si nécessaire" } } } From e86d616b56e2707ed33e4f5b29b3723a4a7c65d2 Mon Sep 17 00:00:00 2001 From: Arjan <44190435+vingerha@users.noreply.github.com> Date: Sun, 12 Nov 2023 17:09:16 +0100 Subject: [PATCH 08/42] Fix strings. json --- custom_components/gtfs2/strings.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/custom_components/gtfs2/strings.json b/custom_components/gtfs2/strings.json index 24a2b8b..d8110e1 100644 --- a/custom_components/gtfs2/strings.json +++ b/custom_components/gtfs2/strings.json @@ -66,7 +66,7 @@ "description": "Provide url to real time API", "data": { "trip_update_url": "URL to trip data", - "vehicle_position_url": "URL to vehicle position (can be the same as trip data)" + "vehicle_position_url": "URL to vehicle position (can be the same as trip data)", "api_key": "API key, if required", "x_api_key": "X_API key, if required" } From 99aa94e650dff81bd552f7db9e5f98674002303c Mon Sep 17 00:00:00 2001 From: Arjan <44190435+vingerha@users.noreply.github.com> Date: Thu, 16 Nov 2023 12:40:48 +0100 Subject: [PATCH 09/42] Various - improve constants - datetime attributes to UTC / ISO --- custom_components/gtfs2/config_flow.py | 4 ++-- custom_components/gtfs2/coordinator.py | 10 ++++----- custom_components/gtfs2/gtfs_helper.py | 27 +++++++++++++++-------- custom_components/gtfs2/gtfs_rt_helper.py | 12 +++++++--- custom_components/gtfs2/sensor.py | 7 +++--- 5 files changed, 37 insertions(+), 23 deletions(-) diff --git a/custom_components/gtfs2/config_flow.py b/custom_components/gtfs2/config_flow.py index 8eab881..45fb28a 100644 --- a/custom_components/gtfs2/config_flow.py +++ b/custom_components/gtfs2/config_flow.py @@ -294,8 +294,8 @@ async def async_step_real_time( { vol.Required(CONF_TRIP_UPDATE_URL, default=self.config_entry.options.get(CONF_TRIP_UPDATE_URL)): str, vol.Required(CONF_VEHICLE_POSITION_URL, default=self.config_entry.options.get(CONF_VEHICLE_POSITION_URL)): str, - vol.Optional(CONF_API_KEY, default="na"): str, - vol.Optional(CONF_X_API_KEY,default="na"): str + vol.Optional(CONF_API_KEY, default=self.config_entry.options.get(CONF_API_KEY, "na")): str, + vol.Optional(CONF_X_API_KEY,default=self.config_entry.options.get(CONF_X_API_KEY, "na")): str }, ), errors=errors, diff --git a/custom_components/gtfs2/coordinator.py b/custom_components/gtfs2/coordinator.py index 3c3fcdd..dbcf5ea 100644 --- a/custom_components/gtfs2/coordinator.py +++ b/custom_components/gtfs2/coordinator.py @@ -54,7 +54,7 @@ async def _async_update_data(self) -> dict[str, str]: self.hass, DEFAULT_PATH, data, False ) previous_data = None if self.data is None else self.data.copy() - + if previous_data is not None and (previous_data["next_departure"]["gtfs_updated_at"] + timedelta(minutes=options.get("refresh_interval", DEFAULT_REFRESH_INTERVAL))) > dt_util.now().replace(tzinfo=None): # do nothing awaiting refresh interval self._data = previous_data @@ -85,7 +85,6 @@ async def _async_update_data(self) -> dict[str, str]: # collect and return rt attributes # STILL REQUIRES A SOLUTION IF TIMING OUT - self._data["next_departure"]["next_departure_realtime_attr"] = {ATTR_RT_UPDATED_AT: "", ATTR_DUE_IN: "-", ATTR_LATITUDE: "", ATTR_LONGITUDE: ""} if "real_time" in options: if options["real_time"]: self._get_next_service = {} @@ -108,13 +107,12 @@ async def _async_update_data(self) -> dict[str, str]: try: self._get_rt_route_statuses = await self.hass.async_add_executor_job(get_rt_route_statuses, self) self._get_next_service = await self.hass.async_add_executor_job(get_next_services, self) + self._data["next_departure"]["next_departure_realtime_attr"] = self._get_next_service + self._data["next_departure"]["next_departure_realtime_attr"]["gtfs_rt_updated_at"] = dt_util.now().replace(tzinfo=None) except Exception as ex: # pylint: disable=broad-except _LOGGER.error("Error getting gtfs realtime data: %s", ex) - self._get_next_service = "error" - self._data["next_departure"]["next_departure_realtime_attr"] = self._get_next_service - self._data["next_departure"]["next_departure_realtime_attr"]["gtfs_rt_updated_at"] = dt_util.now().replace(tzinfo=None) else: - _LOGGER.info("GTFS RT: RealTime = false, selected in entity options") + _LOGGER.info("GTFS RT: RealTime = false, selected in entity options") else: _LOGGER.debug("GTFS RT: RealTime not selected in entity options") return self._data diff --git a/custom_components/gtfs2/gtfs_helper.py b/custom_components/gtfs2/gtfs_helper.py index ec42e13..6f40667 100644 --- a/custom_components/gtfs2/gtfs_helper.py +++ b/custom_components/gtfs2/gtfs_helper.py @@ -208,16 +208,16 @@ def get_next_departure(self): timetable_remaining = [] for key in sorted(timetable.keys()): if datetime.datetime.strptime(key, "%Y-%m-%d %H:%M:%S") > now: - timetable_remaining.append(key) + timetable_remaining.append(dt_util.as_utc(datetime.datetime.strptime(key, "%Y-%m-%d %H:%M:%S")).isoformat()) _LOGGER.debug( "Timetable Remaining Departures on this Start/Stop: %s", timetable_remaining ) # create upcoming timetable with line info timetable_remaining_line = [] - for key2, value in sorted(timetable.items()): - if datetime.datetime.strptime(key2, "%Y-%m-%d %H:%M:%S") > now: + for key, value in sorted(timetable.items()): + if datetime.datetime.strptime(key, "%Y-%m-%d %H:%M:%S") > now: timetable_remaining_line.append( - str(key2) + " (" + str(value["route_long_name"]) + ")" + str(dt_util.as_utc(datetime.datetime.strptime(key, "%Y-%m-%d %H:%M:%S")).isoformat()) + " (" + str(value["route_long_name"]) + ")" ) _LOGGER.debug( "Timetable Remaining Departures on this Start/Stop, per line: %s", @@ -225,10 +225,10 @@ def get_next_departure(self): ) # create upcoming timetable with headsign timetable_remaining_headsign = [] - for key2, value in sorted(timetable.items()): - if datetime.datetime.strptime(key2, "%Y-%m-%d %H:%M:%S") > now: + for key, value in sorted(timetable.items()): + if datetime.datetime.strptime(key, "%Y-%m-%d %H:%M:%S") > now: timetable_remaining_headsign.append( - str(key2) + " (" + str(value["trip_headsign"]) + ")" + str(dt_util.as_utc(datetime.datetime.strptime(key, "%Y-%m-%d %H:%M:%S")).isoformat()) + " (" + str(value["trip_headsign"]) + ")" ) _LOGGER.debug( "Timetable Remaining Departures on this Start/Stop, with headsign: %s", @@ -260,9 +260,18 @@ def get_next_departure(self): dest_depart_time = ( f"{dest_depart.strftime(dt_util.DATE_STR_FORMAT)} {item['dest_depart_time']}" ) - - depart_time = dt_util.parse_datetime(origin_depart_time).replace(tzinfo=timezone) + # align on timezone + #_LOGGER.error("dest_depart NEW: %s", dt_util.as_utc(datetime.datetime.strptime(dest_depart_time, "%Y-%m-%d %H:%M:%S")).isoformat()) + depart_time = dt_util.parse_datetime(origin_depart_time).replace(tzinfo=timezone) arrival_time = dt_util.parse_datetime(dest_arrival_time).replace(tzinfo=timezone) + #_LOGGER.error("dest_depart: %s", dest_depart) + #_LOGGER.error("depart_time: %s", depart_time) + origin_arrival_time = dt_util.as_utc(datetime.datetime.strptime(origin_arrival_time, "%Y-%m-%d %H:%M:%S")).isoformat() + origin_depart_time = dt_util.as_utc(datetime.datetime.strptime(origin_depart_time, "%Y-%m-%d %H:%M:%S")).isoformat() + dest_arrival_time = dt_util.as_utc(datetime.datetime.strptime(dest_arrival_time, "%Y-%m-%d %H:%M:%S")).isoformat() + dest_depart_time = dt_util.as_utc(datetime.datetime.strptime(dest_depart_time, "%Y-%m-%d %H:%M:%S")).isoformat() + + #_LOGGER.error("dtutil now: %s", dt_util.now()) origin_stop_time = { "Arrival Time": origin_arrival_time, diff --git a/custom_components/gtfs2/gtfs_rt_helper.py b/custom_components/gtfs2/gtfs_rt_helper.py index 62efd93..a07f565 100644 --- a/custom_components/gtfs2/gtfs_rt_helper.py +++ b/custom_components/gtfs2/gtfs_rt_helper.py @@ -24,6 +24,8 @@ ATTR_ICON, ATTR_UNIT_OF_MEASUREMENT, ATTR_DEVICE_CLASS, + ATTR_LATITUDE, + ATTR_LONGITUDE, CONF_API_KEY, CONF_X_API_KEY, @@ -113,7 +115,7 @@ def get_next_services(self): ) else: due_in = ( - next_services[0].arrival_time.replace(tzinfo=timezone) + dt_util.as_utc(next_services[0].arrival_time) if len(next_services) > 0 else "-" ) @@ -123,6 +125,8 @@ def get_next_services(self): ATTR_STOP_ID: self._stop, ATTR_ROUTE: self._route, ATTR_DIRECTION_ID: self._direction, + ATTR_LATITUDE: "", + ATTR_LONGITUDE: "" } if len(next_services) > 0: attrs[ATTR_DUE_AT] = ( @@ -130,8 +134,6 @@ def get_next_services(self): if len(next_services) > 0 else "-" ) - attrs[ATTR_LATITUDE] = "" - attrs[ATTR_LONGITUDE] = "" if next_services[0].position: attrs[ATTR_LATITUDE] = next_services[0].position.latitude attrs[ATTR_LONGITUDE] = next_services[0].position.longitude @@ -301,6 +303,10 @@ def get_rt_vehicle_positions(self): [ "Adding position for trip ID", vehicle.trip.trip_id, + "route ID", + vehicle.trip.route_id, + "direction ID", + vehicle.trip.direction_id, "position latitude", vehicle.position.latitude, "longitude", diff --git a/custom_components/gtfs2/sensor.py b/custom_components/gtfs2/sensor.py index df42d4d..668e0fd 100644 --- a/custom_components/gtfs2/sensor.py +++ b/custom_components/gtfs2/sensor.py @@ -381,11 +381,12 @@ def _update_attrs(self): # noqa: C901 PLR0911 "gtfs_updated_at"] _LOGGER.debug("next dep realtime attr: %s", self._departure["next_departure_realtime_attr"]) - # Add next departure realtime + # Add next departure realtime to the right level self._attributes["gtfs_rt_updated_at"] = self._departure["next_departure_realtime_attr"]["gtfs_rt_updated_at"] - self._attributes["latitude"] = self._departure["next_departure_realtime_attr"]["latitude"] - self._attributes["longitude"] = self._departure["next_departure_realtime_attr"]["longitude"] self._attributes["next_departure_realtime"] = self._departure["next_departure_realtime_attr"]["Due in"] + self._attributes["latitude"] = self._departure["next_departure_realtime_attr"]["latitude"] + self._attributes["longitude"] = self._departure["next_departure_realtime_attr"]["longitude"] + self._attr_extra_state_attributes = self._attributes return self._attr_extra_state_attributes From 94e850e2005517bbed452816627717a17c29a389 Mon Sep 17 00:00:00 2001 From: Arjan <44190435+vingerha@users.noreply.github.com> Date: Thu, 16 Nov 2023 17:12:41 +0100 Subject: [PATCH 10/42] TZ update on attributes and bugfix for new setup --- custom_components/gtfs2/coordinator.py | 9 +++++---- custom_components/gtfs2/gtfs_helper.py | 2 +- custom_components/gtfs2/sensor.py | 11 ++++++----- 3 files changed, 12 insertions(+), 10 deletions(-) diff --git a/custom_components/gtfs2/coordinator.py b/custom_components/gtfs2/coordinator.py index dbcf5ea..4466574 100644 --- a/custom_components/gtfs2/coordinator.py +++ b/custom_components/gtfs2/coordinator.py @@ -1,6 +1,7 @@ """Data Update coordinator for the GTFS integration.""" from __future__ import annotations +import datetime from datetime import timedelta import logging @@ -54,12 +55,12 @@ async def _async_update_data(self) -> dict[str, str]: self.hass, DEFAULT_PATH, data, False ) previous_data = None if self.data is None else self.data.copy() - - if previous_data is not None and (previous_data["next_departure"]["gtfs_updated_at"] + timedelta(minutes=options.get("refresh_interval", DEFAULT_REFRESH_INTERVAL))) > dt_util.now().replace(tzinfo=None): + + if previous_data is not None and (datetime.datetime.strptime(previous_data["next_departure"]["gtfs_updated_at"],'%Y-%m-%dT%H:%M:%S.%f%z') + timedelta(minutes=options.get("refresh_interval", DEFAULT_REFRESH_INTERVAL))) > dt_util.utcnow() + timedelta(seconds=1) : # do nothing awaiting refresh interval self._data = previous_data - if previous_data is None or (previous_data["next_departure"]["gtfs_updated_at"] + timedelta(minutes=options.get("refresh_interval", DEFAULT_REFRESH_INTERVAL))) < dt_util.now().replace(tzinfo=None): + if previous_data is None or (datetime.datetime.strptime(previous_data["next_departure"]["gtfs_updated_at"],'%Y-%m-%dT%H:%M:%S.%f%z') + timedelta(minutes=options.get("refresh_interval", DEFAULT_REFRESH_INTERVAL))) < dt_util.utcnow() + timedelta(seconds=1): self._data = { "schedule": self._pygtfs, "origin": data["origin"].split(": ")[0], @@ -108,7 +109,7 @@ async def _async_update_data(self) -> dict[str, str]: self._get_rt_route_statuses = await self.hass.async_add_executor_job(get_rt_route_statuses, self) self._get_next_service = await self.hass.async_add_executor_job(get_next_services, self) self._data["next_departure"]["next_departure_realtime_attr"] = self._get_next_service - self._data["next_departure"]["next_departure_realtime_attr"]["gtfs_rt_updated_at"] = dt_util.now().replace(tzinfo=None) + self._data["next_departure"]["next_departure_realtime_attr"]["gtfs_rt_updated_at"] = dt_util.utcnow() except Exception as ex: # pylint: disable=broad-except _LOGGER.error("Error getting gtfs realtime data: %s", ex) else: diff --git a/custom_components/gtfs2/gtfs_helper.py b/custom_components/gtfs2/gtfs_helper.py index 6f40667..afea166 100644 --- a/custom_components/gtfs2/gtfs_helper.py +++ b/custom_components/gtfs2/gtfs_helper.py @@ -308,7 +308,7 @@ def get_next_departure(self): "next_departures": timetable_remaining, "next_departures_lines": timetable_remaining_line, "next_departures_headsign": timetable_remaining_headsign, - "gtfs_updated_at": dt_util.now().replace(tzinfo=None), + "gtfs_updated_at": dt_util.utcnow().isoformat(), "next_departure_realtime_attr": {}, } diff --git a/custom_components/gtfs2/sensor.py b/custom_components/gtfs2/sensor.py index 668e0fd..e172b23 100644 --- a/custom_components/gtfs2/sensor.py +++ b/custom_components/gtfs2/sensor.py @@ -381,11 +381,12 @@ def _update_attrs(self): # noqa: C901 PLR0911 "gtfs_updated_at"] _LOGGER.debug("next dep realtime attr: %s", self._departure["next_departure_realtime_attr"]) - # Add next departure realtime to the right level - self._attributes["gtfs_rt_updated_at"] = self._departure["next_departure_realtime_attr"]["gtfs_rt_updated_at"] - self._attributes["next_departure_realtime"] = self._departure["next_departure_realtime_attr"]["Due in"] - self._attributes["latitude"] = self._departure["next_departure_realtime_attr"]["latitude"] - self._attributes["longitude"] = self._departure["next_departure_realtime_attr"]["longitude"] + # Add next departure realtime to the right level, only if populated + if "gtfs_rt_updated_at" in self._departure["next_departure_realtime_attr"]: + self._attributes["gtfs_rt_updated_at"] = self._departure["next_departure_realtime_attr"]["gtfs_rt_updated_at"] + self._attributes["next_departure_realtime"] = self._departure["next_departure_realtime_attr"]["Due in"] + self._attributes["latitude"] = self._departure["next_departure_realtime_attr"]["latitude"] + self._attributes["longitude"] = self._departure["next_departure_realtime_attr"]["longitude"] self._attr_extra_state_attributes = self._attributes From b4837ccb8c23063acc6324655821bed99bfdc592 Mon Sep 17 00:00:00 2001 From: Arjan <44190435+vingerha@users.noreply.github.com> Date: Fri, 17 Nov 2023 17:22:01 +0100 Subject: [PATCH 11/42] Initial version of geojson data creates a geojson file in config/www for use in map-card --- custom_components/gtfs2/const.py | 1 + custom_components/gtfs2/coordinator.py | 6 ++- custom_components/gtfs2/gtfs_rt_helper.py | 45 +++++++++++++++++++++-- 3 files changed, 48 insertions(+), 4 deletions(-) diff --git a/custom_components/gtfs2/const.py b/custom_components/gtfs2/const.py index bae3ae7..5f5db6d 100644 --- a/custom_components/gtfs2/const.py +++ b/custom_components/gtfs2/const.py @@ -8,6 +8,7 @@ DEFAULT_NAME = "GTFS Sensor2" DEFAULT_PATH = "gtfs2" +DEFAULT_PATH_GEOJSON = "www" CONF_DATA = "data" CONF_DESTINATION = "destination" diff --git a/custom_components/gtfs2/coordinator.py b/custom_components/gtfs2/coordinator.py index 4466574..689e57b 100644 --- a/custom_components/gtfs2/coordinator.py +++ b/custom_components/gtfs2/coordinator.py @@ -101,7 +101,11 @@ async def _async_update_data(self) -> dict[str, str]: self._headers = None self._headers = None self.info = {} - self._route_id = self._data["next_departure"]["route_id"] + try: + self._route_id = self._data["next_departure"]["route_id"] + except Exception as ex: # pylint: disable=broad-except + _LOGGER.error("Error getting entity route_id for realtime data, for origin: %s with error: %s", data["origin"], ex) + self._route_id = data["route"].split(": ")[0] self._stop_id = data["origin"].split(": ")[0] self._direction = data["direction"] self._relative = False diff --git a/custom_components/gtfs2/gtfs_rt_helper.py b/custom_components/gtfs2/gtfs_rt_helper.py index a07f565..f4b04c4 100644 --- a/custom_components/gtfs2/gtfs_rt_helper.py +++ b/custom_components/gtfs2/gtfs_rt_helper.py @@ -1,5 +1,7 @@ import logging from datetime import datetime, timedelta +import json +import os import homeassistant.helpers.config_validation as cv import homeassistant.util.dt as dt_util @@ -43,6 +45,8 @@ DEFAULT_SERVICE, DEFAULT_ICON, DEFAULT_DIRECTION, + DEFAULT_PATH, + DEFAULT_PATH_GEOJSON, TIME_STR_FORMAT ) @@ -293,9 +297,12 @@ def get_rt_vehicle_positions(self): headers=self._headers, label="vehicle positions", ) + #_LOGGER.error("GTFS RT feed entities: %s", feed_entities) + geojson_body = [] + geojson_element = {"geometry": {"coordinates":[],"type": "Point"}, "properties": {"id": "", "title": "", "trip_id": "", "route_id": "", "direction_id": "", "vehicle_id": "", "vehicle_label": ""}, "type": "Feature"} for entity in feed_entities: vehicle = entity.vehicle - + if not vehicle.trip.trip_id: # Vehicle is not in service continue @@ -314,8 +321,40 @@ def get_rt_vehicle_positions(self): ], 2, ) + positions[vehicle.trip.trip_id] = vehicle.position + + #construct geojson only for configured rout/direction + if str(self._route_id) == str(vehicle.trip.route_id) and str(self._direction) == str(vehicle.trip.direction_id): + geojson_element = {"geometry": {"coordinates":[],"type": "Point"}, "properties": {"id": "", "title": "", "trip_id": "", "route_id": "", "direction_id": "", "vehicle_id": "", "vehicle_label": ""}, "type": "Feature"} + geojson_element["geometry"]["coordinates"] = [] + geojson_element["geometry"]["coordinates"].append(vehicle.position.longitude) + geojson_element["geometry"]["coordinates"].append(vehicle.position.latitude) + geojson_element["properties"]["id"] = vehicle.trip.trip_id + geojson_element["properties"]["title"] = vehicle.trip.trip_id + geojson_element["properties"]["trip_id"] = vehicle.trip.trip_id + geojson_element["properties"]["route_id"] = vehicle.trip.route_id + geojson_element["properties"]["direction_id"] = vehicle.trip.direction_id + geojson_element["properties"]["vehicle_id"] = "tbd" + geojson_element["properties"]["vehicle_label"] = "tbd" + geojson_body.append(geojson_element) + + self.geojson = {"features": geojson_body, "type": "FeatureCollection"} + - positions[vehicle.trip.trip_id] = vehicle.position - + #_LOGGER.error("GTFS RT Positions: %s", positions) + _LOGGER.error("GTFS RT geojson body: %s", json.dumps(self.geojson)) + self._route_dir = self._route_id + "_" + self._direction + update_geojson(self) return positions + + +def update_geojson(self): + geojson_dir = self.hass.config.path(DEFAULT_PATH_GEOJSON) + os.makedirs(geojson_dir, exist_ok=True) + file = os.path.join(geojson_dir, self._route_dir + ".json") + #_LOGGER.error("gtfs geojson file: %s", file) + with open(file, "w") as outfile: + json.dump(self.geojson, outfile) + + From 9e78a23e268cb2d25a5b382c6143211d0dc82684 Mon Sep 17 00:00:00 2001 From: Arjan <44190435+vingerha@users.noreply.github.com> Date: Sat, 18 Nov 2023 10:12:18 +0100 Subject: [PATCH 12/42] Fix service and align vehicle position to geojson --- custom_components/gtfs2/__init__.py | 2 +- custom_components/gtfs2/gtfs_rt_helper.py | 31 +++++++++-------------- custom_components/gtfs2/manifest.json | 2 +- custom_components/gtfs2/services.yaml | 21 ++++++++++++--- 4 files changed, 31 insertions(+), 25 deletions(-) diff --git a/custom_components/gtfs2/__init__.py b/custom_components/gtfs2/__init__.py index d578282..0fec6b4 100644 --- a/custom_components/gtfs2/__init__.py +++ b/custom_components/gtfs2/__init__.py @@ -94,7 +94,7 @@ def setup(hass, config): def update_gtfs(call): """My GTFS service.""" _LOGGER.debug("Updating GTFS with: %s", call.data) - get_gtfs(hass, DEFAULT_PATH, call.data["name"], call.data["url"], True) + get_gtfs(hass, DEFAULT_PATH, call.data, True) return True hass.services.register( diff --git a/custom_components/gtfs2/gtfs_rt_helper.py b/custom_components/gtfs2/gtfs_rt_helper.py index f4b04c4..6c295ad 100644 --- a/custom_components/gtfs2/gtfs_rt_helper.py +++ b/custom_components/gtfs2/gtfs_rt_helper.py @@ -138,9 +138,9 @@ def get_next_services(self): if len(next_services) > 0 else "-" ) - if next_services[0].position: - attrs[ATTR_LATITUDE] = next_services[0].position.latitude - attrs[ATTR_LONGITUDE] = next_services[0].position.longitude + if next_services[0].position[0]: + attrs[ATTR_LATITUDE] = next_services[0].position[0][1] + attrs[ATTR_LONGITUDE] = next_services[0].position[0][0] if len(next_services) > 1: attrs[ATTR_NEXT_UP] = ( next_services[1].arrival_time.strftime(TIME_STR_FORMAT) @@ -165,8 +165,7 @@ def get_rt_route_statuses(self): if self._vehicle_position_url != "" : vehicle_positions = get_rt_vehicle_positions(self) - - + class StopDetails: def __init__(self, arrival_time, position): self.arrival_time = arrival_time @@ -270,9 +269,7 @@ def __init__(self, arrival_time, position): details = StopDetails( datetime.fromtimestamp(stop_time), - vehicle_positions.get( - entity.trip_update.trip.trip_id - ), + [d["properties"].get(entity.trip_update.trip.trip_id) for d in vehicle_positions], ) departure_times[route_id][direction_id][ stop_id @@ -291,13 +288,11 @@ def __init__(self, arrival_time, position): return departure_times def get_rt_vehicle_positions(self): - positions = {} feed_entities = get_gtfs_feed_entities( url=self._vehicle_position_url, headers=self._headers, label="vehicle positions", ) - #_LOGGER.error("GTFS RT feed entities: %s", feed_entities) geojson_body = [] geojson_element = {"geometry": {"coordinates":[],"type": "Point"}, "properties": {"id": "", "title": "", "trip_id": "", "route_id": "", "direction_id": "", "vehicle_id": "", "vehicle_label": ""}, "type": "Feature"} for entity in feed_entities: @@ -320,8 +315,7 @@ def get_rt_vehicle_positions(self): vehicle.position.longitude, ], 2, - ) - positions[vehicle.trip.trip_id] = vehicle.position + ) #construct geojson only for configured rout/direction if str(self._route_id) == str(vehicle.trip.route_id) and str(self._direction) == str(vehicle.trip.direction_id): @@ -329,30 +323,29 @@ def get_rt_vehicle_positions(self): geojson_element["geometry"]["coordinates"] = [] geojson_element["geometry"]["coordinates"].append(vehicle.position.longitude) geojson_element["geometry"]["coordinates"].append(vehicle.position.latitude) - geojson_element["properties"]["id"] = vehicle.trip.trip_id - geojson_element["properties"]["title"] = vehicle.trip.trip_id + geojson_element["properties"]["id"] = str(vehicle.trip.route_id) + "(" + str(vehicle.trip.direction_id) + ")" + geojson_element["properties"]["title"] = str(vehicle.trip.route_id) + "(" + str(vehicle.trip.direction_id) + ")" geojson_element["properties"]["trip_id"] = vehicle.trip.trip_id geojson_element["properties"]["route_id"] = vehicle.trip.route_id geojson_element["properties"]["direction_id"] = vehicle.trip.direction_id geojson_element["properties"]["vehicle_id"] = "tbd" geojson_element["properties"]["vehicle_label"] = "tbd" + geojson_element["properties"][vehicle.trip.trip_id] = geojson_element["geometry"]["coordinates"] geojson_body.append(geojson_element) self.geojson = {"features": geojson_body, "type": "FeatureCollection"} - - #_LOGGER.error("GTFS RT Positions: %s", positions) - _LOGGER.error("GTFS RT geojson body: %s", json.dumps(self.geojson)) + _LOGGER.debug("GTFS RT geojson: %s", json.dumps(self.geojson)) self._route_dir = self._route_id + "_" + self._direction update_geojson(self) - return positions + return geojson_body def update_geojson(self): geojson_dir = self.hass.config.path(DEFAULT_PATH_GEOJSON) os.makedirs(geojson_dir, exist_ok=True) file = os.path.join(geojson_dir, self._route_dir + ".json") - #_LOGGER.error("gtfs geojson file: %s", file) + _LOGGER.debug("gtfs geojson file: %s", file) with open(file, "w") as outfile: json.dump(self.geojson, outfile) diff --git a/custom_components/gtfs2/manifest.json b/custom_components/gtfs2/manifest.json index 8dbe4c3..63fac40 100644 --- a/custom_components/gtfs2/manifest.json +++ b/custom_components/gtfs2/manifest.json @@ -7,5 +7,5 @@ "iot_class": "local_polling", "issue_tracker": "https://github.com/vingerha/gtfs2/issues", "requirements": ["pygtfs==0.1.9","gtfs-realtime-bindings==1.0.0"], - "version": "0.1.5" + "version": "0.1.6" } diff --git a/custom_components/gtfs2/services.yaml b/custom_components/gtfs2/services.yaml index 379675d..00d2629 100644 --- a/custom_components/gtfs2/services.yaml +++ b/custom_components/gtfs2/services.yaml @@ -1,19 +1,32 @@ # Describes the format for available ADS services update_gtfs: name: Update GTFS Data - description: Collects a new gtfs zip and unpacks it to sqlite + description: Unpacks source to gtfs-db fields: - name: + extract_from: + name: Indicate source of the data + description: Select if you update from url or zip + required: true + example: "url" + default: "url" + selector: + select: + translation_key: "extract_from" + options: + - "url" + - "zip" + file: name: Name of the transport service, without .zip description: If you use the same name as an existing one, the existing one will be overwitten required: true example: "mytransportservice" selector: - text: + text: url: name: URL description: provide the full path to the zip file itself required: true + default: "na" example: "https://path-to-my-zip-file-location/filename.zip" selector: - text: + text: From c3c55b15d054282ab7d549d7851827b0e55a1348 Mon Sep 17 00:00:00 2001 From: Arjan <44190435+vingerha@users.noreply.github.com> Date: Sat, 18 Nov 2023 10:47:44 +0100 Subject: [PATCH 13/42] Fix error in case no positions --- custom_components/gtfs2/gtfs_rt_helper.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/custom_components/gtfs2/gtfs_rt_helper.py b/custom_components/gtfs2/gtfs_rt_helper.py index 6c295ad..a903ea4 100644 --- a/custom_components/gtfs2/gtfs_rt_helper.py +++ b/custom_components/gtfs2/gtfs_rt_helper.py @@ -138,9 +138,10 @@ def get_next_services(self): if len(next_services) > 0 else "-" ) - if next_services[0].position[0]: - attrs[ATTR_LATITUDE] = next_services[0].position[0][1] - attrs[ATTR_LONGITUDE] = next_services[0].position[0][0] + if next_services[0].position: + if next_services[0].position[0]: + attrs[ATTR_LATITUDE] = next_services[0].position[0][1] + attrs[ATTR_LONGITUDE] = next_services[0].position[0][0] if len(next_services) > 1: attrs[ATTR_NEXT_UP] = ( next_services[1].arrival_time.strftime(TIME_STR_FORMAT) From 9fdcdaa6348bdd439383230f0e7e994eef788c28 Mon Sep 17 00:00:00 2001 From: Arjan <44190435+vingerha@users.noreply.github.com> Date: Sat, 18 Nov 2023 12:37:02 +0100 Subject: [PATCH 14/42] Update README.md --- README.md | 24 ++++++++++++++++++------ 1 file changed, 18 insertions(+), 6 deletions(-) diff --git a/README.md b/README.md index 83b098a..fdf3bf2 100644 --- a/README.md +++ b/README.md @@ -17,11 +17,21 @@ Core GTFS uses start + stop, it then determines every option between them and pr ***Solution/workaround in GTFS2***: attribute added: next_departure_line shows all next departues with their line/means-of-transport. So even if you select a route first and then two stops, the attibutes will still show alternatives between those 2 stops, if applicable. ## Updates -- 20231104: initial version - -## ToDo's +202311DD +- realtime vehile tracking with geojson output +- workflow tweaks +- extend update service call +20231110: adding features: +- new attribute: next_departure_headsigns +- adding route shortname in selection/list to overcome data discrepancies been short name and long name +- for new datasource, allow to use a self-placed zip file in the gtfs2 folder. This for zip that are not available via URL or zip with data that may need modification to comply with extraction conditions by pygtfs +- timezone for next_departure is now used in order: agency (delivering data), if not > HA system, if not > UTC. This to resolve TZ issues for datasets without agency (timezone) + +20231104: initial version + +## ToDo's / In Development - Issue when updating the source db, it throws a db locked error. This when an existing entity for the same db starts polling it at the same time -- Icon for the integration (brands) +- (DONE) Icon for the integration (brands) - bypass setup control for routes that have no trips 'today'. The configuration does a spot-check if start/end actually return data with the idea to validate the setup. However, this only checks for 'today' so if your route actually has no transport running at the day of setup (say Sunday or Holiday) then it will reject it. ## Installation via HACS : @@ -49,11 +59,13 @@ Example: https://github.com/vingerha/gtfs2/blob/main/example.md Data can be updated at your own discretion by a service, e.g. you can have a weekly automation to run the service **Note:** for "update" to work, the name should be the ***same*** as the existing source. It will first remove the existing one and reload the one as per your URL -![image](https://github.com/vingerha/gtfs2/assets/44190435/2defc23d-a1a0-40be-b610-6c5360fbd464) +![image](https://github.com/vingerha/gtfs2/assets/44190435/2d639afa-376b-4956-8223-2c982dc537cb) + or via yaml -![image](https://github.com/vingerha/gtfs2/assets/44190435/2fea7926-a64d-43b6-a653-c95f1f01c66d) +![image](https://github.com/vingerha/gtfs2/assets/44190435/0d50bb87-c081-4cd6-8dc5-9603a44c21a4) + From 9dc8a4d274d0fd686035be28ffd0364faa067a69 Mon Sep 17 00:00:00 2001 From: Arjan <44190435+vingerha@users.noreply.github.com> Date: Sat, 18 Nov 2023 12:40:38 +0100 Subject: [PATCH 15/42] Update example.md --- example.md | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/example.md b/example.md index c31fd08..858ffda 100644 --- a/example.md +++ b/example.md @@ -39,6 +39,20 @@ You can add a optional area ![image](https://github.com/vingerha/gtfs2/assets/44190435/f2f855f9-bc07-405d-8b0b-09b3da7e4f79) +## CONFIGURE Options + +After setup you can change the refresh interval and add real-time source(s) + +![image](https://github.com/vingerha/gtfs2/assets/44190435/03135ba3-e9ff-4fe6-a23b-bb1f0a44c6ea) + +![image](https://github.com/vingerha/gtfs2/assets/44190435/11de0f3c-ac1b-4b4d-8712-38764dfc5bd4) + +![image](https://github.com/vingerha/gtfs2/assets/44190435/5895e947-882d-444e-9259-e56d7d5e426a) + + + + + Sample of the entity and its attributes ``` arrival: "2023-11-04T09:42:29+00:00" From 546dcb008ca699d8351302450002d459ba095298 Mon Sep 17 00:00:00 2001 From: Arjan <44190435+vingerha@users.noreply.github.com> Date: Sat, 18 Nov 2023 12:42:17 +0100 Subject: [PATCH 16/42] Update example.md --- example.md | 141 +++++++++++++++++++++++------------------------------ 1 file changed, 62 insertions(+), 79 deletions(-) diff --git a/example.md b/example.md index 858ffda..ee1305c 100644 --- a/example.md +++ b/example.md @@ -55,114 +55,97 @@ After setup you can change the refresh interval and add real-time source(s) Sample of the entity and its attributes ``` -arrival: "2023-11-04T09:42:29+00:00" +arrival: 2023-11-18T12:18:00+00:00 day: today first: false last: false offset: 0 -agency_agency_id: LE MET -agency_agency_name: LE MET' -agency_agency_url: https://lemet.fr +agency_agency_id: None +agency_agency_name: TAO (Orléans) +agency_agency_url: http://reseau-tao.fr/ agency_agency_timezone: Europe/Paris -agency_agency_lang: FR -agency_agency_phone: 0.800.00.29.38 -agency_agency_fare_url: https://services.lemet.fr/fr/billetterie -agency_agency_email: contact@lemet.fr -origin_station_stop_id: "6010" +agency_agency_lang: fr +agency_agency_phone: 0800012000 +agency_agency_fare_url: None +agency_agency_email: None +origin_station_stop_id: ORLEANS:StopArea:00026500 origin_station_stop_code: None -origin_station_stop_name: P+R WOIPPY +origin_station_stop_name: Gaston Galloux origin_station_stop_desc: None -origin_station_stop_lat: "49.150349" -origin_station_stop_lon: "6.173323" +origin_station_stop_lat: 47.884827 +origin_station_stop_lon: 1.924645 origin_station_zone_id: None -origin_station_stop_url: https://services.lemet.fr/fr/biv/arret/1627 -origin_station_location_type: "0" +origin_station_stop_url: None +origin_station_location_type: 0 origin_station_parent_station: None -origin_station_stop_timezone: None -origin_station_wheelchair_boarding: "1" +origin_station_stop_timezone: Europe/Paris +origin_station_wheelchair_boarding: 0 origin_station_platform_code: None origin_station_location_type_name: Station -origin_station_wheelchair_boarding_available: true -destination_station_stop_id: "6180" +origin_station_wheelchair_boarding_available: unknown +destination_station_stop_id: ORLEANS:StopArea:01001712 destination_station_stop_code: None -destination_station_stop_name: FELIX ALCAN +destination_station_stop_name: Gare d'Orléans - Quai E destination_station_stop_desc: None -destination_station_stop_lat: "49.112572" -destination_station_stop_lon: "6.199158" +destination_station_stop_lat: 47.907085 +destination_station_stop_lon: 1.90578 destination_station_zone_id: None -destination_station_stop_url: https://services.lemet.fr/fr/biv/arret/7324 -destination_station_location_type: "0" +destination_station_stop_url: None +destination_station_location_type: 0 destination_station_parent_station: None -destination_station_stop_timezone: None -destination_station_wheelchair_boarding: "1" +destination_station_stop_timezone: Europe/Paris +destination_station_wheelchair_boarding: 0 destination_station_platform_code: None destination_station_location_type_name: Station -destination_station_wheelchair_boarding_available: true -route_route_id: A-98 -route_agency_id: LE MET -route_route_short_name: MA -route_route_long_name: METTIS A +destination_station_wheelchair_boarding_available: unknown +route_route_id: ORLEANS:Line:40 +route_agency_id: None +route_route_short_name: 40 +route_route_long_name: GARE ORLEANS - PETITE MERIE route_route_desc: None -route_route_type: "3" +route_route_type: 3 route_route_url: None -route_route_color: F0980C -route_route_text_color: FFFFFF +route_route_color: 24A472 +route_route_text_color: 000000 route_type_name: Bus -trip_route_id: A-98 -trip_service_id: HIV2324-Sam_Sp23-Samedi-21 -trip_trip_id: 1281546-HIV2324-Sam_Sp23-Samedi-21 -trip_trip_headsign: MA - BORNY +trip_route_id: ORLEANS:Line:40 +trip_service_id: chouette:TimeTable:4f12e6e5-93ca-4af2-b493-0858f5c73e39:LOC +trip_trip_id: ORLEANS:VehicleJourney:40_A_56_16_4002_6_124300 +trip_trip_headsign: None trip_trip_short_name: None -trip_direction_id: "0" -trip_block_id: "196205" -trip_shape_id: A0014 -trip_wheelchair_accessible: "1" -trip_bikes_allowed: "2" -trip_bikes_allowed_state: false -trip_wheelchair_access_available: true -origin_stop_arrival_time: "2023-11-04 10:16:00" -origin_stop_departure_time: "2023-11-04 10:16:00" -origin_stop_drop_off_type: 0 +trip_direction_id: 0 +trip_block_id: None +trip_shape_id: PME-CNY-POSC-GARE +trip_wheelchair_accessible: None +trip_bikes_allowed: None +trip_bikes_allowed_state: unknown +trip_wheelchair_access_available: unknown +origin_stop_arrival_time: 2023-11-18T12:09:05+00:00 +origin_stop_departure_time: 2023-11-18T12:09:05+00:00 origin_stop_pickup_type: 0 -origin_stop_sequence: 1 -origin_stop_drop_off_type_state: Regular +origin_stop_sequence: 17 +origin_stop_drop_off_type_state: unknown origin_stop_pickup_type_state: Regular origin_stop_timepoint_exact: true -destination_stop_arrival_time: "2023-11-04 10:42:29" -destination_stop_departure_time: "2023-11-04 10:42:29" -destination_stop_drop_off_type: 0 +destination_stop_arrival_time: 2023-11-18T12:18:00+00:00 +destination_stop_departure_time: 2023-11-18T12:18:00+00:00 destination_stop_pickup_type: 0 -destination_stop_sequence: 19 -destination_stop_drop_off_type_state: Regular +destination_stop_sequence: 23 +destination_stop_drop_off_type_state: unknown destination_stop_pickup_type_state: Regular destination_stop_timepoint_exact: true -next_departures: - - "2023-11-04 10:16:00" - - "2023-11-04 10:31:00" - - "2023-11-04 10:46:00" - - "2023-11-04 11:01:00" - - "2023-11-04 11:16:00" - - "2023-11-04 11:31:00" - - "2023-11-04 11:46:00" - - "2023-11-04 12:01:00" - - "2023-11-04 12:16:00" - - "2023-11-04 12:31:00" -next_departures_lines: - - 2023-11-04 10:16:00 (METTIS A) - - 2023-11-04 10:31:00 (METTIS A) - - 2023-11-04 10:46:00 (METTIS A) - - 2023-11-04 11:01:00 (METTIS A) - - 2023-11-04 11:16:00 (METTIS A) - - 2023-11-04 11:31:00 (METTIS A) - - 2023-11-04 11:46:00 (METTIS A) - - 2023-11-04 12:01:00 (METTIS A) - - 2023-11-04 12:16:00 (METTIS A) - - 2023-11-04 12:31:00 (METTIS A) -updated_at: "2023-11-04T10:07:07.085514" -attribution: LE MET' +next_departures: 2023-11-18T12:09:05+00:00, 2023-11-18T12:39:05+00:00, 2023-11-18T13:10:05+00:00, 2023-11-18T13:40:05+00:00, 2023-11-18T14:10:05+00:00, 2023-11-18T14:40:05+00:00, 2023-11-18T15:11:05+00:00, 2023-11-18T15:41:05+00:00, 2023-11-18T16:12:05+00:00, 2023-11-18T16:42:05+00:00 +next_departures_lines: 2023-11-18T12:09:05+00:00 (GARE ORLEANS - PETITE MERIE), 2023-11-18T12:39:05+00:00 (GARE ORLEANS - PETITE MERIE), 2023-11-18T13:10:05+00:00 (GARE ORLEANS - PETITE MERIE), 2023-11-18T13:40:05+00:00 (GARE ORLEANS - PETITE MERIE), 2023-11-18T14:10:05+00:00 (GARE ORLEANS - PETITE MERIE), 2023-11-18T14:40:05+00:00 (GARE ORLEANS - PETITE MERIE), 2023-11-18T15:11:05+00:00 (GARE ORLEANS - PETITE MERIE), 2023-11-18T15:41:05+00:00 (GARE ORLEANS - PETITE MERIE), 2023-11-18T16:12:05+00:00 (GARE ORLEANS - PETITE MERIE), 2023-11-18T16:42:05+00:00 (GARE ORLEANS - PETITE MERIE) +next_departures_headsign: 2023-11-18T12:09:05+00:00 (None), 2023-11-18T12:39:05+00:00 (None), 2023-11-18T13:10:05+00:00 (None), 2023-11-18T13:40:05+00:00 (None), 2023-11-18T14:10:05+00:00 (None), 2023-11-18T14:40:05+00:00 (None), 2023-11-18T15:11:05+00:00 (None), 2023-11-18T15:41:05+00:00 (None), 2023-11-18T16:12:05+00:00 (None), 2023-11-18T16:42:05+00:00 (None) +gtfs_updated_at: 2023-11-18T11:38:52.654949+00:00 +gtfs_rt_updated_at: 2023-11-18T11:40:59.832457+00:00 +next_departure_realtime: 2023-11-18T12:09:30+00:00 +latitude: +longitude: +attribution: TAO (Orléans) device_class: timestamp icon: mdi:bus -friendly_name: MyRouteInMetz +friendly_name: Orleans 40 outbound ``` From 07659f4bb73c78dd222d97a4fcf0cbdaa0d4f7af Mon Sep 17 00:00:00 2001 From: Arjan <44190435+vingerha@users.noreply.github.com> Date: Sat, 18 Nov 2023 12:52:36 +0100 Subject: [PATCH 17/42] Update README.md --- README.md | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/README.md b/README.md index fdf3bf2..98e077d 100644 --- a/README.md +++ b/README.md @@ -49,6 +49,12 @@ Use the workflow Example: https://github.com/vingerha/gtfs2/blob/main/example.md +## Real Time vehicle tracking + +As per v1.6, the vehicle tracking output coordinates to geojson file in your www folder, which in turn can then be consumed by the geosjon integration and map card https://www.home-assistant.io/integrations/geo_json_events/ +![image](https://github.com/vingerha/gtfs2/assets/44190435/a3cbea60-46f1-40e9-88c5-4b9a0519c782) + + **IMPORTANT** - certain providers publish large zip-files which in turn will result in much larger db files. Unpacking may take a long time (depending HA server perf.). Example for a 117Mb zip: ~2hrs to unpack to a 7Gb sqlite @@ -67,12 +73,7 @@ or via yaml ![image](https://github.com/vingerha/gtfs2/assets/44190435/0d50bb87-c081-4cd6-8dc5-9603a44c21a4) - - - ## Thank you - @joostlek ... massive thanks to help me through many (!) tech aspects and getting this to the inital version - @mxbssn for initiating, bringing ideas, helping with testing - - - +- @mark1foley for his gtfs real time integration which was enhanced with its integration in GTFS2 From 377af8c382b7f51db43c5870b9a645961072b365 Mon Sep 17 00:00:00 2001 From: Arjan <44190435+vingerha@users.noreply.github.com> Date: Sat, 18 Nov 2023 12:55:17 +0100 Subject: [PATCH 18/42] Update README.md --- README.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index 98e077d..45643e8 100644 --- a/README.md +++ b/README.md @@ -55,11 +55,11 @@ As per v1.6, the vehicle tracking output coordinates to geojson file in your www ![image](https://github.com/vingerha/gtfs2/assets/44190435/a3cbea60-46f1-40e9-88c5-4b9a0519c782) -**IMPORTANT** - +## **IMPORTANT** +- sources need to adhere to GTFS standards both for static data (zip/sqlite) as well as for real-time data (binary). - certain providers publish large zip-files which in turn will result in much larger db files. Unpacking may take a long time (depending HA server perf.). Example for a 117Mb zip: ~2hrs to unpack to a 7Gb sqlite -- for these large db, performance may be slow too, there is a PR to improve this by adding indexes to the stop_times table - the integration uses folder /config/gtfs2 to store the datafiles (zip and sqlite) +- the integration uses folder /config/www for geojson files, only available when using verhical tracking sources ## Data add / update Data can be updated at your own discretion by a service, e.g. you can have a weekly automation to run the service From 0215fd71395202f21a9f89eaeb1d43dc0bc9e593 Mon Sep 17 00:00:00 2001 From: vingerha <44190435+vingerha@users.noreply.github.com> Date: Sat, 18 Nov 2023 13:30:55 +0100 Subject: [PATCH 19/42] Fix issue with route collection If route name or short name is 'null' then the concatenation failed --- custom_components/gtfs2/gtfs_helper.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/custom_components/gtfs2/gtfs_helper.py b/custom_components/gtfs2/gtfs_helper.py index afea166..21e913c 100644 --- a/custom_components/gtfs2/gtfs_helper.py +++ b/custom_components/gtfs2/gtfs_helper.py @@ -368,7 +368,7 @@ def get_route_list(schedule): row = row_cursor._asdict() routes_list.append(list(row_cursor)) for x in routes_list: - val = x[0] + ": " + x[1] + " (" + x[2] + ")" + val = str(x[0]) + ": " + str(x[1]) + " (" + str(x[2]) + ")" routes.append(val) _LOGGER.debug(f"routes: {routes}") return routes From 3ff0797e0f3091322a2ce9999e8c6eb4a013afcd Mon Sep 17 00:00:00 2001 From: Arjan <44190435+vingerha@users.noreply.github.com> Date: Sun, 19 Nov 2023 19:04:53 +0100 Subject: [PATCH 20/42] More tuning Realtime: - stop making vehicle position url mandatory - service call: fix and improve translation - gtfs_rt: outcomment debug that makes the log too log - gtfs: add geojson for route, temporarily stopped as no way to visualize in HA --- custom_components/gtfs2/config_flow.py | 2 +- custom_components/gtfs2/coordinator.py | 7 ++- custom_components/gtfs2/gtfs_helper.py | 62 +++++++++++++++++++- custom_components/gtfs2/gtfs_rt_helper.py | 52 ++++++++-------- custom_components/gtfs2/translations/en.json | 19 ++++++ custom_components/gtfs2/translations/fr.json | 22 ++++++- 6 files changed, 131 insertions(+), 33 deletions(-) diff --git a/custom_components/gtfs2/config_flow.py b/custom_components/gtfs2/config_flow.py index 45fb28a..c828543 100644 --- a/custom_components/gtfs2/config_flow.py +++ b/custom_components/gtfs2/config_flow.py @@ -293,7 +293,7 @@ async def async_step_real_time( data_schema=vol.Schema( { vol.Required(CONF_TRIP_UPDATE_URL, default=self.config_entry.options.get(CONF_TRIP_UPDATE_URL)): str, - vol.Required(CONF_VEHICLE_POSITION_URL, default=self.config_entry.options.get(CONF_VEHICLE_POSITION_URL)): str, + vol.Optional(CONF_VEHICLE_POSITION_URL, default=self.config_entry.options.get(CONF_VEHICLE_POSITION_URL)): str, vol.Optional(CONF_API_KEY, default=self.config_entry.options.get(CONF_API_KEY, "na")): str, vol.Optional(CONF_X_API_KEY,default=self.config_entry.options.get(CONF_X_API_KEY, "na")): str }, diff --git a/custom_components/gtfs2/coordinator.py b/custom_components/gtfs2/coordinator.py index 689e57b..6ec070e 100644 --- a/custom_components/gtfs2/coordinator.py +++ b/custom_components/gtfs2/coordinator.py @@ -21,7 +21,7 @@ ATTR_LONGITUDE, ATTR_RT_UPDATED_AT ) -from .gtfs_helper import get_gtfs, get_next_departure, check_datasource_index +from .gtfs_helper import get_gtfs, get_next_departure, check_datasource_index, create_trip_geojson from .gtfs_rt_helper import get_rt_route_statuses, get_next_services _LOGGER = logging.getLogger(__name__) @@ -79,6 +79,9 @@ async def _async_update_data(self) -> dict[str, str]: self._data["next_departure"] = await self.hass.async_add_executor_job( get_next_departure, self ) + trip_shape = await self.hass.async_add_executor_job( + create_trip_geojson, self + ) except Exception as ex: # pylint: disable=broad-except _LOGGER.error("Error getting gtfs data from generic helper: %s", ex) return None @@ -115,7 +118,7 @@ async def _async_update_data(self) -> dict[str, str]: self._data["next_departure"]["next_departure_realtime_attr"] = self._get_next_service self._data["next_departure"]["next_departure_realtime_attr"]["gtfs_rt_updated_at"] = dt_util.utcnow() except Exception as ex: # pylint: disable=broad-except - _LOGGER.error("Error getting gtfs realtime data: %s", ex) + _LOGGER.error("Error getting gtfs realtime data, for origin: %s with error: %s", data["origin"], ex) else: _LOGGER.info("GTFS RT: RealTime = false, selected in entity options") else: diff --git a/custom_components/gtfs2/gtfs_helper.py b/custom_components/gtfs2/gtfs_helper.py index 21e913c..e999b19 100644 --- a/custom_components/gtfs2/gtfs_helper.py +++ b/custom_components/gtfs2/gtfs_helper.py @@ -4,6 +4,7 @@ import datetime import logging import os +import json import requests import pygtfs from sqlalchemy.sql import text @@ -12,6 +13,8 @@ import homeassistant.util.dt as dt_util from homeassistant.core import HomeAssistant +from .const import DEFAULT_PATH_GEOJSON + _LOGGER = logging.getLogger(__name__) @@ -329,8 +332,10 @@ def get_gtfs(hass, path, data, update=False): if os.path.exists(journal) : _LOGGER.debug("Still unpacking %s", filename) return "extracting" - if update and os.path.exists(os.path.join(gtfs_dir, file)): + if update and data["extract_from"] == "url" and os.path.exists(os.path.join(gtfs_dir, file)): remove_datasource(hass, path, filename) + if update and data["extract_from"] == "zip" and os.path.exists(os.path.join(gtfs_dir, file)): + os.remove(os.path.join(gtfs_dir, sqlite)) if data["extract_from"] == "zip": if not os.path.exists(os.path.join(gtfs_dir, file)): _LOGGER.error("The given GTFS zipfile was not found") @@ -437,12 +442,21 @@ def check_datasource_index(schedule): WHERE type= 'index' and tbl_name = 'stop_times' and name like '%stop_id%'; """ + sql_index_3 = f""" + SELECT count(*) as checkidx + FROM sqlite_master + WHERE + type= 'index' and tbl_name = 'shapes' and name like '%shape_id%'; + """ sql_add_index_1 = f""" create index gtfs2_stop_times_trip_id on stop_times(trip_id) """ sql_add_index_2 = f""" create index gtfs2_stop_times_stop_id on stop_times(stop_id) """ + sql_add_index_3 = f""" + create index gtfs2_shapes_shape_id on shapes(shape_id) + """ result_1a = schedule.engine.connect().execute( text(sql_index_1), {"q": "q"}, @@ -467,4 +481,48 @@ def check_datasource_index(schedule): result_2b = schedule.engine.connect().execute( text(sql_add_index_2), {"q": "q"}, - ) \ No newline at end of file + ) + + result_3a = schedule.engine.connect().execute( + text(sql_index_3), + {"q": "q"}, + ) + for row_cursor in result_3a: + _LOGGER.debug("IDX result3: %s", row_cursor._asdict()) + if row_cursor._asdict()['checkidx'] == 0: + _LOGGER.info("Adding index 3 to improve performance") + result_3b = schedule.engine.connect().execute( + text(sql_add_index_3), + {"q": "q"}, + ) + +def create_trip_geojson(self): + #_LOGGER.debug("GTFS Helper, create geojson with data: %s", self._data) + schedule = self._data["schedule"] + self._trip_id = self._data["next_departure"]["trip_id"] + sql_shape = f""" + SELECT t.trip_id, s.shape_pt_lat, s.shape_pt_lon + FROM trips t, shapes s + WHERE + t.shape_id = s.shape_id + and t.trip_id = '{self._trip_id}' + order by s.shape_pt_sequence + """ + result = schedule.engine.connect().execute( + text(sql_shape), + {"q": "q"}, + ) + + shapes_list = [] + coordinates = [] + for row_cursor in result: + row = row_cursor._asdict() + shapes_list.append(list(row_cursor)) + for x in shapes_list: + coordinate = [] + coordinate.append(x[2]) + coordinate.append(x[1]) + coordinates.append(coordinate) + self.geojson = {"features": [{"geometry": {"coordinates": coordinates, "type": "LineString"}, "properties": {"id": self._trip_id, "title": self._trip_id}, "type": "Feature"}], "type": "FeatureCollection"} + #_LOGGER.error("Geojson: %s", json.dumps(self.geojson)) + return None \ No newline at end of file diff --git a/custom_components/gtfs2/gtfs_rt_helper.py b/custom_components/gtfs2/gtfs_rt_helper.py index a903ea4..3835d2b 100644 --- a/custom_components/gtfs2/gtfs_rt_helper.py +++ b/custom_components/gtfs2/gtfs_rt_helper.py @@ -239,34 +239,34 @@ def __init__(self, arrival_time, position): stop_time = stop.departure.time else: stop_time = stop.arrival.time - log_debug( - [ - "Stop:", - stop_id, - "Stop Sequence:", - stop.stop_sequence, - "Stop Time:", - stop_time, - ], - 2, - ) + #log_debug( + #[ + # "Stop:", + # stop_id, + # "Stop Sequence:", + # stop.stop_sequence, + # "Stop Time:", + # stop_time, + #], + #2, + #) # Ignore arrival times in the past if due_in_minutes(datetime.fromtimestamp(stop_time)) >= 0: - log_debug( - [ - "Adding route ID", - route_id, - "trip ID", - entity.trip_update.trip.trip_id, - "direction ID", - entity.trip_update.trip.direction_id, - "stop ID", - stop_id, - "stop time", - stop_time, - ], - 3, - ) + #log_debug( + # [ + # "Adding route ID", + # route_id, + # "trip ID", + # entity.trip_update.trip.trip_id, + # "direction ID", + # entity.trip_update.trip.direction_id, + # "stop ID", + # stop_id, + # "stop time", + # stop_time, + # ], + # 3, + #) details = StopDetails( datetime.fromtimestamp(stop_time), diff --git a/custom_components/gtfs2/translations/en.json b/custom_components/gtfs2/translations/en.json index ab713f3..0d8a673 100644 --- a/custom_components/gtfs2/translations/en.json +++ b/custom_components/gtfs2/translations/en.json @@ -83,5 +83,24 @@ "1": "Return" } } + }, + "services": { + "update_gtfs": { + "name": "Updates a GTFS2 datasource", + "description": "Either via Link or placing a Zip yourselves in gtfs2", + "fields": { + "extract_from": { + "name": "Indicate source to use zip or url" + }, + "file": { + "name": "Name of the transport service, without .zip", + "description": "If you use the same name as an existing one, the existing one will be overwitten" + }, + "url": { + "name": "URL", + "description": "provide the full path to the zip file itself" + } + } + } } } diff --git a/custom_components/gtfs2/translations/fr.json b/custom_components/gtfs2/translations/fr.json index 06b1e4b..daaa917 100644 --- a/custom_components/gtfs2/translations/fr.json +++ b/custom_components/gtfs2/translations/fr.json @@ -74,7 +74,7 @@ "extract_from": { "options": { "zip": "ZIP: attend un fichier dans dossier 'gtfs2' avec le même nom, sans extension .zip", - "url": "URL: utilise l'URL below, laisse 'na' si zip" + "url": "URL: utilise l'URL, laisse 'na' si zip" } }, "direction": { @@ -83,5 +83,23 @@ "1": "Retour" } } + }, + "services": { + "update_gtfs": { + "name": "MAJ d'un GTFS2 datasource", + "description": "Utiliser un lien ou placer votre fichier ZIP dans le dossier gtfs2", + "fields": { + "extract_from": { + "name": "Collecte données de:" + }, + "file": { + "name": "Nom du Service, sans ajouter .zip, " + }, + "url": { + "name": "URL externe vers le fichier (zip) des données GTFS, laisse 'na' si zip", + "description": "A noter: si déjà existant, l'ancien sera remplacé" + } + } + } } -} +} \ No newline at end of file From 405b3a12663e2176fe3d0fd56614f76b10be2f29 Mon Sep 17 00:00:00 2001 From: Arjan <44190435+vingerha@users.noreply.github.com> Date: Mon, 20 Nov 2023 08:26:06 +0100 Subject: [PATCH 21/42] Update en.json fix hacsfest finding --- custom_components/gtfs2/translations/en.json | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/custom_components/gtfs2/translations/en.json b/custom_components/gtfs2/translations/en.json index 0d8a673..49d486a 100644 --- a/custom_components/gtfs2/translations/en.json +++ b/custom_components/gtfs2/translations/en.json @@ -90,7 +90,8 @@ "description": "Either via Link or placing a Zip yourselves in gtfs2", "fields": { "extract_from": { - "name": "Indicate source to use zip or url" + "name": "Indicate source to use zip or url", + "description": "" }, "file": { "name": "Name of the transport service, without .zip", From 33daf6ef348c063f3951c72d45b5cab414fb5d90 Mon Sep 17 00:00:00 2001 From: Arjan <44190435+vingerha@users.noreply.github.com> Date: Fri, 24 Nov 2023 14:22:41 +0100 Subject: [PATCH 22/42] Increase stability Move 'offset' top options for easier control Outcommented some bugging debug-statements to keep debug readable, not sure yet keep or not --- custom_components/gtfs2/__init__.py | 13 ++++- custom_components/gtfs2/config_flow.py | 11 +++-- custom_components/gtfs2/const.py | 1 + custom_components/gtfs2/coordinator.py | 28 +++++++---- custom_components/gtfs2/gtfs_helper.py | 15 +++--- custom_components/gtfs2/gtfs_rt_helper.py | 50 ++++++++++---------- custom_components/gtfs2/sensor.py | 33 +++++++------ custom_components/gtfs2/translations/en.json | 2 +- custom_components/gtfs2/translations/fr.json | 2 +- 9 files changed, 94 insertions(+), 61 deletions(-) diff --git a/custom_components/gtfs2/__init__.py b/custom_components/gtfs2/__init__.py index 0fec6b4..065391e 100644 --- a/custom_components/gtfs2/__init__.py +++ b/custom_components/gtfs2/__init__.py @@ -31,7 +31,7 @@ async def async_migrate_entry(hass, config_entry: ConfigEntry) -> bool: new_options['x_api_key'] = "" config_entry.version = 4 - hass.config_entries.async_update_entry(config_entry, data=new) + hass.config_entries.async_update_entry(config_entry, data=new_data) hass.config_entries.async_update_entry(config_entry, options=new_options) if config_entry.version == 2: @@ -52,6 +52,17 @@ async def async_migrate_entry(hass, config_entry: ConfigEntry) -> bool: config_entry.version = 4 hass.config_entries.async_update_entry(config_entry, options=new_options) + + if config_entry.version == 4: + + new_options = {**config_entry.options} + new_data = {**config_entry.data} + new_options['offset'] = 0 + new_data.pop('offset') + + config_entry.version = 5 + hass.config_entries.async_update_entry(config_entry, data=new_data) + hass.config_entries.async_update_entry(config_entry, options=new_options) _LOGGER.debug("Migration to version %s successful", config_entry.version) diff --git a/custom_components/gtfs2/config_flow.py b/custom_components/gtfs2/config_flow.py index c828543..3b545f4 100644 --- a/custom_components/gtfs2/config_flow.py +++ b/custom_components/gtfs2/config_flow.py @@ -15,6 +15,7 @@ DEFAULT_PATH, DOMAIN, DEFAULT_REFRESH_INTERVAL, + DEFAULT_OFFSET, CONF_API_KEY, CONF_X_API_KEY, CONF_VEHICLE_POSITION_URL, @@ -43,7 +44,7 @@ class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN): """Handle a config flow for GTFS.""" - VERSION = 4 + VERSION = 5 def __init__(self) -> None: """Init ConfigFlow.""" @@ -179,7 +180,6 @@ async def async_step_stops(self, user_input: dict | None = None) -> FlowResult: vol.Required("origin"): vol.In(stops), vol.Required("destination", default=last_stop): vol.In(stops), vol.Required("name"): str, - vol.Optional("offset", default=0): int, vol.Optional("include_tomorrow", default = False): selector.BooleanSelector(), }, ), @@ -216,7 +216,7 @@ async def _check_config(self, data): "schedule": self._pygtfs, "origin": data["origin"].split(": ")[0], "destination": data["destination"].split(": ")[0], - "offset": data["offset"], + "offset": 0, "include_tomorrow": data["include_tomorrow"], "gtfs_dir": DEFAULT_PATH, "name": data["name"], @@ -261,7 +261,6 @@ async def async_step_init( if user_input is not None: if user_input['real_time']: self._user_inputs.update(user_input) - _LOGGER.debug(f"GTFS Options with realtime: {self._user_inputs}") return await self.async_step_real_time() else: self._user_inputs.update(user_input) @@ -270,6 +269,7 @@ async def async_step_init( opt1_schema = { vol.Optional("refresh_interval", default=self.config_entry.options.get("refresh_interval", DEFAULT_REFRESH_INTERVAL)): int, + vol.Optional("offset", default=self.config_entry.options.get("offset", DEFAULT_OFFSET)): int, vol.Optional("real_time", default=self.config_entry.options.get("real_time")): selector.BooleanSelector() } @@ -286,6 +286,7 @@ async def async_step_real_time( errors: dict[str, str] = {} if user_input is not None: self._user_inputs.update(user_input) + _LOGGER.debug(f"GTFS Options with realtime: {self._user_inputs}") return self.async_create_entry(title="", data=self._user_inputs) return self.async_show_form( @@ -293,7 +294,7 @@ async def async_step_real_time( data_schema=vol.Schema( { vol.Required(CONF_TRIP_UPDATE_URL, default=self.config_entry.options.get(CONF_TRIP_UPDATE_URL)): str, - vol.Optional(CONF_VEHICLE_POSITION_URL, default=self.config_entry.options.get(CONF_VEHICLE_POSITION_URL)): str, + vol.Optional(CONF_VEHICLE_POSITION_URL, default=self.config_entry.options.get(CONF_VEHICLE_POSITION_URL,"")): str, vol.Optional(CONF_API_KEY, default=self.config_entry.options.get(CONF_API_KEY, "na")): str, vol.Optional(CONF_X_API_KEY,default=self.config_entry.options.get(CONF_X_API_KEY, "na")): str }, diff --git a/custom_components/gtfs2/const.py b/custom_components/gtfs2/const.py index 5f5db6d..46ec701 100644 --- a/custom_components/gtfs2/const.py +++ b/custom_components/gtfs2/const.py @@ -5,6 +5,7 @@ # default values for options DEFAULT_REFRESH_INTERVAL = 15 +DEFAULT_OFFSET = 0 DEFAULT_NAME = "GTFS Sensor2" DEFAULT_PATH = "gtfs2" diff --git a/custom_components/gtfs2/coordinator.py b/custom_components/gtfs2/coordinator.py index 6ec070e..0e38856 100644 --- a/custom_components/gtfs2/coordinator.py +++ b/custom_components/gtfs2/coordinator.py @@ -55,17 +55,30 @@ async def _async_update_data(self) -> dict[str, str]: self.hass, DEFAULT_PATH, data, False ) previous_data = None if self.data is None else self.data.copy() - - if previous_data is not None and (datetime.datetime.strptime(previous_data["next_departure"]["gtfs_updated_at"],'%Y-%m-%dT%H:%M:%S.%f%z') + timedelta(minutes=options.get("refresh_interval", DEFAULT_REFRESH_INTERVAL))) > dt_util.utcnow() + timedelta(seconds=1) : + _LOGGER.debug("Previous data: %s", previous_data) + # determin static + rt or only static (refresh schedule depending) + #1. sensor exists but no gtfs data, e.g. after reboot after last transport + #if previous_data is not None and (not "gtfs_update_at" in previous_data["next_departure"]): + # run_static = True + # _LOGGER.debug("Run static refresh 1: sensor with gtfs data but incomplete for name: %s", data["name"]) + #1. sensor exists with data but refresh interval not yet reached, use existing data + if previous_data is not None and (datetime.datetime.strptime(previous_data["gtfs_updated_at"],'%Y-%m-%dT%H:%M:%S.%f%z') + timedelta(minutes=options.get("refresh_interval", DEFAULT_REFRESH_INTERVAL))) > dt_util.utcnow() + timedelta(seconds=1) : + run_static = False + _LOGGER.debug("Do not Run static refresh: sensor exists but not yet refresh for name: %s", data["name"]) + #2. sensor exists and refresh interval reached, get static data + else: + run_static = True + _LOGGER.debug("Run static refresh 2: sensor without gtfs data OR refresh for name: %s", data["name"]) + + if not run_static: # do nothing awaiting refresh interval self._data = previous_data - - if previous_data is None or (datetime.datetime.strptime(previous_data["next_departure"]["gtfs_updated_at"],'%Y-%m-%dT%H:%M:%S.%f%z') + timedelta(minutes=options.get("refresh_interval", DEFAULT_REFRESH_INTERVAL))) < dt_util.utcnow() + timedelta(seconds=1): + else: self._data = { "schedule": self._pygtfs, "origin": data["origin"].split(": ")[0], "destination": data["destination"].split(": ")[0], - "offset": data["offset"], + "offset": options["offset"] if "offset" in options else 0, "include_tomorrow": data["include_tomorrow"], "gtfs_dir": DEFAULT_PATH, "name": data["name"], @@ -79,9 +92,7 @@ async def _async_update_data(self) -> dict[str, str]: self._data["next_departure"] = await self.hass.async_add_executor_job( get_next_departure, self ) - trip_shape = await self.hass.async_add_executor_job( - create_trip_geojson, self - ) + self._data["gtfs_updated_at"] = dt_util.utcnow().isoformat() except Exception as ex: # pylint: disable=broad-except _LOGGER.error("Error getting gtfs data from generic helper: %s", ex) return None @@ -123,5 +134,6 @@ async def _async_update_data(self) -> dict[str, str]: _LOGGER.info("GTFS RT: RealTime = false, selected in entity options") else: _LOGGER.debug("GTFS RT: RealTime not selected in entity options") + return self._data diff --git a/custom_components/gtfs2/gtfs_helper.py b/custom_components/gtfs2/gtfs_helper.py index e999b19..2d96cd6 100644 --- a/custom_components/gtfs2/gtfs_helper.py +++ b/custom_components/gtfs2/gtfs_helper.py @@ -205,6 +205,10 @@ def get_next_departure(self): break if item == {}: + data_returned = { + "gtfs_updated_at": dt_util.utcnow().isoformat(), + } + _LOGGER.debug("No items found in gtfs") return {} # create upcoming timetable @@ -264,11 +268,8 @@ def get_next_departure(self): f"{dest_depart.strftime(dt_util.DATE_STR_FORMAT)} {item['dest_depart_time']}" ) # align on timezone - #_LOGGER.error("dest_depart NEW: %s", dt_util.as_utc(datetime.datetime.strptime(dest_depart_time, "%Y-%m-%d %H:%M:%S")).isoformat()) depart_time = dt_util.parse_datetime(origin_depart_time).replace(tzinfo=timezone) arrival_time = dt_util.parse_datetime(dest_arrival_time).replace(tzinfo=timezone) - #_LOGGER.error("dest_depart: %s", dest_depart) - #_LOGGER.error("depart_time: %s", depart_time) origin_arrival_time = dt_util.as_utc(datetime.datetime.strptime(origin_arrival_time, "%Y-%m-%d %H:%M:%S")).isoformat() origin_depart_time = dt_util.as_utc(datetime.datetime.strptime(origin_depart_time, "%Y-%m-%d %H:%M:%S")).isoformat() dest_arrival_time = dt_util.as_utc(datetime.datetime.strptime(dest_arrival_time, "%Y-%m-%d %H:%M:%S")).isoformat() @@ -297,8 +298,8 @@ def get_next_departure(self): "Sequence": item["dest_stop_sequence"], "Timepoint": item["dest_stop_timepoint"], } - - return { + + data_returned = { "trip_id": item["trip_id"], "route_id": item["route_id"], "day": item["day"], @@ -314,7 +315,9 @@ def get_next_departure(self): "gtfs_updated_at": dt_util.utcnow().isoformat(), "next_departure_realtime_attr": {}, } - + _LOGGER.debug("Data returned: %s", data_returned) + + return data_returned def get_gtfs(hass, path, data, update=False): """Get gtfs file.""" diff --git a/custom_components/gtfs2/gtfs_rt_helper.py b/custom_components/gtfs2/gtfs_rt_helper.py index 3835d2b..6722f8f 100644 --- a/custom_components/gtfs2/gtfs_rt_helper.py +++ b/custom_components/gtfs2/gtfs_rt_helper.py @@ -180,22 +180,23 @@ def __init__(self, arrival_time, position): for entity in feed_entities: if entity.HasField("trip_update"): + # OUTCOMMENTED as spamming even debig log # If delimiter specified split the route ID in the gtfs rt feed - log_debug( - [ - "Received Trip ID", - entity.trip_update.trip.trip_id, - "Route ID:", - entity.trip_update.trip.route_id, - "direction ID", - entity.trip_update.trip.direction_id, - "Start Time:", - entity.trip_update.trip.start_time, - "Start Date:", - entity.trip_update.trip.start_date, - ], - 1, - ) + #log_debug( + # [ + # "Received Trip ID", + # entity.trip_update.trip.trip_id, + # "Route ID:", + # entity.trip_update.trip.route_id, + # "direction ID", + # entity.trip_update.trip.direction_id, + # "Start Time:", + # entity.trip_update.trip.start_time, + # "Start Date:", + # entity.trip_update.trip.start_date, + # ], + # 1, + #) if self._route_delimiter is not None: route_id_split = entity.trip_update.trip.route_id.split( self._route_delimiter @@ -204,15 +205,16 @@ def __init__(self, arrival_time, position): route_id = entity.trip_update.trip.route_id else: route_id = route_id_split[0] - log_debug( - [ - "Feed Route ID", - entity.trip_update.trip.route_id, - "changed to", - route_id, - ], - 1, - ) + # OUTCOMMENTED as spamming even debig log + #log_debug( + # [ + # "Feed Route ID", + # entity.trip_update.trip.route_id, + # "changed to", + # route_id, + # ], + # 1, + #) else: route_id = entity.trip_update.trip.route_id diff --git a/custom_components/gtfs2/sensor.py b/custom_components/gtfs2/sensor.py index e172b23..8eabb84 100644 --- a/custom_components/gtfs2/sensor.py +++ b/custom_components/gtfs2/sensor.py @@ -336,10 +336,11 @@ def _update_attrs(self): # noqa: C901 PLR0911 ) else: self.remove_keys(prefix) - - _LOGGER.debug( - "Destination_stop_time %s", self._departure["destination_stop_time"] - ) + + if "destination_stop_time" in self._departure: + _LOGGER.debug("Destination_stop_time %s", self._departure["destination_stop_time"]) + else: + _LOGGER.debug("No destination_stop_time") prefix = "destination_stop" if self._departure: self.append_keys(self._departure["destination_stop_time"], prefix) @@ -377,18 +378,20 @@ def _update_attrs(self): # noqa: C901 PLR0911 self._attributes["next_departures_headsign"] = self._departure[ "next_departures_headsign"][:10] - self._attributes["gtfs_updated_at"] = self._departure[ - "gtfs_updated_at"] - - _LOGGER.debug("next dep realtime attr: %s", self._departure["next_departure_realtime_attr"]) - # Add next departure realtime to the right level, only if populated - if "gtfs_rt_updated_at" in self._departure["next_departure_realtime_attr"]: - self._attributes["gtfs_rt_updated_at"] = self._departure["next_departure_realtime_attr"]["gtfs_rt_updated_at"] - self._attributes["next_departure_realtime"] = self._departure["next_departure_realtime_attr"]["Due in"] - self._attributes["latitude"] = self._departure["next_departure_realtime_attr"]["latitude"] - self._attributes["longitude"] = self._departure["next_departure_realtime_attr"]["longitude"] - + self._attributes["gtfs_updated_at"] = self.coordinator.data[ + "gtfs_updated_at"] + if "next_departure_realtime_attr" in self._departure: + _LOGGER.debug("next dep realtime attr: %s", self._departure["next_departure_realtime_attr"]) + # Add next departure realtime to the right level, only if populated + if "gtfs_rt_updated_at" in self._departure["next_departure_realtime_attr"]: + self._attributes["gtfs_rt_updated_at"] = self._departure["next_departure_realtime_attr"]["gtfs_rt_updated_at"] + self._attributes["next_departure_realtime"] = self._departure["next_departure_realtime_attr"]["Due in"] + self._attributes["latitude"] = self._departure["next_departure_realtime_attr"]["latitude"] + self._attributes["longitude"] = self._departure["next_departure_realtime_attr"]["longitude"] + else: + _LOGGER.debug("No next departure realtime attributes") + self._attr_extra_state_attributes = self._attributes return self._attr_extra_state_attributes diff --git a/custom_components/gtfs2/translations/en.json b/custom_components/gtfs2/translations/en.json index 0d8a673..2fb9af7 100644 --- a/custom_components/gtfs2/translations/en.json +++ b/custom_components/gtfs2/translations/en.json @@ -26,7 +26,6 @@ "origin": "Origin Stop", "destination": "Destination Stop", "name": "Name of the route", - "offset": "Offset in minutes", "refresh_interval": "Refresh interval in minutes", "include_tomorrow": "Include tomorrow" } @@ -56,6 +55,7 @@ "description": "Customize the way the integration works", "data": { "refresh_interval": "Data refresh interval (in minutes)", + "offset": "Offset in minutes", "real_time": "Setup Realtime integration? \n (needs data from the same source)" } }, diff --git a/custom_components/gtfs2/translations/fr.json b/custom_components/gtfs2/translations/fr.json index daaa917..fc609da 100644 --- a/custom_components/gtfs2/translations/fr.json +++ b/custom_components/gtfs2/translations/fr.json @@ -26,7 +26,6 @@ "origin": "Arrêt d'origine", "destination": "Arrêt de destination", "name": "Nom de la ligne", - "offset": "Décalage en minutes", "refresh_interval": "Intervalle d'actualisation en minutes", "include_tomorrow": "Inclure le lendemain?" } @@ -56,6 +55,7 @@ "description": "Personnalisez le fonctionnement de l'intégration", "data": { "refresh_interval": "Intervalle d'actualisation en minutes", + "offset": "Décalage en minutes", "real_time": "Ajoute intégration temps réel? \n (nécessite données de la même source)" } }, From 8d1f6ecc49c8f93f24f4609e05a1236f41dc2451 Mon Sep 17 00:00:00 2001 From: Arjan <44190435+vingerha@users.noreply.github.com> Date: Fri, 24 Nov 2023 18:20:42 +0100 Subject: [PATCH 23/42] Update README.md --- README.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index 45643e8..54db8ea 100644 --- a/README.md +++ b/README.md @@ -29,9 +29,9 @@ Core GTFS uses start + stop, it then determines every option between them and pr 20231104: initial version -## ToDo's / In Development -- Issue when updating the source db, it throws a db locked error. This when an existing entity for the same db starts polling it at the same time -- (DONE) Icon for the integration (brands) +## ToDo's / In Development / Known Issues +- Issue when updating the source db, it throws a db locked error OR pygtfs. This when an existing entity for the same db starts polling it at the same time +- Issue when updating the source db: pygtfs error: at the moment unclear as errors fluctuate, posisbly a lack of resources (mem/cpu) - bypass setup control for routes that have no trips 'today'. The configuration does a spot-check if start/end actually return data with the idea to validate the setup. However, this only checks for 'today' so if your route actually has no transport running at the day of setup (say Sunday or Holiday) then it will reject it. ## Installation via HACS : From 9ea26eae8ec38879ed751facf33921186b2f116d Mon Sep 17 00:00:00 2001 From: Arjan <44190435+vingerha@users.noreply.github.com> Date: Sat, 25 Nov 2023 07:56:52 +0100 Subject: [PATCH 24/42] Further stabilizing Avoid error while collecting data from source being extracted Textual updates in error/warning/info handling --- custom_components/gtfs2/__init__.py | 4 +- custom_components/gtfs2/config_flow.py | 7 ++-- custom_components/gtfs2/coordinator.py | 21 ++++------ custom_components/gtfs2/gtfs_helper.py | 44 ++++++++++++-------- custom_components/gtfs2/gtfs_rt_helper.py | 2 +- custom_components/gtfs2/sensor.py | 12 +++--- custom_components/gtfs2/translations/en.json | 2 +- custom_components/gtfs2/translations/fr.json | 8 ++-- 8 files changed, 54 insertions(+), 46 deletions(-) diff --git a/custom_components/gtfs2/__init__.py b/custom_components/gtfs2/__init__.py index 065391e..ab83e0a 100644 --- a/custom_components/gtfs2/__init__.py +++ b/custom_components/gtfs2/__init__.py @@ -16,7 +16,7 @@ async def async_migrate_entry(hass, config_entry: ConfigEntry) -> bool: """Migrate old entry.""" - _LOGGER.debug("Migrating from version %s", config_entry.version) + _LOGGER.warning("Migrating from version %s", config_entry.version) if config_entry.version == 1: @@ -64,7 +64,7 @@ async def async_migrate_entry(hass, config_entry: ConfigEntry) -> bool: hass.config_entries.async_update_entry(config_entry, data=new_data) hass.config_entries.async_update_entry(config_entry, options=new_options) - _LOGGER.debug("Migration to version %s successful", config_entry.version) + _LOGGER.warning("Migration to version %s successful", config_entry.version) return True diff --git a/custom_components/gtfs2/config_flow.py b/custom_components/gtfs2/config_flow.py index 3b545f4..b3d4eaf 100644 --- a/custom_components/gtfs2/config_flow.py +++ b/custom_components/gtfs2/config_flow.py @@ -124,9 +124,9 @@ async def async_step_remove(self, user_input: dict | None = None) -> FlowResult: ) try: removed = remove_datasource(self.hass, DEFAULT_PATH, user_input["file"]) - _LOGGER.debug(f"removed value: {removed}") + _LOGGER.debug(f"Removed gtfs data source: {removed}") except Exception as ex: - _LOGGER.info("Error while deleting : %s", {ex}) + _LOGGER.error("Error while deleting : %s", {ex}) return "generic_failure" return self.async_abort(reason="files_deleted") @@ -221,6 +221,7 @@ async def _check_config(self, data): "gtfs_dir": DEFAULT_PATH, "name": data["name"], "next_departure": None, + "file": data["file"], } try: @@ -228,7 +229,7 @@ async def _check_config(self, data): get_next_departure, self ) except Exception as ex: # pylint: disable=broad-except - _LOGGER.info( + _LOGGER.error( "Config: error getting gtfs data from generic helper: %s", {ex}, exc_info=1, diff --git a/custom_components/gtfs2/coordinator.py b/custom_components/gtfs2/coordinator.py index 0e38856..5295962 100644 --- a/custom_components/gtfs2/coordinator.py +++ b/custom_components/gtfs2/coordinator.py @@ -51,29 +51,25 @@ async def _async_update_data(self) -> dict[str, str]: data = self.config_entry.data options = self.config_entry.options - self._pygtfs = get_gtfs( - self.hass, DEFAULT_PATH, data, False - ) previous_data = None if self.data is None else self.data.copy() _LOGGER.debug("Previous data: %s", previous_data) # determin static + rt or only static (refresh schedule depending) - #1. sensor exists but no gtfs data, e.g. after reboot after last transport - #if previous_data is not None and (not "gtfs_update_at" in previous_data["next_departure"]): - # run_static = True - # _LOGGER.debug("Run static refresh 1: sensor with gtfs data but incomplete for name: %s", data["name"]) #1. sensor exists with data but refresh interval not yet reached, use existing data if previous_data is not None and (datetime.datetime.strptime(previous_data["gtfs_updated_at"],'%Y-%m-%dT%H:%M:%S.%f%z') + timedelta(minutes=options.get("refresh_interval", DEFAULT_REFRESH_INTERVAL))) > dt_util.utcnow() + timedelta(seconds=1) : run_static = False - _LOGGER.debug("Do not Run static refresh: sensor exists but not yet refresh for name: %s", data["name"]) + _LOGGER.debug("No run static refresh: sensor exists but not yet refresh for name: %s", data["name"]) #2. sensor exists and refresh interval reached, get static data else: run_static = True - _LOGGER.debug("Run static refresh 2: sensor without gtfs data OR refresh for name: %s", data["name"]) + _LOGGER.debug("Run static refresh: sensor without gtfs data OR refresh for name: %s", data["name"]) if not run_static: - # do nothing awaiting refresh interval + # do nothing awaiting refresh interval and use existing data self._data = previous_data else: + self._pygtfs = get_gtfs( + self.hass, DEFAULT_PATH, data, False + ) self._data = { "schedule": self._pygtfs, "origin": data["origin"].split(": ")[0], @@ -82,10 +78,11 @@ async def _async_update_data(self) -> dict[str, str]: "include_tomorrow": data["include_tomorrow"], "gtfs_dir": DEFAULT_PATH, "name": data["name"], + "file": data["file"], } check_index = await self.hass.async_add_executor_job( - check_datasource_index, self._pygtfs + check_datasource_index, self ) try: @@ -131,7 +128,7 @@ async def _async_update_data(self) -> dict[str, str]: except Exception as ex: # pylint: disable=broad-except _LOGGER.error("Error getting gtfs realtime data, for origin: %s with error: %s", data["origin"], ex) else: - _LOGGER.info("GTFS RT: RealTime = false, selected in entity options") + _LOGGER.debug("GTFS RT: RealTime = false, selected in entity options") else: _LOGGER.debug("GTFS RT: RealTime not selected in entity options") diff --git a/custom_components/gtfs2/gtfs_helper.py b/custom_components/gtfs2/gtfs_helper.py index 2d96cd6..b932df7 100644 --- a/custom_components/gtfs2/gtfs_helper.py +++ b/custom_components/gtfs2/gtfs_helper.py @@ -20,6 +20,13 @@ def get_next_departure(self): _LOGGER.debug("Get next departure with data: %s", self._data) + gtfs_dir = self.hass.config.path(self._data["gtfs_dir"]) + filename = self._data["file"] + journal = os.path.join(gtfs_dir, filename + ".sqlite-journal") + if os.path.exists(journal) : + _LOGGER.error("Cannot use this datasource as still unpacking: %s", filename) + return {} + """Get next departures from data.""" if self.hass.config.time_zone is None: _LOGGER.error("Timezone is not set in Home Assistant configuration") @@ -208,7 +215,7 @@ def get_next_departure(self): data_returned = { "gtfs_updated_at": dt_util.utcnow().isoformat(), } - _LOGGER.debug("No items found in gtfs") + _LOGGER.info("No items found in gtfs") return {} # create upcoming timetable @@ -275,8 +282,6 @@ def get_next_departure(self): dest_arrival_time = dt_util.as_utc(datetime.datetime.strptime(dest_arrival_time, "%Y-%m-%d %H:%M:%S")).isoformat() dest_depart_time = dt_util.as_utc(datetime.datetime.strptime(dest_depart_time, "%Y-%m-%d %H:%M:%S")).isoformat() - #_LOGGER.error("dtutil now: %s", dt_util.now()) - origin_stop_time = { "Arrival Time": origin_arrival_time, "Departure Time": origin_depart_time, @@ -329,15 +334,12 @@ def get_gtfs(hass, path, data, update=False): file = data["file"] + ".zip" sqlite = data["file"] + ".sqlite" journal = os.path.join(gtfs_dir, filename + ".sqlite-journal") - _LOGGER.debug("filename__: %s", filename[-2:]) - _LOGGER.debug("journal: %s", journal) - _LOGGER.debug("journal exist: %s", os.path.exists(journal)) - if os.path.exists(journal) : - _LOGGER.debug("Still unpacking %s", filename) + if os.path.exists(journal) and not update : + _LOGGER.warning("Cannot use this datasource as still unpacking %s", filename) return "extracting" if update and data["extract_from"] == "url" and os.path.exists(os.path.join(gtfs_dir, file)): remove_datasource(hass, path, filename) - if update and data["extract_from"] == "zip" and os.path.exists(os.path.join(gtfs_dir, file)): + if update and data["extract_from"] == "zip" and os.path.exists(os.path.join(gtfs_dir, file)) and os.path.exists(os.path.join(gtfs_dir, sqlite)): os.remove(os.path.join(gtfs_dir, sqlite)) if data["extract_from"] == "zip": if not os.path.exists(os.path.join(gtfs_dir, file)): @@ -413,14 +415,12 @@ def get_datasources(hass, path) -> dict[str]: _LOGGER.debug(f"Datasources path: {path}") gtfs_dir = hass.config.path(path) os.makedirs(gtfs_dir, exist_ok=True) - _LOGGER.debug(f"Datasources folder: {gtfs_dir}") files = os.listdir(gtfs_dir) - _LOGGER.debug(f"Datasources files: {files}") datasources = [] for file in files: if file.endswith(".sqlite"): datasources.append(file.split(".")[0]) - _LOGGER.debug(f"datasources: {datasources}") + _LOGGER.debug(f"Datasources in folder: {datasources}") return datasources @@ -432,7 +432,15 @@ def remove_datasource(hass, path, filename): return "removed" -def check_datasource_index(schedule): +def check_datasource_index(self): + _LOGGER.debug("Check datasource with data: %s", self._data) + gtfs_dir = self.hass.config.path(self._data["gtfs_dir"]) + filename = self._data["file"] + journal = os.path.join(gtfs_dir, filename + ".sqlite-journal") + if os.path.exists(journal) : + _LOGGER.warning("Cannot check indexes on this datasource as still unpacking: %s", filename) + return + schedule=self._pygtfs sql_index_1 = f""" SELECT count(*) as checkidx FROM sqlite_master @@ -467,7 +475,7 @@ def check_datasource_index(schedule): for row_cursor in result_1a: _LOGGER.debug("IDX result1: %s", row_cursor._asdict()) if row_cursor._asdict()['checkidx'] == 0: - _LOGGER.info("Adding index 1 to improve performance") + _LOGGER.debug("Adding index 1 to improve performance") result_1b = schedule.engine.connect().execute( text(sql_add_index_1), {"q": "q"}, @@ -480,7 +488,7 @@ def check_datasource_index(schedule): for row_cursor in result_2a: _LOGGER.debug("IDX result2: %s", row_cursor._asdict()) if row_cursor._asdict()['checkidx'] == 0: - _LOGGER.info("Adding index 2 to improve performance") + _LOGGER.debug("Adding index 2 to improve performance") result_2b = schedule.engine.connect().execute( text(sql_add_index_2), {"q": "q"}, @@ -493,14 +501,14 @@ def check_datasource_index(schedule): for row_cursor in result_3a: _LOGGER.debug("IDX result3: %s", row_cursor._asdict()) if row_cursor._asdict()['checkidx'] == 0: - _LOGGER.info("Adding index 3 to improve performance") + _LOGGER.debug("Adding index 3 to improve performance") result_3b = schedule.engine.connect().execute( text(sql_add_index_3), {"q": "q"}, ) def create_trip_geojson(self): - #_LOGGER.debug("GTFS Helper, create geojson with data: %s", self._data) + _LOGGER.debug("GTFS Helper, create geojson with data: %s", self._data) schedule = self._data["schedule"] self._trip_id = self._data["next_departure"]["trip_id"] sql_shape = f""" @@ -527,5 +535,5 @@ def create_trip_geojson(self): coordinate.append(x[1]) coordinates.append(coordinate) self.geojson = {"features": [{"geometry": {"coordinates": coordinates, "type": "LineString"}, "properties": {"id": self._trip_id, "title": self._trip_id}, "type": "Feature"}], "type": "FeatureCollection"} - #_LOGGER.error("Geojson: %s", json.dumps(self.geojson)) + _LOGGER.debug("Geojson: %s", json.dumps(self.geojson)) return None \ No newline at end of file diff --git a/custom_components/gtfs2/gtfs_rt_helper.py b/custom_components/gtfs2/gtfs_rt_helper.py index 6722f8f..9465cb2 100644 --- a/custom_components/gtfs2/gtfs_rt_helper.py +++ b/custom_components/gtfs2/gtfs_rt_helper.py @@ -348,7 +348,7 @@ def update_geojson(self): geojson_dir = self.hass.config.path(DEFAULT_PATH_GEOJSON) os.makedirs(geojson_dir, exist_ok=True) file = os.path.join(geojson_dir, self._route_dir + ".json") - _LOGGER.debug("gtfs geojson file: %s", file) + _LOGGER.debug("GTFS RT geojson file: %s", file) with open(file, "w") as outfile: json.dump(self.geojson, outfile) diff --git a/custom_components/gtfs2/sensor.py b/custom_components/gtfs2/sensor.py index 8eabb84..f718961 100644 --- a/custom_components/gtfs2/sensor.py +++ b/custom_components/gtfs2/sensor.py @@ -133,15 +133,15 @@ def _update_attrs(self): # noqa: C901 PLR0911 self._route = None self._agency = None # Fetch valid stop information once - if not self._origin: + if not self._origin and self._departure: stops = self._pygtfs.stops_by_id(self.origin) if not stops: self._available = False _LOGGER.warning("Origin stop ID %s not found", self.origin) return - self._origin = stops[0] + self._origin = stops[0] - if not self._destination: + if not self._destination and self._departure: stops = self._pygtfs.stops_by_id(self.destination) if not stops: self._available = False @@ -257,9 +257,9 @@ def _update_attrs(self): # noqa: C901 PLR0911 if self._state is None: self._attributes[ATTR_INFO] = ( - "No more departures" + "No more departures or extracting new data" if self._include_tomorrow - else "No more departures today" + else "No more departures today or extracting new data" ) elif ATTR_INFO in self._attributes: del self._attributes[ATTR_INFO] @@ -340,7 +340,7 @@ def _update_attrs(self): # noqa: C901 PLR0911 if "destination_stop_time" in self._departure: _LOGGER.debug("Destination_stop_time %s", self._departure["destination_stop_time"]) else: - _LOGGER.debug("No destination_stop_time") + _LOGGER.warning("No destination_stop_time") prefix = "destination_stop" if self._departure: self.append_keys(self._departure["destination_stop_time"], prefix) diff --git a/custom_components/gtfs2/translations/en.json b/custom_components/gtfs2/translations/en.json index b25925c..66a9e03 100644 --- a/custom_components/gtfs2/translations/en.json +++ b/custom_components/gtfs2/translations/en.json @@ -91,7 +91,7 @@ "fields": { "extract_from": { "name": "Indicate source to use zip or url", - "description": "" + "description": "" }, "file": { "name": "Name of the transport service, without .zip", diff --git a/custom_components/gtfs2/translations/fr.json b/custom_components/gtfs2/translations/fr.json index fc609da..5625565 100644 --- a/custom_components/gtfs2/translations/fr.json +++ b/custom_components/gtfs2/translations/fr.json @@ -90,13 +90,15 @@ "description": "Utiliser un lien ou placer votre fichier ZIP dans le dossier gtfs2", "fields": { "extract_from": { - "name": "Collecte données de:" + "name": "Collecte données de:", + "description": "" }, "file": { - "name": "Nom du Service, sans ajouter .zip, " + "name": "Nom du Service, sans ajouter .zip", + "description": "A noter: si déjà existant, l'ancien sera remplacé" }, "url": { - "name": "URL externe vers le fichier (zip) des données GTFS, laisse 'na' si zip", + "name": "URL externe vers le fichier (zip) des données GTFS, laissez le 'na' si zip", "description": "A noter: si déjà existant, l'ancien sera remplacé" } } From f0fa371c60836f0c0643b5a6f750a5fc613edf53 Mon Sep 17 00:00:00 2001 From: Arjan <44190435+vingerha@users.noreply.github.com> Date: Sat, 25 Nov 2023 08:02:16 +0100 Subject: [PATCH 25/42] Adjust migration for older versions --- custom_components/gtfs2/__init__.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/custom_components/gtfs2/__init__.py b/custom_components/gtfs2/__init__.py index ab83e0a..c9e7142 100644 --- a/custom_components/gtfs2/__init__.py +++ b/custom_components/gtfs2/__init__.py @@ -37,21 +37,29 @@ async def async_migrate_entry(hass, config_entry: ConfigEntry) -> bool: if config_entry.version == 2: new_options = {**config_entry.options} + new_data = {**config_entry.data} new_options['real_time'] = False new_options['api_key'] = "" new_options['x_api_key'] = "" + new_options['offset'] = 0 + new_data.pop('offset') config_entry.version = 4 hass.config_entries.async_update_entry(config_entry, options=new_options) + hass.config_entries.async_update_entry(config_entry, data=new_data) if config_entry.version == 3: new_options = {**config_entry.options} + new_data = {**config_entry.data} new_options['api_key'] = "" new_options['x_api_key'] = "" + new_options['offset'] = 0 + new_data.pop('offset') config_entry.version = 4 hass.config_entries.async_update_entry(config_entry, options=new_options) + hass.config_entries.async_update_entry(config_entry, data=new_data) if config_entry.version == 4: From 39a4e8afe963b2a950d9eab37439e12d2dddca70 Mon Sep 17 00:00:00 2001 From: Arjan <44190435+vingerha@users.noreply.github.com> Date: Sat, 25 Nov 2023 08:15:30 +0100 Subject: [PATCH 26/42] Fix migration error --- custom_components/gtfs2/__init__.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/custom_components/gtfs2/__init__.py b/custom_components/gtfs2/__init__.py index c9e7142..7aee183 100644 --- a/custom_components/gtfs2/__init__.py +++ b/custom_components/gtfs2/__init__.py @@ -29,8 +29,10 @@ async def async_migrate_entry(hass, config_entry: ConfigEntry) -> bool: new_options['refresh_interval'] = 15 new_options['api_key'] = "" new_options['x_api_key'] = "" + new_options['offset'] = 0 + new_data.pop('offset') - config_entry.version = 4 + config_entry.version = 5 hass.config_entries.async_update_entry(config_entry, data=new_data) hass.config_entries.async_update_entry(config_entry, options=new_options) @@ -44,7 +46,7 @@ async def async_migrate_entry(hass, config_entry: ConfigEntry) -> bool: new_options['offset'] = 0 new_data.pop('offset') - config_entry.version = 4 + config_entry.version = 5 hass.config_entries.async_update_entry(config_entry, options=new_options) hass.config_entries.async_update_entry(config_entry, data=new_data) @@ -57,7 +59,7 @@ async def async_migrate_entry(hass, config_entry: ConfigEntry) -> bool: new_options['offset'] = 0 new_data.pop('offset') - config_entry.version = 4 + config_entry.version = 5 hass.config_entries.async_update_entry(config_entry, options=new_options) hass.config_entries.async_update_entry(config_entry, data=new_data) From 614c97a74a794d3e1e474ac9437f07ed367c6069 Mon Sep 17 00:00:00 2001 From: Arjan <44190435+vingerha@users.noreply.github.com> Date: Sat, 25 Nov 2023 10:20:49 +0100 Subject: [PATCH 27/42] Fix bug with collecting realtime data whilst still extracting --- custom_components/gtfs2/coordinator.py | 42 +++++++++++++++----------- custom_components/gtfs2/gtfs_helper.py | 25 ++++++++------- custom_components/gtfs2/sensor.py | 21 ++----------- 3 files changed, 42 insertions(+), 46 deletions(-) diff --git a/custom_components/gtfs2/coordinator.py b/custom_components/gtfs2/coordinator.py index 5295962..fab4116 100644 --- a/custom_components/gtfs2/coordinator.py +++ b/custom_components/gtfs2/coordinator.py @@ -21,7 +21,7 @@ ATTR_LONGITUDE, ATTR_RT_UPDATED_AT ) -from .gtfs_helper import get_gtfs, get_next_departure, check_datasource_index, create_trip_geojson +from .gtfs_helper import get_gtfs, get_next_departure, check_datasource_index, create_trip_geojson, check_extracting from .gtfs_rt_helper import get_rt_route_statuses, get_next_services _LOGGER = logging.getLogger(__name__) @@ -50,9 +50,31 @@ async def _async_update_data(self) -> dict[str, str]: """Get the latest data from GTFS and GTFS relatime, depending refresh interval""" data = self.config_entry.data options = self.config_entry.options - previous_data = None if self.data is None else self.data.copy() - _LOGGER.debug("Previous data: %s", previous_data) + _LOGGER.debug("Previous data: %s", previous_data) + + self._pygtfs = get_gtfs( + self.hass, DEFAULT_PATH, data, False + ) + self._data = { + "schedule": self._pygtfs, + "origin": data["origin"].split(": ")[0], + "destination": data["destination"].split(": ")[0], + "offset": options["offset"] if "offset" in options else 0, + "include_tomorrow": data["include_tomorrow"], + "gtfs_dir": DEFAULT_PATH, + "name": data["name"], + "file": data["file"], + "extracting": False, + "next_departure": {} + } + + if check_extracting(self): + _LOGGER.warning("Cannot update this sensor as still unpacking: %s", self._data["file"]) + previous_data["extracting"] = True + return previous_data + + # determin static + rt or only static (refresh schedule depending) #1. sensor exists with data but refresh interval not yet reached, use existing data if previous_data is not None and (datetime.datetime.strptime(previous_data["gtfs_updated_at"],'%Y-%m-%dT%H:%M:%S.%f%z') + timedelta(minutes=options.get("refresh_interval", DEFAULT_REFRESH_INTERVAL))) > dt_util.utcnow() + timedelta(seconds=1) : @@ -67,20 +89,6 @@ async def _async_update_data(self) -> dict[str, str]: # do nothing awaiting refresh interval and use existing data self._data = previous_data else: - self._pygtfs = get_gtfs( - self.hass, DEFAULT_PATH, data, False - ) - self._data = { - "schedule": self._pygtfs, - "origin": data["origin"].split(": ")[0], - "destination": data["destination"].split(": ")[0], - "offset": options["offset"] if "offset" in options else 0, - "include_tomorrow": data["include_tomorrow"], - "gtfs_dir": DEFAULT_PATH, - "name": data["name"], - "file": data["file"], - } - check_index = await self.hass.async_add_executor_job( check_datasource_index, self ) diff --git a/custom_components/gtfs2/gtfs_helper.py b/custom_components/gtfs2/gtfs_helper.py index b932df7..95edbfd 100644 --- a/custom_components/gtfs2/gtfs_helper.py +++ b/custom_components/gtfs2/gtfs_helper.py @@ -20,11 +20,8 @@ def get_next_departure(self): _LOGGER.debug("Get next departure with data: %s", self._data) - gtfs_dir = self.hass.config.path(self._data["gtfs_dir"]) - filename = self._data["file"] - journal = os.path.join(gtfs_dir, filename + ".sqlite-journal") - if os.path.exists(journal) : - _LOGGER.error("Cannot use this datasource as still unpacking: %s", filename) + if check_extracting(self): + _LOGGER.warning("Cannot get next depurtures on this datasource as still unpacking: %s", self._data["file"]) return {} """Get next departures from data.""" @@ -335,7 +332,7 @@ def get_gtfs(hass, path, data, update=False): sqlite = data["file"] + ".sqlite" journal = os.path.join(gtfs_dir, filename + ".sqlite-journal") if os.path.exists(journal) and not update : - _LOGGER.warning("Cannot use this datasource as still unpacking %s", filename) + _LOGGER.warning("Cannot use this datasource as still unpacking: %s", filename) return "extracting" if update and data["extract_from"] == "url" and os.path.exists(os.path.join(gtfs_dir, file)): remove_datasource(hass, path, filename) @@ -430,15 +427,21 @@ def remove_datasource(hass, path, filename): os.remove(os.path.join(gtfs_dir, filename + ".zip")) os.remove(os.path.join(gtfs_dir, filename + ".sqlite")) return "removed" - - -def check_datasource_index(self): - _LOGGER.debug("Check datasource with data: %s", self._data) + +def check_extracting(self): gtfs_dir = self.hass.config.path(self._data["gtfs_dir"]) filename = self._data["file"] journal = os.path.join(gtfs_dir, filename + ".sqlite-journal") if os.path.exists(journal) : - _LOGGER.warning("Cannot check indexes on this datasource as still unpacking: %s", filename) + _LOGGER.debug("check extracting: yes") + return True + return False + + +def check_datasource_index(self): + _LOGGER.debug("Check datasource with data: %s", self._data) + if check_extracting(self): + _LOGGER.warning("Cannot check indexes on this datasource as still unpacking: %s", self._data["file"]) return schedule=self._pygtfs sql_index_1 = f""" diff --git a/custom_components/gtfs2/sensor.py b/custom_components/gtfs2/sensor.py index f718961..2a151b1 100644 --- a/custom_components/gtfs2/sensor.py +++ b/custom_components/gtfs2/sensor.py @@ -119,6 +119,7 @@ def _update_attrs(self): # noqa: C901 PLR0911 _LOGGER.debug(f"SENSOR update attr DATA: {self.coordinator.data}") self._pygtfs = self.coordinator.data["schedule"] self.origin = self.coordinator.data["origin"].split(": ")[0] + self.extracting = self.coordinator.data["extracting"] self.destination = self.coordinator.data["destination"].split(": ")[0] self._include_tomorrow = self.coordinator.data["include_tomorrow"] self._offset = self.coordinator.data["offset"] @@ -132,34 +133,18 @@ def _update_attrs(self): # noqa: C901 PLR0911 self._trip = None self._route = None self._agency = None - # Fetch valid stop information once - if not self._origin and self._departure: - stops = self._pygtfs.stops_by_id(self.origin) - if not stops: - self._available = False - _LOGGER.warning("Origin stop ID %s not found", self.origin) - return - self._origin = stops[0] - - if not self._destination and self._departure: - stops = self._pygtfs.stops_by_id(self.destination) - if not stops: - self._available = False - _LOGGER.warning("Destination stop ID %s not found", self.destination) - return - self._destination = stops[0] # Fetch trip and route details once, unless updated if not self._departure: self._trip = None else: trip_id = self._departure["trip_id"] - if not self._trip or self._trip.trip_id != trip_id: + if not self.extracting and (not self._trip or self._trip.trip_id != trip_id): _LOGGER.debug("Fetching trip details for %s", trip_id) self._trip = self._pygtfs.trips_by_id(trip_id)[0] route_id = self._departure["route_id"] - if not self._route or self._route.route_id != route_id: + if not self.extracting and (not self._route or self._route.route_id != route_id): _LOGGER.debug("Fetching route details for %s", route_id) self._route = self._pygtfs.routes_by_id(route_id)[0] From bdd3dae5da5078c0149f82ece87a14ef517e7e54 Mon Sep 17 00:00:00 2001 From: Arjan <44190435+vingerha@users.noreply.github.com> Date: Sat, 25 Nov 2023 10:44:28 +0100 Subject: [PATCH 28/42] Update hacs.json --- hacs.json | 1 + 1 file changed, 1 insertion(+) diff --git a/hacs.json b/hacs.json index b07636f..fb6c2e8 100644 --- a/hacs.json +++ b/hacs.json @@ -1,4 +1,5 @@ { "name": "GTFS2 for HomeAssistant", + "render_readme": true, "homeassistant": "2023.10.1" } From 104b732186114468a8da51768db6366a7f427993 Mon Sep 17 00:00:00 2001 From: vingerha <44190435+vingerha@users.noreply.github.com> Date: Sat, 25 Nov 2023 14:16:35 +0100 Subject: [PATCH 29/42] Fix issue with missing stop/dest ID --- custom_components/gtfs2/gtfs_rt_helper.py | 30 +++++++++++------------ custom_components/gtfs2/sensor.py | 27 +++++++++++++++++--- 2 files changed, 38 insertions(+), 19 deletions(-) diff --git a/custom_components/gtfs2/gtfs_rt_helper.py b/custom_components/gtfs2/gtfs_rt_helper.py index 9465cb2..808fbd5 100644 --- a/custom_components/gtfs2/gtfs_rt_helper.py +++ b/custom_components/gtfs2/gtfs_rt_helper.py @@ -254,21 +254,21 @@ def __init__(self, arrival_time, position): #) # Ignore arrival times in the past if due_in_minutes(datetime.fromtimestamp(stop_time)) >= 0: - #log_debug( - # [ - # "Adding route ID", - # route_id, - # "trip ID", - # entity.trip_update.trip.trip_id, - # "direction ID", - # entity.trip_update.trip.direction_id, - # "stop ID", - # stop_id, - # "stop time", - # stop_time, - # ], - # 3, - #) + log_debug( + [ + "Adding route ID", + route_id, + "trip ID", + entity.trip_update.trip.trip_id, + "direction ID", + entity.trip_update.trip.direction_id, + "stop ID", + stop_id, + "stop time", + stop_time, + ], + 3, + ) details = StopDetails( datetime.fromtimestamp(stop_time), diff --git a/custom_components/gtfs2/sensor.py b/custom_components/gtfs2/sensor.py index 2a151b1..5073162 100644 --- a/custom_components/gtfs2/sensor.py +++ b/custom_components/gtfs2/sensor.py @@ -118,8 +118,8 @@ def icon(self) -> str: def _update_attrs(self): # noqa: C901 PLR0911 _LOGGER.debug(f"SENSOR update attr DATA: {self.coordinator.data}") self._pygtfs = self.coordinator.data["schedule"] - self.origin = self.coordinator.data["origin"].split(": ")[0] self.extracting = self.coordinator.data["extracting"] + self.origin = self.coordinator.data["origin"].split(": ")[0] self.destination = self.coordinator.data["destination"].split(": ")[0] self._include_tomorrow = self.coordinator.data["include_tomorrow"] self._offset = self.coordinator.data["offset"] @@ -128,11 +128,30 @@ def _update_attrs(self): # noqa: C901 PLR0911 self._icon = ICON self._state: datetime.datetime | None = None self._attr_device_class = SensorDeviceClass.TIMESTAMP - self._origin = None - self._destination = None self._trip = None self._route = None self._agency = None + self._origin = None + self._destination = None + # Fetch valid stop information once + if not self._origin and not self.extracting: + stops = self._pygtfs.stops_by_id(self.origin) + if not stops: + self._available = False + _LOGGER.warning("Origin stop ID %s not found", self.origin) + return + self._origin = stops[0] + + if not self._destination and not self.extracting: + stops = self._pygtfs.stops_by_id(self.destination) + if not stops: + self._available = False + _LOGGER.warning( + "Destination stop ID %s not found", self.destination + ) + return + self._destination = stops[0] + # Fetch trip and route details once, unless updated if not self._departure: @@ -204,7 +223,7 @@ def _update_attrs(self): # noqa: C901 PLR0911 name = ( f"{getattr(self._agency, 'agency_name', DEFAULT_NAME)} " - f"{self.origin} to {self.destination} next departure" + f"{self._origin} to {self._destination} next departure" ) if not self._departure: name = f"{DEFAULT_NAME}" From ea971abd5495a75da3a21a99178bded1fc8967c2 Mon Sep 17 00:00:00 2001 From: Arjan <44190435+vingerha@users.noreply.github.com> Date: Sun, 26 Nov 2023 11:28:26 +0100 Subject: [PATCH 30/42] Fix issues with tomorrow dates not properly displayed Both state and attributes fixed Initial work on extracting real-time via trip_id (e.g. for TER/France, the realtime data does not contain route_id) --- custom_components/gtfs2/const.py | 1 + custom_components/gtfs2/coordinator.py | 4 +- custom_components/gtfs2/gtfs_helper.py | 20 ++- custom_components/gtfs2/gtfs_rt_helper.py | 193 +++++++++++++++++----- 4 files changed, 170 insertions(+), 48 deletions(-) diff --git a/custom_components/gtfs2/const.py b/custom_components/gtfs2/const.py index 46ec701..c148e92 100644 --- a/custom_components/gtfs2/const.py +++ b/custom_components/gtfs2/const.py @@ -241,6 +241,7 @@ #gtfs_rt ATTR_STOP_ID = "Stop ID" ATTR_ROUTE = "Route" +ATTR_TRIP = "Trip" ATTR_DIRECTION_ID = "Direction ID" ATTR_DUE_IN = "Due in" ATTR_DUE_AT = "Due at" diff --git a/custom_components/gtfs2/coordinator.py b/custom_components/gtfs2/coordinator.py index fab4116..ffae24b 100644 --- a/custom_components/gtfs2/coordinator.py +++ b/custom_components/gtfs2/coordinator.py @@ -22,7 +22,7 @@ ATTR_RT_UPDATED_AT ) from .gtfs_helper import get_gtfs, get_next_departure, check_datasource_index, create_trip_geojson, check_extracting -from .gtfs_rt_helper import get_rt_route_statuses, get_next_services +from .gtfs_rt_helper import get_rt_route_statuses, get_rt_trip_statuses, get_next_services _LOGGER = logging.getLogger(__name__) @@ -126,10 +126,12 @@ async def _async_update_data(self) -> dict[str, str]: _LOGGER.error("Error getting entity route_id for realtime data, for origin: %s with error: %s", data["origin"], ex) self._route_id = data["route"].split(": ")[0] self._stop_id = data["origin"].split(": ")[0] + self._trip_id = self._data["next_departure"]["trip_id"] self._direction = data["direction"] self._relative = False try: self._get_rt_route_statuses = await self.hass.async_add_executor_job(get_rt_route_statuses, self) + self._get_rt_trip_statuses = await self.hass.async_add_executor_job(get_rt_trip_statuses, self) self._get_next_service = await self.hass.async_add_executor_job(get_next_services, self) self._data["next_departure"]["next_departure_realtime_attr"] = self._get_next_service self._data["next_departure"]["next_departure_realtime_attr"]["gtfs_rt_updated_at"] = dt_util.utcnow() diff --git a/custom_components/gtfs2/gtfs_helper.py b/custom_components/gtfs2/gtfs_helper.py index 95edbfd..f7d4835 100644 --- a/custom_components/gtfs2/gtfs_helper.py +++ b/custom_components/gtfs2/gtfs_helper.py @@ -47,6 +47,7 @@ def get_next_departure(self): # days. limit = 24 * 60 * 60 * 2 tomorrow_select = tomorrow_where = tomorrow_order = "" + tomorrow_calendar_date_where = f"AND (calendar_date_today.date = :today)" if include_tomorrow: _LOGGER.debug("Include Tomorrow") limit = int(limit / 2 * 3) @@ -54,6 +55,7 @@ def get_next_departure(self): tomorrow_select = f"calendar.{tomorrow_name} AS tomorrow," tomorrow_where = f"OR calendar.{tomorrow_name} = 1" tomorrow_order = f"calendar.{tomorrow_name} DESC," + tomorrow_calendar_date_where = f"AND (calendar_date_today.date = :today or calendar_date_today.date = :tomorrow)" sql_query = f""" SELECT trip.trip_id, trip.route_id,trip.trip_headsign,route.route_long_name, @@ -143,7 +145,7 @@ def get_next_departure(self): WHERE start_station.stop_id = :origin_station_id AND end_station.stop_id = :end_station_id AND origin_stop_sequence < dest_stop_sequence - AND (calendar_date_today.date = :today or calendar_date_today.date = :tomorrow) + {tomorrow_calendar_date_where} ORDER BY today_cd, origin_depart_time """ # noqa: S608 result = schedule.engine.connect().execute( @@ -207,7 +209,8 @@ def get_next_departure(self): "Departure found for station %s @ %s -> %s", start_station_id, key, item ) break - + _LOGGER.debug("item: %s", item) + if item == {}: data_returned = { "gtfs_updated_at": dt_util.utcnow().isoformat(), @@ -248,7 +251,15 @@ def get_next_departure(self): # Format arrival and departure dates and times, accounting for the # possibility of times crossing over midnight. + _tomorrow = item.get("tomorrow") origin_arrival = now + dest_arrival = now + origin_depart_time = f"{now_date} {item['origin_depart_time']}" + if _tomorrow == 1: + origin_arrival = tomorrow + dest_arrival = tomorrow + origin_depart_time = f"{tomorrow_date} {item['origin_depart_time']}" + if item["origin_arrival_time"] > item["origin_depart_time"]: origin_arrival -= datetime.timedelta(days=1) origin_arrival_time = ( @@ -256,11 +267,8 @@ def get_next_departure(self): f"{item['origin_arrival_time']}" ) - origin_depart_time = f"{now_date} {item['origin_depart_time']}" - - dest_arrival = now if item["dest_arrival_time"] < item["origin_depart_time"]: - dest_arrival += datetime.timedelta(days=1) + dest_arrival += datetime.timedelta(days=1) dest_arrival_time = ( f"{dest_arrival.strftime(dt_util.DATE_STR_FORMAT)} {item['dest_arrival_time']}" ) diff --git a/custom_components/gtfs2/gtfs_rt_helper.py b/custom_components/gtfs2/gtfs_rt_helper.py index 808fbd5..3a3853b 100644 --- a/custom_components/gtfs2/gtfs_rt_helper.py +++ b/custom_components/gtfs2/gtfs_rt_helper.py @@ -19,6 +19,7 @@ ATTR_STOP_ID, ATTR_ROUTE, + ATTR_TRIP, ATTR_DIRECTION_ID, ATTR_DUE_IN, ATTR_DUE_AT, @@ -95,16 +96,29 @@ def get_gtfs_feed_entities(url: str, headers, label: str): ) feed.ParseFromString(response.content) - + #_LOGGER.debug("Feed entity: %s", feed.entity) return feed.entity def get_next_services(self): self.data = self._get_rt_route_statuses self._stop = self._stop_id self._route = self._route_id + self._trip = self._trip_id self._direction = self._direction + _LOGGER.debug("RT route: %s", self._route) + _LOGGER.debug("RT trip: %s", self._trip) + _LOGGER.debug("RT stop: %s", self._stop) + _LOGGER.debug("RT direction: %s", self._direction) next_services = self.data.get(self._route, {}).get(self._direction, {}).get(self._stop, []) - + _LOGGER.debug("Next services route_id: %s", next_services) + if not next_services: + self._direction = 0 + self.data2 = self._get_rt_trip_statuses + next_services = self.data2.get(self._trip, {}).get(self._direction, {}).get(self._stop, []) + _LOGGER.debug("Next services trip_id: %s", next_services) + if next_services: + _LOGGER.debug("Next services trip_id[0].arrival_time: %s", next_services[0].arrival_time) + if self.hass.config.time_zone is None: _LOGGER.error("Timezone is not set in Home Assistant configuration") timezone = "UTC" @@ -128,6 +142,7 @@ def get_next_services(self): ATTR_DUE_IN: due_in, ATTR_STOP_ID: self._stop, ATTR_ROUTE: self._route, + ATTR_TRIP: self._trip, ATTR_DIRECTION_ID: self._direction, ATTR_LATITUDE: "", ATTR_LONGITUDE: "" @@ -183,19 +198,19 @@ def __init__(self, arrival_time, position): # OUTCOMMENTED as spamming even debig log # If delimiter specified split the route ID in the gtfs rt feed #log_debug( - # [ - # "Received Trip ID", - # entity.trip_update.trip.trip_id, - # "Route ID:", - # entity.trip_update.trip.route_id, - # "direction ID", - # entity.trip_update.trip.direction_id, - # "Start Time:", - # entity.trip_update.trip.start_time, - # "Start Date:", - # entity.trip_update.trip.start_date, - # ], - # 1, + #[ + # "Received Trip ID", + # entity.trip_update.trip.trip_id, + # "Route ID:", + # entity.trip_update.trip.route_id, + # "direction ID", + # entity.trip_update.trip.direction_id, + # "Start Time:", + # entity.trip_update.trip.start_time, + # "Start Date:", + # entity.trip_update.trip.start_date, + #], + #1, #) if self._route_delimiter is not None: route_id_split = entity.trip_update.trip.route_id.split( @@ -221,7 +236,7 @@ def __init__(self, arrival_time, position): if route_id not in departure_times: departure_times[route_id] = {} - + if entity.trip_update.trip.direction_id is not None: direction_id = str(entity.trip_update.trip.direction_id) else: @@ -242,33 +257,33 @@ def __init__(self, arrival_time, position): else: stop_time = stop.arrival.time #log_debug( - #[ - # "Stop:", - # stop_id, - # "Stop Sequence:", - # stop.stop_sequence, - # "Stop Time:", - # stop_time, - #], - #2, + # [ + # "Stop:", + # stop_id, + # "Stop Sequence:", + # stop.stop_sequence, + # "Stop Time:", + # stop_time, + # ], + # 2, #) # Ignore arrival times in the past if due_in_minutes(datetime.fromtimestamp(stop_time)) >= 0: - log_debug( - [ - "Adding route ID", - route_id, - "trip ID", - entity.trip_update.trip.trip_id, - "direction ID", - entity.trip_update.trip.direction_id, - "stop ID", - stop_id, - "stop time", - stop_time, - ], - 3, - ) + #log_debug( + # [ + # "Adding route ID", + # route_id, + # "trip ID", + # entity.trip_update.trip.trip_id, + # "direction ID", + # entity.trip_update.trip.direction_id, + # "stop ID", + # stop_id, + # "stop time", + # stop_time, + # ], + # 3, + #) details = StopDetails( datetime.fromtimestamp(stop_time), @@ -287,8 +302,104 @@ def __init__(self, arrival_time, position): ) self.info = departure_times - + #_LOGGER.debug("Departure times: %s", departure_times) return departure_times + +def get_rt_trip_statuses(self): + + vehicle_positions = {} + + if self._vehicle_position_url != "" : + vehicle_positions = get_rt_vehicle_positions(self) + + class StopDetails: + def __init__(self, arrival_time, position): + self.arrival_time = arrival_time + self.position = position + + departure_times = {} + + feed_entities = get_gtfs_feed_entities( + url=self._trip_update_url, headers=self._headers, label="trip data" + ) + + for entity in feed_entities: + if entity.HasField("trip_update"): + trip_id = entity.trip_update.trip.trip_id + #_LOGGER.debug("RT Trip, trip: %s", trip) + #_LOGGER.debug("RT Trip, trip_id: %s", self._trip_id) + + if trip_id == self._trip_id: + _LOGGER.debug("RT Trip, found trip: %s", trip_id) + + if trip_id not in departure_times: + departure_times[trip_id] = {} + + if entity.trip_update.trip.direction_id is not None: + direction_id = str(entity.trip_update.trip.direction_id) + else: + direction_id = DEFAULT_DIRECTION + if direction_id not in departure_times[trip_id]: + departure_times[trip_id][direction_id] = {} + + for stop in entity.trip_update.stop_time_update: + stop_id = stop.stop_id + if not departure_times[trip_id][direction_id].get( + stop_id + ): + departure_times[trip_id][direction_id][stop_id] = [] + # Use stop arrival time; + # fall back on departure time if not available + if stop.arrival.time == 0: + stop_time = stop.departure.time + else: + stop_time = stop.arrival.time + #log_debug( + # [ + # "Stop:", + # stop_id, + # "Stop Sequence:", + # stop.stop_sequence, + # "Stop Time:", + # stop_time, + # ], + # 2, + #) + # Ignore arrival times in the past + if due_in_minutes(datetime.fromtimestamp(stop_time)) >= 0: + #log_debug( + # [ + # "Adding trip ID", + # entity.trip_update.trip.trip_id, + # "direction ID", + # entity.trip_update.trip.direction_id, + # "stop ID", + # stop_id, + # "stop time", + # stop_time, + # ], + # 3, + #) + + details = StopDetails( + datetime.fromtimestamp(stop_time), + [d["properties"].get(entity.trip_update.trip.trip_id) for d in vehicle_positions], + ) + departure_times[trip_id][direction_id][ + stop_id + ].append(details) + + # Sort by arrival time + for trip in departure_times: + for direction in departure_times[trip]: + for stop in departure_times[trip][direction]: + departure_times[trip][direction][stop].sort( + key=lambda t: t.arrival_time + ) + + self.info = departure_times + #_LOGGER.debug("Departure times Trip: %s", departure_times) + return departure_times def get_rt_vehicle_positions(self): feed_entities = get_gtfs_feed_entities( From 17722b79bc786c2a34349350258e30f4c3e880b6 Mon Sep 17 00:00:00 2001 From: Arjan <44190435+vingerha@users.noreply.github.com> Date: Sun, 26 Nov 2023 16:33:52 +0100 Subject: [PATCH 31/42] Fix instability if no data Fixes sensor crash if no data and restart on day with no data --- custom_components/gtfs2/const.py | 1 + custom_components/gtfs2/coordinator.py | 9 +++--- custom_components/gtfs2/gtfs_helper.py | 2 -- custom_components/gtfs2/sensor.py | 39 +++++++++++++++----------- 4 files changed, 29 insertions(+), 22 deletions(-) diff --git a/custom_components/gtfs2/const.py b/custom_components/gtfs2/const.py index c148e92..2f0f914 100644 --- a/custom_components/gtfs2/const.py +++ b/custom_components/gtfs2/const.py @@ -26,6 +26,7 @@ ATTR_DROP_OFF_DESTINATION = "destination_stop_drop_off_type_state" ATTR_DROP_OFF_ORIGIN = "origin_stop_drop_off_type_state" ATTR_INFO = "info" +ATTR_INFO_RT = "info_realtime" ATTR_OFFSET = CONF_OFFSET ATTR_LAST = "last" ATTR_LOCATION_DESTINATION = "destination_station_location_type_name" diff --git a/custom_components/gtfs2/coordinator.py b/custom_components/gtfs2/coordinator.py index ffae24b..eda229c 100644 --- a/custom_components/gtfs2/coordinator.py +++ b/custom_components/gtfs2/coordinator.py @@ -66,7 +66,8 @@ async def _async_update_data(self) -> dict[str, str]: "name": data["name"], "file": data["file"], "extracting": False, - "next_departure": {} + "next_departure": {}, + "next_departure_realtime_attr": {} } if check_extracting(self): @@ -126,15 +127,15 @@ async def _async_update_data(self) -> dict[str, str]: _LOGGER.error("Error getting entity route_id for realtime data, for origin: %s with error: %s", data["origin"], ex) self._route_id = data["route"].split(": ")[0] self._stop_id = data["origin"].split(": ")[0] - self._trip_id = self._data["next_departure"]["trip_id"] + self._trip_id = self._data.get('next_departure', {}).get('trip_id', None) self._direction = data["direction"] self._relative = False try: self._get_rt_route_statuses = await self.hass.async_add_executor_job(get_rt_route_statuses, self) self._get_rt_trip_statuses = await self.hass.async_add_executor_job(get_rt_trip_statuses, self) self._get_next_service = await self.hass.async_add_executor_job(get_next_services, self) - self._data["next_departure"]["next_departure_realtime_attr"] = self._get_next_service - self._data["next_departure"]["next_departure_realtime_attr"]["gtfs_rt_updated_at"] = dt_util.utcnow() + self._data["next_departure_realtime_attr"] = self._get_next_service + self._data["next_departure_realtime_attr"]["gtfs_rt_updated_at"] = dt_util.utcnow() except Exception as ex: # pylint: disable=broad-except _LOGGER.error("Error getting gtfs realtime data, for origin: %s with error: %s", data["origin"], ex) else: diff --git a/custom_components/gtfs2/gtfs_helper.py b/custom_components/gtfs2/gtfs_helper.py index f7d4835..b7eaa7b 100644 --- a/custom_components/gtfs2/gtfs_helper.py +++ b/custom_components/gtfs2/gtfs_helper.py @@ -322,8 +322,6 @@ def get_next_departure(self): "next_departures": timetable_remaining, "next_departures_lines": timetable_remaining_line, "next_departures_headsign": timetable_remaining_headsign, - "gtfs_updated_at": dt_util.utcnow().isoformat(), - "next_departure_realtime_attr": {}, } _LOGGER.debug("Data returned: %s", data_returned) diff --git a/custom_components/gtfs2/sensor.py b/custom_components/gtfs2/sensor.py index 5073162..a2ddd2d 100644 --- a/custom_components/gtfs2/sensor.py +++ b/custom_components/gtfs2/sensor.py @@ -20,6 +20,7 @@ ATTR_DROP_OFF_ORIGIN, ATTR_FIRST, ATTR_INFO, + ATTR_INFO_RT, ATTR_LAST, ATTR_LOCATION_DESTINATION, ATTR_LOCATION_ORIGIN, @@ -123,7 +124,8 @@ def _update_attrs(self): # noqa: C901 PLR0911 self.destination = self.coordinator.data["destination"].split(": ")[0] self._include_tomorrow = self.coordinator.data["include_tomorrow"] self._offset = self.coordinator.data["offset"] - self._departure = self.coordinator.data["next_departure"] + self._departure = self.coordinator.data.get("next_departure",None) + self._departure_rt = self.coordinator.data.get("next_departure_realtime_attr",None) self._available = False self._icon = ICON self._state: datetime.datetime | None = None @@ -157,12 +159,12 @@ def _update_attrs(self): # noqa: C901 PLR0911 if not self._departure: self._trip = None else: - trip_id = self._departure["trip_id"] + trip_id = self._departure.get("trip_id") if not self.extracting and (not self._trip or self._trip.trip_id != trip_id): _LOGGER.debug("Fetching trip details for %s", trip_id) self._trip = self._pygtfs.trips_by_id(trip_id)[0] - route_id = self._departure["route_id"] + route_id = self._departure.get("route_id") if not self.extracting and (not self._route or self._route.route_id != route_id): _LOGGER.debug("Fetching route details for %s", route_id) self._route = self._pygtfs.routes_by_id(route_id)[0] @@ -172,7 +174,7 @@ def _update_attrs(self): # noqa: C901 PLR0911 if not self._departure: self._next_departures = None else: - self._next_departures = self._departure["next_departures"] + self._next_departures = self._departure.get("next_departures",None) # Fetch agency details exactly once if self._agency is None and self._route: @@ -196,7 +198,7 @@ def _update_attrs(self): # noqa: C901 PLR0911 elif self._agency: _LOGGER.debug( "Self._departure time for state value TZ: %s ", - {self._departure["departure_time"]}, + {self._departure.get("departure_time")}, ) self._state = self._departure["departure_time"].replace( tzinfo=dt_util.get_time_zone(self._agency.agency_timezone) @@ -204,9 +206,9 @@ def _update_attrs(self): # noqa: C901 PLR0911 else: _LOGGER.debug( "Self._departure time from helper: %s", - {self._departure["departure_time"]}, + {self._departure.get("departure_time")}, ) - self._state = self._departure["departure_time"] + self._state = self._departure.get("departure_time") # settin state value self._attr_native_value = self._state @@ -232,7 +234,7 @@ def _update_attrs(self): # noqa: C901 PLR0911 # Add departure information if self._departure: self._attributes[ATTR_ARRIVAL] = dt_util.as_utc( - self._departure["arrival_time"] + self._departure.get("arrival_time") ).isoformat() self._attributes[ATTR_DAY] = self._departure["day"] @@ -385,16 +387,21 @@ def _update_attrs(self): # noqa: C901 PLR0911 self._attributes["gtfs_updated_at"] = self.coordinator.data[ "gtfs_updated_at"] - if "next_departure_realtime_attr" in self._departure: - _LOGGER.debug("next dep realtime attr: %s", self._departure["next_departure_realtime_attr"]) + if self._departure_rt: + _LOGGER.debug("next dep realtime attr: %s", self._departure_rt) # Add next departure realtime to the right level, only if populated - if "gtfs_rt_updated_at" in self._departure["next_departure_realtime_attr"]: - self._attributes["gtfs_rt_updated_at"] = self._departure["next_departure_realtime_attr"]["gtfs_rt_updated_at"] - self._attributes["next_departure_realtime"] = self._departure["next_departure_realtime_attr"]["Due in"] - self._attributes["latitude"] = self._departure["next_departure_realtime_attr"]["latitude"] - self._attributes["longitude"] = self._departure["next_departure_realtime_attr"]["longitude"] + if "gtfs_rt_updated_at" in self._departure_rt: + self._attributes["gtfs_rt_updated_at"] = self._departure_rt["gtfs_rt_updated_at"] + self._attributes["next_departure_realtime"] = self._departure_rt["Due in"] + self._attributes["latitude"] = self._departure_rt["latitude"] + self._attributes["longitude"] = self._departure_rt["longitude"] + if ATTR_INFO_RT in self._attributes: + del self._attributes[ATTR_INFO_RT] else: - _LOGGER.debug("No next departure realtime attributes") + _LOGGER.debug("No next departure realtime attributes") + self._attributes[ATTR_INFO_RT] = ( + "No realtime information" + ) self._attr_extra_state_attributes = self._attributes return self._attr_extra_state_attributes From eeb9c97c237449b84a4486b5fa11c982afd39a8e Mon Sep 17 00:00:00 2001 From: Arjan <44190435+vingerha@users.noreply.github.com> Date: Sun, 26 Nov 2023 16:39:16 +0100 Subject: [PATCH 32/42] Reduce risk on failure of new route Check config procedure changed to include 'tomorrow' thereby increasing chance of finding the start/stop and complete the setup --- custom_components/gtfs2/config_flow.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/custom_components/gtfs2/config_flow.py b/custom_components/gtfs2/config_flow.py index b3d4eaf..47c3cfc 100644 --- a/custom_components/gtfs2/config_flow.py +++ b/custom_components/gtfs2/config_flow.py @@ -217,7 +217,7 @@ async def _check_config(self, data): "origin": data["origin"].split(": ")[0], "destination": data["destination"].split(": ")[0], "offset": 0, - "include_tomorrow": data["include_tomorrow"], + "include_tomorrow": True, "gtfs_dir": DEFAULT_PATH, "name": data["name"], "next_departure": None, From dd6712df5ca759da2b81635a4c1e8c84e72d8b9f Mon Sep 17 00:00:00 2001 From: Arjan <44190435+vingerha@users.noreply.github.com> Date: Sun, 26 Nov 2023 17:11:19 +0100 Subject: [PATCH 33/42] Update README.md --- README.md | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index 54db8ea..d7387a8 100644 --- a/README.md +++ b/README.md @@ -1,13 +1,15 @@ [![hacs_badge](https://img.shields.io/badge/HACS-Default-orange.svg)](https://github.com/custom-components/hacs) # GTFS2 -This is an adaptation of the GTFS integration in HA Core, enhancements: +This is an adaptation of the GTFS integration in HomeAssistant Core, enhancements: - configuration via the GUI, no configuration.yaml needed - Uses selected route to further select start/end stops - Shows next 10 departures on the same stretch start/end , including alternative transport lines if applicable - allows to load/update/delete datasources in gtfs2 folder -- added a sservice to update the GTFS datasource, e.g. calling the service via automation -- translations: at present only English and French +- Option to add gtfs realtime source/url +- Option to add gtfs realtime vehicle location source/url, generates geojson file which can be used for tracking vehicle on map card +- added a service to update the GTFS datasource, e.g. for calling the service via automation +- translations: English and French ## Difference with GTFS HA core (outside of GUI setup) Core GTFS uses start + stop, it then determines every option between them and provides the next best option, regardless of the line/route @@ -17,10 +19,11 @@ Core GTFS uses start + stop, it then determines every option between them and pr ***Solution/workaround in GTFS2***: attribute added: next_departure_line shows all next departues with their line/means-of-transport. So even if you select a route first and then two stops, the attibutes will still show alternatives between those 2 stops, if applicable. ## Updates -202311DD +20231126 - realtime vehile tracking with geojson output - workflow tweaks - extend update service call +- increase stability with reboots, loss of data(source) 20231110: adding features: - new attribute: next_departure_headsigns - adding route shortname in selection/list to overcome data discrepancies been short name and long name From 338aaa0a3085b2933d9f620395787b34d2eff521 Mon Sep 17 00:00:00 2001 From: Arjan <44190435+vingerha@users.noreply.github.com> Date: Sun, 26 Nov 2023 17:12:59 +0100 Subject: [PATCH 34/42] Update README.md --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index d7387a8..e20fe84 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,4 @@ -[![hacs_badge](https://img.shields.io/badge/HACS-Default-orange.svg)](https://github.com/custom-components/hacs) +![GitHub release (with filter)](https://img.shields.io/github/v/release/aohzan/hass-prixcarburant) ![GitHub](https://img.shields.io/github/license/aohzan/hass-prixcarburant)[![hacs_badge](https://img.shields.io/badge/HACS-Default-orange.svg)](https://github.com/custom-components/hacs) # GTFS2 This is an adaptation of the GTFS integration in HomeAssistant Core, enhancements: From 9d8ae0ff602cec0b32771c5712772d38fbc0135c Mon Sep 17 00:00:00 2001 From: Arjan <44190435+vingerha@users.noreply.github.com> Date: Sun, 26 Nov 2023 17:13:28 +0100 Subject: [PATCH 35/42] Update README.md --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index e20fe84..a4962bc 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,4 @@ -![GitHub release (with filter)](https://img.shields.io/github/v/release/aohzan/hass-prixcarburant) ![GitHub](https://img.shields.io/github/license/aohzan/hass-prixcarburant)[![hacs_badge](https://img.shields.io/badge/HACS-Default-orange.svg)](https://github.com/custom-components/hacs) +![GitHub release (with filter)](https://img.shields.io/github/v/release/aohzan/hass-prixcarburant) ![GitHub](https://img.shields.io/github/license/aohzan/hass-prixcarburant) [![hacs_badge](https://img.shields.io/badge/HACS-Default-orange.svg)](https://github.com/custom-components/hacs) # GTFS2 This is an adaptation of the GTFS integration in HomeAssistant Core, enhancements: From f888dc88d7cfeb4751e65bcab2ac82ef64bb409c Mon Sep 17 00:00:00 2001 From: Arjan <44190435+vingerha@users.noreply.github.com> Date: Sun, 26 Nov 2023 17:13:59 +0100 Subject: [PATCH 36/42] Update README.md --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index a4962bc..15d646b 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,4 @@ -![GitHub release (with filter)](https://img.shields.io/github/v/release/aohzan/hass-prixcarburant) ![GitHub](https://img.shields.io/github/license/aohzan/hass-prixcarburant) [![hacs_badge](https://img.shields.io/badge/HACS-Default-orange.svg)](https://github.com/custom-components/hacs) +![GitHub release (with filter)](https://img.shields.io/github/v/release/vingerha/gtfs2) ![GitHub](https://img.shields.io/github/license/aohzan/hass-prixcarburant) [![hacs_badge](https://img.shields.io/badge/HACS-Default-orange.svg)](https://github.com/custom-components/hacs) # GTFS2 This is an adaptation of the GTFS integration in HomeAssistant Core, enhancements: From 86232e8c51bc2023fda3d8f5c219b8160cff664b Mon Sep 17 00:00:00 2001 From: Arjan <44190435+vingerha@users.noreply.github.com> Date: Sun, 26 Nov 2023 17:14:20 +0100 Subject: [PATCH 37/42] Update README.md --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 15d646b..5ef37a6 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,4 @@ -![GitHub release (with filter)](https://img.shields.io/github/v/release/vingerha/gtfs2) ![GitHub](https://img.shields.io/github/license/aohzan/hass-prixcarburant) [![hacs_badge](https://img.shields.io/badge/HACS-Default-orange.svg)](https://github.com/custom-components/hacs) +![GitHub release (with filter)](https://img.shields.io/github/v/release/vingerha/gtfs2) ![GitHub](https://img.shields.io/github/license/vingerha/gtfs2) [![hacs_badge](https://img.shields.io/badge/HACS-Default-orange.svg)](https://github.com/custom-components/hacs) # GTFS2 This is an adaptation of the GTFS integration in HomeAssistant Core, enhancements: From 6e2f49bf04f4b03f04b4831cf506dd5de86b6006 Mon Sep 17 00:00:00 2001 From: Arjan <44190435+vingerha@users.noreply.github.com> Date: Sun, 26 Nov 2023 17:15:35 +0100 Subject: [PATCH 38/42] Update README.md --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 5ef37a6..dfd6aa9 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,4 @@ -![GitHub release (with filter)](https://img.shields.io/github/v/release/vingerha/gtfs2) ![GitHub](https://img.shields.io/github/license/vingerha/gtfs2) [![hacs_badge](https://img.shields.io/badge/HACS-Default-orange.svg)](https://github.com/custom-components/hacs) +![GitHub release (with filter)](https://img.shields.io/github/v/release/vingerha/gtfs2) ![GitHub](https://img.shields.io/github/license/vingerha/gtfs2) [![hacs_badge](https://img.shields.io/badge/HACS-Default-orange.svg)](https://github.com/custom-components/hacs) [![Donate](https://img.shields.io/badge/$-support-ff69b4.svg?style=flat)](https://github.com/sponsors/vingerha) # GTFS2 This is an adaptation of the GTFS integration in HomeAssistant Core, enhancements: From 8f31cb6e2caf3dac13d186e887941b54ea1b925c Mon Sep 17 00:00:00 2001 From: Arjan <44190435+vingerha@users.noreply.github.com> Date: Sun, 26 Nov 2023 17:18:07 +0100 Subject: [PATCH 39/42] Update README.md --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index dfd6aa9..5ef37a6 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,4 @@ -![GitHub release (with filter)](https://img.shields.io/github/v/release/vingerha/gtfs2) ![GitHub](https://img.shields.io/github/license/vingerha/gtfs2) [![hacs_badge](https://img.shields.io/badge/HACS-Default-orange.svg)](https://github.com/custom-components/hacs) [![Donate](https://img.shields.io/badge/$-support-ff69b4.svg?style=flat)](https://github.com/sponsors/vingerha) +![GitHub release (with filter)](https://img.shields.io/github/v/release/vingerha/gtfs2) ![GitHub](https://img.shields.io/github/license/vingerha/gtfs2) [![hacs_badge](https://img.shields.io/badge/HACS-Default-orange.svg)](https://github.com/custom-components/hacs) # GTFS2 This is an adaptation of the GTFS integration in HomeAssistant Core, enhancements: From c5b480a994d5ccafaebb2f865510c1cfec699540 Mon Sep 17 00:00:00 2001 From: Arjan <44190435+vingerha@users.noreply.github.com> Date: Sun, 26 Nov 2023 17:19:43 +0100 Subject: [PATCH 40/42] Update README.md --- README.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index 5ef37a6..d85bb23 100644 --- a/README.md +++ b/README.md @@ -1,14 +1,14 @@ ![GitHub release (with filter)](https://img.shields.io/github/v/release/vingerha/gtfs2) ![GitHub](https://img.shields.io/github/license/vingerha/gtfs2) [![hacs_badge](https://img.shields.io/badge/HACS-Default-orange.svg)](https://github.com/custom-components/hacs) -# GTFS2 +# GTFS2 for Static and RealTime This is an adaptation of the GTFS integration in HomeAssistant Core, enhancements: - configuration via the GUI, no configuration.yaml needed - Uses selected route to further select start/end stops - Shows next 10 departures on the same stretch start/end , including alternative transport lines if applicable -- allows to load/update/delete datasources in gtfs2 folder +- allows to load/update/delete datasources in gtfs2 folder from the GUI - Option to add gtfs realtime source/url - Option to add gtfs realtime vehicle location source/url, generates geojson file which can be used for tracking vehicle on map card -- added a service to update the GTFS datasource, e.g. for calling the service via automation +- A service to update the GTFS datasource, e.g. for calling the service via automation - translations: English and French ## Difference with GTFS HA core (outside of GUI setup) From 44d7f66b6140dc95b09b4657c32c8100c1bc7a2c Mon Sep 17 00:00:00 2001 From: Arjan <44190435+vingerha@users.noreply.github.com> Date: Sun, 26 Nov 2023 17:21:12 +0100 Subject: [PATCH 41/42] Update README.md --- README.md | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/README.md b/README.md index d85bb23..47437e7 100644 --- a/README.md +++ b/README.md @@ -33,9 +33,8 @@ Core GTFS uses start + stop, it then determines every option between them and pr 20231104: initial version ## ToDo's / In Development / Known Issues -- Issue when updating the source db, it throws a db locked error OR pygtfs. This when an existing entity for the same db starts polling it at the same time - Issue when updating the source db: pygtfs error: at the moment unclear as errors fluctuate, posisbly a lack of resources (mem/cpu) -- bypass setup control for routes that have no trips 'today'. The configuration does a spot-check if start/end actually return data with the idea to validate the setup. However, this only checks for 'today' so if your route actually has no transport running at the day of setup (say Sunday or Holiday) then it will reject it. +- get realtime data for sources that donot base on routes, e.g. France's TER realtime source only uses trip_id ## Installation via HACS : From 30470de70ce9553a9cd36b0cc20b11be11c57ea5 Mon Sep 17 00:00:00 2001 From: Arjan <44190435+vingerha@users.noreply.github.com> Date: Sun, 26 Nov 2023 17:23:24 +0100 Subject: [PATCH 42/42] Update README.md --- README.md | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index 47437e7..db99fc2 100644 --- a/README.md +++ b/README.md @@ -38,12 +38,11 @@ Core GTFS uses start + stop, it then determines every option between them and pr ## Installation via HACS : -In HACS, select the 3-dots and then custom repositories -Add : +1. In HACS, select the 3-dots and then custom repositories, add : - URL : https://github.com/vingerha/gtfs2 - Category : Integration -In Settings > Devices & Sevices +2. In Settings > Devices & Sevices - add the integration, note that this is GTFS2 ## Configuration