AutomationDataset/bacco007/bacco007_automations.yaml

4880 lines
180 KiB
YAML

- alias: '[Sonos Favourites] Play 102.7 KIIS-FM Los Angeles'
description: Play 102.7 KIIS-FM Los Angeles on Sonos
id: sonos_favourites_play_102_7_kiis_fm_los_angeles
trigger:
- platform: webhook
webhook_id: sonos_102_7_kiis_fm_los_angeles
allowed_methods:
- POST
- PUT
local_only: true
action:
- service: script.toggle
target:
entity_id: script.sonos_favourites_102_7_kiis_fm_los_angeles
- service: input_text.set_value
target:
entity_id: input_text.sonos_playlist_playing
data:
value: 102.7 KIIS-FM Los Angeles
- alias: '[Sonos Favourites] Play 1170 SEN'
description: Play 1170 SEN on Sonos
id: sonos_favourites_play_1170_sen
trigger:
- platform: webhook
webhook_id: sonos_1170_sen
allowed_methods:
- POST
- PUT
local_only: true
action:
- service: script.toggle
target:
entity_id: script.sonos_favourites_1170_sen
- service: input_text.set_value
target:
entity_id: input_text.sonos_playlist_playing
data:
value: 1170 SEN
- alias: '[Sonos Favourites] Play 93.5 Triple M Dubbo'
description: Play 93.5 Triple M Dubbo on Sonos
id: sonos_favourites_play_93_5_triple_m_dubbo
trigger:
- platform: webhook
webhook_id: sonos_93_5_triple_m_dubbo
allowed_methods:
- POST
- PUT
local_only: true
action:
- service: script.toggle
target:
entity_id: script.sonos_favourites_93_5_triple_m_dubbo
- service: input_text.set_value
target:
entity_id: input_text.sonos_playlist_playing
data:
value: 93.5 Triple M Dubbo
- alias: '[Sonos Favourites] Play ABC News'
description: Play ABC News on Sonos
id: sonos_favourites_play_abc_news
trigger:
- platform: webhook
webhook_id: sonos_abc_news
allowed_methods:
- POST
- PUT
local_only: true
action:
- service: script.toggle
target:
entity_id: script.sonos_favourites_abc_news
- service: input_text.set_value
target:
entity_id: input_text.sonos_playlist_playing
data:
value: ABC News
- alias: '[Sonos Favourites] Play ABC New England North West'
description: Play ABC New England North West on Sonos
id: sonos_favourites_play_abc_new_england_north_west
trigger:
- platform: webhook
webhook_id: sonos_abc_new_england_north_west
allowed_methods:
- POST
- PUT
local_only: true
action:
- service: script.toggle
target:
entity_id: script.sonos_favourites_abc_new_england_north_west
- service: input_text.set_value
target:
entity_id: input_text.sonos_playlist_playing
data:
value: ABC New England North West
- alias: '[Sonos Favourites] Play ABC Radio National (Sydney, NSW)'
description: Play ABC Radio National (Sydney, NSW) on Sonos
id: sonos_favourites_play_abc_radio_national_sydney_nsw
trigger:
- platform: webhook
webhook_id: sonos_abc_radio_national_sydney_nsw
allowed_methods:
- POST
- PUT
local_only: true
action:
- service: script.toggle
target:
entity_id: script.sonos_favourites_abc_radio_national_sydney_nsw
- service: input_text.set_value
target:
entity_id: input_text.sonos_playlist_playing
data:
value: ABC Radio National (Sydney, NSW)
- alias: '[Sonos Favourites] Play ABC Radio Sydney'
description: Play ABC Radio Sydney on Sonos
id: sonos_favourites_play_abc_radio_sydney
trigger:
- platform: webhook
webhook_id: sonos_abc_radio_sydney
allowed_methods:
- POST
- PUT
local_only: true
action:
- service: script.toggle
target:
entity_id: script.sonos_favourites_abc_radio_sydney
- service: input_text.set_value
target:
entity_id: input_text.sonos_playlist_playing
data:
value: ABC Radio Sydney
- alias: '[Sonos Favourites] Play ABC Sport'
description: Play ABC Sport on Sonos
id: sonos_favourites_play_abc_sport
trigger:
- platform: webhook
webhook_id: sonos_abc_sport
allowed_methods:
- POST
- PUT
local_only: true
action:
- service: script.toggle
target:
entity_id: script.sonos_favourites_abc_sport
- service: input_text.set_value
target:
entity_id: input_text.sonos_playlist_playing
data:
value: ABC Sport
- alias: '[Sonos Favourites] Play Absolute Radio 00s'
description: Play Absolute Radio 00s on Sonos
id: sonos_favourites_play_absolute_radio_00s
trigger:
- platform: webhook
webhook_id: sonos_absolute_radio_00s
allowed_methods:
- POST
- PUT
local_only: true
action:
- service: script.toggle
target:
entity_id: script.sonos_favourites_absolute_radio_00s
- service: input_text.set_value
target:
entity_id: input_text.sonos_playlist_playing
data:
value: Absolute Radio 00s
- alias: '[Sonos Favourites] Play Absolute Radio 10s'
description: Play Absolute Radio 10s on Sonos
id: sonos_favourites_play_absolute_radio_10s
trigger:
- platform: webhook
webhook_id: sonos_absolute_radio_10s
allowed_methods:
- POST
- PUT
local_only: true
action:
- service: script.toggle
target:
entity_id: script.sonos_favourites_absolute_radio_10s
- service: input_text.set_value
target:
entity_id: input_text.sonos_playlist_playing
data:
value: Absolute Radio 10s
- alias: '[Sonos Favourites] Play Absolute Radio 20s'
description: Play Absolute Radio 20s on Sonos
id: sonos_favourites_play_absolute_radio_20s
trigger:
- platform: webhook
webhook_id: sonos_absolute_radio_20s
allowed_methods:
- POST
- PUT
local_only: true
action:
- service: script.toggle
target:
entity_id: script.sonos_favourites_absolute_radio_20s
- service: input_text.set_value
target:
entity_id: input_text.sonos_playlist_playing
data:
value: Absolute Radio 20s
- alias: '[Sonos Favourites] Play Absolute Radio 80s'
description: Play Absolute Radio 80s on Sonos
id: sonos_favourites_play_absolute_radio_80s
trigger:
- platform: webhook
webhook_id: sonos_absolute_radio_80s
allowed_methods:
- POST
- PUT
local_only: true
action:
- service: script.toggle
target:
entity_id: script.sonos_favourites_absolute_radio_80s
- service: input_text.set_value
target:
entity_id: input_text.sonos_playlist_playing
data:
value: Absolute Radio 80s
- alias: '[Sonos Favourites] Play Absolute Radio 90s'
description: Play Absolute Radio 90s on Sonos
id: sonos_favourites_play_absolute_radio_90s
trigger:
- platform: webhook
webhook_id: sonos_absolute_radio_90s
allowed_methods:
- POST
- PUT
local_only: true
action:
- service: script.toggle
target:
entity_id: script.sonos_favourites_absolute_radio_90s
- service: input_text.set_value
target:
entity_id: input_text.sonos_playlist_playing
data:
value: Absolute Radio 90s
- alias: '[Sonos Favourites] Play Absolute Radio Classic Rock'
description: Play Absolute Radio Classic Rock on Sonos
id: sonos_favourites_play_absolute_radio_classic_rock
trigger:
- platform: webhook
webhook_id: sonos_absolute_radio_classic_rock
allowed_methods:
- POST
- PUT
local_only: true
action:
- service: script.toggle
target:
entity_id: script.sonos_favourites_absolute_radio_classic_rock
- service: input_text.set_value
target:
entity_id: input_text.sonos_playlist_playing
data:
value: Absolute Radio Classic Rock
- alias: '[Sonos Favourites] Play Absolute Radio UK (AAC)'
description: Play Absolute Radio UK (AAC) on Sonos
id: sonos_favourites_play_absolute_radio_uk_aac
trigger:
- platform: webhook
webhook_id: sonos_absolute_radio_uk_aac
allowed_methods:
- POST
- PUT
local_only: true
action:
- service: script.toggle
target:
entity_id: script.sonos_favourites_absolute_radio_uk_aac
- service: input_text.set_value
target:
entity_id: input_text.sonos_playlist_playing
data:
value: Absolute Radio UK (AAC)
- id: activity_driving
alias: '[Activity] Set Driving State'
initial_state: true
trigger:
- platform: state
entity_id: sensor.activity_thomas_current_activity
to: Automotive
- platform: state
entity_id: sensor.activity_thomas_current_activity
from: Automotive
action:
- service: input_boolean.turn_{{ 'on' if is_state('sensor.activity_thomas_current_activity',
'Automotive') else 'off' }}
entity_id: input_boolean.activity_thomas_driving
mode: single
- id: '1653186204936'
alias: '[Adaptive Lighting] - Turn off Sleep Mode at Sunrise'
description: ''
trigger:
- platform: sun
event: sunrise
offset: 0
condition: []
action:
- service: switch.turn_off
data: {}
target:
entity_id:
- switch.adaptive_lighting_sleep_mode_office_adaptive
- switch.adaptive_lighting_sleep_mode_bedroom_adaptive
mode: single
- alias: '[Alarm Clock] Alarm Clock'
description: ''
trigger:
- platform: template
value_template: "{{ is_state('binary_sensor.alarm_today', 'on') and\n states('sensor.time')\
\ == state_attr('binary_sensor.alarm_today', 'alarm_time') }}"
id: alarm_triggered
- platform: event
event_type: timer.finished
event_data:
entity_id: timer.alarm_length
id: alarm_timer_finished
condition: []
action:
- variables:
sonos_device: '{{state_attr(''sensor.alarm_defaults'', ''default_sonos'')}}'
light_switchedon: '{{state_attr(''sensor.alarm_defaults'', ''default_light'')}}'
music_selected: '{{state_attr(''sensor.alarm_defaults'', ''default_music'')}}'
- choose:
- conditions:
- condition: or
conditions:
- condition: state
entity_id: input_boolean.alarm_debug_testing
state: 'on'
- condition: trigger
id:
- alarm_triggered
sequence:
- if:
- condition: state
entity_id: input_boolean.alarm_debug_testing
state: 'on'
then:
- service: input_boolean.turn_off
data: {}
target:
entity_id: input_boolean.alarm_debug_testing
- service: light.turn_on
data:
transition: 30
target:
entity_id: '{{light_switchedon}}'
- service: media_player.select_source
data:
entity_id: '{{sonos_device}}'
source: '{{music_selected}}'
- service: media_player.volume_set
data:
volume_level: 0.15
target:
entity_id: '{{sonos_device}}'
- service: timer.start
data:
duration: 01:00:00
target:
entity_id: timer.alarm_length
- conditions:
- condition: trigger
id: alarm_timer_finished
sequence:
- service: media_player.media_stop
data: {}
target:
entity_id: '{{sonos_device}}'
- service: light.turn_off
data:
transition: 30
target:
entity_id: '{{light_switchedon}}'
mode: single
- alias: '[Alarm Clock] Set Wakeup Music Select'
description: ''
trigger:
- platform: homeassistant
event: start
- platform: state
entity_id:
- sensor.sonos_favorites
to: null
condition: []
action:
- variables:
sonos_favourites: '{{ state_attr(''sensor.sonos_favorites'', ''items'').values()
| list }}'
current_favourite: '{{states(''input_select.alarm_music'')}}'
- service: input_select.set_options
data:
options: '{{sonos_favourites}}'
target:
entity_id: input_select.alarm_music
- service: input_select.select_option
data:
option: '{{current_favourite}}'
target:
entity_id: input_select.alarm_music
mode: single
- id: a9b997b8-84ac-4332-bf06-19957c0d4d19
alias: '[Presence] Arrive Home > Lights On (Night)'
description: ''
trigger:
- platform: zone
entity_id: person.thomas_baxter
zone: zone.home
event: enter
condition:
- condition: or
conditions:
- condition: sun
after: sunset
after_offset: -00:15:00
- condition: sun
before: sunrise
action:
- service: light.turn_on
metadata: {}
data:
transition: 20
brightness_pct: 35
target:
device_id:
- 126e91428c97177372f5ef7398eb537e
- 6623c3d12a6637f37fab2c6e24c41afc
mode: single
- id: ha_autogroup_bulk
alias: '[HA] Autogroup - Bulk'
mode: single
max_exceeded: silent
trigger:
- platform: homeassistant
id: startup
event: start
- platform: state
entity_id: input_boolean.ha_startup_pending
to: 'off'
- platform: event
event_type: call_service
event_data:
domain: group
service: reload
- platform: time_pattern
minutes: /15
action:
- if: '{{ trigger.id == ''startup'' }}'
then:
- delay: 240
- service: group.set
data_template:
object_id: battery_devices
name: '[Group] Battery Devices'
icon: mdi:battery
entities: "{%-\n for state in states.sensor\n if is_state_attr(state.entity_id,\
\ 'device_class', 'battery') and\n (state.entity_id.endswith(\"_battery\"\
) or state.entity_id.endswith(\"_power\"))\n%} {{ state.entity_id }}{%- if\
\ not loop.last -%}, {%- endif -%} {%- endfor %}"
- service: group.set
data_template:
object_id: media_players
name: '[Group] Media Players'
icon: mdi:media-network
entities: '{{ states.media_player|map(attribute=''entity_id'')|list|sort }}'
- service: group.set
data:
object_id: lights
name: '[Group] Lights'
icon: mdi:lamps
entities: '{{ states.light|map(attribute=''entity_id'')|list|sort }}
'
- service: group.set
data_template:
object_id: automations
name: '[Group] Automations'
icon: mdi:home-automation
entities: '{{ states.automation|map(attribute=''entity_id'')|list|sort }}'
- service: group.set
data_template:
object_id: remotes
name: '[Group] Remotes'
icon: mdi:remote
entities: '{{ states.remote|map(attribute=''entity_id'')|list|sort }}'
- service: group.set
data_template:
object_id: scripts
name: '[Group] Scripts'
icon: mdi:script
entities: '{{ states.script|map(attribute=''entity_id'')|list|sort }}'
- service: group.set
data_template:
object_id: switches
name: '[Group] Switches'
icon: mdi:script
entities: '{{ states.switch|map(attribute=''entity_id'')|list|sort }}'
- service: group.set
data_template:
object_id: calendars
name: '[Group] Calendars'
icon: mdi:calendar
entities: '{{ states.calendar|map(attribute=''entity_id'')|list|sort }}'
- service: group.set
data_template:
object_id: calendars_sport
name: '[Group] Calendars (Sport)'
icon: mdi:calendar
entities: "{{ states.calendar\n |selectattr('entity_id','search','calendar.ical_*')\n\
\ |rejectattr('entity_id','search','home_assistant*')\n |map(attribute='entity_id')|list|sort\
\ }}"
- service: group.set
data_template:
object_id: geo_location
name: '[Group] Geo Locations'
icon: mdi:map-marker-alert
entities: '{{ states.geo_location|map(attribute=''entity_id'')|list|sort }}'
- service: group.set
data_template:
object_id: weather
name: '[Group] Weather'
icon: mdi:weather-cloudy-clock
entities: '{{ states.weather|map(attribute=''entity_id'')|list|sort }}'
- service: group.set
data_template:
object_id: people
name: '[Group] People'
icon: mdi:account-group
entities: '{{ states.person|map(attribute=''entity_id'')|list|sort }}'
- service: group.set
data_template:
object_id: cameras
name: '[Group] Cameras'
icon: mdi:cctv
entities: '{{ states.camera|map(attribute=''entity_id'')|list|sort }}'
- service: group.set
data:
object_id: alert_controls
name: '[Group] Alert Controls'
icon: mdi:alert-decagram
entities: "{{ states\n |selectattr('entity_id','search','input_boolean.alerts_*')\n\
\ |map(attribute='entity_id')|list|sort }}\n"
- service: group.set
data:
object_id: alert_monitors
name: '[Group] Alert Monitors'
icon: mdi:alert-decagram
entities: "{{ states\n |selectattr('entity_id','search','binary_sensor.alerts_*')\n\
\ |map(attribute='entity_id')|list|sort }}\n"
- service: group.set
data:
object_id: alerts
name: '[Group] Alerts'
icon: mdi:alert-decagram
entities: "{{ states.alert\n |map(attribute='entity_id')|list|sort }}\n"
- service: group.set
data:
object_id: connectivity_monitors
name: '[Group] Connectivity Monitors'
icon: mdi:connection
entities: "{{ states\n |selectattr('entity_id','search','binary_sensor.ha_connected_integration_*')\n\
\ |map(attribute='entity_id')|list|sort }}\n"
- service: group.set
data:
object_id: people
name: '[Group] People'
icon: mdi:account-multiple
entities: '{{ states.person|map(attribute=''entity_id'')|list|sort }}
'
- service: group.set
data:
object_id: waqi
name: '[Group] WAQI'
icon: mdi:smog
entities: "{{ states\n |selectattr('entity_id','search','waqi*')\n |rejectattr('entity_id','search','alert*')\n\
\ |rejectattr('entity_id','search','group*')\n |map(attribute='entity_id')|list|sort\
\ }}\n"
- service: group.set
data:
object_id: docker_pi4srv_containers
name: '[Group] Docker Pi4Srv Containers'
icon: mdi:docker
entities: "{{states.sensor\n |selectattr('entity_id','search','sensor.pi4srv_monitordocker_')\n\
\ |rejectattr('entity_id','search','_1cpu')\n |rejectattr('entity_id','search','_containers_')\n\
\ |rejectattr('entity_id','search','_cpu')\n |rejectattr('entity_id','search','_images')\n\
\ |rejectattr('entity_id','search','_memory')\n |rejectattr('entity_id','search','_version')\n\
\ |map(attribute='entity_id')\n |list|sort }}\n"
- service: group.set
data:
object_id: docker_pi4srv_switches
name: '[Group] Docker Pi4Srv Switches'
icon: mdi:docker
entities: "{{states.switch\n |selectattr('entity_id','search','pi4srv_monitordocker_')\n\
\ |map(attribute='entity_id')\n |list|sort }}\n"
- service: group.set
data:
object_id: docker_tbsh1_containers
name: '[Group] Docker tbsh1 Containers'
icon: mdi:docker
entities: "{{states.sensor\n |selectattr('entity_id','search','sensor.tbsh1_monitordocker_')\n\
\ |rejectattr('entity_id','search','_1cpu')\n |rejectattr('entity_id','search','_containers_')\n\
\ |rejectattr('entity_id','search','_cpu')\n |rejectattr('entity_id','search','_images')\n\
\ |rejectattr('entity_id','search','_memory')\n |rejectattr('entity_id','search','_version')\n\
\ |map(attribute='entity_id')\n |list|sort }}\n"
- service: group.set
data:
object_id: docker_tbsh1_switches
name: '[Group] Docker tbsh1 Switches'
icon: mdi:docker
entities: "{{states.switch\n |selectattr('entity_id','search','tbsh1_monitordocker_')\n\
\ |map(attribute='entity_id')\n |list|sort }}\n"
- service: group.set
data:
object_id: docker_tbsh2_containers
name: '[Group] Docker tbsh2 Containers'
icon: mdi:docker
entities: "{{states.sensor\n |selectattr('entity_id','search','sensor.tbsh2_monitordocker_')\n\
\ |rejectattr('entity_id','search','_1cpu')\n |rejectattr('entity_id','search','_containers_')\n\
\ |rejectattr('entity_id','search','_cpu')\n |rejectattr('entity_id','search','_images')\n\
\ |rejectattr('entity_id','search','_memory')\n |rejectattr('entity_id','search','_version')\n\
\ |map(attribute='entity_id')\n |list|sort }}\n"
- service: group.set
data:
object_id: docker_tbsh2_switches
name: '[Group] Docker tbsh2 Switches'
icon: mdi:docker
entities: "{{states.switch\n |selectattr('entity_id','search','tbsh2_monitordocker_')\n\
\ |map(attribute='entity_id')\n |list|sort }}\n"
- service: group.set
data:
object_id: docker_tbsh3_containers
name: '[Group] Docker tbsh3 Containers'
icon: mdi:docker
entities: "{{states.sensor\n |selectattr('entity_id','search','sensor.tbsh3_monitordocker_')\n\
\ |rejectattr('entity_id','search','_1cpu')\n |rejectattr('entity_id','search','_containers_')\n\
\ |rejectattr('entity_id','search','_cpu')\n |rejectattr('entity_id','search','_images')\n\
\ |rejectattr('entity_id','search','_memory')\n |rejectattr('entity_id','search','_version')\n\
\ |map(attribute='entity_id')\n |list|sort }}\n"
- service: group.set
data:
object_id: docker_tbsh3_switches
name: '[Group] Docker tbsh3 Switches'
icon: mdi:docker
entities: "{{states.switch\n |selectattr('entity_id','search','tbsh3_monitordocker_')\n\
\ |map(attribute='entity_id')\n |list|sort }}\n"
- service: group.set
data:
object_id: docker_tbsh5_containers
name: '[Group] Docker tbsh5 Containers'
icon: mdi:docker
entities: "{{states.sensor\n |selectattr('entity_id','search','sensor.tbsh5_monitordocker_')\n\
\ |rejectattr('entity_id','search','_1cpu')\n |rejectattr('entity_id','search','_containers_')\n\
\ |rejectattr('entity_id','search','_cpu')\n |rejectattr('entity_id','search','_images')\n\
\ |rejectattr('entity_id','search','_memory')\n |rejectattr('entity_id','search','_version')\n\
\ |map(attribute='entity_id')\n |list|sort }}\n"
- service: group.set
data:
object_id: docker_tbsh5_switches
name: '[Group] Docker tbsh5 Switches'
icon: mdi:docker
entities: "{{states.switch\n |selectattr('entity_id','search','tbsh5_monitordocker_')\n\
\ |map(attribute='entity_id')\n |list|sort }}\n"
- service: group.set
data:
object_id: updates_homeassistant
name: '[Updates] Home Assistant'
icon: mdi:home-assistant
entities: "{{expand(integration_entities('Supervisor'))\n |selectattr('domain',\
\ 'eq', 'update')\n |map(attribute='entity_id')\n |list|sort }}\n"
- service: group.set
data:
object_id: updates_hacs
name: '[Updates] HA Community Store'
icon: mdi:home-assistant
entities: "{{expand(integration_entities('hacs'))\n |selectattr('domain', 'eq',\
\ 'update')\n |map(attribute='entity_id')\n |list|sort }}\n"
- service: group.set
data:
object_id: updates_docker
name: '[Updates] Docker'
icon: mdi:docker
entities: "{{states.update\n |selectattr('entity_id', 'search', 'wud')\n |map(attribute='entity_id')\n\
\ |list|sort }}\n"
- service: group.set
data:
object_id: updates_unifi
name: '[Updates] Unifi'
icon: si:ubiquiti
entities: "{{expand(integration_entities('unifi'))\n |selectattr('domain',\
\ 'eq', 'update')\n |map(attribute='entity_id')\n |list|sort }}\n"
- service: group.set
data:
object_id: updates_zigbee2mqtt
name: '[Updates] Zigbee2MQTT'
icon: mdi:zigbee
entities: "{{states.update\n |selectattr('entity_id', 'search', 'z2m')\n |map(attribute='entity_id')\n\
\ |list|sort }}\n"
- service: group.set
data:
object_id: updates_other
name: '[Updates] Other'
icon: mdi:update
entities: "{{states.update\n |rejectattr('entity_id', 'search', 'wud')\n |rejectattr('entity_id',\
\ 'search', 'z2m')\n |rejectattr('entity_id', 'in', expand(integration_entities('hacs'))|map(attribute='entity_id')|list)\n\
\ |rejectattr('entity_id', 'in', expand(integration_entities('Supervisor'))|map(attribute='entity_id')|list)\n\
\ |rejectattr('entity_id', 'in', expand(integration_entities('unifi'))|map(attribute='entity_id')|list)\n\
\ |map(attribute='entity_id')\n |list|sort }}\n"
- service: group.set
data:
object_id: qbittorrent
name: '[Group] qbittorrent'
icon: mdi:download-box
entities: "{{ states\n |selectattr('entity_id','search','qbittorrent_')\n \
\ |map(attribute='entity_id')\n |list|sort }}\n"
- service: group.set
data:
object_id: rssfeeds
name: '[Group] RSS Feeds'
icon: mdi:rss-box
entities: "{{ states\n |selectattr('entity_id','search','rssfeed_')\n |map(attribute='entity_id')\n\
\ |list|sort }}\n"
- service: group.set
data:
object_id: addon_switch
name: '[Group] Add-on Switches'
icon: mdi:toggle-switch
entities: "{{ states\n |selectattr('entity_id','search','switch.addon_*')\n\
\ |map(attribute='entity_id')|list|sort }}\n"
- service: group.set
data:
object_id: upcomingsport_withevents
name: '[Group] Upcoming Sport, With Events'
icon: mdi:soccer
entities: "{{ states|selectattr('entity_id','search','sensor.upcoming_sport_')\n\
\ |rejectattr('entity_id','search','this_week')\n |rejectattr('entity_id','search','next_24_hours')\n\
\ |rejectattr('attributes.remaining_events','eq',0)\n |map(attribute='entity_id')|list|sort\
\ }}\n"
- service: group.set
data:
object_id: upcomingsport_withoutevents
name: '[Group] Upcoming Sport, Without Events'
icon: mdi:soccer
entities: "{{ states|selectattr('entity_id','search','sensor.upcoming_sport_')\n\
\ |rejectattr('entity_id','search','this_week')\n |rejectattr('entity_id','search','next_24_hours')\n\
\ |selectattr('attributes.remaining_events','eq',0)\n |map(attribute='entity_id')|list|sort\
\ }}\n"
- service: group.set
data:
object_id: upcomingsport_thisweek
name: '[Group] Upcoming Sport, This Week'
icon: mdi:soccer
entities: "{{ states|selectattr('entity_id','search','sensor.upcoming_sport_')\n\
\ |rejectattr('entity_id','search','this_week')\n |rejectattr('entity_id','search','next_24_hours')\n\
\ |rejectattr('attributes.remaining_events','==',0)\n |selectattr(\"\
attributes.nextevent_timeto\", 'defined')\n |selectattr('attributes.nextevent_timeto','<=',7)\n\
\ |map(attribute='entity_id')|list|sort }}\n"
- service: group.set
data:
object_id: livetrafficnsw
name: '[Group] Live Traffic NSW'
icon: mdi:dump-truck
entities: "{{ states|selectattr('entity_id','search','sensor.getdata_livetrafficnsw_')\n\
\ |map(attribute='entity_id')|list|sort }}\n"
- service: group.set
data:
object_id: automations_sonos_favourites
name: '[Group] Automations, Sonos Favourites'
icon: si:sonos
entities: "{{ states\n |selectattr('entity_id','search','automation.sonos_favourites*')\n\
\ |map(attribute='entity_id')|list|sort }}\n"
- service: group.set
data:
object_id: script_sonos_playlists
name: '[Group] Scripts, Sonos Playlists'
icon: si:sonos
entities: "{{ states\n |selectattr('entity_id','search','script.sonos_favourites*')\n\
\ |map(attribute='entity_id')|list|sort }}\n"
- service: group.set
data:
object_id: hibp_breaches
name: '[Group] Have I Been Pwned (HIBP) Breach Data'
icon: mdi:virus-outline
entities: "{{ states\n |selectattr('entity_id','search','sensor.breaches_*')\n\
\ |map(attribute='entity_id')|list|sort }}\n"
- id: '1751697921200'
alias: Restart Zigbee2MQTT if lights unavailable
description: ''
triggers:
- trigger: state
entity_id:
- light.office_lamp
- light.bedroom_lamp
to: unavailable
for:
hours: 0
minutes: 5
seconds: 0
conditions: []
actions:
- action: hassio.addon_restart
metadata: {}
data:
addon: 45df7312_zigbee2mqtt
mode: single
- id: ha_automations_reloaded
alias: '[HA] Log Automation Reload'
trigger:
- platform: event
event_type: automation_reloaded
action:
- service: system_log.write
data:
logger: '{{ this.entity_id }}'
level: critical
message: '**** Automations have been reloaded. ****'
- service: browser_mod.notification
data:
duration: 30000
message: '**** Automations have been reloaded. ****'
- id: kiosk_turnonoffauto
alias: '[Kiosk] Turn On/Off Automatically'
description: ''
trigger:
- platform: state
entity_id:
- binary_sensor.kiosk_off
to: 'off'
id: kiosk_off
- platform: state
entity_id:
- binary_sensor.kiosk_off
to: 'on'
id: kiosk_on
- platform: state
entity_id:
- person.thomas_baxter
to: not_home
id: kiosk_off
for:
hours: 0
minutes: 5
seconds: 0
- platform: state
entity_id:
- person.thomas_baxter
to: home
id: kiosk_on
for:
hours: 0
minutes: 5
seconds: 0
condition: []
action:
- choose:
- conditions:
- condition: trigger
id:
- kiosk_off
sequence:
- service: switch.turn_on
target:
entity_id: switch.galaxy_tab_s5e_screensaver
- conditions:
- condition: trigger
id:
- kiosk_on
sequence:
- service: switch.turn_off
target:
entity_id: switch.galaxy_tab_s5e_screensaver
mode: single
- alias: '[Sonos Favourites] Play BBC Radio 1'
description: Play BBC Radio 1 on Sonos
id: sonos_favourites_play_bbc_radio_1
trigger:
- platform: webhook
webhook_id: sonos_bbc_radio_1
allowed_methods:
- POST
- PUT
local_only: true
action:
- service: script.toggle
target:
entity_id: script.sonos_favourites_bbc_radio_1
- service: input_text.set_value
target:
entity_id: input_text.sonos_playlist_playing
data:
value: BBC Radio 1
- alias: '[Sonos Favourites] Play BBC Radio 1Xtra'
description: Play BBC Radio 1Xtra on Sonos
id: sonos_favourites_play_bbc_radio_1xtra
trigger:
- platform: webhook
webhook_id: sonos_bbc_radio_1xtra
allowed_methods:
- POST
- PUT
local_only: true
action:
- service: script.toggle
target:
entity_id: script.sonos_favourites_bbc_radio_1xtra
- service: input_text.set_value
target:
entity_id: input_text.sonos_playlist_playing
data:
value: BBC Radio 1Xtra
- alias: '[Sonos Favourites] Play BBC Radio 1 Dance'
description: Play BBC Radio 1 Dance on Sonos
id: sonos_favourites_play_bbc_radio_1_dance
trigger:
- platform: webhook
webhook_id: sonos_bbc_radio_1_dance
allowed_methods:
- POST
- PUT
local_only: true
action:
- service: script.toggle
target:
entity_id: script.sonos_favourites_bbc_radio_1_dance
- service: input_text.set_value
target:
entity_id: input_text.sonos_playlist_playing
data:
value: BBC Radio 1 Dance
- alias: '[Sonos Favourites] Play BBC Radio 1 Relax'
description: Play BBC Radio 1 Relax on Sonos
id: sonos_favourites_play_bbc_radio_1_relax
trigger:
- platform: webhook
webhook_id: sonos_bbc_radio_1_relax
allowed_methods:
- POST
- PUT
local_only: true
action:
- service: script.toggle
target:
entity_id: script.sonos_favourites_bbc_radio_1_relax
- service: input_text.set_value
target:
entity_id: input_text.sonos_playlist_playing
data:
value: BBC Radio 1 Relax
- alias: '[Sonos Favourites] Play BBC Radio 2'
description: Play BBC Radio 2 on Sonos
id: sonos_favourites_play_bbc_radio_2
trigger:
- platform: webhook
webhook_id: sonos_bbc_radio_2
allowed_methods:
- POST
- PUT
local_only: true
action:
- service: script.toggle
target:
entity_id: script.sonos_favourites_bbc_radio_2
- service: input_text.set_value
target:
entity_id: input_text.sonos_playlist_playing
data:
value: BBC Radio 2
- alias: '[Sonos Favourites] Play BBC Radio 3'
description: Play BBC Radio 3 on Sonos
id: sonos_favourites_play_bbc_radio_3
trigger:
- platform: webhook
webhook_id: sonos_bbc_radio_3
allowed_methods:
- POST
- PUT
local_only: true
action:
- service: script.toggle
target:
entity_id: script.sonos_favourites_bbc_radio_3
- service: input_text.set_value
target:
entity_id: input_text.sonos_playlist_playing
data:
value: BBC Radio 3
- alias: '[Sonos Favourites] Play BBC Radio 4 Extra (Comedy)'
description: Play BBC Radio 4 Extra (Comedy) on Sonos
id: sonos_favourites_play_bbc_radio_4_extra_comedy
trigger:
- platform: webhook
webhook_id: sonos_bbc_radio_4_extra_comedy
allowed_methods:
- POST
- PUT
local_only: true
action:
- service: script.toggle
target:
entity_id: script.sonos_favourites_bbc_radio_4_extra_comedy
- service: input_text.set_value
target:
entity_id: input_text.sonos_playlist_playing
data:
value: BBC Radio 4 Extra (Comedy)
- alias: '[Sonos Favourites] Play BBC Radio 4 LW'
description: Play BBC Radio 4 LW on Sonos
id: sonos_favourites_play_bbc_radio_4_lw
trigger:
- platform: webhook
webhook_id: sonos_bbc_radio_4_lw
allowed_methods:
- POST
- PUT
local_only: true
action:
- service: script.toggle
target:
entity_id: script.sonos_favourites_bbc_radio_4_lw
- service: input_text.set_value
target:
entity_id: input_text.sonos_playlist_playing
data:
value: BBC Radio 4 LW
- alias: '[Sonos Favourites] Play BBC Radio 5 live'
description: Play BBC Radio 5 live on Sonos
id: sonos_favourites_play_bbc_radio_5_live
trigger:
- platform: webhook
webhook_id: sonos_bbc_radio_5_live
allowed_methods:
- POST
- PUT
local_only: true
action:
- service: script.toggle
target:
entity_id: script.sonos_favourites_bbc_radio_5_live
- service: input_text.set_value
target:
entity_id: input_text.sonos_playlist_playing
data:
value: BBC Radio 5 live
- alias: '[Sonos Favourites] Play BBC Radio 5 live sports extra'
description: Play BBC Radio 5 live sports extra on Sonos
id: sonos_favourites_play_bbc_radio_5_live_sports_extra
trigger:
- platform: webhook
webhook_id: sonos_bbc_radio_5_live_sports_extra
allowed_methods:
- POST
- PUT
local_only: true
action:
- service: script.toggle
target:
entity_id: script.sonos_favourites_bbc_radio_5_live_sports_extra
- service: input_text.set_value
target:
entity_id: input_text.sonos_playlist_playing
data:
value: BBC Radio 5 live sports extra
- alias: '[Sonos Favourites] Play BBC Radio 6 Music'
description: Play BBC Radio 6 Music on Sonos
id: sonos_favourites_play_bbc_radio_6_music
trigger:
- platform: webhook
webhook_id: sonos_bbc_radio_6_music
allowed_methods:
- POST
- PUT
local_only: true
action:
- service: script.toggle
target:
entity_id: script.sonos_favourites_bbc_radio_6_music
- service: input_text.set_value
target:
entity_id: input_text.sonos_playlist_playing
data:
value: BBC Radio 6 Music
- alias: '[Sonos Favourites] Play BBC World Service News'
description: Play BBC World Service News on Sonos
id: sonos_favourites_play_bbc_world_service_news
trigger:
- platform: webhook
webhook_id: sonos_bbc_world_service_news
allowed_methods:
- POST
- PUT
local_only: true
action:
- service: script.toggle
target:
entity_id: script.sonos_favourites_bbc_world_service_news
- service: input_text.set_value
target:
entity_id: input_text.sonos_playlist_playing
data:
value: BBC World Service News
- alias: '[Home] Bin Day Notification'
description: ''
trigger:
- platform: time
at: '17:00:00'
condition:
- condition: template
value_template: '{{ states.sensor | selectattr(''entity_id'',''search'',''binday_'')
| selectattr(''attributes.days'', ''equalto'', 1) | map(attribute=''attributes.friendly_name'')|
list | join('', '') | length > 0 }}'
action:
- service: notify.alert
data:
title: Tomorrow is Bin Day!
message: 'Don''t forget to put the following bins out: {{ states.sensor | selectattr(''entity_id'',''search'',''binday_'')
| selectattr(''attributes.days'', ''equalto'', 1) | map(attribute=''attributes.friendly_name'')|
list | join('', '') }}'
data:
push:
badge: 1
interruption-level: time-sensitive
presentation_options:
- alert
- badge
- alias: '[Sonos Favourites] Play Capital Anthems'
description: Play Capital Anthems on Sonos
id: sonos_favourites_play_capital_anthems
trigger:
- platform: webhook
webhook_id: sonos_capital_anthems
allowed_methods:
- POST
- PUT
local_only: true
action:
- service: script.toggle
target:
entity_id: script.sonos_favourites_capital_anthems
- service: input_text.set_value
target:
entity_id: input_text.sonos_playlist_playing
data:
value: Capital Anthems
- alias: '[Sonos Favourites] Play Capital London'
description: Play Capital London on Sonos
id: sonos_favourites_play_capital_london
trigger:
- platform: webhook
webhook_id: sonos_capital_london
allowed_methods:
- POST
- PUT
local_only: true
action:
- service: script.toggle
target:
entity_id: script.sonos_favourites_capital_london
- service: input_text.set_value
target:
entity_id: input_text.sonos_playlist_playing
data:
value: Capital London
- id: clear_alert
alias: '[Notifications] Clear Alert'
mode: parallel
max: 25
trigger:
- platform: state
id: mobile
entity_id:
- alert.weatherflow_cloud
- alert.weatherflow_local
- alert.zigbee_network
- alert.test_alert
to:
- 'off'
- idle
action:
- service: automation.turn_off
target:
entity_id: automation.notifications_mobile_notification_cleared
data:
stop_actions: false
- service: notify.alert
data:
message: clear_notification
data:
tag: '{{ trigger.entity_id.split(''.'')[1] }}'
continue_on_error: true
- if: '{{ trigger.id == ''alert'' }}'
then:
- service: persistent_notification.dismiss
data:
notification_id: '{{ trigger.entity_id.split(''.'')[1] }}'
continue_on_error: true
- service: automation.turn_on
target:
entity_id: automation.notifications_mobile_notification_cleared
- alias: Clear Offline Integration Notification
trigger:
- platform: state
entity_id: group.connectivity_monitors
to: 'on'
condition:
- condition: template
value_template: "{{ expand('group.connectivity_monitors')\n | rejectattr('entity_id',\
\ 'in', ['sensor.ha_connected_integration_offline_integrations'])\n | selectattr('state',\
\ 'eq', 'off')\n | list\n | count == 0 }}\n"
action:
- service: notify.alert
data:
message: clear_notification
data:
tag: offline_integrations
- service: input_number.set_value
data:
entity_id: input_number.offline_integration_count
value: 0
- alias: '[Sonos Favourites] Play CNN International'
description: Play CNN International on Sonos
id: sonos_favourites_play_cnn_international
trigger:
- platform: webhook
webhook_id: sonos_cnn_international
allowed_methods:
- POST
- PUT
local_only: true
action:
- service: script.toggle
target:
entity_id: script.sonos_favourites_cnn_international
- service: input_text.set_value
target:
entity_id: input_text.sonos_playlist_playing
data:
value: CNN International
- triggers:
- alias: Update combined weather forecast every 5 minutes at 30 seconds past the
minute
trigger: time_pattern
minutes: /5
seconds: '30'
- alias: Update combined weather forecast on Home Assistant startup
trigger: homeassistant
event: start
- alias: Update combined weather forecast on manual trigger event
trigger: event
event_type: update_combined_forecast
actions:
- alias: Define weights for different weather integrations
variables:
debug: false
weight:
- entity_id: weather.knmi
current: 3
daily: 3
hourly: 4
- entity_id: weather.buienradar
current: 2
daily: 2
forecast_all:
- attr: condition
- attr: wind_bearing
round: 2
- attr: cloud_coverage
round: 0
- attr: temperature
round: 1
- attr: wind_speed
round: 2
- attr: precipitation
round: 2
- attr: precipitation_probability
round: 0
- attr: dew_point
round: 1
- attr: uv_index
round: 2
- attr: apparent_temperature
round: 1
- attr: pressure
round: 2
- attr: humidity
round: 0
forecast_daily:
- attr: templow
round: 2
- attr: wind_gust_speed
round: 0
forecast_twice_daily:
- attr: is_daytime
- alias: Set right forecast type
variables:
type: hourly
supported:
- 2
- 3
- 6
- 7
- alias: Get the forecast data from the entities
sequence: &id001
- alias: Get all entities which support the forecast type
variables:
weather_entities: "{{\n states.weather\n | selectattr('attributes.supported_features',\
\ 'defined')\n | selectattr('attributes.supported_features', 'in', supported)\n\
\ | map(attribute='entity_id')\n | reject('in', integration_entities('template'))\n\
\ | select('has_value')\n | list\n}}\n"
- alias: Get forecasts in case there are entities
if: '{{ weather_entities | count > 0 }}'
then:
- alias: Set forecast available flag when entities found
variables:
forecast_available: true
- alias: Get hourly forecast data out of selected entities
action: weather.get_forecasts
data:
type: '{{ type }}'
target:
entity_id: '{{ weather_entities }}'
response_variable: forecast
else:
- alias: Set forecast unavailable flag when no entities found
variables:
forecast_available: false
- alias: Combine the forecasts in one
variables:
hourly_entities: '{{ weather_entities }}'
hourly: "{# set number of days to use for forecasts #}\n {% set forecast_days\
\ = 3 if type == 'hourly' else 10 %}\n{# define valid forecast attributes\
\ and precision for rounding #}\n {% set forecast_attr = forecast_all if\
\ type == 'hourly' else forecast_all + forecast_daily if type == 'daily' else\
\ forecast_all + forecast_twice_daily %}\n{# check if forecast is retreived\
\ #}\n {% if forecast_available %}\n {# combine all forecasts in one list\
\ #}\n {% if weight is defined\n and weight is list\n and\
\ weight | count > 0\n and weight[0] is mapping\n %}\n {% set\
\ ns = namespace(all=[]) %}\n {% for k, v in forecast.items() %}\n \
\ {% set w = weight\n | selectattr('entity_id', 'eq',\
\ k)\n | map(attribute=type)\n | first\n\
\ | default(1) %}\n {% set ns.all = ns.all + v.forecast\
\ * w %}\n {% endfor %}\n {% set all = ns.all %}\n {% else %}\n\
\ {% set all = forecast.values() | map(attribute='forecast') | sum(start=[])\
\ %}\n {% endif %}\n {# make sure no old forecasts are included, and only\
\ 3 days for hourly forecasts, and 10 days for daily #}\n {% set compare_start\
\ = today_at() if type == 'daily' else now() - timedelta(hours=1) %}\n \
\ {% set compare_end = now() + timedelta(days=forecast_days) %}\n {# align\
\ datetimes so they all use local timezone, and the same start time for daily\
\ forecasts #}\n {% set ns = namespace(aligned=[], forecast=[]) %}\n \
\ {% for item in all if compare_start <= as_datetime(item.datetime) | as_local\
\ <= compare_end %}\n {% set new_dt = item.datetime | as_datetime(item.datetime)\
\ | as_local %}\n {% set new_dt = new_dt.isoformat() if type in ['hourly',\
\ 'twice_daily'] else new_dt.replace(hour=0, minute=0).isoformat() %}\n \
\ {% set ns.aligned = ns.aligned + [dict(item, datetime=new_dt)] %}\n \
\ {% endfor %}\n {# set list of unique datetime #}\n {% set dt_list\
\ = ns.aligned | map(attribute='datetime') | unique | sort | list %}\n {#\
\ create forecast list item for each datetime #}\n {% for dt in dt_list\
\ %}\n {% set forecasts = ns.aligned | selectattr('datetime', 'eq', dt)\
\ | list %}\n {% set dt_ns = namespace(keys=[], forecast=dict(datetime=dt))\
\ %}\n {# find forecast available forecast items #}\n {% for item\
\ in forecasts %}\n {% set dt_ns.keys = dt_ns.keys + item.keys()\
\ | list %}\n {% endfor %}\n {# remove unsupported types for\
\ template weather #}\n {% set allowed_keys = forecast_attr | map(attribute='attr')\
\ | list %}\n {% set keys_list = dt_ns.keys | unique | select('in',\
\ allowed_keys) %}\n {# find value for each forecast item #}\n \
\ {% for key in keys_list %}\n {% set key_items = forecasts | selectattr(key,\
\ 'defined') | map(attribute=key) | list %}\n {# find most frequent\
\ item for condition #}\n {% if key == 'condition' %}\n \
\ {% if 'clear-night' in key_items %}\n {% set key_items\
\ = key_items | map('replace', 'sunny', 'clear-night') | list %}\n \
\ {% endif %}\n {% set add_dict = dict(condition=statistical_mode(key_items,\
\ none)) %}\n {% set dt_ns.forecast = dict(dt_ns.forecast, **add_dict)\
\ %}\n {% elif key == 'is_daytime' %}\n {% set add_dict\
\ = dict(is_daytime=key_items[0]) %}\n {% set dt_ns.forecast\
\ = dict(dt_ns.forecast, **add_dict) %}\n {# find median for other\
\ (numeric) forecast types #}\n {% else %}\n {# filter\
\ out non numeric values #}\n {% set values = key_items |\
\ map('replace', none, 0) | select('is_number') | map('float') | list | sort\
\ %}\n {% if values | count > 0 %}\n {# add\
\ forecast item to forecast #}\n {% set round = forecast_attr\
\ | selectattr('attr', 'eq', key) | map(attribute='round') | list | first\
\ %}\n {% set add_dict = {key: median(values)|round(round)}\
\ %}\n {% set dt_ns.forecast = dict(dt_ns.forecast, **add_dict)\
\ %}\n {% endif %}\n {% endif %}\n {% endfor\
\ %}\n {# combine forecast for each datetime in one list #}\n \
\ {% set ns.forecast = ns.forecast + [dt_ns.forecast] %}\n {% endfor %}\n\
\ {# output the forecast #}\n {{ ns.forecast }}\n{# create twice daily\
\ forecast based on hourly forecast if not provided normally #} {% elif type\
\ == 'twice_daily' and hourly | count > 0 %}\n {% set times = hourly | selectattr('datetime',\
\ 'search', 'T09:00|T18:00') | map(attribute='datetime') | list %}\n {% set\
\ ns = namespace(forecast=[], item={}) %}\n {% for t in times %}\n {%\
\ set day = t is search 'T09:00' %}\n {% set dates = hourly | map(attribute='datetime')\
\ | list %}\n {% set index = dates.index(t) %}\n {% set data = hourly[index:index+(9\
\ if day else 13)] %}\n {% set key_list = data[0].keys() | list %}\n \
\ {% set ns.item = {'datetime': t, 'is_daytime': day} %}\n {% for key\
\ in key_list %}\n {% set key_items = data | selectattr(key, 'defined')\
\ | map(attribute=key) | list %}\n {% if key_items[0] is string %}\n\
\ {% set ns.item = dict(ns.item, **{key: key_items | statistical_mode})\
\ %}\n {% elif key == 'temperature' %}\n {% set ns.item = dict(ns.item,\
\ **{key: key_items | max, 'templow': key_items | min}) %}\n {% elif\
\ key_items[0] | is_number %}\n {% set r = forecast_attr | selectattr('attr',\
\ 'eq', key) | map(attribute='round') | first | default(1) %}\n {%\
\ set ns.item = dict(ns.item, **{key: key_items | average | round(r)}) %}\n\
\ {% endif %}\n {% endfor %}\n {% set ns.forecast = ns.forecast\
\ + [ns.item] %}\n {% endfor %}\n {{ ns.forecast }}\n{% else %}\n []\n\
{% endif %}\n"
- alias: Set right forecast type
variables:
type: daily
supported:
- 1
- 3
- 5
- 7
- alias: Get the forecast data from the entities
sequence: *id001
- alias: Combine the forecasts in one
variables:
daily_entities: '{{ weather_entities }}'
daily: "{# set number of days to use for forecasts #}\n {% set forecast_days\
\ = 3 if type == 'hourly' else 10 %}\n{# define valid forecast attributes\
\ and precision for rounding #}\n {% set forecast_attr = forecast_all if\
\ type == 'hourly' else forecast_all + forecast_daily if type == 'daily' else\
\ forecast_all + forecast_twice_daily %}\n{# check if forecast is retreived\
\ #}\n {% if forecast_available %}\n {# combine all forecasts in one list\
\ #}\n {% if weight is defined\n and weight is list\n and\
\ weight | count > 0\n and weight[0] is mapping\n %}\n {% set\
\ ns = namespace(all=[]) %}\n {% for k, v in forecast.items() %}\n \
\ {% set w = weight\n | selectattr('entity_id', 'eq',\
\ k)\n | map(attribute=type)\n | first\n\
\ | default(1) %}\n {% set ns.all = ns.all + v.forecast\
\ * w %}\n {% endfor %}\n {% set all = ns.all %}\n {% else %}\n\
\ {% set all = forecast.values() | map(attribute='forecast') | sum(start=[])\
\ %}\n {% endif %}\n {# make sure no old forecasts are included, and only\
\ 3 days for hourly forecasts, and 10 days for daily #}\n {% set compare_start\
\ = today_at() if type == 'daily' else now() - timedelta(hours=1) %}\n \
\ {% set compare_end = now() + timedelta(days=forecast_days) %}\n {# align\
\ datetimes so they all use local timezone, and the same start time for daily\
\ forecasts #}\n {% set ns = namespace(aligned=[], forecast=[]) %}\n \
\ {% for item in all if compare_start <= as_datetime(item.datetime) | as_local\
\ <= compare_end %}\n {% set new_dt = item.datetime | as_datetime(item.datetime)\
\ | as_local %}\n {% set new_dt = new_dt.isoformat() if type in ['hourly',\
\ 'twice_daily'] else new_dt.replace(hour=0, minute=0).isoformat() %}\n \
\ {% set ns.aligned = ns.aligned + [dict(item, datetime=new_dt)] %}\n \
\ {% endfor %}\n {# set list of unique datetime #}\n {% set dt_list\
\ = ns.aligned | map(attribute='datetime') | unique | sort | list %}\n {#\
\ create forecast list item for each datetime #}\n {% for dt in dt_list\
\ %}\n {% set forecasts = ns.aligned | selectattr('datetime', 'eq', dt)\
\ | list %}\n {% set dt_ns = namespace(keys=[], forecast=dict(datetime=dt))\
\ %}\n {# find forecast available forecast items #}\n {% for item\
\ in forecasts %}\n {% set dt_ns.keys = dt_ns.keys + item.keys()\
\ | list %}\n {% endfor %}\n {# remove unsupported types for\
\ template weather #}\n {% set allowed_keys = forecast_attr | map(attribute='attr')\
\ | list %}\n {% set keys_list = dt_ns.keys | unique | select('in',\
\ allowed_keys) %}\n {# find value for each forecast item #}\n \
\ {% for key in keys_list %}\n {% set key_items = forecasts | selectattr(key,\
\ 'defined') | map(attribute=key) | list %}\n {# find most frequent\
\ item for condition #}\n {% if key == 'condition' %}\n \
\ {% if 'clear-night' in key_items %}\n {% set key_items\
\ = key_items | map('replace', 'sunny', 'clear-night') | list %}\n \
\ {% endif %}\n {% set add_dict = dict(condition=statistical_mode(key_items,\
\ none)) %}\n {% set dt_ns.forecast = dict(dt_ns.forecast, **add_dict)\
\ %}\n {% elif key == 'is_daytime' %}\n {% set add_dict\
\ = dict(is_daytime=key_items[0]) %}\n {% set dt_ns.forecast\
\ = dict(dt_ns.forecast, **add_dict) %}\n {# find median for other\
\ (numeric) forecast types #}\n {% else %}\n {# filter\
\ out non numeric values #}\n {% set values = key_items |\
\ map('replace', none, 0) | select('is_number') | map('float') | list | sort\
\ %}\n {% if values | count > 0 %}\n {# add\
\ forecast item to forecast #}\n {% set round = forecast_attr\
\ | selectattr('attr', 'eq', key) | map(attribute='round') | list | first\
\ %}\n {% set add_dict = {key: median(values)|round(round)}\
\ %}\n {% set dt_ns.forecast = dict(dt_ns.forecast, **add_dict)\
\ %}\n {% endif %}\n {% endif %}\n {% endfor\
\ %}\n {# combine forecast for each datetime in one list #}\n \
\ {% set ns.forecast = ns.forecast + [dt_ns.forecast] %}\n {% endfor %}\n\
\ {# output the forecast #}\n {{ ns.forecast }}\n{# create twice daily\
\ forecast based on hourly forecast if not provided normally #} {% elif type\
\ == 'twice_daily' and hourly | count > 0 %}\n {% set times = hourly | selectattr('datetime',\
\ 'search', 'T09:00|T18:00') | map(attribute='datetime') | list %}\n {% set\
\ ns = namespace(forecast=[], item={}) %}\n {% for t in times %}\n {%\
\ set day = t is search 'T09:00' %}\n {% set dates = hourly | map(attribute='datetime')\
\ | list %}\n {% set index = dates.index(t) %}\n {% set data = hourly[index:index+(9\
\ if day else 13)] %}\n {% set key_list = data[0].keys() | list %}\n \
\ {% set ns.item = {'datetime': t, 'is_daytime': day} %}\n {% for key\
\ in key_list %}\n {% set key_items = data | selectattr(key, 'defined')\
\ | map(attribute=key) | list %}\n {% if key_items[0] is string %}\n\
\ {% set ns.item = dict(ns.item, **{key: key_items | statistical_mode})\
\ %}\n {% elif key == 'temperature' %}\n {% set ns.item = dict(ns.item,\
\ **{key: key_items | max, 'templow': key_items | min}) %}\n {% elif\
\ key_items[0] | is_number %}\n {% set r = forecast_attr | selectattr('attr',\
\ 'eq', key) | map(attribute='round') | first | default(1) %}\n {%\
\ set ns.item = dict(ns.item, **{key: key_items | average | round(r)}) %}\n\
\ {% endif %}\n {% endfor %}\n {% set ns.forecast = ns.forecast\
\ + [ns.item] %}\n {% endfor %}\n {{ ns.forecast }}\n{% else %}\n []\n\
{% endif %}\n"
- alias: Set right forecast type
variables:
type: twice_daily
supported:
- 4
- 5
- 6
- 7
- alias: Get the forecast data from the entities
sequence: *id001
- alias: Combine the forecasts in one
variables:
twice_daily_entities: '{{ weather_entities }}'
twice_daily: "{# set number of days to use for forecasts #}\n {% set forecast_days\
\ = 3 if type == 'hourly' else 10 %}\n{# define valid forecast attributes\
\ and precision for rounding #}\n {% set forecast_attr = forecast_all if\
\ type == 'hourly' else forecast_all + forecast_daily if type == 'daily' else\
\ forecast_all + forecast_twice_daily %}\n{# check if forecast is retreived\
\ #}\n {% if forecast_available %}\n {# combine all forecasts in one list\
\ #}\n {% if weight is defined\n and weight is list\n and\
\ weight | count > 0\n and weight[0] is mapping\n %}\n {% set\
\ ns = namespace(all=[]) %}\n {% for k, v in forecast.items() %}\n \
\ {% set w = weight\n | selectattr('entity_id', 'eq',\
\ k)\n | map(attribute=type)\n | first\n\
\ | default(1) %}\n {% set ns.all = ns.all + v.forecast\
\ * w %}\n {% endfor %}\n {% set all = ns.all %}\n {% else %}\n\
\ {% set all = forecast.values() | map(attribute='forecast') | sum(start=[])\
\ %}\n {% endif %}\n {# make sure no old forecasts are included, and only\
\ 3 days for hourly forecasts, and 10 days for daily #}\n {% set compare_start\
\ = today_at() if type == 'daily' else now() - timedelta(hours=1) %}\n \
\ {% set compare_end = now() + timedelta(days=forecast_days) %}\n {# align\
\ datetimes so they all use local timezone, and the same start time for daily\
\ forecasts #}\n {% set ns = namespace(aligned=[], forecast=[]) %}\n \
\ {% for item in all if compare_start <= as_datetime(item.datetime) | as_local\
\ <= compare_end %}\n {% set new_dt = item.datetime | as_datetime(item.datetime)\
\ | as_local %}\n {% set new_dt = new_dt.isoformat() if type in ['hourly',\
\ 'twice_daily'] else new_dt.replace(hour=0, minute=0).isoformat() %}\n \
\ {% set ns.aligned = ns.aligned + [dict(item, datetime=new_dt)] %}\n \
\ {% endfor %}\n {# set list of unique datetime #}\n {% set dt_list\
\ = ns.aligned | map(attribute='datetime') | unique | sort | list %}\n {#\
\ create forecast list item for each datetime #}\n {% for dt in dt_list\
\ %}\n {% set forecasts = ns.aligned | selectattr('datetime', 'eq', dt)\
\ | list %}\n {% set dt_ns = namespace(keys=[], forecast=dict(datetime=dt))\
\ %}\n {# find forecast available forecast items #}\n {% for item\
\ in forecasts %}\n {% set dt_ns.keys = dt_ns.keys + item.keys()\
\ | list %}\n {% endfor %}\n {# remove unsupported types for\
\ template weather #}\n {% set allowed_keys = forecast_attr | map(attribute='attr')\
\ | list %}\n {% set keys_list = dt_ns.keys | unique | select('in',\
\ allowed_keys) %}\n {# find value for each forecast item #}\n \
\ {% for key in keys_list %}\n {% set key_items = forecasts | selectattr(key,\
\ 'defined') | map(attribute=key) | list %}\n {# find most frequent\
\ item for condition #}\n {% if key == 'condition' %}\n \
\ {% if 'clear-night' in key_items %}\n {% set key_items\
\ = key_items | map('replace', 'sunny', 'clear-night') | list %}\n \
\ {% endif %}\n {% set add_dict = dict(condition=statistical_mode(key_items,\
\ none)) %}\n {% set dt_ns.forecast = dict(dt_ns.forecast, **add_dict)\
\ %}\n {% elif key == 'is_daytime' %}\n {% set add_dict\
\ = dict(is_daytime=key_items[0]) %}\n {% set dt_ns.forecast\
\ = dict(dt_ns.forecast, **add_dict) %}\n {# find median for other\
\ (numeric) forecast types #}\n {% else %}\n {# filter\
\ out non numeric values #}\n {% set values = key_items |\
\ map('replace', none, 0) | select('is_number') | map('float') | list | sort\
\ %}\n {% if values | count > 0 %}\n {# add\
\ forecast item to forecast #}\n {% set round = forecast_attr\
\ | selectattr('attr', 'eq', key) | map(attribute='round') | list | first\
\ %}\n {% set add_dict = {key: median(values)|round(round)}\
\ %}\n {% set dt_ns.forecast = dict(dt_ns.forecast, **add_dict)\
\ %}\n {% endif %}\n {% endif %}\n {% endfor\
\ %}\n {# combine forecast for each datetime in one list #}\n \
\ {% set ns.forecast = ns.forecast + [dt_ns.forecast] %}\n {% endfor %}\n\
\ {# output the forecast #}\n {{ ns.forecast }}\n{# create twice daily\
\ forecast based on hourly forecast if not provided normally #} {% elif type\
\ == 'twice_daily' and hourly | count > 0 %}\n {% set times = hourly | selectattr('datetime',\
\ 'search', 'T09:00|T18:00') | map(attribute='datetime') | list %}\n {% set\
\ ns = namespace(forecast=[], item={}) %}\n {% for t in times %}\n {%\
\ set day = t is search 'T09:00' %}\n {% set dates = hourly | map(attribute='datetime')\
\ | list %}\n {% set index = dates.index(t) %}\n {% set data = hourly[index:index+(9\
\ if day else 13)] %}\n {% set key_list = data[0].keys() | list %}\n \
\ {% set ns.item = {'datetime': t, 'is_daytime': day} %}\n {% for key\
\ in key_list %}\n {% set key_items = data | selectattr(key, 'defined')\
\ | map(attribute=key) | list %}\n {% if key_items[0] is string %}\n\
\ {% set ns.item = dict(ns.item, **{key: key_items | statistical_mode})\
\ %}\n {% elif key == 'temperature' %}\n {% set ns.item = dict(ns.item,\
\ **{key: key_items | max, 'templow': key_items | min}) %}\n {% elif\
\ key_items[0] | is_number %}\n {% set r = forecast_attr | selectattr('attr',\
\ 'eq', key) | map(attribute='round') | first | default(1) %}\n {%\
\ set ns.item = dict(ns.item, **{key: key_items | average | round(r)}) %}\n\
\ {% endif %}\n {% endfor %}\n {% set ns.forecast = ns.forecast\
\ + [ns.item] %}\n {% endfor %}\n {{ ns.forecast }}\n{% else %}\n []\n\
{% endif %}\n"
- alias: Set variable for weather entities for attributes
variables:
weather_entities: "{{\n states.weather\n | map(attribute='entity_id')\n\
\ | reject('in', integration_entities('template'))\n | select('has_value')\n\
\ | list\n}}\n"
current_values: "{% set attr_all = forecast_all + forecast_daily %} {% set ns\
\ = namespace(attributes=['condition'], values={}) %} {% for e in weather_entities\
\ %}\n {% set ns.attributes = ns.attributes + states[e].attributes.items()\
\ | selectattr('1', 'is_number') | map(attribute='0') | reject('eq', 'supported_features')\
\ | list %}\n{% endfor %} {% set attributes = ns.attributes | unique | list\
\ %} {% for a in attributes %}\n {% if weight is defined and weight is list\
\ and weight[0] is mapping %}\n {% set ns.weight = [] %}\n {% for e\
\ in weather_entities %}\n {% set w = weight | selectattr('entity_id',\
\ 'eq', e) | map(attribute='current') | first | default(1) %}\n {% set\
\ ns.weight = ns.weight + [states(e) if a == 'condition' else state_attr(e,\
\ a)] * w %}\n {% endfor %}\n {% set values = ns.weight | reject('none')\
\ | list %}\n {% else %}\n {% if a == 'condition' %}\n {% set values\
\ = weather_entities | map('states') | list %}\n {% else %}\n {% set\
\ values = weather_entities | map('state_attr', a) | select('is_number') |\
\ list %}\n {% endif %}\n {% endif %}\n {% set r = attr_all | selectattr('attr',\
\ 'eq', a) | map(attribute='round') | first | default(1) %}\n {% set value\
\ = values | statistical_mode if a == 'condition' else values | median(none)\
\ | round(r, default=none) %}\n {% set ns.values = dict(ns.values, **{a:\
\ value}) %}\n{% endfor %} {{ ns.values }}\n"
- alias: Send debug notification if debug mode is enabled
if: '{{ debug | default(false) | bool(false) }}'
then:
- alias: Send notification with some debug date
action: persistent_notification.create
data:
title: Weather Combined debug
message: 'hourly: {{ hourly | count }} items
hourly_entities: "{{ hourly_entities | join('', '') }}"
daily: {{ daily | count }} items
daily_entities: {{ daily_entities | join('', '') }}
twice_daily: {{ twice_daily | count }} items
twice_daily_entities: {{ twice_daily_entities | join('', '') }}
current_values: {{ current_values }}
'
weather:
- name: Combined
unique_id: 97b3c060-3146-41dd-91d9-0765d2e15e16
condition_template: '{{ current_values.get(''condition'', none) }}'
temperature_template: '{{ current_values.get(''temperature'', none) }}'
apparent_temperature_template: '{{ current_values.get(''apparent_temperature'',
none) }}'
pressure_template: '{{ current_values.get(''pressure'', none) }}'
wind_speed_template: '{{ current_values.get(''wind_speed'', none) }}'
wind_gust_speed_template: '{{ current_values.get(''wind_gust_speed'', none) }}'
visibility_template: '{{ current_values.get(''visibility'', none) }}'
humidity_template: '{{ current_values.get(''humidity'', none) }}'
wind_bearing_template: '{{ current_values.get(''wind_bearing'', none) }}'
ozone_template: '{{ current_values.get(''ozone'', none) }}'
cloud_coverage_template: '{{ current_values.get(''cloud_coverage'', none) }}'
dew_point_template: '{{ current_values.get(''dew_point'', none) }}'
temperature_unit: °C
pressure_unit: hPa
wind_speed_unit: km/h
visibility_unit: km
precipitation_unit: mm
forecast_hourly_template: '{{ hourly }}'
forecast_daily_template: '{{ daily }}'
forecast_twice_daily_template: '{{ twice_daily | default([], true) }}'
sensor:
- device_class: temperature
state_class: measurement
unit_of_measurement: °C
availability: "{{\n daily is defined\n and daily is list\n and daily | selectattr('datetime',\
\ 'search', now().date() | string) | list | count > 0\n}}\n"
unique_id: 7b190917-6d93-4c01-8854-4a1b7bf9d886
name: Weather Combined Max Temp Today
state: "{{\n daily\n | selectattr('datetime', 'search', now().date()| string)\n\
\ | map(attribute='temperature')\n | list\n | first\n}}\n"
- device_class: temperature
state_class: measurement
unit_of_measurement: °C
availability: "{{\n daily is defined\n and daily is list\n and daily | selectattr('datetime',\
\ 'search', now().date() | string) | list | count > 0\n}}\n"
unique_id: e61acc80-7348-45ed-b83c-070109cd4fe1
name: Weather Combined Max Temp Tomorrow
state: "{{\n daily\n | selectattr('datetime', 'search', (now() + timedelta(days=1)).date()\n\
\ | string)\n | map(attribute='temperature')\n | list\n | first\n\
}}\n"
- unique_id: 3098ef4f-aa24-4c27-ae00-514b699a1bfa
name: Weather Combined daily condition
state: '{{ daily[0].condition }}'
icon: "{% set icon = {\n \"clear-night\": \"mdi:weather-night\",\n \"cloudy\"\
: \"mdi:weather-cloudy\",\n \"exceptional\": \"mdi:alert-circle-outline\"\
,\n \"fog\": \"mdi:weather-fog\",\n \"hail\": \"mdi:weather-hail\",\n\
\ \"lightning\": \"mdi:weather-lightning\",\n \"lightning-rainy\": \"\
mdi:weather-lightning-rainy\",\n \"pouring\": \"mdi:weather-pouring\",\n\
\ \"rainy\": \"mdi:weather-rainy\",\n \"snowy\": \"mdi:weather-snowy\"\
,\n \"snowy-rainy\": \"mdi:weather-snowy-rainy\",\n \"sunny\": \"mdi:weather-sunny\"\
,\n \"windy\": \"mdi:weather-windy\",\n \"windy-variant\": \"mdi:weather-windy-variant\"\
,\n \"partlycloudy\": \"mdi:weather-partly-cloudy\"\n }\n%} {{ icon.get(daily[0].condition,\
\ 'mdi:cloud-alert') }}\n"
availability: '{{ daily is defined and daily is list and daily | count > 0 }}'
binary_sensor:
- unique_id: 5e88168e-bd66-4f98-8a12-e4cab0fb42c8
name: Weather combined sun in next hours
state: '{% set next_hours = hourly[:3] | map(attribute=''condition'') | list %}
{{ next_hours | select(''in'', [''partlycloudy'', ''sunny'']) | list | count
> 0 }}
'
icon: "{% set icon = {\n \"clear-night\": \"mdi:weather-night\",\n \"cloudy\"\
: \"mdi:weather-cloudy\",\n \"exceptional\": \"mdi:alert-circle-outline\"\
,\n \"fog\": \"mdi:weather-fog\",\n \"hail\": \"mdi:weather-hail\",\n\
\ \"lightning\": \"mdi:weather-lightning\",\n \"lightning-rainy\": \"\
mdi:weather-lightning-rainy\",\n \"pouring\": \"mdi:weather-pouring\",\n\
\ \"rainy\": \"mdi:weather-rainy\",\n \"snowy\": \"mdi:weather-snowy\"\
,\n \"snowy-rainy\": \"mdi:weather-snowy-rainy\",\n \"sunny\": \"mdi:weather-sunny\"\
,\n \"windy\": \"mdi:weather-windy\",\n \"windy-variant\": \"mdi:weather-windy-variant\"\
,\n \"partlycloudy\": \"mdi:weather-partly-cloudy\"\n }\n%} {% set next_hours\
\ = hourly[:3] | map(attribute='condition') | list %} {{ icon.get(statistical_mode(next_hours),\
\ 'mdi:cloud-alert') }}\n"
availability: '{{ hourly is defined and hourly is list and hourly | count >= 3
}}'
- alias: '[HA] Database Repack (Weekly)'
id: database_repack
mode: single
trigger:
- platform: time
at: 02:00
condition:
- condition: time
weekday:
- sun
- tue
- thu
action:
- service: recorder.purge
data:
apply_filter: true
repack: true
- alias: '[HA DB] Remove Orphaned Entities'
id: ha_db_remove_orphaned_entities
mode: single
trigger:
- platform: time
at: 02:30:00
condition: []
action:
- action: script.toggle
metadata: {}
data: {}
target:
entity_id: script.db_remove_orphaned_entities
- alias: '[Sonos Favourites] Play Double J Sydney'
description: Play Double J Sydney on Sonos
id: sonos_favourites_play_double_j_sydney
trigger:
- platform: webhook
webhook_id: sonos_double_j_sydney
allowed_methods:
- POST
- PUT
local_only: true
action:
- service: script.toggle
target:
entity_id: script.sonos_favourites_double_j_sydney
- service: input_text.set_value
target:
entity_id: input_text.sonos_playlist_playing
data:
value: Double J Sydney
- alias: '[Sonos Favourites] Play Double J (Sydney, NSW)'
description: Play Double J (Sydney, NSW) on Sonos
id: sonos_favourites_play_double_j_sydney_nsw
trigger:
- platform: webhook
webhook_id: sonos_double_j_sydney_nsw
allowed_methods:
- POST
- PUT
local_only: true
action:
- service: script.toggle
target:
entity_id: script.sonos_favourites_double_j_sydney_nsw
- service: input_text.set_value
target:
entity_id: input_text.sonos_playlist_playing
data:
value: Double J (Sydney, NSW)
- alias: '[Sonos Favourites] Play ESPN Radio'
description: Play ESPN Radio on Sonos
id: sonos_favourites_play_espn_radio
trigger:
- platform: webhook
webhook_id: sonos_espn_radio
allowed_methods:
- POST
- PUT
local_only: true
action:
- service: script.toggle
target:
entity_id: script.sonos_favourites_espn_radio
- service: input_text.set_value
target:
entity_id: input_text.sonos_playlist_playing
data:
value: ESPN Radio
- id: fan_set_speed
alias: '[Fan] Set Speed'
description: '[Fan] Set Speed'
trigger:
- platform: state
entity_id:
- input_number.office_fan_speed
action:
- service: script.fan_set_speed
data:
fan: '{{ fan }}'
speed: '{{ states(trigger.entity_id)| round (0, ''floor'') }}'
variables:
fan: '{{ trigger.entity_id | regex_replace(find=''input_number.'', replace='''',
ignorecase=False) | regex_replace(find=''_speed'', replace='''', ignorecase=False)
}}'
- id: influx2entity_flighttrack365day
alias: 'InfluxDB to Entity: 365 Day Flight Tracker'
description: 'InfluxDB to Entity: 365 Day Flight Tracker'
mode: restart
max_exceeded: warning
trigger:
- platform: homeassistant
id: startup
event: start
- platform: state
entity_id: input_boolean.ha_startup_pending
to: 'off'
- platform: time_pattern
hours: /1
action:
- if: '{{ trigger.id == ''startup'' }}'
then:
- delay: 240
- service: pyscript.influxdb2_query_to_entity
data:
query: "import \"timezone\" option location = timezone.location(name: \"Australia/Sydney\"\
) from(bucket: \"homeassistant\")\n |> range(start: -1y)\n |> filter(fn:\
\ (r) => r[\"_measurement\"] == \"Flights\")\n |> filter(fn: (r) => r[\"\
entity_id\"] == \"flighttracker_flights_24hr\")\n |> filter(fn: (r) => r[\"\
_field\"] == \"value\")\n |> drop(columns: [\"friendly_name\"])\n |> aggregateWindow(every:\
\ 1d, fn: max, createEmpty: true)\n |> fill(value: 0.0)\n |> yield(name:\
\ \"max\")"
entity_id: sensor.archive_flighttracker_flights_365day
unit_of_measurement: Flights
friendly_name: Flight Tracker - Flights - 365 Day
icon: mdi:airplane-takeoff
- service: pyscript.influxdb2_query_to_entity
data:
query: "import \"timezone\" option location = timezone.location(name: \"Australia/Sydney\"\
) from(bucket: \"homeassistant\")\n |> range(start: -1y)\n |> filter(fn:\
\ (r) => r[\"_measurement\"] == \"m\")\n |> filter(fn: (r) => r[\"entity_id\"\
] == \"flighttracker_distance_max\")\n |> filter(fn: (r) => r[\"_field\"\
] == \"value\")\n |> map(fn: (r) => ({r with _value: r._value / 1000.0}))\n\
\ |> aggregateWindow(every: 1d, fn: max, createEmpty: true)\n |> fill(value:\
\ 0.0)\n |> yield(name: \"max\")"
entity_id: sensor.archive_flighttracker_maxdistance_365day
unit_of_measurement: km
friendly_name: Flight Tracker - Max Distance - 365 Day
icon: mdi:airplane-marker
- service: pyscript.influxdb2_query_to_entity
data:
query: "import \"timezone\" option location = timezone.location(name: \"Australia/Sydney\"\
) from(bucket: \"homeassistant\")\n |> range(start: -1y)\n |> filter(fn:\
\ (r) => r[\"_measurement\"] == \"m\")\n |> filter(fn: (r) => r[\"entity_id\"\
] == \"flighttracker_distance_max\")\n |> filter(fn: (r) => r[\"_field\"\
] == \"value\")\n |> map(fn: (r) => ({r with _value: r._value / 1000.0}))\n\
\ |> aggregateWindow(every: 1d, fn: mean, createEmpty: true)\n |> fill(value:\
\ 0.0)\n |> yield(name: \"mean\")"
entity_id: sensor.archive_flighttracker_avgdistance_365day
unit_of_measurement: km
friendly_name: Flight Tracker - Avg Distance - 365 Day
icon: mdi:airplane-marker
- id: '1698473315369'
alias: '[pixelclock] Flight Tracker Stats'
description: ''
trigger:
- platform: state
entity_id:
- sensor.flighttracker_aircraft_total
condition: []
action:
- service: mqtt.publish
data:
payload: '{%- set value = states(''sensor.flighttracker_aircraft_total'') -%}
{%- set nearest_rego = state_attr(''sensor.plane_overhead_detail'',''aircraft_rego'')
-%} {%- set nearest_type = state_attr(''sensor.plane_overhead_detail'', ''aircraft'')
-%} {%- set nearest_trip = state_attr(''sensor.plane_overhead_detail'', ''origin_airport_code'')
+ " > " +state_attr(''sensor.plane_overhead_detail'', ''destination_airport_code'')
-%} { "icon": 2933, "pushIcon": 0, "repeat": 1, "textCase": 2, "text": "{{value
}}, {{ nearest_type }}, {{ nearest_rego }}, {{nearest_trip}}" }
'
topic: pixelclock/custom/flighttracker
qos: 0
retain: false
mode: single
- alias: '[Sonos Favourites] Play Gaydio (London) (Top 40 & Pop Music)'
description: Play Gaydio (London) (Top 40 & Pop Music) on Sonos
id: sonos_favourites_play_gaydio_london_top_40_pop_music
trigger:
- platform: webhook
webhook_id: sonos_gaydio_london_top_40_pop_music
allowed_methods:
- POST
- PUT
local_only: true
action:
- service: script.toggle
target:
entity_id: script.sonos_favourites_gaydio_london_top_40_pop_music
- service: input_text.set_value
target:
entity_id: input_text.sonos_playlist_playing
data:
value: Gaydio (London) (Top 40 & Pop Music)
- alias: '[Sonos Favourites] Play Gaydio (Manchester) (Dance & Electronic)'
description: Play Gaydio (Manchester) (Dance & Electronic) on Sonos
id: sonos_favourites_play_gaydio_manchester_dance_electronic
trigger:
- platform: webhook
webhook_id: sonos_gaydio_manchester_dance_electronic
allowed_methods:
- POST
- PUT
local_only: true
action:
- service: script.toggle
target:
entity_id: script.sonos_favourites_gaydio_manchester_dance_electronic
- service: input_text.set_value
target:
entity_id: input_text.sonos_playlist_playing
data:
value: Gaydio (Manchester) (Dance & Electronic)
- alias: '[Sonos Favourites] Play GB News Radio (Local News)'
description: Play GB News Radio (Local News) on Sonos
id: sonos_favourites_play_gb_news_radio_local_news
trigger:
- platform: webhook
webhook_id: sonos_gb_news_radio_local_news
allowed_methods:
- POST
- PUT
local_only: true
action:
- service: script.toggle
target:
entity_id: script.sonos_favourites_gb_news_radio_local_news
- service: input_text.set_value
target:
entity_id: input_text.sonos_playlist_playing
data:
value: GB News Radio (Local News)
- alias: Admin - Generate README.md
id: a7c427e6-ce03-49eb-9a23-c5bbe7625aa4
initial_state: true
trigger:
- platform: time
at: 05:00:00
condition: []
action:
- service: readme.generate
- id: github_push
alias: Github - Push config to GitHub (Daily)
trigger:
- at: 06:00:00
platform: time
action:
- service: shell_command.push_to_github
- id: github_stars
alias: Github - Notify when repo starred
trigger:
- platform: state
entity_id:
- sensor.bacco007_homeassistantconfig_stars
condition:
- condition: template
value_template: "{% set ignore = ['unknown', 'unavailable', 'none','0'] %} {{\
\ trigger.from_state.state not in ignore and\n trigger.to_state.state not in\
\ ignore and\n trigger.to_state.state != trigger.from_state.state }}\n"
mode: restart
action:
- service: persistent_notification.create
data_template:
title: Github - Repo Star
notification_id: github_repostar
message: '"{{ trigger.to_state.attributes.friendly_name | regex_replace(find=''bacco007/'',
replace='''', ignorecase=False) }}: {{ trigger.from_state.state }} => {{ trigger.to_state.state
}}"'
- alias: '[Sonos Favourites] Play GOLD 104.3'
description: Play GOLD 104.3 on Sonos
id: sonos_favourites_play_gold_104_3
trigger:
- platform: webhook
webhook_id: sonos_gold_104_3
allowed_methods:
- POST
- PUT
local_only: true
action:
- service: script.toggle
target:
entity_id: script.sonos_favourites_gold_104_3
- service: input_text.set_value
target:
entity_id: input_text.sonos_playlist_playing
data:
value: GOLD 104.3
- id: '1705097473383'
alias: '[GTFS] Get New Data and Run Update'
description: ''
trigger:
- platform: time
at: 02:30:00
enabled: true
condition:
- condition: time
weekday:
- wed
enabled: true
action:
- service: pyscript.download_gtfs
data:
url: https://api.transport.nsw.gov.au/v1/gtfs/schedule/regionbuses/newenglandnorthwest
filename: buses_nenw.zip
- delay:
hours: 0
minutes: 0
seconds: 10
milliseconds: 0
- service: pyscript.download_gtfs
data:
url: https://api.transport.nsw.gov.au/v1/gtfs/schedule/nswtrains
filename: nswtrains.zip
- delay:
hours: 0
minutes: 0
seconds: 10
milliseconds: 0
- service: gtfs2.update_gtfs
metadata: {}
data:
extract_from: zip
url: na
file: buses_nenw
clean_feed_info: true
api_key_location: not_applicable
api_key_name: x
check_source_dates: false
api_key: x
- service: gtfs2.update_gtfs
metadata: {}
data:
extract_from: zip
url: na
file: nswtrains
clean_feed_info: true
api_key_location: not_applicable
api_key_name: x
check_source_dates: false
api_key: x
mode: single
- alias: '[HA] Count Executed Scripts'
id: ha_count_executed_scripts
mode: parallel
max: 25
max_exceeded: silent
trigger:
- platform: event
event_type: script_started
action:
- service: counter.increment
entity_id: counter.scripts_executed_counter
- alias: '[HA] New Device Connected'
id: ha_newdevice
trigger:
- platform: event
event_type: entity_registry_updated
event_data:
action: create
condition:
- condition: template
value_template: '{{trigger.event.data.entity_id.split(''.'')[0] == ''device_tracker''
}}'
action:
- variables:
entity: '{{trigger.event.data.entity_id}}
'
- service: persistent_notification.create
data:
title: New Device Connected
message: 'Device: {{state_attr(entity,''friendly_name'')}} Entity: {{entity}}
Host: {{state_attr(entity,''host_name'')}} Ip: {{state_attr(entity,''ip'')}}
Mac-address: {{state_attr(entity,''mac'')}} Full data: {{trigger.event.data}}
'
notification_id: newdevice
- if:
- condition: state
entity_id: input_boolean.alerts_ha
state: 'on'
then:
- service: notify.tbsmarthome
data_template:
message: New Device Connected
target: '#tbsmarthome'
data:
blocks:
- type: section
text:
type: mrkdwn
text: New Device Connected
- type: section
text:
type: mrkdwn
text: 'Device: {{state_attr(entity,''friendly_name'')}} Entity: {{entity}}
Host: {{state_attr(entity,''host_name'')}} Ip: {{state_attr(entity,''ip'')}}
Mac-address: {{state_attr(entity,''mac'')}} Full data: {{trigger.event.data}}
'
- service: notify.mobile_app_tbiphone15promax
data:
message: 'Device: {{state_attr(entity,''friendly_name'')}} Entity: {{entity}}
Host: {{state_attr(entity,''host_name'')}} Ip: {{state_attr(entity,''ip'')}}
Mac-address: {{state_attr(entity,''mac'')}} Full data: {{trigger.event.data}}
'
title: New Device Connected
data:
push:
badge: 1
sound: none
interruption-level: time-sensitive
presentation_options:
- alert
- badge
- alias: '[HA] Restart'
id: ha_restart
max_exceeded: silent
trigger:
- platform: event
event_type: mobile_app_notification_action
event_data:
action: restart_ha
action:
- service: homeassistant.restart
- alias: '[HA] Shutdown'
id: ha_shutdown
trigger:
- platform: homeassistant
event: shutdown
action:
- service: system_log.write
data:
logger: '{{ this.entity_id }}
'
level: critical
message: '**** HOME ASSISTANT STOP ****'
- if:
- condition: state
entity_id: input_boolean.alerts_ha
state: 'on'
then:
- service: notify.tbsmarthome
data_template:
message: Home Assistant Stopped
target: '#tbsmarthome'
data:
blocks:
- type: section
text:
type: mrkdwn
text: '*Home Assistant Stopped*'
- type: section
text:
type: mrkdwn
text: Home Assistant Stopped
- service: notify.mobile_app_tbiphone15promax
data:
message: Home Assistant has Stopped
title: Home Assistant Stopped
data:
push:
badge: 1
sound: none
interruption-level: time-sensitive
presentation_options:
- alert
- badge
- alias: '[HA] Startup'
id: ha_startup
trigger:
- platform: homeassistant
event: start
action:
- service: system_log.write
data:
logger: '{{ this.entity_id }}'
level: critical
message: '**** HOME ASSISTANT START ****'
- service: persistent_notification.create
data:
title: Automation Startup
message: Waiting for startup to complete.
notification_id: hass_restart
- service: counter.reset
entity_id: counter.automation_trigger_counter
- service: input_boolean.turn_off
entity_id: input_boolean.ha_startup_pending
- service: persistent_notification.create
data:
title: Home Assistant Started
message: HA started at {{ now().strftime('%-I:%M %p') }}.
notification_id: hass_restart
- if:
- condition: state
entity_id: input_boolean.alerts_ha
state: 'on'
then:
- service: notify.tbsmarthome
data_template:
message: Home Assistant Started
target: '#tbsmarthome'
data:
blocks:
- type: section
text:
type: mrkdwn
text: Home Assistant Started
- type: section
text:
type: mrkdwn
text: HA started at {{ now().strftime('%-I:%M %p') }}.
- service: notify.mobile_app_tbiphone15promax
data:
message: HA started at {{ now().strftime('%-I:%M %p') }}.
title: Home Assistant Started
data:
push:
badge: 1
sound: none
interruption-level: time-sensitive
presentation_options:
- alert
- badge
- service: persistent_notification.dismiss
data:
notification_id: hass_restart
- service: system_log.write
data:
logger: '{{ this.entity_id }}'
level: critical
message: '**** HOME ASSISTANT STARTUP COMPLETE ****'
- alias: '[HA] Count Triggered Automations'
id: ha_count_triggered_automations
mode: parallel
max: 25
max_exceeded: silent
trigger:
- platform: event
event_type: automation_triggered
condition:
- condition: template
value_template: '{{ not ''automation.ha_count_triggered_automations'' in (trigger.event.data.entity_id|string)
}}'
action:
- service: counter.increment
entity_id: counter.automation_trigger_counter
- alias: '[HIBP] Notify when new breach recorded'
id: hibp_notify_new_breach
mode: single
trigger:
- platform: state
entity_id:
- sensor.hibp_breach_summary
condition:
- condition: template
value_template: '{{ trigger.to_state.state - trigger.from_state.state >= 1 }}'
action:
- service: notify.tbsmarthome
data_template:
message: New Breach on HIBP
target: '#tbsmarthome'
data:
blocks:
- type: section
text:
type: mrkdwn
text: '*New Breach on HIBP*'
- type: section
text:
type: mrkdwn
text: HIBP has added a new breach on one of your email addresses
- service: notify.mobile_app_tbiphone15promax
data:
message: HIBP has added a new breach on one of your email addresses
title: New Breach on HIBP
data:
push:
badge: 1
sound: none
interruption-level: time-sensitive
presentation_options:
- alert
- badge
- alias: '[Presence] Holiday Mode - Auto Disable'
id: presence_holiday_mode_auto_disable
trigger:
platform: state
entity_id: group.people
from: not_home
to: home
condition:
- condition: state
entity_id: input_boolean.holiday_mode
state: 'on'
action:
- service: input_boolean.turn_off
entity_id: input_boolean.holiday_mode
- service: notify.alert
data:
message: Holiday Mode Disabled
title: Holiday Mode Auto Disabled
data:
push:
badge: 1
sound: none
interruption-level: time-sensitive
presentation_options:
- alert
- badge
- alias: '[Presence] Holiday Mode - Auto Enable'
id: presence_holiday_mode_auto_enable
trigger:
platform: state
entity_id: group.people
from: home
to: not_home
for: '24:00:00'
action:
- service: input_boolean.turn_on
entity_id: input_boolean.holiday_mode
- service: notify.alert
data:
message: Holiday Mode Enabled
title: Holiday Mode Auto Enabled
data:
push:
badge: 1
sound: none
interruption-level: time-sensitive
presentation_options:
- alert
- badge
- id: influx2entity_internetstats365day
alias: 'InfluxDB to Entity: 365 Day Internet Stats'
description: 'InfluxDB to Entity: 365 Day Internet Stats'
mode: restart
max_exceeded: warning
trigger:
- platform: homeassistant
id: startup
event: start
- platform: state
entity_id: input_boolean.ha_startup_pending
to: 'off'
- platform: time_pattern
hours: /1
action:
- if: '{{ trigger.id == ''startup'' }}'
then:
- delay: 240
- service: pyscript.influxdb2_query_to_entity
data:
query: "import \"timezone\" option location = timezone.location(name: \"Australia/Sydney\"\
) from(bucket: \"homeassistant\")\n |> range(start: -1y)\n |> filter(fn:\
\ (r) => r[\"_measurement\"] == \"Mbit/s\")\n |> filter(fn: (r) => r[\"entity_id\"\
] == \"speedtest_download\")\n |> filter(fn: (r) => r[\"_field\"] == \"value\"\
)\n |> drop(columns: [\"friendly_name\"])\n |> aggregateWindow(every: 1d,\
\ fn: max, createEmpty: true)\n |> fill(value: 0.0)\n |> yield(name: \"\
max\")"
entity_id: sensor.archive_internet_download_max_365day
unit_of_measurement: mbit/s
friendly_name: Internet Download Speed - Max Speed - 365 Day
icon: mdi:download-network
- service: pyscript.influxdb2_query_to_entity
data:
query: "import \"timezone\" option location = timezone.location(name: \"Australia/Sydney\"\
) from(bucket: \"homeassistant\")\n |> range(start: -1y)\n |> filter(fn:\
\ (r) => r[\"_measurement\"] == \"Mbit/s\")\n |> filter(fn: (r) => r[\"entity_id\"\
] == \"speedtest_download\")\n |> filter(fn: (r) => r[\"_field\"] == \"value\"\
)\n |> drop(columns: [\"friendly_name\"])\n |> aggregateWindow(every: 1d,\
\ fn: min, createEmpty: true)\n |> fill(value: 0.0)\n |> yield(name: \"\
min\")"
entity_id: sensor.archive_internet_download_min_365day
unit_of_measurement: mbit/s
friendly_name: Internet Download Speed - Min Speed - 365 Day
icon: mdi:download-network
- service: pyscript.influxdb2_query_to_entity
data:
query: "import \"timezone\" option location = timezone.location(name: \"Australia/Sydney\"\
) from(bucket: \"homeassistant\")\n |> range(start: -1y)\n |> filter(fn:\
\ (r) => r[\"_measurement\"] == \"Mbit/s\")\n |> filter(fn: (r) => r[\"entity_id\"\
] == \"speedtest_download\")\n |> filter(fn: (r) => r[\"_field\"] == \"value\"\
)\n |> drop(columns: [\"friendly_name\"])\n |> aggregateWindow(every: 1d,\
\ fn: mean, createEmpty: true)\n |> fill(value: 0.0)\n |> yield(name: \"\
mean\")"
entity_id: sensor.archive_internet_download_mean_365day
unit_of_measurement: mbit/s
friendly_name: Internet Download Speed - Mean Speed - 365 Day
icon: mdi:download-network
- service: pyscript.influxdb2_query_to_entity
data:
query: "import \"timezone\" option location = timezone.location(name: \"Australia/Sydney\"\
) from(bucket: \"homeassistant\")\n |> range(start: -1y)\n |> filter(fn:\
\ (r) => r[\"_measurement\"] == \"Mbit/s\")\n |> filter(fn: (r) => r[\"entity_id\"\
] == \"speedtest_upload\")\n |> filter(fn: (r) => r[\"_field\"] == \"value\"\
)\n |> drop(columns: [\"friendly_name\"])\n |> aggregateWindow(every: 1d,\
\ fn: max, createEmpty: true)\n |> fill(value: 0.0)\n |> yield(name: \"\
max\")"
entity_id: sensor.archive_internet_upload_max_365day
unit_of_measurement: mbit/s
friendly_name: Internet Upload Speed - Max Speed - 365 Day
icon: mdi:upload-network
- service: pyscript.influxdb2_query_to_entity
data:
query: "import \"timezone\" option location = timezone.location(name: \"Australia/Sydney\"\
) from(bucket: \"homeassistant\")\n |> range(start: -1y)\n |> filter(fn:\
\ (r) => r[\"_measurement\"] == \"Mbit/s\")\n |> filter(fn: (r) => r[\"entity_id\"\
] == \"speedtest_upload\")\n |> filter(fn: (r) => r[\"_field\"] == \"value\"\
)\n |> drop(columns: [\"friendly_name\"])\n |> aggregateWindow(every: 1d,\
\ fn: min, createEmpty: true)\n |> fill(value: 0.0)\n |> yield(name: \"\
min\")"
entity_id: sensor.archive_internet_upload_min_365day
unit_of_measurement: mbit/s
friendly_name: Internet Upload Speed - Min Speed - 365 Day
icon: mdi:upload-network
- service: pyscript.influxdb2_query_to_entity
data:
query: "import \"timezone\" option location = timezone.location(name: \"Australia/Sydney\"\
) from(bucket: \"homeassistant\")\n |> range(start: -1y)\n |> filter(fn:\
\ (r) => r[\"_measurement\"] == \"Mbit/s\")\n |> filter(fn: (r) => r[\"entity_id\"\
] == \"speedtest_upload\")\n |> filter(fn: (r) => r[\"_field\"] == \"value\"\
)\n |> drop(columns: [\"friendly_name\"])\n |> aggregateWindow(every: 1d,\
\ fn: mean, createEmpty: true)\n |> fill(value: 0.0)\n |> yield(name: \"\
mean\")"
entity_id: sensor.archive_internet_upload_mean_365day
unit_of_measurement: mbit/s
friendly_name: Internet Upload Speed - Mean Speed - 365 Day
icon: mdi:upload-network
- id: influx2entity_lastfmplaycount365day
alias: 'InfluxDB to Entity: 365 Day LastFM Playcount'
description: 'InfluxDB to Entity: 365 Day LastFM Playcount'
mode: restart
max_exceeded: warning
trigger:
- platform: homeassistant
id: startup
event: start
- platform: state
entity_id: input_boolean.ha_startup_pending
to: 'off'
- platform: time_pattern
hours: /1
action:
- if: '{{ trigger.id == ''startup'' }}'
then:
- delay: 240
- service: pyscript.influxdb2_query_to_entity
data:
query: "import \"timezone\" option location = timezone.location(name:\"Australia/Sydney\"\
) from(bucket: \"homeassistant\")\n |> range(start: -1y)\n |> filter(fn:\
\ (r) => r[\"entity_id\"] == \"lastfm_user_playcount\")\n |> filter(fn: (r)\
\ => r[\"_field\"] == \"value\")\n |> drop(columns: [\"friendly_name\"])\n\
\ |> filter(fn: (r) => r._value != 0.0 )\n |> aggregateWindow(every: 1d,\
\ fn: last, createEmpty: true)\n |> fill(usePrevious: true)\n |> yield(name:\
\ \"last\")"
entity_id: sensor.archive_last_fm_playcount_365day
unit_of_measurement: Songs
friendly_name: Last.fm - Playcount - 365 Day
icon: si:lastfm
- service: pyscript.influxdb2_query_to_entity
data:
query: "import \"timezone\" option location = timezone.location(name:\"Australia/Sydney\"\
) from(bucket: \"homeassistant\")\n |> range(start: -1y)\n |> filter(fn:\
\ (r) => r[\"entity_id\"] == \"lastfm_user_playcount\")\n |> filter(fn: (r)\
\ => r[\"_field\"] == \"value\")\n |> drop(columns: [\"friendly_name\"])\n\
\ |> filter(fn: (r) => r._value != 0.0 )\n |> aggregateWindow(every: 1d,\
\ fn: last, createEmpty: true)\n |> fill(usePrevious: true)\n |> derivative(unit:\
\ 1d)\n |> yield(name: \"last\")"
entity_id: sensor.archive_last_fm_dailyplaycount_365day
unit_of_measurement: Songs
friendly_name: Last.fm - Daily Playcount - 365 Day
icon: si:lastfm
- id: c23a62bc-023c-4235-bf6d-bf768bd81006
alias: '[Presence] Leave Home > Switch Off'
description: ''
trigger:
- platform: zone
entity_id: person.thomas_baxter
zone: zone.home
event: leave
condition: []
action:
- service: light.turn_off
metadata: {}
data:
transition: 20
target:
device_id:
- 126e91428c97177372f5ef7398eb537e
- 6623c3d12a6637f37fab2c6e24c41afc
- service: media_player.media_stop
metadata: {}
data: {}
target:
device_id:
- 8fbcca75df7f6f0eb5cd1cae9194d2d2
- 8975bd002481c313a15a4822793e8c2c
- 74e08bc2098b8b7eacd3efdba5af32f3
- 68de45dcbb7f4a3ea545c62e767c7c8b
mode: single
- alias: '[Notification] Lightning Warning'
id: notification_lightning_warning
mode: single
trigger:
- platform: state
entity_id: sensor.weather_lightning_count
condition:
- condition: numeric_state
entity_id: sensor.weather_lightning_distance
below: 10
action:
- service: notify.alert
data:
title: Weather Alert
message: Lightning within 10km of home!
data:
tag: weather-lightning
subtitle: ⚡️ Lightning within 10km ⚡
- id: logs_counter_reset
alias: Logs - Counters reset on start
mode: queued
max: 100
max_exceeded: silent
trigger:
platform: homeassistant
event: start
action:
- service: counter.reset
entity_id: counter.logs_warning_counter
- service: counter.reset
entity_id: counter.logs_critical_counter
- service: counter.reset
entity_id: counter.logs_error_counter
- alias: '[HA] Log File Increase - Notification'
id: ha_logfileincrease_notification
trigger:
platform: numeric_state
entity_id:
- sensor.ha_logfile_recent_change
above: 1
value_template: '{{state.state | int(0) }}'
action:
- service: notify.alert
data:
message: 'The Home Assistant Log File recently increased {{ states("ha_logfile_recent_change")
}} MB. Please check whether something may be wrong
'
title: 'Home Assistant Log Increased {{ states("ha_logfile_recent_change") }}
MB
'
data:
push:
badge: 1
sound: none
interruption-level: time-sensitive
presentation_options:
- alert
- badge
- id: logger_setdefaultlevel
alias: Logger - Set Default Level
trigger:
- platform: state
entity_id: input_select.log_level
action:
service: logger.set_default_level
data_template:
level: '{{ states(''input_select.log_level'') }}'
- id: ha_log_level_selection
alias: '[HA] Log Level Selection'
variables:
level: '{{ states(''input_select.log_level'') }}'
trigger:
- platform: homeassistant
event: start
- platform: state
entity_id: input_select.log_level
to: null
action:
- service: logger.set_default_level
data:
level: '{{ level }}'
- service: system_log.write
data:
logger: '{{ this.entity_id }}'
level: critical
message: '***** HASS LOG LEVEL SET TO : {{ level }} *****'
- alias: '[Logs] Increment Counter'
id: logs_increment_counter
trigger:
- platform: event
event_type: system_log_event
event_data:
level: ERROR
id: error
- platform: event
event_type: system_log_event
event_data:
level: WARNING
id: warning
- platform: event
event_type: system_log_event
event_data:
level: CRITICAL
id: critical
action:
- choose:
- conditions:
- condition: trigger
id: warning
sequence:
- service: counter.increment
entity_id: counter.logs_warning_counter
- conditions:
- condition: trigger
id: error
- condition: and
conditions:
- condition: template
value_template: '{{ not ''logs_error_counter_increment'' in (trigger.event.data.message|string)
}}'
sequence:
- service: counter.increment
entity_id: counter.logs_error_counter
- conditions:
- condition: trigger
id: critical
sequence:
- service: counter.increment
entity_id: counter.logs_critical_counter
mode: queued
max: 20
max_exceeded: silent
- id: influx2entity_misc
alias: 'InfluxDB to Entity: Misc'
description: 'InfluxDB to Entity: Misc'
mode: restart
max_exceeded: warning
trigger:
- platform: homeassistant
id: startup
event: start
- platform: state
entity_id: input_boolean.ha_startup_pending
to: 'off'
- platform: time_pattern
hours: /1
action:
- if: '{{ trigger.id == ''startup'' }}'
then:
- delay: 240
- service: pyscript.influxdb2_query_to_entity
data:
query: "import \"timezone\" option location = timezone.location(name: \"Australia/Sydney\"\
) from(bucket: \"homeassistant\")\n |> range(start: -1y)\n |> filter(fn:\
\ (r) => r[\"_measurement\"] == \"karma\")\n |> filter(fn: (r) => r[\"entity_id\"\
] == \"reddit_profile_bacco007\")\n |> filter(fn: (r) => r[\"_field\"] ==\
\ \"value\")\n |> drop(columns: [\"friendly_name\"])\n |> aggregateWindow(every:\
\ 1d, fn: last, createEmpty: true)\n |> fill(usePrevious: true)\n |> yield(name:\
\ \"last\")"
entity_id: sensor.archive_reddit_karma_365day
unit_of_measurement: Karma
friendly_name: Reddit - Karma - 365 Day
icon: mdi:reddit
- service: pyscript.influxdb2_query_to_entity
data:
query: 'import "timezone" option location = timezone.location(name: "Australia/Sydney")
from(bucket: "homeassistant") |> range(start: -1y) |> filter(fn: (r) => r["entity_id"]
== "untappd_total_beers") |> filter(fn: (r) => r["_field"] == "value") |>
drop(columns: ["friendly_name"]) |> aggregateWindow(every: 1d, fn: last, createEmpty:
true) |> fill(usePrevious: true) |> yield(name: "last")'
entity_id: sensor.archive_untappd_beers_365day
unit_of_measurement: Beers
friendly_name: Untappd - Beers - 365 Day
icon: mdi:beer-outline
- id: alert_mobile_cleared
alias: '[Notifications] Mobile Notification Cleared'
mode: parallel
trigger:
- platform: event
event_type: mobile_app_notification_cleared
condition:
- '{{ trigger.event.data.tag is defined }}'
action:
- service: automation.turn_off
target:
entity_id: automation.notifications_mobile_notification_cleared
data:
stop_actions: false
- service: notify.alert
data:
message: clear_notification
data:
tag: '{{ trigger.event.data[''tag''] }}'
continue_on_error: true
- service: automation.turn_on
target:
entity_id: automation.notifications_mobile_notification_cleared
- id: ha_autogroup_monitored_entities
alias: '[HA] Autogroup - Monitored Entities'
description: Update Monitored Entities Groups automatically
mode: single
max_exceeded: silent
triggers:
- platform: event
event_type: call_service
event_data:
domain: group
service: reload
- platform: time_pattern
minutes: /1
actions:
- service: group.set
data:
object_id: monitor_unavailable_entities
name: '[Group] Monitor - Unavailable Entities'
icon: mdi:check-circle
entities: "{%- set ignore_seconds = 60 %} {% set ignore_label = 'ignored' %}\
\ {% set ignored_domains = ['button', 'conversation', 'device_tracker', 'event',\
\ 'group', 'image', 'input_button', 'input_text', 'remote', 'tts', 'scene',\
\ 'stt', 'media_player'] %} {% set ignore_ts = (now().timestamp() - ignore_seconds)|as_datetime\
\ %} {% set disabled_device_entities = state_attr('sensor.disabled_entities',\
\ 'entities')\n | regex_replace(find='\\[|\\]|\\{|\\}|\\'entity_id\\':',\
\ replace='') %}\n{% set ignored_devices = label_devices(ignore_label | lower)\
\ %} {% set ignored_device_entities = namespace(value=[]) %} {% for device\
\ in ignored_devices %}\n {% set ignored_device_entities.value = ignored_device_entities.value\
\ + device_entities(device) %}\n{% endfor %} {{ states\n | rejectattr('domain',\
\ 'in', ignored_domains)\n | rejectattr('entity_id', 'in', disabled_device_entities)\n\
\ | rejectattr('entity_id', 'in', state_attr('group.ignored_unavailable_entities',\
\ 'entity_id'))\n | rejectattr('entity_id', 'in', ['group.monitor_unavailable_entities',\
\ 'group.ignored_unavailable_entities'])\n | rejectattr('entity_id', 'in',\
\ ignored_device_entities.value)\n | rejectattr('entity_id', 'in', label_entities(ignore_label\
\ | lower))\n | rejectattr('last_changed', 'ge', ignore_ts)\n | selectattr('state',\
\ 'in', ['unknown', 'unavailable'])\n |rejectattr('entity_id','search','rssfeed_')\n\
\ |rejectattr('entity_id','search','button.')\n |rejectattr('entity_id','search','select.')\n\
\ |rejectattr('entity_id','search','_color')\n |rejectattr('entity_id','search','wunder')\n\
\ |rejectattr('entity_id','search','gosungrow')\n |rejectattr('entity_id','search','z2m')\n\
\ |rejectattr('entity_id','search','zigbee2mqtt')\n |rejectattr('entity_id','search','local_stop_zone')\n\
\ |rejectattr('entity_id','search','in_radius_zone')\n |rejectattr('entity_id','search','sensor.weather_')\n\
\ |rejectattr('entity_id','search','upcoming_sport')\n |rejectattr('entity_id','in',expand(integration_entities('browser_mod'))|map(attribute='entity_id')|list)\n\
\ |rejectattr('entity_id','in',expand(integration_entities('gdacs'))|map(attribute='entity_id')|list)\n\
\ |rejectattr('entity_id','in',expand(integration_entities('github'))|map(attribute='entity_id')|list)\n\
\ |rejectattr('entity_id','in',expand(integration_entities('glances'))|map(attribute='entity_id')|list)\n\
\ |rejectattr('entity_id','in',expand(integration_entities('hassio'))|map(attribute='entity_id')|list)\n\
\ |rejectattr('entity_id','in',expand(integration_entities('openuv'))|map(attribute='entity_id')|list)\n\
\ |rejectattr('entity_id','in',expand(integration_entities('utility_meter'))|map(attribute='entity_id')|list)\n\
\ |rejectattr('entity_id','in',expand(integration_entities('waqi'))|map(attribute='entity_id')|list)\n\
\ |rejectattr('entity_id','in',expand(integration_entities('tautulli'))|map(attribute='entity_id')|list)\n\
\ |rejectattr('entity_id','in',expand(integration_entities('here_travel_time'))|map(attribute='entity_id')|list)\n\
\ |rejectattr('entity_id','in',expand(integration_entities('tomorrowio'))|map(attribute='entity_id')|list)\n\
\ |rejectattr('entity_id','in',expand(integration_entities('pirateweather'))|map(attribute='entity_id')|list)\n\
\ |rejectattr('entity_id','in',expand(integration_entities('proxmoxve'))|map(attribute='entity_id')|list)\n\
\ |rejectattr('entity_id','in',expand(integration_entities('blitzortung'))|map(attribute='entity_id')|list)\n\
\ |rejectattr('entity_id','in',expand(integration_entities('ble_monitor'))|map(attribute='entity_id')|list)\n\
\ |rejectattr('entity_id','in',expand(integration_entities('waternsw'))|map(attribute='entity_id')|list)\n\
\ |rejectattr('entity_id','in',expand(integration_entities('ical'))|map(attribute='entity_id')|list)\n\
\ |rejectattr('entity_id','in',expand(integration_entities('bureau_of_meteorology'))|map(attribute='entity_id')|list)\n\
\ |rejectattr('entity_id','in',expand(integration_entities('weatherflow'))|map(attribute='entity_id')|list)\n\
\ |rejectattr('entity_id','in',expand(integration_entities('eufy_security'))|map(attribute='entity_id')|list)\n\
\ |rejectattr('entity_id','in',expand(integration_entities('browser_mod'))|map(attribute='entity_id')|list)\n\
\ |rejectattr('entity_id','in',expand(integration_entities('mobile_app'))|map(attribute='entity_id')|list)\n\
\ |rejectattr('entity_id','in',expand(integration_entities('gtfs2'))|map(attribute='entity_id')|list)\n\
\ |rejectattr('entity_id','in',expand(integration_entities('google_fit'))|map(attribute='entity_id')|list)\n\
\ |rejectattr('entity_id','in',expand(integration_entities('sensibo'))|map(attribute='entity_id')|list)\n\
\ |rejectattr('entity_id','in',expand(integration_entities('astroweather'))|map(attribute='entity_id')|list)\n\
\ |rejectattr('entity_id','in',expand(integration_entities('satellitetracker'))|map(attribute='entity_id')|list)\n\
\ |rejectattr('entity_id','in',expand(integration_entities('gtfs2'))|map(attribute='entity_id')|list)\n\
\ |rejectattr('entity_id','in',expand(integration_entities('openweathermap'))|map(attribute='entity_id')|list)\n\
\ |rejectattr('entity_id','in',expand(integration_entities('withings'))|map(attribute='entity_id')|list)\n\
\ |rejectattr('entity_id','in',expand(integration_entities('hdhomerun'))|map(attribute='entity_id')|list)\n\
\ |rejectattr('entity_id','in',expand(integration_entities('flightradar24'))|map(attribute='entity_id')|list)\n\
\ |rejectattr('entity_id','in',expand(integration_entities('bermuda'))|map(attribute='entity_id')|list)\n\
\ | map(attribute='entity_id') | list | sort }}\n"
- service: group.set
data:
object_id: monitor_available_updates
name: '[Group] Monitor - Available Updates'
entities: "{{ states.update\n | selectattr('state', 'eq', 'on')\n | map(attribute='entity_id')\
\ | list | sort }}\n"
- service: group.set
data:
object_id: monitor_offline_integrations
name: '[Group] Monitor - Offline Integrations'
entities: "{{ expand('group.connectivity_monitors') | selectattr('state', 'eq',\
\ 'off')\n | map(attribute='entity_id') | list | sort }}\n"
- alias: '[Sonos Favourites] Play MSNBC'
description: Play MSNBC on Sonos
id: sonos_favourites_play_msnbc
trigger:
- platform: webhook
webhook_id: sonos_msnbc
allowed_methods:
- POST
- PUT
local_only: true
action:
- service: script.toggle
target:
entity_id: script.sonos_favourites_msnbc
- service: input_text.set_value
target:
entity_id: input_text.sonos_playlist_playing
data:
value: MSNBC
- id: ha_autogroup_network_devices
alias: '[HA] Autogroup - Network Devices'
mode: single
max_exceeded: silent
trigger:
- platform: homeassistant
id: startup
event: start
- platform: state
entity_id: input_boolean.ha_startup_pending
to: 'off'
- platform: event
event_type: call_service
event_data:
domain: group
service: reload
- platform: time_pattern
minutes: /5
action:
- if: '{{ trigger.id == ''startup'' }}'
then:
- delay: 240
- service: group.set
data:
object_id: wired_devices
entities: "{{ states.device_tracker\n | selectattr('attributes.source_type',\
\ 'defined')\n | selectattr('attributes.source_type', 'eq', 'router')\n\
\ | rejectattr('attributes.essid', 'defined')\n | map(attribute='entity_id')\
\ | list | sort }}\n"
- service: group.set
data:
object_id: wireless_devices
entities: "{{ states.device_tracker\n | selectattr('attributes.source_type',\
\ 'defined')\n | selectattr('attributes.source_type', 'eq', 'router')\n\
\ | selectattr('attributes.essid', 'defined')\n | map(attribute='entity_id')\
\ | list | sort }}\n"
- service: group.set
data:
object_id: unknown_devices
entities: "{{ states.device_tracker\n | rejectattr('attributes.source_type',\
\ 'in', ['router', 'bluetooth_le'])\n | rejectattr('attributes.device_type',\
\ 'in',\n ['bluetooth', 'wired', 'wireless', 'location', 'bluetooth_le'])\n\
\ | rejectattr('attributes.integration', 'in', ['iCloud3'])\n | map(attribute='entity_id')\
\ | select('has_value') | list | sort }}\n"
- service: group.set
data:
object_id: bluetooth_devices
entities: "{{ states.device_tracker\n | selectattr('attributes.device_type',\
\ 'eq', 'bluetooth')\n | map(attribute='entity_id') | list | sort }}\n"
- service: group.set
data:
object_id: bluetooth_le_devices
entities: "{{ states.device_tracker\n | selectattr('attributes.source_type',\
\ 'eq', 'bluetooth_le')\n | map(attribute='entity_id') | list | sort }}\n"
- service: group.set
data:
object_id: icloud3_devices
entities: "{{ states.device_tracker\n | selectattr('attributes.integration',\
\ 'in', ['iCloud3'])\n | map(attribute='entity_id') | list | sort }}\n"
- alias: '[Sonos Favourites] Play Nova 90s'
description: Play Nova 90s on Sonos
id: sonos_favourites_play_nova_90s
trigger:
- platform: webhook
webhook_id: sonos_nova_90s
allowed_methods:
- POST
- PUT
local_only: true
action:
- service: script.toggle
target:
entity_id: script.sonos_favourites_nova_90s
- service: input_text.set_value
target:
entity_id: input_text.sonos_playlist_playing
data:
value: Nova 90s
- alias: '[Sonos Favourites] Play Nova 96.9'
description: Play Nova 96.9 on Sonos
id: sonos_favourites_play_nova_96_9
trigger:
- platform: webhook
webhook_id: sonos_nova_96_9
allowed_methods:
- POST
- PUT
local_only: true
action:
- service: script.toggle
target:
entity_id: script.sonos_favourites_nova_96_9
- service: input_text.set_value
target:
entity_id: input_text.sonos_playlist_playing
data:
value: Nova 96.9
- alias: '[Sonos Favourites] Play Nova Nation'
description: Play Nova Nation on Sonos
id: sonos_favourites_play_nova_nation
trigger:
- platform: webhook
webhook_id: sonos_nova_nation
allowed_methods:
- POST
- PUT
local_only: true
action:
- service: script.toggle
target:
entity_id: script.sonos_favourites_nova_nation
- service: input_text.set_value
target:
entity_id: input_text.sonos_playlist_playing
data:
value: Nova Nation
- alias: '[Sonos Favourites] Play Nova Throwbacks'
description: Play Nova Throwbacks on Sonos
id: sonos_favourites_play_nova_throwbacks
trigger:
- platform: webhook
webhook_id: sonos_nova_throwbacks
allowed_methods:
- POST
- PUT
local_only: true
action:
- service: script.toggle
target:
entity_id: script.sonos_favourites_nova_throwbacks
- service: input_text.set_value
target:
entity_id: input_text.sonos_playlist_playing
data:
value: Nova Throwbacks
- alias: '[Notifications] Offline Integrations'
id: notifications_offline_integrations
mode: single
trigger:
- platform: time_pattern
minutes: /1
condition:
- condition: template
value_template: "{{ expand('group.connectivity_monitors')\n | rejectattr('entity_id',\
\ 'in', ['binary_sensor.ha_connected_integration_offline_integrations'])\n \
\ | selectattr('state', 'eq', 'off')\n | list\n | count > 0 }}\n"
action:
- variables:
current_count: "{{ expand('group.connectivity_monitors')\n | rejectattr('entity_id',\
\ 'in', ['binary_sensor.ha_connected_integration_offline_integrations'])\n\
\ | selectattr('state', 'eq', 'off')\n | list\n | count }}\n"
previous_count: '{{ states(''input_number.offline_integration_count'') | int
}}
'
- choose:
- conditions:
- condition: template
value_template: '{{ current_count != previous_count }}'
sequence:
- service: notify.alert
data:
title: Offline Integrations Detected
message: "{{ current_count }} Offline Integrations Detected: {{ expand('group.connectivity_monitors')\n\
\ | rejectattr('entity_id', 'in', ['binary_sensor.ha_connected_integration_offline_integrations'])\n\
\ | selectattr('state', 'eq', 'off')\n | map(attribute='entity_id')\n\
\ | map('replace', 'binary_sensor.ha_connected_integration_', '')\n\
\ | list\n | sort\n | join('\\n- ') }}"
data:
tag: offline_integrations
notification_id: offline_integrations
push:
badge: 1
sound: none
interruption-level: time-sensitive
presentation_options:
- alert
- badge
group: alerts
actions:
- title: Restart HA
action: notify_action_restart_ha
destructive: true
authenticationRequired: true
- title: Pause Alert
action: pause_alert_offline_integrations
- service: input_number.set_value
data:
entity_id: input_number.offline_integration_count
value: '{{ current_count }}'
- alias: '[OpenUV] Update During Day'
id: 003780ee-c3a1-475a-b54f-8f2ef3a5c2dcß
trigger:
- platform: time_pattern
minutes: /20
condition:
- condition: state
entity_id: binary_sensor.connected_wan
state: 'on'
- condition: sun
after: sunrise
- condition: sun
before: sunset
action:
service: homeassistant.update_entity
target:
entity_id:
- sensor.current_uv_index
- id: pause_alert
alias: '[Notifications] Pause Alert'
mode: parallel
variables:
alert: '{{ trigger.event.data[''action''][12:] }}'
trigger:
- platform: event
event_type: mobile_app_notification_action
condition: '{{ trigger.event.data[''action''][:11] == ''pause_alert'' }}'
action:
- service: automation.turn_off
target:
entity_id: automation.notifications_clear_alert
data:
stop_actions: false
- service: alert.turn_off
target:
entity_id: alert.{{ alert }}
continue_on_error: true
- service: automation.turn_on
target:
entity_id: automation.notifications_clear_alert
- alias: '[Sonos Favourites] Play Playlists'
description: Play Playlists on Sonos
id: sonos_favourites_play_playlists
trigger:
- platform: webhook
webhook_id: sonos_playlists
allowed_methods:
- POST
- PUT
local_only: true
action:
- service: script.toggle
target:
entity_id: script.sonos_favourites_playlists
- service: input_text.set_value
target:
entity_id: input_text.sonos_playlist_playing
data:
value: Playlists
- id: pyscript_getdata_untappd
alias: '[PyScript] GetData: Untappd'
description: '[PyScript] GetData: Untappd'
mode: restart
max_exceeded: warning
trigger:
- platform: homeassistant
id: startup
event: start
- platform: state
entity_id: input_boolean.ha_startup_pending
to: 'off'
- platform: time_pattern
hours: /3
action:
- if: '{{ trigger.id == ''startup'' }}'
then:
- delay: 240
- service: pyscript.getdata_untappd_project52
- id: pyscript_getdata_essentialenergy
alias: 'PyScript, GetData: Essential Energy Outages'
description: 'PyScript, GetData: Essential Energy Outages'
mode: restart
max_exceeded: warning
trigger:
- platform: homeassistant
id: startup
event: start
- platform: state
entity_id: input_boolean.ha_startup_pending
to: 'off'
- platform: time_pattern
minutes: /15
action:
- if: '{{ trigger.id == ''startup'' }}'
then:
- delay: 240
- service: pyscript.getdata_essentialenergyoutages
data:
entity_id: sensor.essential_energy_outages_current
dataseturl: https://www.essentialenergy.com.au/Assets/kmz/current.kml
friendly_name: '[Essential Energy] Outages, Current'
icon: mdi:alert-circle
- service: pyscript.getdata_essentialenergyoutages
data:
entity_id: sensor.essential_energy_outages_future
dataseturl: https://www.essentialenergy.com.au/Assets/kmz/future.kml
friendly_name: '[Essential Energy] Outages, Future'
icon: mdi:alert-box
- service: pyscript.getdata_essentialenergyoutages
data:
entity_id: sensor.essential_energy_outages_cancelled
dataseturl: https://www.essentialenergy.com.au/Assets/kmz/cancelled.kml
friendly_name: '[Essential Energy] Outages, Cancelled'
icon: mdi:alert-circle-check
- id: pyscript_getdata_hazardwatch
alias: 'PyScript, GetData: Hazard Watch'
description: 'PyScript, GetData: Hazard Watch'
trigger:
- platform: homeassistant
event: start
- platform: time_pattern
hours: /1
condition: []
mode: single
action:
- service: pyscript.getdata_hazardwatch
- id: pyscript_getdata_livetraffic
alias: 'PyScript, GetData: Live Traffic'
description: 'PyScript, GetData: Live Traffic'
mode: restart
max_exceeded: warning
trigger:
- platform: homeassistant
id: startup
event: start
- platform: state
entity_id: input_boolean.ha_startup_pending
to: 'off'
- platform: time_pattern
minutes: /30
action:
- if: '{{ trigger.id == ''startup'' }}'
then:
- delay: 240
- service: pyscript.getdata_livetrafficnsw
data:
entity_id: sensor.getdata_livetrafficnsw_majorevents
dataseturl: https://www.livetraffic.com/traffic/hazards/majorevent.json
friendly_name: Live Traffic NSW - Major Events
icon: mdi:party-popper
- service: pyscript.getdata_livetrafficnsw
data:
entity_id: sensor.getdata_livetrafficnsw_incidentslga
dataseturl: https://www.livetraffic.com/traffic/hazards/regional/lga-incidents.json
friendly_name: Live Traffic NSW - Incidents (LGA)
icon: mdi:road-variant
- service: pyscript.getdata_livetrafficnsw
data:
entity_id: sensor.getdata_livetrafficnsw_roadwork
dataseturl: https://www.livetraffic.com/traffic/hazards/roadwork.json
friendly_name: Live Traffic NSW - Roadwork
icon: mdi:dump-truck
- service: pyscript.getdata_livetrafficnsw
data:
entity_id: sensor.getdata_livetrafficnsw_incidents
dataseturl: https://www.livetraffic.com/traffic/hazards/incident.json
friendly_name: Live Traffic NSW - Incidents
icon: mdi:road-variant
- service: pyscript.getdata_livetrafficnsw
data:
entity_id: sensor.getdata_livetrafficnsw_flood
dataseturl: https://www.livetraffic.com/traffic/hazards/flood.json
friendly_name: Live Traffic NSW - Flood
icon: mdi:home-flood
- service: pyscript.getdata_livetrafficnsw
data:
entity_id: sensor.getdata_livetrafficnsw_alpine
dataseturl: https://www.livetraffic.com/traffic/hazards/alpine.json
friendly_name: Live Traffic NSW - Alpine
icon: mdi:snowflake-alert
- service: pyscript.getdata_livetrafficnsw
data:
entity_id: sensor.getdata_livetrafficnsw_fire
dataseturl: https://www.livetraffic.com/traffic/hazards/fire.json
friendly_name: Live Traffic NSW - Fire
icon: mdi:fire-alert
- id: pyscript_getdata_yourspotify
alias: 'PyScript, GetData: Your Spotify'
description: 'PyScript, GetData: Your Spotify'
mode: restart
max_exceeded: warning
trigger:
- platform: homeassistant
id: startup
event: start
- platform: state
entity_id: input_boolean.ha_startup_pending
to: 'off'
- platform: time_pattern
minutes: /5
action:
- if: '{{ trigger.id == ''startup'' }}'
then:
- delay: 240
- service: pyscript.getdata_yourspotify
data: {}
- alias: Python Script - Update ha_overview
trigger:
- event: start
platform: homeassistant
- platform: time_pattern
minutes: /30
action:
- service: python_script.ha_overview
- alias: '[Sonos Favourites] Play Radio Hauraki 99.0 (Rock Music)'
description: Play Radio Hauraki 99.0 (Rock Music) on Sonos
id: sonos_favourites_play_radio_hauraki_99_0_rock_music
trigger:
- platform: webhook
webhook_id: sonos_radio_hauraki_99_0_rock_music
allowed_methods:
- POST
- PUT
local_only: true
action:
- service: script.toggle
target:
entity_id: script.sonos_favourites_radio_hauraki_99_0_rock_music
- service: input_text.set_value
target:
entity_id: input_text.sonos_playlist_playing
data:
value: Radio Hauraki 99.0 (Rock Music)
- alias: '[Sonos Favourites] Play Radio X'
description: Play Radio X on Sonos
id: sonos_favourites_play_radio_x
trigger:
- platform: webhook
webhook_id: sonos_radio_x
allowed_methods:
- POST
- PUT
local_only: true
action:
- service: script.toggle
target:
entity_id: script.sonos_favourites_radio_x
- service: input_text.set_value
target:
entity_id: input_text.sonos_playlist_playing
data:
value: Radio X
- alias: '[Sonos Favourites] Play Radio X 00s'
description: Play Radio X 00s on Sonos
id: sonos_favourites_play_radio_x_00s
trigger:
- platform: webhook
webhook_id: sonos_radio_x_00s
allowed_methods:
- POST
- PUT
local_only: true
action:
- service: script.toggle
target:
entity_id: script.sonos_favourites_radio_x_00s
- service: input_text.set_value
target:
entity_id: input_text.sonos_playlist_playing
data:
value: Radio X 00s
- alias: '[Sonos Favourites] Play Radio X 90s'
description: Play Radio X 90s on Sonos
id: sonos_favourites_play_radio_x_90s
trigger:
- platform: webhook
webhook_id: sonos_radio_x_90s
allowed_methods:
- POST
- PUT
local_only: true
action:
- service: script.toggle
target:
entity_id: script.sonos_favourites_radio_x_90s
- service: input_text.set_value
target:
entity_id: input_text.sonos_playlist_playing
data:
value: Radio X 90s
- alias: '[Sonos Favourites] Play Radio X Chilled'
description: Play Radio X Chilled on Sonos
id: sonos_favourites_play_radio_x_chilled
trigger:
- platform: webhook
webhook_id: sonos_radio_x_chilled
allowed_methods:
- POST
- PUT
local_only: true
action:
- service: script.toggle
target:
entity_id: script.sonos_favourites_radio_x_chilled
- service: input_text.set_value
target:
entity_id: input_text.sonos_playlist_playing
data:
value: Radio X Chilled
- alias: '[Sonos Favourites] Play Radio X Classic Rock'
description: Play Radio X Classic Rock on Sonos
id: sonos_favourites_play_radio_x_classic_rock
trigger:
- platform: webhook
webhook_id: sonos_radio_x_classic_rock
allowed_methods:
- POST
- PUT
local_only: true
action:
- service: script.toggle
target:
entity_id: script.sonos_favourites_radio_x_classic_rock
- service: input_text.set_value
target:
entity_id: input_text.sonos_playlist_playing
data:
value: Radio X Classic Rock
- alias: '[Sonos Favourites] Play Radio X Classic Rock (Classic Rock)'
description: Play Radio X Classic Rock (Classic Rock) on Sonos
id: sonos_favourites_play_radio_x_classic_rock_classic_rock
trigger:
- platform: webhook
webhook_id: sonos_radio_x_classic_rock_classic_rock
allowed_methods:
- POST
- PUT
local_only: true
action:
- service: script.toggle
target:
entity_id: script.sonos_favourites_radio_x_classic_rock_classic_rock
- service: input_text.set_value
target:
entity_id: input_text.sonos_playlist_playing
data:
value: Radio X Classic Rock (Classic Rock)
- id: notify_action_restart_ha
alias: '[Notifications Action] Start HA'
max_exceeded: silent
trigger:
- platform: event
event_type: mobile_app_notification_action
event_data:
action: notify_action_restart_ha
action:
- service: homeassistant.restart
- id: notify_action_start_sungather
alias: '[Notifications Action] Start Sungather'
max_exceeded: silent
trigger:
- platform: event
event_type: mobile_app_notification_action
event_data:
action: notify_action_start_sungather
action:
- service: hassio.addon_start
data:
addon: 7b536ee6_sungather
- id: notify_action_restart_zigbee2mqtt
alias: '[Notifications Action] Restart Zigbee2MQTT'
max_exceeded: silent
trigger:
- platform: event
event_type: mobile_app_notification_action
event_data:
action: notify_action_restart_zigbee2mqtt
action:
- service: hassio.addon_restart
data:
addon: 45df7312_zigbee2mqtt_edge
- alias: 'Admin: Run Chores'
description: Run shell and Python scripts in utils folder.
trigger:
platform: time_pattern
hours: /3
action:
service: shell_command.run_chores
- alias: Admin - Run Watchman (Daily)
id: 669e9cda-a9ba-484d-a715-53864fc0d61c
initial_state: true
trigger:
- platform: time_pattern
hours: '3'
condition: []
action:
- service: watchman.report
data:
create_file: true
send_notification: false
parse_config: true
- alias: '[Presence] Set Night Mode'
id: presence_night_mode
mode: single
trigger:
- platform: state
entity_id:
- sun.sun
to: below_horizon
id: sun_down
- platform: state
entity_id:
- sun.sun
to: above_horizon
id: sun_up
condition: []
action:
- choose:
- conditions:
- condition: trigger
id:
- sun_up
sequence:
- service: input_boolean.turn_off
data: {}
target:
entity_id: input_boolean.night_mode
- conditions:
- condition: trigger
id:
- sun_down
sequence:
- service: input_boolean.turn_on
data: {}
target:
entity_id: input_boolean.night_mode
- alias: '[Sonos Favourites] Play Sky News Radio'
description: Play Sky News Radio on Sonos
id: sonos_favourites_play_sky_news_radio
trigger:
- platform: webhook
webhook_id: sonos_sky_news_radio
allowed_methods:
- POST
- PUT
local_only: true
action:
- service: script.toggle
target:
entity_id: script.sonos_favourites_sky_news_radio
- service: input_text.set_value
target:
entity_id: input_text.sonos_playlist_playing
data:
value: Sky News Radio
- alias: '[Sonos Favourites] Play Sky Sports Radio'
description: Play Sky Sports Radio on Sonos
id: sonos_favourites_play_sky_sports_radio
trigger:
- platform: webhook
webhook_id: sonos_sky_sports_radio
allowed_methods:
- POST
- PUT
local_only: true
action:
- service: script.toggle
target:
entity_id: script.sonos_favourites_sky_sports_radio
- service: input_text.set_value
target:
entity_id: input_text.sonos_playlist_playing
data:
value: Sky Sports Radio
- id: influx2entity_solar365dayyield
alias: 'InfluxDB to Entity: 365 Day Solar Yield'
description: 'InfluxDB to Entity: 365 Day Solar Yield'
mode: restart
max_exceeded: warning
trigger:
- platform: homeassistant
id: startup
event: start
- platform: state
entity_id: input_boolean.ha_startup_pending
to: 'off'
- platform: time_pattern
hours: /1
action:
- if: '{{ trigger.id == ''startup'' }}'
then:
- delay: 240
- service: pyscript.influxdb2_query_to_entity
data:
query: "import \"timezone\" option location = timezone.location(name: \"Australia/Sydney\"\
) from(bucket: \"homeassistant\")\n |> range(start: -1y)\n |> filter(fn:\
\ (r) => r[\"_measurement\"] == \"kWh\")\n |> filter(fn: (r) => r[\"entity_id\"\
] == \"sungrowsolar_daily_power_yields\")\n |> filter(fn: (r) => r[\"_field\"\
] == \"value\")\n |> drop(columns: [\"friendly_name\"])\n |> aggregateWindow(every:\
\ 1d, fn: max, createEmpty: true)\n |> fill(usePrevious: true)\n |> yield(name:\
\ \"max\")"
entity_id: sensor.archive_solar_yield_365day
unit_of_measurement: kWh
friendly_name: Solar - 365 Day - Yield
icon: mdi:solar-power
- service: pyscript.influxdb2_query_to_entity
data:
query: "import \"timezone\" option location = timezone.location(name:\n \
\ \"Australia/Sydney\") from(bucket: \"homeassistant\")\n |> range(start:\
\ -1y)\n |> filter(fn: (r) => r._field == \"value\")\n |> filter(fn: (r)\
\ => r.entity_id == \"sungrowsolar_daily_power_yields\")\n |> drop(columns:\
\ [\"friendly_name\"])\n |> aggregateWindow(every: 1d, fn: max, createEmpty:\
\ true)\n |> movingAverage(n: 7)\n |> fill(usePrevious: true)\n |> keep(columns:\
\ [\"_time\", \"_value\"])\n |> sort(columns: [\"_time\"])"
entity_id: sensor.archive_solar_yield_365day_avg
unit_of_measurement: kWh
friendly_name: Solar - 365 Day - Yield - Avg
icon: mdi:solar-power
- alias: '[Solcast] Update During Day'
id: solcast_update
trigger:
- platform: time_pattern
hours: /2
condition:
- condition: state
entity_id: binary_sensor.connected_wan
state: 'on'
- condition: sun
before: sunset
after: sunrise
action:
- delay:
seconds: '{{ range(30, 360)|random|int }}'
- service: solcast_solar.update_forecasts
data: {}
mode: single
- id: notify_action_restart_gdbackup
alias: '[Notifications Action] Start Google Drive Backup'
max_exceeded: silent
trigger:
- platform: event
event_type: mobile_app_notification_action
event_data:
action: notify_action_restart_gdbackup
action:
- service: hassio.addon_start
data:
addon: cebe7a76_hassio_google_drive_backup
- id: notify_action_start_weatherflow2mqtt
alias: '[Notifications Action] Start Weatherflow2MQTT'
max_exceeded: silent
trigger:
- platform: event
event_type: mobile_app_notification_action
event_data:
action: notify_action_start_weatherflow2mqtt
action:
- service: hassio.addon_start
data:
addon: 67f4b1af_weatherflow2mqtt
- alias: '[Home] Sunset Notification'
description: ''
mode: single
trigger:
- platform: sun
event: sunset
offset: -00:05:00
action:
- service: notify.alert
data:
title: The Sun has gone down and the moon has come up
message: Sunset in 5 minutes!
data:
push:
badge: 1
interruption-level: time-sensitive
presentation_options:
- alert
- badge
- id: '1712972875484'
alias: '[Webhooks] Notifications from Synology (TheCube2)'
description: ''
trigger:
- platform: webhook
webhook_id: synology_notify
allowed_methods:
- POST
- PUT
local_only: true
condition: []
action:
- service: notify.alert
data:
title: Synology Notification
message: '{{trigger.json.text}}'
data:
tag: synology_notify
notification_id: synology_notify
push:
badge: 1
sound: none
interruption-level: time-sensitive
presentation_options:
- alert
- badge
group: alerts
actions:
- action: URI
title: OPEN WWW
uri: http://192.168.1.100:5000
- action: URI
title: OPEN APP
uri: app://com.synology.DSfinder
- alias: '[Sonos Favourites] Play talkSPORT'
description: Play talkSPORT on Sonos
id: sonos_favourites_play_talksport
trigger:
- platform: webhook
webhook_id: sonos_talksport
allowed_methods:
- POST
- PUT
local_only: true
action:
- service: script.toggle
target:
entity_id: script.sonos_favourites_talksport
- service: input_text.set_value
target:
entity_id: input_text.sonos_playlist_playing
data:
value: talkSPORT
- alias: '[Sonos Favourites] Play talkSPORT 2 (Sports Talk & News)'
description: Play talkSPORT 2 (Sports Talk & News) on Sonos
id: sonos_favourites_play_talksport_2_sports_talk_news
trigger:
- platform: webhook
webhook_id: sonos_talksport_2_sports_talk_news
allowed_methods:
- POST
- PUT
local_only: true
action:
- service: script.toggle
target:
entity_id: script.sonos_favourites_talksport_2_sports_talk_news
- service: input_text.set_value
target:
entity_id: input_text.sonos_playlist_playing
data:
value: talkSPORT 2 (Sports Talk & News)
- alias: '[Sonos Favourites] Play Tamworth''s 88.9FM'
description: Play Tamworth's 88.9FM on Sonos
id: sonos_favourites_play_tamworth_s_88_9fm
trigger:
- platform: webhook
webhook_id: sonos_tamworth_s_88_9fm
allowed_methods:
- POST
- PUT
local_only: true
action:
- service: script.toggle
target:
entity_id: script.sonos_favourites_tamworth_s_88_9fm
- service: input_text.set_value
target:
entity_id: input_text.sonos_playlist_playing
data:
value: Tamworth's 88.9FM
- id: ha_templates_reloaded
alias: '[HA] Log Template Reload'
trigger:
- platform: event
event_type: event_template_reloaded
action:
- service: system_log.write
data:
logger: '{{ this.entity_id }}'
level: critical
message: '**** Templates have been reloaded. ****'
- service: browser_mod.notification
data:
duration: 30000
message: '**** Templates have been reloaded. ****'
- delay: 1
- service: browser_mod.refresh
- alias: '[Notifications] Today''s Birthdays'
id: notifications_todaysbirthdays
mode: single
trigger:
- platform: time
at: 07:00
variables:
ann_list: '{{ expand(integration_entities(''anniversaries'')) | selectattr(''state'',
''eq'', ''0'') | list }}'
condition:
- alias: Check if list of Anniversaries with state 0 has any items in it
condition: template
value_template: '{{ ann_list | count > 0 }}'
action:
- action: notify.alert
data:
title: This Week's Birthdays
message: "{%- for l in ann_list %}\n {{state_attr(l.entity_id, 'friendly_name')}}\n\
{%- endfor %}\n"
data:
tag: today_birthday
notification_id: today_birthday
push:
badge: 1
sound: none
interruption-level: time-sensitive
presentation_options:
- alert
- badge
group: alerts
- alias: '[Sonos Favourites] Play TripleM 2000s'
description: Play TripleM 2000s on Sonos
id: sonos_favourites_play_triplem_2000s
trigger:
- platform: webhook
webhook_id: sonos_triplem_2000s
allowed_methods:
- POST
- PUT
local_only: true
action:
- service: script.toggle
target:
entity_id: script.sonos_favourites_triplem_2000s
- service: input_text.set_value
target:
entity_id: input_text.sonos_playlist_playing
data:
value: TripleM 2000s
- alias: '[Sonos Favourites] Play TripleM Greatest Hits Digital'
description: Play TripleM Greatest Hits Digital on Sonos
id: sonos_favourites_play_triplem_greatest_hits_digital
trigger:
- platform: webhook
webhook_id: sonos_triplem_greatest_hits_digital
allowed_methods:
- POST
- PUT
local_only: true
action:
- service: script.toggle
target:
entity_id: script.sonos_favourites_triplem_greatest_hits_digital
- service: input_text.set_value
target:
entity_id: input_text.sonos_playlist_playing
data:
value: TripleM Greatest Hits Digital
- alias: '[Sonos Favourites] Play TripleM Soft Rock Digital'
description: Play TripleM Soft Rock Digital on Sonos
id: sonos_favourites_play_triplem_soft_rock_digital
trigger:
- platform: webhook
webhook_id: sonos_triplem_soft_rock_digital
allowed_methods:
- POST
- PUT
local_only: true
action:
- service: script.toggle
target:
entity_id: script.sonos_favourites_triplem_soft_rock_digital
- service: input_text.set_value
target:
entity_id: input_text.sonos_playlist_playing
data:
value: TripleM Soft Rock Digital
- alias: '[Sonos Favourites] Play Triple J Hottest'
description: Play Triple J Hottest on Sonos
id: sonos_favourites_play_triple_j_hottest
trigger:
- platform: webhook
webhook_id: sonos_triple_j_hottest
allowed_methods:
- POST
- PUT
local_only: true
action:
- service: script.toggle
target:
entity_id: script.sonos_favourites_triple_j_hottest
- service: input_text.set_value
target:
entity_id: input_text.sonos_playlist_playing
data:
value: Triple J Hottest
- alias: '[Sonos Favourites] Play triple j (New South Wales)'
description: Play triple j (New South Wales) on Sonos
id: sonos_favourites_play_triple_j_new_south_wales
trigger:
- platform: webhook
webhook_id: sonos_triple_j_new_south_wales
allowed_methods:
- POST
- PUT
local_only: true
action:
- service: script.toggle
target:
entity_id: script.sonos_favourites_triple_j_new_south_wales
- service: input_text.set_value
target:
entity_id: input_text.sonos_playlist_playing
data:
value: triple j (New South Wales)
- alias: '[Sonos Favourites] Play Triple M 104.9 Sydney'
description: Play Triple M 104.9 Sydney on Sonos
id: sonos_favourites_play_triple_m_104_9_sydney
trigger:
- platform: webhook
webhook_id: sonos_triple_m_104_9_sydney
allowed_methods:
- POST
- PUT
local_only: true
action:
- service: script.toggle
target:
entity_id: script.sonos_favourites_triple_m_104_9_sydney
- service: input_text.set_value
target:
entity_id: input_text.sonos_playlist_playing
data:
value: Triple M 104.9 Sydney
- alias: '[Sonos Favourites] Play Triple M 105.1 Melbourne'
description: Play Triple M 105.1 Melbourne on Sonos
id: sonos_favourites_play_triple_m_105_1_melbourne
trigger:
- platform: webhook
webhook_id: sonos_triple_m_105_1_melbourne
allowed_methods:
- POST
- PUT
local_only: true
action:
- service: script.toggle
target:
entity_id: script.sonos_favourites_triple_m_105_1_melbourne
- service: input_text.set_value
target:
entity_id: input_text.sonos_playlist_playing
data:
value: Triple M 105.1 Melbourne
- alias: '[Sonos Favourites] Play Triple M 80s (Sydney)'
description: Play Triple M 80s (Sydney) on Sonos
id: sonos_favourites_play_triple_m_80s_sydney
trigger:
- platform: webhook
webhook_id: sonos_triple_m_80s_sydney
allowed_methods:
- POST
- PUT
local_only: true
action:
- service: script.toggle
target:
entity_id: script.sonos_favourites_triple_m_80s_sydney
- service: input_text.set_value
target:
entity_id: input_text.sonos_playlist_playing
data:
value: Triple M 80s (Sydney)
- alias: '[Sonos Favourites] Play Triple M 90s (Sydney)'
description: Play Triple M 90s (Sydney) on Sonos
id: sonos_favourites_play_triple_m_90s_sydney
trigger:
- platform: webhook
webhook_id: sonos_triple_m_90s_sydney
allowed_methods:
- POST
- PUT
local_only: true
action:
- service: script.toggle
target:
entity_id: script.sonos_favourites_triple_m_90s_sydney
- service: input_text.set_value
target:
entity_id: input_text.sonos_playlist_playing
data:
value: Triple M 90s (Sydney)
- alias: '[Sonos Favourites] Play Triple M Classic Rock Digital (Sydney)'
description: Play Triple M Classic Rock Digital (Sydney) on Sonos
id: sonos_favourites_play_triple_m_classic_rock_digital_sydney
trigger:
- platform: webhook
webhook_id: sonos_triple_m_classic_rock_digital_sydney
allowed_methods:
- POST
- PUT
local_only: true
action:
- service: script.toggle
target:
entity_id: script.sonos_favourites_triple_m_classic_rock_digital_sydney
- service: input_text.set_value
target:
entity_id: input_text.sonos_playlist_playing
data:
value: Triple M Classic Rock Digital (Sydney)
- alias: '[Sonos Favourites] Play Triple M Hard n Heavy (Sydney)'
description: Play Triple M Hard n Heavy (Sydney) on Sonos
id: sonos_favourites_play_triple_m_hard_n_heavy_sydney
trigger:
- platform: webhook
webhook_id: sonos_triple_m_hard_n_heavy_sydney
allowed_methods:
- POST
- PUT
local_only: true
action:
- service: script.toggle
target:
entity_id: script.sonos_favourites_triple_m_hard_n_heavy_sydney
- service: input_text.set_value
target:
entity_id: input_text.sonos_playlist_playing
data:
value: Triple M Hard n Heavy (Sydney)
- id: ha_autogroup_tvguide
alias: '[HA] Autogroup - TV Guide'
mode: single
max_exceeded: silent
trigger:
- platform: homeassistant
id: startup
event: start
- platform: state
entity_id: input_boolean.ha_startup_pending
to: 'off'
- platform: event
event_type: call_service
event_data:
domain: group
service: reload
- platform: time_pattern
hours: /1
action:
- if: '{{ trigger.id == ''startup'' }}'
then:
- delay: 240
- service: group.set
data:
object_id: tvguide_fta
entities: "{{ states\n | selectattr('attributes.category', 'defined')\n \
\ | selectattr('attributes.category', 'eq', 'tvguide')\n | selectattr('attributes.channel_group',\
\ 'eq', 'FTA')\n | map(attribute='entity_id') | list | sort }}\n"
- service: group.set
data:
object_id: tvguide_foxtel_news
entities: "{{ states\n | selectattr('attributes.category', 'defined')\n \
\ | selectattr('attributes.category', 'eq', 'tvguide')\n | selectattr('attributes.channel_group',\
\ 'eq', 'Foxtel News')\n | map(attribute='entity_id') | list | sort }}\n"
- service: group.set
data:
object_id: tvguide_foxtel_sport
entities: "{{ states\n | selectattr('attributes.category', 'defined')\n \
\ | selectattr('attributes.category', 'eq', 'tvguide')\n | selectattr('attributes.channel_group',\
\ 'eq', 'Foxtel Sport')\n | map(attribute='entity_id') | list | sort }}\n"
- service: group.set
data:
object_id: tvguide_optus_sport
entities: "{% set l1 = states\n |selectattr('entity_id','search','sensor.tvguide_optussport*')\n\
\ |map(attribute='entity_id')|list|sort %}\n{% set l2 = states\n | selectattr('attributes.category',\
\ 'defined')\n | selectattr('attributes.category', 'eq', 'tvguide')\n \
\ | selectattr('attributes.channel_group', 'eq', 'Optus')\n | map(attribute='entity_id')\
\ | list | sort %}\n{{ l1 + l2 | sort }}\n"
- service: group.set
data:
object_id: tvguide_uk
entities: "{{ states\n | selectattr('attributes.category', 'defined')\n \
\ | selectattr('attributes.category', 'eq', 'tvguide')\n | selectattr('attributes.channel_group',\
\ 'eq', 'UK')\n | map(attribute='entity_id') | list | sort }}\n"
- service: group.set
data:
object_id: tvguide_uk_sport
entities: "{% set l1 = states\n | selectattr('attributes.category', 'defined')\n\
\ | selectattr('attributes.category', 'eq', 'tvguide')\n | selectattr('attributes.channel_group',\
\ 'eq', 'UK')\n |selectattr('entity_id','search','sensor.tvguide_eurosport*')\n\
\ | map(attribute='entity_id') | list | sort %}\n{% set l2 = states\n \
\ | selectattr('attributes.category', 'defined')\n | selectattr('attributes.category',\
\ 'eq', 'tvguide')\n | selectattr('attributes.channel_group', 'eq', 'UK')\n\
\ |selectattr('entity_id','search','sensor.tvguide_sky_sports*')\n |\
\ map(attribute='entity_id') | list | sort %}\n{% set l3 = states\n | selectattr('attributes.category',\
\ 'defined')\n | selectattr('attributes.category', 'eq', 'tvguide')\n \
\ | selectattr('attributes.channel_group', 'eq', 'UK')\n |selectattr('entity_id','search','sensor.tvguide_tnt*')\n\
\ | map(attribute='entity_id') | list | sort %}\n{{ l1 + l2 + l3 | sort\
\ }}\n"
- service: group.set
data:
object_id: tvguide_uk_radio
entities: "{{ states\n | selectattr('attributes.category', 'defined')\n \
\ | selectattr('attributes.category', 'eq', 'tvguide')\n | selectattr('attributes.channel_group',\
\ 'eq', 'UKRadio')\n | map(attribute='entity_id') | list | sort }}\n"
- service: group.set
data:
object_id: tvguide_us
entities: "{{ states\n | selectattr('attributes.category', 'defined')\n \
\ | selectattr('attributes.category', 'eq', 'tvguide')\n | selectattr('attributes.channel_group',\
\ 'eq', 'US')\n | map(attribute='entity_id') | list | sort }}\n"
- id: unavailable_entities_notification
alias: '[Notifications] Unavailable Entities'
description: Create persistent notification if there are unavailable entities, dismiss
if none.
mode: restart
trigger:
- platform: state
entity_id: sensor.unavailable_entities
to: null
condition:
- condition: template
value_template: "{{ is_number(trigger.from_state.state)\n and is_number(trigger.to_state.state)\
\ }}\n"
action:
- choose:
conditions:
- condition: numeric_state
entity_id: sensor.unavailable_entities
below: 1
sequence:
- service: persistent_notification.dismiss
data:
notification_id: unavailable_entities
default:
- service: persistent_notification.create
data:
title: Unavailable Entities
message: "- {{ expand(state_attr('sensor.unavailable_entities','entity_id'))\n\
\ |map(attribute='entity_id')|join('\\n- ') }}\n"
notification_id: unavailable_entities
- alias: '[Notifications] Updates Available'
id: notifications_updates_available
mode: single
trigger:
- platform: time_pattern
hours: /3
- platform: state
entity_id:
- update.home_assistant_core_update
to: 'on'
- platform: state
entity_id:
- update.home_assistant_operating_system_update
to: 'on'
- platform: state
entity_id:
- update.hacs_update
to: 'on'
condition:
- condition: time
after: 06:00:00
before: '20:00:00'
action:
- service: notify.alert
data:
title: Updates Available
message: "{%- for upd in states.update %} {%- set ignore = ['unknown', 'unavailable',\
\ 'none', ''] %} {%- if upd.state == 'on' and\n state_attr(upd.entity_id,\
\ 'latest_version') is defined and\n state_attr(upd.entity_id, 'installed_version')\
\ is defined and\n state_attr(upd.entity_id, 'latest_version') is not\
\ in ignore %}\n{%- if is_state_attr(upd.entity_id, 'display_icon', 'mdi:docker')\
\ %} {%- set name = state_attr(upd.entity_id, 'display_name') + \" (\" + state_attr(upd.entity_id,\
\ 'watcher') + \")\" %} {%- else %} {%- set name = iif(state_attr(upd.entity_id,\
\ 'title') == \"null\", state_attr(upd.entity_id, 'title'), state_attr(upd.entity_id,\
\ 'friendly_name')) %} {%- endif %} - {{name}}: {{state_attr(upd.entity_id,\
\ 'installed_version')}} -> {{state_attr(upd.entity_id, 'latest_version')}}\
\ {% endif %} {%- endfor %}"
data:
tag: updates_available
notification_id: updates_available
push:
badge: 1
sound: none
interruption-level: time-sensitive
presentation_options:
- alert
- badge
group: alerts
actions:
- title: Pause Alert
action: pause_alert_updates_available
- service: persistent_notification.create
data:
notification_id: update-alert
title: Updates Available
message: "{%- for upd in states.update %} {%- set ignore = ['unknown', 'unavailable',\
\ 'none', ''] %} {%- if upd.state == 'on' and\n state_attr(upd.entity_id,\
\ 'latest_version') is defined and\n state_attr(upd.entity_id, 'installed_version')\
\ is defined and\n state_attr(upd.entity_id, 'latest_version') is not\
\ in ignore %}\n{%- if is_state_attr(upd.entity_id, 'display_icon', 'mdi:docker')\
\ %} {%- set name = state_attr(upd.entity_id, 'display_name') + \" (\" + state_attr(upd.entity_id,\
\ 'watcher') + \")\" %} {%- else %} {%- set name = iif(state_attr(upd.entity_id,\
\ 'title') == \"null\", state_attr(upd.entity_id, 'title'), state_attr(upd.entity_id,\
\ 'friendly_name')) %} {%- endif %} - {{name}}: {{state_attr(upd.entity_id,\
\ 'installed_version')}} -> {{state_attr(upd.entity_id, 'latest_version')}}\
\ {% endif %} {%- endfor %}"
- id: '1696920090820'
alias: '[Admin] Update BOM Average when Unavailable'
description: ''
trigger:
- platform: state
entity_id:
- sensor.bom_average
to: unavailable
for:
hours: 0
minutes: 2
seconds: 0
condition: []
action:
- service: multiscrape.trigger_bom_average
data: {}
mode: single
- id: '1702689288792'
alias: '[pixelclock] Update Firmware'
description: ''
trigger:
- platform: state
entity_id:
- sensor.blueforcer_awtrix_light_latest_release
attribute: tag
condition: []
action:
- delay:
hours: 0
minutes: 2
seconds: 0
milliseconds: 0
- service: mqtt.publish
data:
topic: pixelclock/doupdate
- service: notify.alert
data:
title: pixelclock Updated
message: pixelclock updated with latest firmware
data:
tag: pixelclock_updated
notification_id: pixelclock_updated
push:
badge: 1
sound: none
interruption-level: time-sensitive
presentation_options:
- alert
- badge
group: alerts
actions:
- title: Pause Alert
action: pause_alert_pixelclock_updated
- service: persistent_notification.create
data:
notification_id: pixelclock_updated
title: pixelclock Update
message: pixelclock updated with latest firmware
mode: single
- alias: '[Sonos Favourites] Play Virgin Radio Anthems UK'
description: Play Virgin Radio Anthems UK on Sonos
id: sonos_favourites_play_virgin_radio_anthems_uk
trigger:
- platform: webhook
webhook_id: sonos_virgin_radio_anthems_uk
allowed_methods:
- POST
- PUT
local_only: true
action:
- service: script.toggle
target:
entity_id: script.sonos_favourites_virgin_radio_anthems_uk
- service: input_text.set_value
target:
entity_id: input_text.sonos_playlist_playing
data:
value: Virgin Radio Anthems UK
- alias: '[Sonos Favourites] Play Virgin Radio Britpop UK'
description: Play Virgin Radio Britpop UK on Sonos
id: sonos_favourites_play_virgin_radio_britpop_uk
trigger:
- platform: webhook
webhook_id: sonos_virgin_radio_britpop_uk
allowed_methods:
- POST
- PUT
local_only: true
action:
- service: script.toggle
target:
entity_id: script.sonos_favourites_virgin_radio_britpop_uk
- service: input_text.set_value
target:
entity_id: input_text.sonos_playlist_playing
data:
value: Virgin Radio Britpop UK
- alias: '[Sonos Favourites] Play Virgin Radio Chilled UK'
description: Play Virgin Radio Chilled UK on Sonos
id: sonos_favourites_play_virgin_radio_chilled_uk
trigger:
- platform: webhook
webhook_id: sonos_virgin_radio_chilled_uk
allowed_methods:
- POST
- PUT
local_only: true
action:
- service: script.toggle
target:
entity_id: script.sonos_favourites_virgin_radio_chilled_uk
- service: input_text.set_value
target:
entity_id: input_text.sonos_playlist_playing
data:
value: Virgin Radio Chilled UK
- alias: '[Sonos Favourites] Play Virgin Radio Groove UK'
description: Play Virgin Radio Groove UK on Sonos
id: sonos_favourites_play_virgin_radio_groove_uk
trigger:
- platform: webhook
webhook_id: sonos_virgin_radio_groove_uk
allowed_methods:
- POST
- PUT
local_only: true
action:
- service: script.toggle
target:
entity_id: script.sonos_favourites_virgin_radio_groove_uk
- service: input_text.set_value
target:
entity_id: input_text.sonos_playlist_playing
data:
value: Virgin Radio Groove UK
- alias: '[Sonos Favourites] Play Virgin Radio UK'
description: Play Virgin Radio UK on Sonos
id: sonos_favourites_play_virgin_radio_uk
trigger:
- platform: webhook
webhook_id: sonos_virgin_radio_uk
allowed_methods:
- POST
- PUT
local_only: true
action:
- service: script.toggle
target:
entity_id: script.sonos_favourites_virgin_radio_uk
- service: input_text.set_value
target:
entity_id: input_text.sonos_playlist_playing
data:
value: Virgin Radio UK
- id: '1710400217767'
alias: '[Addon Watchguard] Sungather'
description: ''
trigger:
- platform: homeassistant
event: start
- platform: time_pattern
minutes: /15
condition:
- condition: and
conditions:
- condition: state
entity_id: sensor.sun_state_day
state: Day
- condition: or
conditions:
- condition: state
entity_id: sensor.sungrow_sg5kd_daily_generation
state: unavailable
for:
hours: 0
minutes: 5
seconds: 0
- condition: template
value_template: '{{ (as_timestamp(now())-as_timestamp(states.sensor[''sungrow_sg5kd_daily_generation''].last_updated))/60
> 240 }}'
action:
- action: hassio.addon_restart
data:
addon: 7b536ee6_sungather
- action: notify.alert
data:
title: Sungather Restarted
message: Solar data hasn't been updated in 5 minutes, Sungather has been restarted
data:
tag: watchguard_sungather
notification_id: watchguard_sungather
push:
badge: 1
sound: none
- alias: '[Addon Watchguard] Zigbee2MQTT'
description: ''
mode: single
triggers:
- minutes: /5
trigger: time_pattern
conditions:
- condition: or
conditions:
- condition: state
entity_id: binary_sensor.zigbee2mqtt_bridge_connection_state
state: 'off'
- condition: state
entity_id: binary_sensor.zigbee2mqtt_bridge_connection_state
state: unavailable
actions:
- data:
addon: 45df7312_zigbee2mqtt
action: hassio.addon_restart
- action: notify.alert
data:
title: Zigbee2MQTT Restarted
message: Zigbee2MQTT connectivity has stopped, addon has been restarted
data:
tag: watchguard_z2m
notification_id: watchguard_z2m
push:
badge: 1
sound: none
- alias: '[Notifications] Notify new unavailable entities, Watchman'
id: watchman_notifyunavailable
description: ''
trigger:
- platform: state
entity_id:
- sensor.watchman_missing_entities
condition:
- condition: template
value_template: '{{ trigger.from_state.state | int < trigger.to_state.state |
int }}'
action:
- service: watchman.report
data: {}
- delay:
hours: 0
minutes: 0
seconds: 30
milliseconds: 0
- condition: template
value_template: '{{ trigger.from_state.state | int < states(''sensor.watchman_missing_entities'')
| int }}'
- service: persistent_notification.create
data_template:
title: Unavailable Entities
message: There are new unavailable entities
notification_id: unavail_entities
mode: single
- id: influx2entity_365daychaffeydam
alias: 'InfluxDB to Entity: 365 Day Chaffey Dam'
description: 'InfluxDB to Entity: 365 Day Chaffey Dam'
mode: restart
max_exceeded: warning
trigger:
- platform: homeassistant
id: startup
event: start
- platform: state
entity_id: input_boolean.ha_startup_pending
to: 'off'
- platform: time_pattern
hours: /1
action:
- if: '{{ trigger.id == ''startup'' }}'
then:
- delay: 240
- service: pyscript.influxdb2_query_to_entity
data:
query: "import \"timezone\" option location = timezone.location(name: \"Australia/Sydney\"\
) from(bucket: \"homeassistant\")\n |> range(start: -1y)\n |> filter(fn:\
\ (r) => r[\"_measurement\"] == \"%\")\n |> filter(fn: (r) => r[\"entity_id\"\
] == \"waternsw_chaffey_dam_storage\")\n |> filter(fn: (r) => r[\"_field\"\
] == \"value\")\n |> drop(columns: [\"friendly_name\"])\n |> aggregateWindow(every:\
\ 1d, fn: max, createEmpty: true)\n |> fill(usePrevious: true)\n |> yield(name:\
\ \"max\")"
entity_id: sensor.archive_waternsw_chaffeydam_30day
unit_of_measurement: '%'
friendly_name: Chaffey Dam - 365 Day - Percent
icon: mdi:water
- service: pyscript.influxdb2_query_to_entity
data:
query: "import \"timezone\" option location = timezone.location(name: \"Australia/Sydney\"\
) from(bucket: \"homeassistant\")\n |> range(start: -1y)\n |> filter(fn:\
\ (r) => r[\"_measurement\"] == \"%\")\n |> filter(fn: (r) => r[\"entity_id\"\
] == \"waternsw_keepit_dam_storage\")\n |> filter(fn: (r) => r[\"_field\"\
] == \"value\")\n |> drop(columns: [\"friendly_name\"])\n |> aggregateWindow(every:\
\ 1d, fn: max, createEmpty: true)\n |> fill(usePrevious: true)\n |> yield(name:\
\ \"max\")"
entity_id: sensor.archive_waternsw_keepitdam_30day
unit_of_measurement: '%'
friendly_name: Keepit Dam - 365 Day - Percent
icon: mdi:water
- service: pyscript.influxdb2_query_to_entity
data:
query: "import \"timezone\" option location = timezone.location(name: \"Australia/Sydney\"\
) from(bucket: \"homeassistant\")\n |> range(start: -1y)\n |> filter(fn:\
\ (r) => r[\"_measurement\"] == \"%\")\n |> filter(fn: (r) => r[\"entity_id\"\
] == \"waternsw_split_rock_dam_storage\")\n |> filter(fn: (r) => r[\"_field\"\
] == \"value\")\n |> drop(columns: [\"friendly_name\"])\n |> aggregateWindow(every:\
\ 1d, fn: max, createEmpty: true)\n |> fill(usePrevious: true)\n |> yield(name:\
\ \"max\")"
entity_id: sensor.archive_waternsw_splitrockdam_30day
unit_of_measurement: '%'
friendly_name: Split Rock Dam - 365 Day - Percent
icon: mdi:water
- id: influx2entity_yearlybomstats
alias: 'InfluxDB to Entity: Yearly BOM Stats'
description: 'InfluxDB to Entity: Yearly BOM Stats'
mode: restart
max_exceeded: warning
trigger:
- platform: homeassistant
id: startup
event: start
- platform: state
entity_id: input_boolean.ha_startup_pending
to: 'off'
- platform: time_pattern
hours: /1
action:
- if: '{{ trigger.id == ''startup'' }}'
then:
- delay: 240
- service: pyscript.influxdb2_query_to_entity
data:
query: "import \"timezone\" option location = timezone.location(name: \"Australia/Sydney\"\
) from(bucket: \"homeassistant\")\n |> range(start: -1y)\n |> filter(fn:\
\ (r) => r[\"_measurement\"] == \"°C\")\n |> filter(fn: (r) => r[\"entity_id\"\
] == \"tamworth_airport_temp\")\n |> filter(fn: (r) => r[\"_field\"] == \"\
value\")\n |> drop(columns: [\"friendly_name\"])\n |> aggregateWindow(every:\
\ 1d9h, fn: max, createEmpty: true)\n |> fill(value: 0.0)\n |> yield(name:\
\ \"max\")"
entity_id: sensor.archive_bomtemp_max_365day
unit_of_measurement: °C
friendly_name: BOM - 365 Day - Max Temp
icon: mdi:thermometer
- service: pyscript.influxdb2_query_to_entity
data:
query: "import \"timezone\" option location = timezone.location(name: \"Australia/Sydney\"\
) from(bucket: \"homeassistant\")\n |> range(start: -1y)\n |> filter(fn:\
\ (r) => r[\"_measurement\"] == \"°C\")\n |> filter(fn: (r) => r[\"entity_id\"\
] == \"tamworth_airport_temp\")\n |> filter(fn: (r) => r[\"_field\"] == \"\
value\")\n |> drop(columns: [\"friendly_name\"])\n |> aggregateWindow(every:\
\ 1d9h, fn: min, createEmpty: true)\n |> fill(value: 0.0)\n |> yield(name:\
\ \"min\")"
entity_id: sensor.archive_bomtemp_min_365day
unit_of_measurement: °C
friendly_name: BOM - 365 Day - Min Temp
icon: mdi:thermometer
- service: pyscript.influxdb2_query_to_entity
data:
query: "import \"timezone\" option location = timezone.location(name: \"Australia/Sydney\"\
) from(bucket: \"homeassistant\")\n |> range(start: -1y)\n |> filter(fn:\
\ (r) => r[\"_measurement\"] == \"°C\")\n |> filter(fn: (r) => r[\"entity_id\"\
] == \"tamworth_airport_temp\")\n |> filter(fn: (r) => r[\"_field\"] == \"\
value\")\n |> drop(columns: [\"friendly_name\"])\n |> aggregateWindow(every:\
\ 1d, fn: spread, createEmpty: true)\n |> fill(value: 0.0)\n |> yield()"
entity_id: sensor.archive_bomtemp_range_365day
unit_of_measurement: °C
friendly_name: BOM - 365 Day - Temp Range
icon: mdi:thermometer
- service: pyscript.influxdb2_query_to_entity
data:
query: "import \"timezone\" option location = timezone.location(name: \"Australia/Sydney\"\
) from(bucket: \"homeassistant\")\n |> range(start: -1y)\n |> filter(fn:\
\ (r) => r[\"_measurement\"] == \"mm\")\n |> filter(fn: (r) => r[\"entity_id\"\
] == \"tamworth_airport_rain_since_9am\")\n |> filter(fn: (r) => r[\"_field\"\
] == \"value\")\n |> drop(columns: [\"friendly_name\"])\n |> aggregateWindow(every:\
\ 1d9h, fn: max, createEmpty: true)\n |> fill(value: 0.0)\n |> yield(name:\
\ \"max\")"
entity_id: sensor.archive_bomrain_365day
unit_of_measurement: mm
friendly_name: BOM - Year - Rain
icon: mdi:water
- service: pyscript.influxdb2_query_to_entity
data:
query: "import \"timezone\" option location = timezone.location(name: \"Australia/Sydney\"\
) from(bucket: \"homeassistant\")\n |> range(start: -1y)\n |> filter(fn:\
\ (r) => r[\"_measurement\"] == \"mm\")\n |> filter(fn: (r) => r[\"entity_id\"\
] == \"tamworth_airport_rain_since_9am\")\n |> filter(fn: (r) => r[\"_field\"\
] == \"value\")\n |> drop(columns: [\"friendly_name\"])\n |> aggregateWindow(every:\
\ 1d9h, fn: max, createEmpty: true)\n |> cumulativeSum()\n |> fill(value:\
\ 0.0)\n |> yield(name: \"max\")"
entity_id: sensor.archive_bomrain_365day_cume
unit_of_measurement: mm
friendly_name: BOM - Year - Rain Cume
icon: mdi:water
- id: influx2entity_yearlyhomestats
alias: 'InfluxDB to Entity: Yearly Home Weather Stats'
description: 'InfluxDB to Entity: Yearly Home Weather Stats'
mode: restart
max_exceeded: warning
trigger:
- platform: homeassistant
id: startup
event: start
- platform: state
entity_id: input_boolean.ha_startup_pending
to: 'off'
- platform: time_pattern
hours: /1
action:
- if: '{{ trigger.id == ''startup'' }}'
then:
- delay: 240
- service: pyscript.influxdb2_query_to_entity
data:
query: "import \"timezone\" option location = timezone.location(name: \"Australia/Sydney\"\
) from(bucket: \"homeassistant\")\n |> range(start: -1y)\n |> filter(fn:\
\ (r) => r[\"_measurement\"] == \"°C\")\n |> filter(fn: (r) => r[\"entity_id\"\
] == \"tempest_st_00056115_temperature\")\n |> filter(fn: (r) => r[\"_field\"\
] == \"value\")\n |> drop(columns: [\"friendly_name\"])\n |> aggregateWindow(every:\
\ 1d, fn: min, createEmpty: true)\n |> fill(value: 0.0)\n |> yield(name:\
\ \"min\")"
entity_id: sensor.archive_homeoutsidetemp_min_365day
unit_of_measurement: °C
friendly_name: Home - 365 Day - Min Temp
icon: mdi:thermometer
- service: pyscript.influxdb2_query_to_entity
data:
query: "import \"timezone\" option location = timezone.location(name: \"Australia/Sydney\"\
) from(bucket: \"homeassistant\")\n |> range(start: -1y)\n |> filter(fn:\
\ (r) => r[\"_measurement\"] == \"°C\")\n |> filter(fn: (r) => r[\"entity_id\"\
] == \"tempest_st_00056115_temperature\")\n |> filter(fn: (r) => r[\"_field\"\
] == \"value\")\n |> drop(columns: [\"friendly_name\"])\n |> aggregateWindow(every:\
\ 1d, fn: max, createEmpty: true)\n |> fill(value: 0.0)\n |> yield(name:\
\ \"max\")"
entity_id: sensor.archive_homeoutsidetemp_max_365day
unit_of_measurement: °C
friendly_name: Home - 365 Day - Max Temp
icon: mdi:thermometer
- service: pyscript.influxdb2_query_to_entity
data:
query: "import \"timezone\" option location = timezone.location(name: \"Australia/Sydney\"\
) from(bucket: \"homeassistant\")\n |> range(start: -1y)\n |> filter(fn:\
\ (r) => r[\"_measurement\"] == \"°C\")\n |> filter(fn: (r) => r[\"entity_id\"\
] == \"tempest_st_00056115_temperature\")\n |> filter(fn: (r) => r[\"_field\"\
] == \"value\")\n |> drop(columns: [\"friendly_name\"])\n |> aggregateWindow(every:\
\ 1d, fn: spread, createEmpty: true)\n |> fill(value: 0.0)\n |> yield()"
entity_id: sensor.archive_homeoutsidetemp_range_365day
unit_of_measurement: °C
friendly_name: Home - 365 Day - Temp Range
icon: mdi:thermometer
- service: pyscript.influxdb2_query_to_entity
data:
query: "import \"timezone\" option location = timezone.location(name: \"Australia/Sydney\"\
) from(bucket: \"homeassistant\")\n |> range(start: -1y)\n |> filter(fn:\
\ (r) => r[\"_measurement\"] == \"mm\")\n |> filter(fn: (r) => r[\"entity_id\"\
] == \"tempest_st_00056115_rain_today\")\n |> filter(fn: (r) => r[\"_field\"\
] == \"value\")\n |> drop(columns: [\"friendly_name\"])\n |> aggregateWindow(every:\
\ 1d, fn: max, createEmpty: true)\n |> fill(value: 0.0)\n |> yield(name:\
\ \"max\")"
entity_id: sensor.archive_homerain_365day
unit_of_measurement: mm
friendly_name: Home - Year - Rain
icon: mdi:water
- service: pyscript.influxdb2_query_to_entity
data:
query: "import \"timezone\" option location = timezone.location(name: \"Australia/Sydney\"\
) from(bucket: \"homeassistant\")\n |> range(start: -1y)\n |> filter(fn:\
\ (r) => r[\"_measurement\"] == \"mm\")\n |> filter(fn: (r) => r[\"entity_id\"\
] == \"tempest_st_00056115_rain_today\")\n |> filter(fn: (r) => r[\"_field\"\
] == \"value\")\n |> drop(columns: [\"friendly_name\"])\n |> aggregateWindow(every:\
\ 1d, fn: max, createEmpty: true)\n |> cumulativeSum()\n |> fill(value:\
\ 0.0)\n |> yield(name: \"max\")"
entity_id: sensor.archive_homerain_365day_cume
unit_of_measurement: mm
friendly_name: Home - Year - Rain Cume
icon: mdi:water
- id: influx2entity_weatherarchive
alias: '[InfluxDB to Entity] Weather Archive'
description: '[InfluxDB to Entity] Weather Archive'
mode: restart
max_exceeded: warning
trigger:
- platform: homeassistant
id: startup
event: start
- platform: state
entity_id: input_boolean.ha_startup_pending
to: 'off'
- platform: time_pattern
hours: /1
action:
- if: '{{ trigger.id == ''startup'' }}'
then:
- delay: 240
- service: pyscript.influxdb2_query_to_entity
data:
entity_id: sensor.weather_archive_temperature_maximum_daily
unit_of_measurement: °C
icon: mdi:thermometer
friendly_name: '[Weather] Daily Maximum Temperature (Last Month)'
query: "import \"timezone\" option location = timezone.location(name: \"Australia/Sydney\"\
) from(bucket: \"homeassistant\")\n |> range(start: -1mo)\n |> filter(fn:\
\ (r) => r[\"_measurement\"] == \"°C\")\n |> filter(fn: (r) => r[\"entity_id\"\
] == \"weather_temperature\")\n |> filter(fn: (r) => r[\"_field\"] == \"\
value\")\n |> drop(columns: [\"friendly_name\"])\n |> aggregateWindow(every:\
\ 1d, fn: max, createEmpty: false)\n |> fill(value: 0.0)\n |> yield(name:\
\ \"max\")"
- service: pyscript.influxdb2_query_to_entity
data:
entity_id: sensor.weather_archive_temperature_minimum_daily
unit_of_measurement: °C
icon: mdi:thermometer
friendly_name: '[Weather] Daily Minimum Temperature (Last Month)'
query: "import \"timezone\" option location = timezone.location(name: \"Australia/Sydney\"\
) from(bucket: \"homeassistant\")\n |> range(start: -1mo)\n |> filter(fn:\
\ (r) => r[\"_measurement\"] == \"°C\")\n |> filter(fn: (r) => r[\"entity_id\"\
] == \"weather_temperature\")\n |> filter(fn: (r) => r[\"_field\"] == \"\
value\")\n |> drop(columns: [\"friendly_name\"])\n |> aggregateWindow(every:\
\ 1d, fn: min, createEmpty: false)\n |> fill(value: 0.0)\n |> yield(name:\
\ \"min\")"
- service: pyscript.influxdb2_query_to_entity
data:
query: "import \"timezone\" option location = timezone.location(name: \"Australia/Sydney\"\
) from(bucket: \"homeassistant\")\n |> range(start: -1y)\n |> filter(fn:\
\ (r) => r[\"_measurement\"] == \"mm\")\n |> filter(fn: (r) => r[\"entity_id\"\
] == \"weather_precipitation_today\")\n |> filter(fn: (r) => r[\"_field\"\
] == \"value\")\n |> drop(columns: [\"friendly_name\"])\n |> window(every:\
\ 1d)\n |> last()\n |> group()\n |> window(every: 1d)\n |> sum()\n |>\
\ group()\n |> duplicate(column: \"_start\", as: \"_time\")\n |> yield(name:\
\ \"sum\")"
entity_id: sensor.weather_archive_precipitation_daily
unit_of_measurement: mm
friendly_name: '[Weather] Daily Precipitation (Last Year)'
icon: mdi:water
- service: pyscript.influxdb2_query_to_entity
data:
query: 'import "timezone" option location = timezone.location(name: "Australia/Sydney")
from(bucket: "homeassistant") |> range(start: -1y) |> filter(fn: (r) => r["_measurement"]
== "mm") |> filter(fn: (r) => r["entity_id"] == "weather_precipitation_today")
|> filter(fn: (r) => r["_field"] == "value") |> drop(columns: ["friendly_name"])
|> window(every: 1d) |> last() |> group() |> window(every: 1w) |> sum() |>
group() |> duplicate(column: "_start", as: "_time") |> yield(name: "sum")'
entity_id: sensor.weather_archive_precipitation_weekly
unit_of_measurement: mm
friendly_name: '[Weather] Weekly Precipitation (Last Year)'
icon: mdi:water
- service: pyscript.influxdb2_query_to_entity
data:
query: 'import "timezone" option location = timezone.location(name: "Australia/Sydney")
from(bucket: "homeassistant") |> range(start: -1y) |> filter(fn: (r) => r["_measurement"]
== "mm") |> filter(fn: (r) => r["entity_id"] == "weather_precipitation_today")
|> filter(fn: (r) => r["_field"] == "value") |> drop(columns: ["friendly_name"])
|> window(every: 1d) |> last() |> group() |> window(every: 1mo) |> sum() |>
group() |> duplicate(column: "_start", as: "_time") |> yield(name: "sum")'
entity_id: sensor.weather_archive_precipitation_monthly
unit_of_measurement: mm
friendly_name: '[Weather] Monthly Precipitation (Last Year)'
icon: mdi:water
- service: pyscript.influxdb2_query_to_entity
data:
query: 'import "timezone" option location = timezone.location(name: "Australia/Sydney")
from(bucket: "homeassistant") |> range(start: -1y) |> filter(fn: (r) => r["_measurement"]
== "°C") |> filter(fn: (r) => r["entity_id"] == "weather_temperature") |>
filter(fn: (r) => r["_field"] == "value") |> drop(columns: ["friendly_name"])
|> window(every: 1d) |> max() |> group() |> window(every: 1mo) |> max() |>
group() |> duplicate(column: "_start", as: "_time") |> yield(name: "sum")'
entity_id: sensor.weather_archive_temperature_maximum_monthly
unit_of_measurement: °C
icon: mdi:thermometer
friendly_name: '[Weather] Monthly Maximum Temperature (Last Year)'
- service: pyscript.influxdb2_query_to_entity
data:
query: "import \"timezone\" option location = timezone.location(name: \"Australia/Sydney\"\
) from(bucket: \"homeassistant\")\n |> range(start: -1y)\n |> filter(fn:\
\ (r) => r[\"_measurement\"] == \"°C\")\n |> filter(fn: (r) => r[\"entity_id\"\
] == \"weather_temperature\")\n |> filter(fn: (r) => r[\"_field\"] == \"\
value\")\n |> drop(columns: [\"friendly_name\"])\n |> filter(fn: (r) =>\
\ r._value != 0.00)\n |> aggregateWindow(every: 1d, fn: max, createEmpty:\
\ false)\n |> aggregateWindow(every: 1w, fn: max, createEmpty: false)\n \
\ |> yield()"
entity_id: sensor.weather_archive_temperature_maximum_weekly
unit_of_measurement: °C
icon: mdi:thermometer
friendly_name: '[Weather] Weekly Maximum Temperature (Last Year)'
- service: pyscript.influxdb2_query_to_entity
data:
query: 'import "timezone" option location = timezone.location(name: "Australia/Sydney")
from(bucket: "homeassistant") |> range(start: -1y) |> filter(fn: (r) => r["_measurement"]
== "°C") |> filter(fn: (r) => r["entity_id"] == "weather_temperature") |>
filter(fn: (r) => r["_field"] == "value") |> drop(columns: ["friendly_name"])
|> window(every: 1d) |> min() |> group() |> window(every: 1mo) |> min() |>
group() |> duplicate(column: "_start", as: "_time") |> yield(name: "sum")'
entity_id: sensor.weather_archive_temperature_minimum_monthly
unit_of_measurement: °C
icon: mdi:thermometer
friendly_name: '[Weather] Monthly Minimum Temperature (Last Year)'
- service: pyscript.influxdb2_query_to_entity
data:
query: "import \"timezone\" option location = timezone.location(name: \"Australia/Sydney\"\
) from(bucket: \"homeassistant\")\n |> range(start: -1y)\n |> filter(fn:\
\ (r) => r[\"_measurement\"] == \"°C\")\n |> filter(fn: (r) => r[\"entity_id\"\
] == \"weather_temperature\")\n |> filter(fn: (r) => r[\"_field\"] == \"\
value\")\n |> drop(columns: [\"friendly_name\"])\n |> filter(fn: (r) =>\
\ r._value != 0.00)\n |> aggregateWindow(every: 1d, fn: min, createEmpty:\
\ false)\n |> aggregateWindow(every: 1w, fn: min, createEmpty: false)\n \
\ |> yield()"
entity_id: sensor.weather_archive_temperature_minimum_weekly
unit_of_measurement: °C
icon: mdi:thermometer
friendly_name: '[Weather] Weekly Minimum Temperature (Last Year)'
- alias: Webhook - Office Lamp & Sonos Off
description: ''
initial_state: true
trigger:
- platform: webhook
webhook_id: office_lampsonos_off
allowed_methods:
- POST
- PUT
local_only: true
- platform: event
event_type: ios.action_fired
event_data:
actionName: LightsSonosActionOver
action:
- entity_id: light.office_lamp
service: light.turn_off
data: {}
- data: {}
entity_id: media_player.office_sonos
service: media_player.media_stop
- alias: Webhook - Office Lamp Toggle
description: ''
initial_state: true
trigger:
- platform: webhook
webhook_id: office_lamp_toggle
allowed_methods:
- POST
- PUT
local_only: true
- platform: event
event_type: ios.action_fired
event_data:
actionName: OfficeLightsOn
action:
- entity_id: light.office_lamp
service: light.toggle
data: {}
- alias: Webhook - Set Media Volume
description: ''
initial_state: true
trigger:
platform: webhook
webhook_id: set_media_volume
allowed_methods:
- POST
- PUT
local_only: true
action:
- service: media_player.volume_set
data:
entity_id: '{{ trigger.json.entities }}'
volume_level: '{{ trigger.json.parameter }}'
- alias: Webhook - Office Sonos Play Source Trigger from Stream Deck
description: ''
initial_state: true
trigger:
platform: webhook
webhook_id: sonos_play_source
allowed_methods:
- POST
- PUT
local_only: true
action:
- service: media_player.select_source
entity_id: media_player.office_sonos
data:
source: '{{ trigger.json.parameter }}'
- alias: '[Zigbee2MQTT] Disable Z2M Join (By Timer)'
id: z2m_disable_join_timer
trigger:
- platform: event
event_type: timer.z2m_permit_join
event_data:
entity_id: timer.z2m_permit_join
action:
- service: mqtt.publish
data:
topic: zigbee2mqtt/bridge/config/permit_join
payload: 'false'
- service: input_boolean.turn_off
data:
entity_id: input_boolean.z2m_permit_join
- alias: '[Zigbee2MQTT] Enable Z2M Join'
id: z2m_enable_join
trigger:
platform: state
entity_id: input_boolean.z2m_permit_join
to: 'on'
action:
- service: mqtt.publish
data:
topic: zigbee2mqtt/bridge/config/permit_join
payload: 'true'
- service: timer.start
data:
entity_id: timer.z2m_permit_join
- alias: '[Zigbee2MQTT] Restart if lights unavailable'
id: z2m_restart_no_lights
triggers:
- trigger: state
entity_id:
- light.office_lamp
- light.bedroom_lamp
to: unavailable
for:
hours: 0
minutes: 5
seconds: 0
conditions: []
actions:
- action: hassio.addon_restart
metadata: {}
data:
addon: 45df7312_zigbee2mqtt
- action: notify.alert
data:
title: Zigbee2MQTT Restarted
message: Zigbee2MQTT connectivity has stopped, addon has been restarted
data:
tag: watchguard_z2m
notification_id: watchguard_z2m
push:
badge: 1
sound: none
mode: single
- alias: '[Zigbee2MQTT] Disable Z2M Join'
id: z2m_disable_join
trigger:
platform: state
entity_id: input_boolean.z2m_permit_join
to: 'off'
action:
- service: mqtt.publish
data:
topic: zigbee2mqtt/bridge/config/permit_join
payload: 'false'
- service: timer.cancel
data:
entity_id: timer.z2m_permit_join