diff --git a/.dashboards/pv_opt_dashboard.yaml b/.dashboards/pv_opt_dashboard.yaml
index 44530a2c..f885804f 100644
--- a/.dashboards/pv_opt_dashboard.yaml
+++ b/.dashboards/pv_opt_dashboard.yaml
@@ -21,7 +21,7 @@ views:
name: Solar
max: 5000
icon: mdi:solar-power-variant
- - entity: sensor.solis_total_load
+ - entity: sensor.solis_house_load
name: Load
max: 5000
- entity: sensor.solis_battery_soc
@@ -54,7 +54,7 @@ views:
name: Time
state: >-
{{(as_local(as_datetime(states('sensor.date_time_iso')))|string)[11:16]}}
- - entity: button.solis_sync_rtc
+ - entity: button.solis_inverter_sync_rtc
- type: markdown
content: >
Cost Summary (GBP)
@@ -142,61 +142,6 @@ views:
{{as_local(as_datetime(a['end_local'])).strftime(tf)}}
|{%endfor%}
title: Car Charging
- - type: conditional
- conditions:
- - condition: state
- entity: sensor.pvopt_tariff
- state: agile
- - condition: state
- entity: select.pvopt_ev_charger
- state_not: None
- card:
- type: markdown
- content: >
-
- Candidate Car Charging Plan
-
-
- | Start | | | End ||| Energy ||| Price (p) |
-
- |:--------|--|--|:-----------|--|--|:------------------:|--|--|:-----|{%
- for a in state_attr('sensor.pvopt_candidate_car_slots',
- 'windows') %}
-
- {% set tf = '%H:%M'%} |
- {{as_local(as_datetime(a['start_local'])).strftime(tf)}} |||
- {{as_local(as_datetime(a['end_local'])).strftime(tf)}} |||
- {{a['charge_in_kwh'] | float | round(2)}}kWh ||| {{a['import'] |
- float | round(2)}}p |{%endfor%}
-
-
- Total Charge Added (kWh) =
- {{'%8.2f'|format(state_attr('sensor.pvopt_candidate_car_slots','ev_total_charge')
- | float)}}
-
- Total Cost (p) =
- {{'%8.0f'|format(state_attr('sensor.pvopt_candidate_car_slots','ev_total_cost')
- | float)}}
-
- Active Car Charging Plan
-
-
- | Start | | | End ||| Energy ||| Price (p) |
-
- |:--------|--|--|:-----------|--|--|:------------------:|--|--|:-----|{%
- for a in state_attr('sensor.pvopt_car_slots', 'windows') %}
-
- {% set tf = '%H:%M'%} |
- {{as_local(as_datetime(a['start_local'])).strftime(tf)}} |||
- {{as_local(as_datetime(a['end_local'])).strftime(tf)}} |||
- {{a['charge_in_kwh'] | float | round(2)}}kWh ||| {{a['import'] |
- float | round(2)}}p |{%endfor%}
-
-
- Total Charge to add =
- {{'%8.2f'|format(state_attr('sensor.pvopt_car_slots','ev_total_charge')
- | float)}} kWh
- title: Car Charging
- type: custom:stack-in-card
title: Optimised Charging
cards:
@@ -237,6 +182,8 @@ views:
entities:
- entity: switch.pvopt_allow_cyclic
name: Allow Cyclic Charge/Discharge
+ - entity: switch.pvopt_fill_first
+ name: Fill First during Optimise Discharge
- type: markdown
content: Solar
- type: entities
@@ -365,7 +312,7 @@ views:
span:
start: day
series:
- - entity: sensor.solis_total_load
+ - entity: sensor.solis_house_load
float_precision: 0
extend_to: now
group_by:
@@ -720,3 +667,4 @@ views:
show:
legend_value: false
extend_to: false
+
diff --git a/README.md b/README.md
index 1bddddec..6df1f5be 100644
--- a/README.md
+++ b/README.md
@@ -1,4 +1,4 @@
-# PV Opt: Home Assistant Solar/Battery Optimiser v5.0.1
+# PV Opt: Home Assistant Solar/Battery Optimiser v5.1.0
- [Introduction](#Introduction)
- [Pre-requisites](#pre-requisites)
@@ -67,6 +67,9 @@ PV Opt supports EV charging:
- If on the Agile tariff, PV Opt can calculate a car charging plan which can be used to control your EV charger/car via external HA automation scripts.
- If necessary Pv_opt automatically prevents house battery discharge during EV Charging.
+Octopus Free Electricity and Octopus Saving sessions are fully supported (requires Octopus HA integration, see below)
+
+Axle Energy export events are also supported (requires Axle VPP HA integration, see below)
Pre-requisites
@@ -85,9 +88,10 @@ This app is not stand-alone it requires the following:
| Studio Code Server | Alternative | Alternative to using `File Editor` to edit config files. Not convered in this guide. |
| Integrations | | |
| Solcast PV Solar Integration | Required | Retrieves solar forecast from Solcast into Home Assistant. |
-| Octopus Energy | Optional | Used to retrieve tariff information and Octopus Saving Session details. For users on Intelligent Octopus Go, this is required for any addtional slots outside of the 6 hour period to be taken into account in the charge/discharge plan. |
+| Octopus Energy | Optional | Used to retrieve tariff information and Octopus Saving Session / Free Electricity details. For users on Intelligent Octopus Go, this is required for any addtional slots outside of the 6 hour period to be taken into account in the charge/discharge plan. |
| Solax Modbus | Optional | Used to control Solis inverter directly. Support for two other integrations is now available (see below). Support inverter brands is possible using the API described below. |
| MyEnergi | Optional | For Intelligent Octopus Go users using a Zappi charger, used by Pv_opt to detect EV plugin and supply EV consumption history. |
+| Axle VPP | Optional | For Axle Energy VPP users, allows PV_opt to take account of Axle discharge events by including the Axle export price in its calulations |
Step by Step Installation Guide
@@ -181,17 +185,17 @@ https://github.com/stevebuk1/pv_opt/blob/main/files/solis_hybrid.yaml
Follow instructions here: https://github.com/home-assistant/addons/blob/master/configurator/README.md
-Navigate to Settings -> Addons -> File editor -> Configuration and set "Enforce Basepath" to "off".
+Navigate to Settings -> Apps -> File editor -> Configuration and set "Enforce Basepath" to "off".
-9. Install Samba Share and/or Studio Code Server Add-ons If Required
+9. Install Samba Share and/or Studio Code Server Apps If Required
-Both of these add-ons make it easier to edit text files on your HA Install but aren't strictly necessary. `Samba Share` also makes it easier to access the AppDaemon log files.
+Both of these Apps make it easier to edit text files on your HA Install but aren't strictly necessary. `Samba Share` also makes it easier to access the AppDaemon log files.
10. Install AppDaemon
The PV_Opt python script currently runs under AppDaemon.
-AppDaemon is a loosely coupled, multi-threaded, sandboxed python execution environment for writing automation apps for home automation projects, and any environment that requires a robust event driven architecture. The simplest way to install it on Home Assistantt is using the dedicated add-on:
+AppDaemon is a loosely coupled, multi-threaded, sandboxed python execution environment for writing automation apps for home automation projects, and any environment that requires a robust event driven architecture. The simplest way to install it on Home Assistant is using the dedicated App:
1. Click the Home Assistant My button below to open the add-on on your Home Assistant instance:
@@ -258,7 +262,7 @@ And add the `client_user` and `client_password` keys to `secrets.yaml` like this
date_format: '%H:%M:%S'
format: '{asctime} {levelname:>8s}: {message}'
-4. Open the AppDaemon Add-On via Settings: http://homeassistant.local:8123/hassio/addon/a0d7b954_appdaemon/info
+4. Open the AppDaemon App via Settings: http://homeassistant.local:8123/hassio/addon/a0d7b954_appdaemon/info
5. Click on Configuration at the top
@@ -320,7 +324,7 @@ And add the `client_user` and `client_password` keys to `secrets.yaml` like this
06/04, 20:33:01 INFO AppDaemon: App initialization complete
```
-That's it. AppDaemon is up and running. There is futher documentation for the [Add-on](https://github.com/hassio-addons/addon-appdaemon/blob/main/appdaemon/DOCS.md) and for [AppDaemon](https://appdaemon.readthedocs.io/en/latest/)
+That's it. AppDaemon is up and running. There is futher documentation for the [App](https://github.com/hassio-addons/addon-appdaemon/blob/main/appdaemon/DOCS.md) and for [AppDaemon](https://appdaemon.readthedocs.io/en/latest/)
12. Install PV Opt from HACS
@@ -334,7 +338,7 @@ That's it. AppDaemon is up and running. There is futher documentation for the [A
Once downloaded AppDaemon should see the app and attempt to load it using the default configuration. Go back to the AppDaemon logs and this time open pv_opt.log. You should see:
```
-16:53:23 INFO: ******************* PV Opt v5.0.0 *******************
+16:53:23 INFO: ******************* PV Opt v5.1.0 *******************
16:53:23 INFO:
16:53:23 INFO: Time Zone Offset: 0.0 minutes
16:53:23 INFO: Reading arguments from YAML:
@@ -349,7 +353,7 @@ Once downloaded AppDaemon should see the app and attempt to load it using the de
13. Add an Automation to Restart AppDAemon when HA Restarts (Optional)
-Restarts between Home Assistant and Add-Ons are not synchronised so it is helpful to set up an Automation to restart AppDAemon if HA is restarted. An example is shown below and included in this repo as `ha_restart_automation.yaml`. The `wait_template` section ensures that key integrations (in this case Solcast and Solax) have numeric values before AppDaemon is started.
+Restarts between Home Assistant and Apps are not synchronised so it is helpful to set up an Automation to restart AppDaemon if HA is restarted. An example is shown below and included in this repo as `ha_restart_automation.yaml`. The `wait_template` section ensures that key integrations (in this case Solcast and Solax) have numeric values before AppDaemon is started.
alias: Restart AppDaemon on HA Restart
description: ""
@@ -398,7 +402,8 @@ The `config.yaml` file also includes all the other configuration used by PV Opt.
overwrite_ha_on_restart: true
-PV_Opt needs to know the size of your battery and the power of your inverter: both when inverting battery to AC power and when chargingh tha battery. It will attempt to work these out from the data it has loaded (WIP) but you should check the following enitities in Home Assistant:
+PV_Opt needs to know the size of your battery and the power of your inverter: both when inverting battery to AC power and when charging the battery. These are best set in
+config.yaml. Check the following enitities in Home Assistant match your system:
System Parameters
@@ -417,9 +422,10 @@ These are the main parameters that will control how PV Opt runs:
| Parameter | Units | Entity | Default | Description |
| :----------------------- | :--------: | :---------------------------------------- | :-----: | :--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
-| Read Only Mode | `on`/`off` | `switch.pvopt_read_only` | On | Controls whether the app will actually control the inverter. Start with this on until you are happy the charge/discharge plan makes sense. |
+| Read Only Mode | `on`/`off` | `switch.pvopt_read_only` | On | Controls whether the app will actually control the inverter. Start with this on until you are happy the charge/discharge plan makes sense. |
| Optimise Charging | `on`/`off` | `switch.pvopt_forced_charge` | On | Controls whether the app will calculate an Optimised plan. If `off` only the Base forecast will be updated. |
-| Optimise Discharging | `on`/`off` | `switch.pvopt_forced_discharge` | On | Controls whether the app will allow for forced discharge as well as charge |
+| Charge to 100% | `on`/`off` | `switch.pvopt_charge_to_100` | Off | Charge to 100% in the cheap rate, keeping as low a charge rate as possible such that 100% is reached smoothly by the end of the cheap rate period. During Winter, on a tariff that has a defined cheap rate (Go, IOG, Cosy etc) theres little benefit to be gained by optimizing battery use to achieve a battery SOC of "flat" just before the cheap rate begins, as errors in consumption and solcast mean that "flat" can happen early with a large consequential cost. Any benefit of leaving room for solar to fill the battery (which Pv_opt will normally do) is largely neglible in winter. Note: this mode is ultimately an overide of the prime aim of Pv_opt, which is to optimise based on cost, but is provided where error margins can lead to a frequent flat battery. It has no effect for Octopus Agile users nor if Optimise Discharging is selected (see below) |
+| Optimise Discharging | `on`/`off` | `switch.pvopt_forced_discharge` | On | Controls whether the app will allow for forced discharge as well as charge |
| Allow Cyclic | `on`/`off` | `switch.pvopt_allow_cyclic` | On | Controls whether the app will allow cycles of alternating charge/discharge |
| Use Solar | `on`/`off` | `switch.pvopt_use_solar` | On | Controls whether the app will use the Solcast solar forecast. If set to Off no solar will be used but battery charging can still be optimised for a time-of use tariff. |
| Solcast Confidence Level | `number` | `number.pvopt_solcast_confidence_level` | Solcast | Selects which the Confidence Level for the Solcast forecast. Levels between 10% and 50% are weighted from the Solcast 10% and 50% forecasts. Levels between 50% and 90% are weighted from the Solcast 50% and 10% forecasts. |
@@ -498,6 +504,14 @@ Import and/or export tarifs can be set manually as follows. These can be combine
- period_start: "14:00"
price: 0.0
+Axle Energy Information
+
+| Parameter | Units | Entity | Default | Description |
+| :------------------------- | :--------: | :--------------------------- | :-----: | :------------------------------------------------------------------------------------------------------- |
+| Pv_opt control during Axle events | `True/False` | `axle_allow_pvopt_writes` | | Allow Pv_opt to write to inverter during Axle Energy events. Axle controls your inverter during an event but has been known to start late or not at all. Until Axle fix this it is recommended that Pv_opt should also control your inverter, which given the current export price will almost certainly schedule an export event. |
+| Axle Energy export price | pence | `axle_export_rate_p` | | Price for Axle Energy Export events. Defaults to 100p which is the current price Axle offer for all events. Change it here if it changes. |
+
+
Tuning Parameters
These parameters will tweak how PV Opt runs:
diff --git a/apps/pv_opt/config/config.yaml b/apps/pv_opt/config/config.yaml
index e41816d7..be0fee9a 100644
--- a/apps/pv_opt/config/config.yaml
+++ b/apps/pv_opt/config/config.yaml
@@ -1,12 +1,10 @@
# Internal configuration --- DO NOT EDIT ---
-
pv_opt:
module: pv_opt
class: PVOpt
-
log: pv_opt_log
prefix: pvopt
- debug: false
+ debug: true
# User configuration --- EDIT AWAY! ---
# ========================================
@@ -14,7 +12,7 @@ pv_opt:
# ========================================
# If true the current config in HA will be over-written with that in the config.yaml.
- overwrite_ha_on_restart: false
+ overwrite_ha_on_restart: true
list_entities: true
# If true the personal data will be redacted from the log files.
@@ -23,9 +21,9 @@ pv_opt:
#=======================================
#Logging Category Control
#=======================================
- #Defines Logging subjects to add to logfile
- #If commented out, everything is logged
- #Ignored if "debug" is set to False above
+ # Defines Logging subjects to add to logfile
+ # If commented out, everything is logged
+ # Ignored if "debug" is set to false above
#
# S = Startup/Initialisation Logging
# T = Tariff loading Logging
@@ -46,30 +44,32 @@ pv_opt:
# Letters can be added to "debug_categories" in any order
- debug_categories: OW
+ debug_categories: O
# ========================================
# Basic parameters
# ========================================
read_only: false # If true the inverter will not be controlled
- forced_discharge: true # Enable forced discharging
+ forced_discharge: false # Enable forced discharging
allow_cyclic: false # Enable alternate charge/discharge windows
+ charge_to_100: false # Charge to 100% SOC over cheap-rate window (not compatible with forced discharge or Agile tariff)
+ fill_first: false
# ========================================
# Plant parameters
# ========================================
- # All parameters can be a number or point to an entity UNLESS they start with 'id_'. All of
- # these are required but will be defaulted if not specified
+ # All parameters can be a number or point to an entity UNLESS they start with 'id_'.
+ # All of these are required, but will be defaulted if not specified
battery_capacity_wh: 20000
# inverter_efficiency_percent: 97 # Default: 97
# charger_efficiency_percent: 91 # Default: 91
- # maximum_dod_percent: 15
- # charger_power_watts: 3500
- # inverter_power_watts: 3600
- # inverter_loss_watts: 100
- # battery_current_limit_amps: 50
+ # maximum_dod_percent: 15 # Default 15
+ # charger_power_watts: 3500 # Default 3500
+ # inverter_power_watts: 3600 # Default 3600
+ # inverter_loss_watts: 100 # Default 100
+ # battery_current_limit_amps: 50 #Default 50
# ========================================
# Solcast configuration
@@ -85,8 +85,8 @@ pv_opt:
# ========================================
#
# use_consumption_history: false
- # # consumption_history_days: 6
- # #
+ # consumption_history_days: 7
+ #
daily_consumption_kwh: 17
shape_consumption_profile: true
consumption_shape:
@@ -110,12 +110,32 @@ pv_opt:
# ========================================
# Octopus account parameters
# ========================================
- octopus_auto: True # Read tariffs from the Octopus Energy integration. If successful this over-rides the following parameters
+ octopus_auto: true # Read tariffs from the Octopus Energy integration. If successful this over-rides the following parameters
# octopus_account: !secret octopus_account
# octopus_api_key: !secret octopus_api_key
+ # The following Can be omitted if either of the above options is working correctly:
+
+ # octopus_import_tariff_code: E-2R-VAR-22-11-01-G
+ # octopus_export_tariff_code: E-1R-AGILE-OUTGOING-19-05-13-G
+
+ # octopus_import_tariff_code: E-1R-AGILE-23-12-06-G
+ # # octopus_export_tariff_code: E-1R-OUTGOING-LITE-FIX-12M-23-09-12-G
+ octopus_export_tariff_code: E-1R-OUTGOING-FIX-12M-19-05-13-G
+
+ # octopus_import_tariff_code: E-1R-FLUX-IMPORT-23-02-14-G
+ # octopus_export_tariff_code: E-1R-FLUX-EXPORT-23-02-14-G
+
+ octopus_import_tariff_code: E-1R-GO-VAR-22-10-14-G
+ # octopus_export_tariff_code: E-1R-OUTGOING-LITE-FIX-12M-23-09-12-N
+
+ # ========================================
+ # Manual Tariff Parameters
+ # ========================================
+ # If not using Octopus set a manual tariff here.
+ #
# manual_import_tariff: True
- # manual_import_tariff_name: Test Importe
+ # manual_import_tariff_name: Test Import
# manual_import_tariff_tz: GB
# manual_import_tariff_standing: 43
# manual_import_tariff_unit:
@@ -140,22 +160,7 @@ pv_opt:
# price: 50.0
# - period_start: "14:00"
# price: 0.0
-
- # The following Can be omitted if either of the above options is working correctly:
-
- # octopus_import_tariff_code: E-2R-VAR-22-11-01-G
- # octopus_export_tariff_code: E-1R-AGILE-OUTGOING-19-05-13-G
-
- # octopus_import_tariff_code: E-1R-AGILE-23-12-06-G
- # # octopus_export_tariff_code: E-1R-OUTGOING-LITE-FIX-12M-23-09-12-G
- octopus_export_tariff_code: E-1R-OUTGOING-FIX-12M-19-05-13-G
-
- # octopus_import_tariff_code: E-1R-FLUX-IMPORT-23-02-14-G
- # octopus_export_tariff_code: E-1R-FLUX-EXPORT-23-02-14-G
-
- octopus_import_tariff_code: E-1R-GO-VAR-22-10-14-N
- # octopus_export_tariff_code: E-1R-OUTGOING-LITE-FIX-12M-23-09-12-N
-
+ #
# ========================================
# EV parameters
# ========================================
@@ -163,7 +168,7 @@ pv_opt:
# If you have a Zappi Charger, uncomment next line to allow Pv_opt to detect car plugin status
# for use with the Octopus Intelligent / Agile tariff and to allow EV consumption data to be read (if needed).
#
- # ev_charger: Zappi # Default = None
+ # ev_charger: Zappi # Default = Not fitted
#
#
# Required Zappi entities are autodeted. For multiple Zappis (or entity names changed from the MyEnergi Integration defaults), it may be necessary to manually set
@@ -179,8 +184,9 @@ pv_opt:
# ev_part_of_house_load: False # Default = True
#
#
- # EV charge plan input values for Agile and IOG. On IOG, these parameters are passed to the Octopus API and will be
- # mirrored in the Octopus App after Pv_opt start.
+ # EV charge plan input values for Agile and IOG.
+ # On IOG, these parameters are passed to the Octopus API and will be mirrored in the Octopus App after Pv_opt start. If you want to control these using the
+ # Octopus App on your phone, leave these commented out (Pv_opt will pick up the values from Octopus)
#
# car_charging_ready_by: 06:30 # Default 06:30
# ev_charge_target_percent: 30 # Default 30%
@@ -195,9 +201,9 @@ pv_opt:
# EV/Charger Parameters for EV charging on Agile. Uncomment to overide the defaults if needed for your car/EV charger.
# These parameters are not needed for IOG, as they are already defined by Octopus.
#
- # ev_battery_capacity_kwh: 60 # Default 60kWh
- # ev_charger_efficiency_percent : 92 # Default 92%
- # ev_charger_power_watts: 7000 # Default 7000W (7kW)
+ # ev_battery_capacity_kwh: 60 # Default 60 (kWh)
+ # ev_charger_efficiency_percent : 92 # Default 92
+ # ev_charger_power_watts: 7000 # Default 7000 (7kW)
#
# Max slot price for Agile charging. Note, setting a low value may mean the car does not achieve its target SOC.
# To disable, uncomment and set to 0.
@@ -210,8 +216,10 @@ pv_opt:
# Uncomment the next two entries if using this integration.
# inverter_type: SOLIS_SOLAX_MODBUS
- # device_name: solis
- hmi_firmware_fb00_plus: True
+ # device_name: solis_inverter
+
+ # If your inverter has 6 slot firmware, uncomment this line.
+ # hmi_firmware_fb00_plus: True
# battery_voltage: sensor.{device_name}_battery_voltage
# update_cycle_seconds: 15
@@ -249,11 +257,8 @@ pv_opt:
# id_timed_charge_discharge_button: button.{device_name}_update_charge_discharge_times
# id_inverter_mode: select.{device_name}_energy_storage_control_switch
-
- # The following do not have defaults stored within Pv_opt so require uncommenting if used:
-
# Required for Tariff Comparison, see end of file.
-
+ # They do not have defaults stored within Pv_opt so require uncommenting if used:
# id_solar_power: sensor.{device_name}_pv_total_power
# OR
# id_solar_power:
@@ -302,9 +307,9 @@ pv_opt:
# id_inverter_mode: sensor.{device_name}_energy_storage_control_switch
- # The following do not have defaults stored within Pv_opt so require uncommenting if used:
# Required for Tariff Comparison, see end of file.
+ # They do not have defaults stored within Pv_opt so require uncommenting if used:
# id_solar_power: sensor.{device_name}_pv_total_power
# OR
@@ -312,44 +317,6 @@ pv_opt:
# - sensor.{device_name}_pv_power_1
# - sensor.{device_name}_pv_power_2
- # ==============================================================================================================
- # Brand / Integration Specific Config: SOLIS_SOLARMAN: https://github.com/StephanJoubert/home_assistant_solarman
- # Note: for legacy installs only. Use SOLIS_SOLARMAN_V2 for new installs.
- # ==============================================================================================================
- #
- # Uncomment the next two entries if using this integration.
-
- # inverter_type: SOLIS_SOLARMAN
- # device_name: solis
-
- # These are the default entities used with the Solis Solarman integration. You can change them here and over-ride the defaults
-
- # battery_voltage: sensor.{device_name}_battery_voltage
- # maximum_dod_percent: 15
- # update_cycle_seconds: 60
-
- # id_consumption_today: sensor.{device_name}_daily_house_backup_consumption
-
- # id_grid_power: sensor.{device_name}_meter_active_power
- # id_grid_import_today: sensor.{device_name}_daily_energy_imported
- # id_inverter_ac_power": sensor.{device_name}_inverter_ac_power
- # id_battery_soc: sensor.{device_name}_battery_soc
- # id_backup_mode_soc:
-
- # id_timed_charge_start_hours: sensor.{device_name}_timed_charge_start_hour
- # id_timed_charge_start_minutes: sensor.{device_name}_timed_charge_start_minute
- # id_timed_charge_end_hours: sensor.{device_name}_timed_charge_end_hour
- # id_timed_charge_end_minutes: sensor.{device_name}_timed_charge_end_minute
- # id_timed_charge_current: sensor.{device_name}_timed_charge_current
-
- # id_timed_discharge_start_hours: sensor.{device_name}_timed_discharge_start_hour
- # id_timed_discharge_start_minutes: sensor.{device_name}_timed_discharge_start_minute
- # id_timed_discharge_end_hours: sensor.{device_name}_timed_discharge_end_hour
- # id_timed_discharge_end_minutes: sensor.{device_name}_timed_discharge_end_minute
- # id_timed_discharge_current: sensor.{device_name}_timed_discharge_current
-
- # id_inverter_mode: sensor.{device_name}_storage_control_mode
-
# ==============================================================================================================
# Brand / Integration Specific Config: SOLIS_SOLARMAN_V2: https://github.com/davidrapan/ha-solarman
@@ -391,9 +358,8 @@ pv_opt:
# id_inverter_mode: select.{device_name}_storage_control_mode
- # The following do not have defaults stored within Pv_opt so require uncommenting if used:
-
- # Required for Tariff Comparison, see end of file. Can be commented out if not using.
+ # Required for Tariff Comparison, see end of file.
+ # They do not have defaults stored within Pv_opt so require uncommenting if used:
# id_solar_power:
# - sensor.{device_name}_power_pv1
# - sensor.{device_name}_power_pv2
@@ -405,35 +371,53 @@ pv_opt:
#
# Uncomment the next three entries if using this integration.
- inverter_type: SUNSYNK_SOLARSYNKV3
- device_name: solarsynkv3
- inverter_sn: 2207112425a # enter {sunsynk_serial} from solarsynkv3 addon
+ # inverter_type: SUNSYNK_SOLARSYNKV3
+ # device_name: solarsynkv3
+ # inverter_sn: 2207112425a # enter {sunsynk_serial} from solarsynkv3 addon
# These are the default entities used with the Sunsynk Solarsynkv3 integration. You can change them here and over-ride the defaults
- id_use_timer: sensor.{device_name}_{inverter_sn}_peakandvallery
- id_priority_load: sensor.{device_name}_{inverter_sn}_energymode
- id_control_helper: input_text.{device_name}_{inverter_sn}_settings
+ # id_use_timer: sensor.{device_name}_{inverter_sn}_peakandvallery
+ # id_priority_load: sensor.{device_name}_{inverter_sn}_energymode
+ # id_control_helper: input_text.{device_name}_{inverter_sn}_settings
+
+ # id_timed_charge_start: sensor.{device_name}_{inverter_sn}_selltime1
+ # id_timed_charge_end: sensor.{device_name}_{inverter_sn}_selltime2
+ # id_timed_charge_enable: sensor.{device_name}_{inverter_sn}_gentime1on
+ # id_timed_charge_target_soc: sensor.{device_name}_{inverter_sn}_cap1
+
+ # id_timed_discharge_start: sensor.{device_name}_{inverter_sn}_selltime3
+ # id_timed_discharge_end: sensor.{device_name}_{inverter_sn}_selltime4
+ # id_timed_discharge_enable: sensor.{device_name}_{inverter_sn}_gentime3on
+ # id_timed_discharge_target_soc: sensor.{device_name}_{inverter_sn}_cap3
+
+ # id_battery_soc: sensor.{device_name}_{inverter_sn}_battery_soc
+ # battery_voltage: sensor.{device_name}_{inverter_sn}_battery_voltage
+ # id_consumption_today: sensor.{device_name}_{inverter_sn}_load_daily_used
+ # id_grid_import_today: sensor.sensor.{device_name}_{inverter_sn}_grid_etoday_from
+ # id_grid_export_today: sensor.{device_name}_{inverter_sn}_grid_etoday_to
+
+ # maximum_dod_percent: 20
+ # supports_hold_soc: false
+ # update_cycle_seconds: 0
- id_timed_charge_start: sensor.{device_name}_{inverter_sn}_selltime1
- id_timed_charge_end: sensor.{device_name}_{inverter_sn}_selltime2
- id_timed_charge_enable: sensor.{device_name}_{inverter_sn}_gentime1on
- id_timed_charge_target_soc: sensor.{device_name}_{inverter_sn}_cap1
- id_timed_discharge_start: sensor.{device_name}_{inverter_sn}_selltime3
- id_timed_discharge_end: sensor.{device_name}_{inverter_sn}_selltime4
- id_timed_discharge_enable: sensor.{device_name}_{inverter_sn}_gentime3on
- id_timed_discharge_target_soc: sensor.{device_name}_{inverter_sn}_cap3
+ # ===============================================================================================================
+ # Brand / Integration Specific Config: SUNSYNK_SOLAR_SUNSYNK: https://github.com/MorneSaunders360/Solar-Sunsynk
+ # ===============================================================================================================
+ #
+ # Uncomment the next three entries if using this integration.
- id_battery_soc: sensor.{device_name}_{inverter_sn}_battery_soc
- battery_voltage: sensor.{device_name}_{inverter_sn}_battery_voltage
- id_consumption_today: sensor.{device_name}_{inverter_sn}_load_daily_used
- id_grid_import_today: sensor.sensor.{device_name}_{inverter_sn}_grid_etoday_from
- id_grid_export_today: sensor.{device_name}_{inverter_sn}_grid_etoday_to
+ # inverter_type: SUNSYNK_SOLARSUNSYNK
+ # device_name: solarsunsynk
+ # inverter_sn: 2207112425a # enter {sunsynk_serial} from Solar Sunskynk integration
- maximum_dod_percent: 20
- supports_hold_soc: false
- update_cycle_seconds: 0
+ # These are the default entities used with the Sunsynk Solarsynkv3 integration.
+ # You can change them here and over-ride the defaults
+
+ # maximum_dod_percent: 20
+ # supports_hold_soc: false
+ # update_cycle_seconds: 0
# ===============================================================================================================
# Brand / Integration Specific Config: SOLIS_CLOUD: https://github.com/hultenvp/solis-sensor
@@ -472,7 +456,8 @@ pv_opt:
# id_timed_discharge_button: button.{device_name}_update_timed_discharge_1
# id_timed_charge_discharge_button: button.{device_name}_update_timed_charge_discharge_1
- # Required for Tariff Comparison, see end of file. Can be commented out if not using.
+ # Required for Tariff Comparison, see end of file.
+ # They do not have defaults stored within Pv_opt so require uncommenting if used:
# id_solar_power:
# - sensor.{device_name}_dc_power_pv1
# - sensor.{device_name}_dc_power_pv2
@@ -492,7 +477,7 @@ pv_opt:
# battery_voltage: sensor.{device_name}_battery_voltage_2
# maximum_dod_percent: sensor.{device_name}_force_discharge_soc
- id_battery_current: sensor.solis_battery_current_2 # Debug purposes only
+ # id_battery_current: sensor.solis_battery_current_2 # Debug purposes only
# id_consumption_today: sensor.{device_name}_daily_grid_energy_used
# # id_consumption:
# # - sensor.{device_name}_total_consumption_power
@@ -517,24 +502,45 @@ pv_opt:
# id_solar_power:
# - sensor.{device_name}_dc_power_pv1
# - sensor.{device_name}_dc_power_pv2
+ #
+ #
+ # ==============================================================================================
+ # Axle Energy Integration
+ # https://github.com/deanhalllincoln/ha-axle-vpp
+ # ==============================================================================================
+ # These are the default entities used with the Axle VPP Integration
+ # You can change them here and over-ride the defaults.
+
+ # id_axle_start_time: sensor.axle_vpp_axle_start_time
+ # id_axle_end_time: sensor.axle_vpp_axle_end_time
+ # Axle Energy controls your inverter during a discharge event. Pv_opt should also schedule
+ # a discharge during this time, given the export price. Setting the next line to True will
+ # allow Pv_opt to set the discharge. As Axle has been found to either set a discharge late or
+ # not at all, but wil still honour any export made during the event, it is recommended to leave
+ # this set to True. Setting to False will automatically supress Pv_opt from inverter writes for
+ # 10 minutes before the event to 10 minutes after.
+
+ # axle_allow_pvopt_writes: True
+
+ # Axles export price. (This is not available on the Axle API(). Uncomment if no longer £1.
+ # axle_export_rate_p: 100
# ===============================================================================================================
# Tariff comparison
# ===============================================================================================================
+ # If necessary, uncomment id_daily_solar or id_solar_power from the Brand / Integration specific config above.
- # id_daily_solar: sensor.{device_name}_power_generation_today
-
- alt_tariffs:
- - name: Agile_Fix
- octopus_import_tariff_code: E-1R-AGILE-23-12-06-G
- octopus_export_tariff_code: E-1R-OUTGOING-FIX-12M-19-05-13-G
-
- # - name: Eco7_Fix
- # octopus_import_tariff_code: E-2R-VAR-22-11-01-G
- # octopus_export_tariff_code: E-1R-OUTGOING-FIX-12M-19-05-13-G
-
- - name: Flux
- octopus_import_tariff_code: E-1R-FLUX-IMPORT-23-02-14-G
- octopus_export_tariff_code: E-1R-FLUX-EXPORT-23-02-14-G
+ # alt_tariffs:
+ # - name: Agile_Fix
+ # octopus_import_tariff_code: E-1R-AGILE-23-12-06-G
+ # octopus_export_tariff_code: E-1R-OUTGOING-FIX-12M-19-05-13-G
+
+ # - name: Eco7_Fix
+ # octopus_import_tariff_code: E-2R-VAR-22-11-01-G
+ # octopus_export_tariff_code: E-1R-OUTGOING-FIX-12M-19-05-13-G
+
+ # - name: Flux
+ # octopus_import_tariff_code: E-1R-FLUX-IMPORT-23-02-14-G
+ # octopus_export_tariff_code: E-1R-FLUX-EXPORT-23-02-14-G
diff --git a/apps/pv_opt/pv_opt.py b/apps/pv_opt/pv_opt.py
index 5dfc8101..d6df9275 100644
--- a/apps/pv_opt/pv_opt.py
+++ b/apps/pv_opt/pv_opt.py
@@ -5,15 +5,23 @@
from datetime import datetime, timedelta
from json import dumps
-import appdaemon.adbase as ad
-import appdaemon.plugins.hass.hassapi as hass
-import appdaemon.plugins.mqtt.mqttapi as mqtt
+try:
+ import appdaemon.adbase as ad
+ import appdaemon.plugins.hass.hassapi as hass
+ import appdaemon.plugins.mqtt.mqttapi as mqtt
+
+ APPDAEMON = True
+except ImportError:
+ import ha_interface.ha_interface as ad
+ import ha_interface.ha_interface as hass
+
+ APPDAEMON = False
import numpy as np
import pandas as pd
import pvpy as pv
from numpy import nan
-VERSION = "5.0.3"
+VERSION = "5.1.0-Beta-2"
UNITS = {
"current": "A",
@@ -144,9 +152,9 @@
DEFAULT_CONFIG = {
"read_only": {"default": True, "domain": "switch"},
- "include_export": {"default": True, "domain": "switch"},
"forced_discharge": {"default": True, "domain": "switch"},
"allow_cyclic": {"default": False, "domain": "switch"},
+ "charge_to_100": {"default": False, "domain": "switch"},
"use_solar": {"default": True, "domain": "switch"},
"ev_part_of_house_load": {"default": True, "domain": "switch"},
"prevent_discharge": {"default": False, "domain": "switch"},
@@ -255,7 +263,7 @@
"default": 5.0,
"attributes": {
"min": 0.0,
- "max": 1000.0,
+ "max": 100.0,
"step": 5,
"mode": "box",
},
@@ -424,6 +432,20 @@
# },
"id_solcast_today": {"default": "sensor.solcast_pv_forecast_forecast_today"},
"id_solcast_tomorrow": {"default": "sensor.solcast_pv_forecast_forecast_tomorrow"},
+ "axle_allow_pvopt_writes": {"default": False, "domain": "switch"},
+ "id_axle_start_time": {"default": "sensor.axle_vpp_axle_start_time"},
+ "id_axle_end_time": {"default": "sensor.axle_vpp_axle_end_time"},
+ "axle_export_rate_p": {
+ "default": 100,
+ "domain": "number",
+ "attributes": {
+ "min": 0,
+ "max": 500,
+ "step": 1,
+ "unit_of_measurement": "p/kWh",
+ "mode": "slider",
+ },
+ },
"use_consumption_history": {"default": True, "domain": "switch"},
"consumption_history_days": {
"default": 7,
@@ -621,6 +643,8 @@ def initialize(self):
self.saving_events = {}
self.free_electricity_events = {}
+ self.axle_event = None
+
self.contract = None
self.car_plugin_detected = 0
self.car_plugin_detected_delayed = 0
@@ -1112,9 +1136,13 @@ def _get_zappi_power(self, start, end, log=False):
i = 0
for entity_id in self.zappi_consumption_entities:
- i += 1
df = self._get_hass_power_from_daily_kwh(entity_id, start=start, end=end, log=log)
+ if df is None or df.empty:
+ self.log(f"No Zappi power data for {entity_id} — skipping", level="WARNING")
+ continue
+
+ i += 1
if i == 1:
df_all = df.copy()
if i > 1: # If more than one charger, add data as extra column
@@ -1127,6 +1155,11 @@ def _get_zappi_power(self, start, end, log=False):
self.rlog(">>> df_all")
self.log(f">>>\n{df_all.to_string()}")
+ if i == 0:
+ # No valid Zappi data found for any entity
+ self.log("No valid Zappi power data found for any entity", level="WARNING")
+ return pd.DataFrame()
+
df_all = df_all.fillna(0) # fill any missing values with 0
if i == 1:
df = df_all
@@ -1347,8 +1380,8 @@ def _load_inverter(self):
if self.inverter_type in INVERTER_TYPES:
inverter_brand = self.inverter_type.split("_")[0].lower()
self.log(f"Inverter type: {self.inverter_type}: inverter module: {inverter_brand}.py")
- if inverter_brand == "solis":
- # for now only Solis uses the new setup
+ if inverter_brand in ("solis", "sunsynk"):
+ # for now Solis and Sunsynk use the new setup
create_inverter_controller = importName(f"{inverter_brand}", "create_inverter_controller")
self.inverter = create_inverter_controller(inverter_type=self.inverter_type, host=self)
else:
@@ -1651,7 +1684,7 @@ def _load_contract(self):
self.rlog(f"Trying to load tariff codes: Export: {self.config['octopus_export_tariff_code']}")
tariffs["export"] = pv.Tariff(
self.config[f"octopus_export_tariff_code"],
- export=False,
+ export=True,
host=self,
)
elif self.get_config("manual_export_tariff", False):
@@ -2016,6 +2049,79 @@ def _load_free_electricity_events_new(self):
else:
self.log(" No upcoming Octopus Free Electricity Events detected")
+
+ def _axle_writes_suspended(self):
+
+ """Return True if inverter writes should be suppressed during the Axle
+ event window. Controlled by the axle_suspend_writes config option — set
+ to False if pv_opt's planned discharge is trusted to align with Axle/Enode
+ and write suppression is not needed."""
+ if self.axle_event is None:
+ return False
+ if not self.get_config("axle_allow_pvopt_writes"):
+ return False
+ now = pd.Timestamp.now(tz="UTC")
+ freq = pd.Timedelta(minutes=self.get_config("optimise_frequency_minutes"))
+ window_start = self.axle_event["start"] - freq
+ window_end = self.axle_event["end"] + freq
+ return window_start <= now <= window_end
+
+ def _load_axle_event(self):
+ """
+ Load the next Axle Energy VPP grid event from the Dean Hall HACS integration
+ (ha-axle-vpp). Entity names default to the standard integration names but can
+ be overridden in apps.yaml via id_axle_start_time, id_axle_end_time, and
+ id_axle_1hr_before.
+
+ Populates self.axle_event with a dict {start, end, rate_p} or None if no
+ upcoming export event is found.
+ """
+ self.axle_event = None
+
+ start_entity = self.config["id_axle_start_time"]
+ end_entity = self.config["id_axle_end_time"]
+
+ # Silently skip if the integration is not installed
+ if not self.entity_exists(start_entity):
+ return
+
+ self.log("")
+ self.log(" Checking for Axle Energy VPP events:")
+
+
+ start_state = self.get_state_retry(start_entity)
+ end_state = self.get_state_retry(end_entity) if self.entity_exists(end_entity) else None
+
+ if start_state in (None, "unknown", "unavailable") or end_state in (None, "unknown", "unavailable"):
+ self.log(" Axle entities present but no event data available.")
+ return
+
+ try:
+ event_start = pd.Timestamp(start_state).tz_localize("UTC") if pd.Timestamp(start_state).tzinfo is None else pd.Timestamp(start_state).tz_convert("UTC")
+ event_end = pd.Timestamp(end_state).tz_localize("UTC") if pd.Timestamp(end_state).tzinfo is None else pd.Timestamp(end_state).tz_convert("UTC")
+ except Exception as e:
+ self.log(f" Could not parse Axle event timestamps: {e}")
+ return
+
+ now = pd.Timestamp.now(tz="UTC")
+
+ if event_end <= now:
+ self.log(" Axle event found but it is in the past — ignoring.")
+ return
+
+ rate_p = self.get_config("axle_export_rate_p")
+ self.axle_event = {
+ "start": event_start,
+ "end": event_end,
+ "rate_p": rate_p,
+ }
+ self.log(
+ f" Axle export event loaded: {event_start.strftime(DATE_TIME_FORMAT_SHORT)} - "
+ f"{event_end.strftime(DATE_TIME_FORMAT_SHORT)} at {rate_p}p/kWh"
+ )
+
+
+
def get_ha_value(self, entity_id):
value = None
@@ -2555,7 +2661,9 @@ def optimise_IOG(self, entity_id, attribute, old, new, kwargs):
@ad.app_lock
def optimise_time(self, cb_args):
self.log(f"Optimiser triggered by Scheduler ")
- self.log(f"Version: v{VERSION}")
+ if not APPDAEMON:
+ self.log(f"App/AddOn Version: {getattr(self, 'addon_version', 'unknown')}")
+ self.log(f"Pv_opt Version: v{VERSION}")
self.optimise()
@ad.app_lock
@@ -2567,6 +2675,7 @@ def optimise(self):
self._load_saving_events_new() # Resolves Issue #418.
# self._load_free_electricity_events()
self._load_free_electricity_events_new() # Resolves Issue #418
+ self._load_axle_event()
if self.get_config("forced_discharge") and (self.get_config("supports_forced_discharge", True)):
discharge_enable = "enabled"
@@ -2675,6 +2784,7 @@ def optimise(self):
self.pv_system.static_flows.index = [self.time_now] + list(self.pv_system.static_flows.index[1:])
soc_now = self.get_config("id_battery_soc")
+
self.pv_system.initial_soc = soc_now
# soc_last_day = self.hass2df(self.config["id_battery_soc"], days=1, log=self.debug)
# if self.debug and "S" in self.debug_cat:
@@ -2748,6 +2858,29 @@ def optimise(self):
# self.log("Self.prices is")
# self.log(self.prices.to_string())
+ # Inject Axle VPP export rate into prices for event slots so the optimiser
+ # correctly values the event and plans a charge-up beforehand.
+ if self.axle_event is not None and "export" in self.prices.columns:
+ axle_rate_p = self.get_config("axle_export_rate_p")
+ event_start = self.axle_event["start"].floor("30min")
+ event_end = self.axle_event["end"].ceil("30min")
+ mask = (
+ (self.prices.index >= event_start)
+ & (self.prices.index < event_end)
+ )
+ if mask.any():
+ self.prices.loc[mask, "export"] = axle_rate_p
+ self.pv_system.prices = self.prices
+ self.log(
+ f" Axle VPP: set export rate to {axle_rate_p}p/kWh for "
+ f"{event_start.strftime(DATE_TIME_FORMAT_SHORT)} - {event_end.strftime(DATE_TIME_FORMAT_SHORT)}"
+ )
+
+
+
+
+
+
self.pv_system.calculate_flows()
self.flows = {"Base": self.pv_system.flows}
self.log("")
@@ -2790,25 +2923,16 @@ def optimise(self):
cases = {
"Optimised Charging": {
- "export": False,
- "discharge": False,
- },
- "Optimised PV Export": {
- "export": True,
"discharge": False,
},
"Forced Discharge": {
- "export": True,
"discharge": True,
},
}
- if not self.get_config("include_export"):
+ if not self.get_config("forced_discharge"):
self.selected_case = "Optimised Charging"
- elif not self.get_config("forced_discharge"):
- self.selected_case = "Optimised PV Export"
-
else:
self.selected_case = "Forced Discharge"
@@ -2824,7 +2948,6 @@ def optimise(self):
self.flows[case] = self.pv_system.optimised_force(
log=True,
- use_export=cases[case]["export"],
discharge=cases[case]["discharge"],
)
@@ -2838,7 +2961,6 @@ def optimise(self):
for case in cases:
self.flows[case] = self.pv_system.optimised_force(
log=(case == self.selected_case),
- use_export=cases[case]["export"],
discharge=cases[case]["discharge"],
)
@@ -2854,7 +2976,7 @@ def optimise(self):
# )
self.ulog("Optimisation Summary")
- self.log(f" {'Base cost:':40s} {self.optimised_cost['Base'].sum():6.1f}p")
+ self.log(f" {'Base cost:':60s} {self.optimised_cost['Base'].sum():6.1f}p")
cost_today = self._cost_actual().sum()
self.summary_costs = {
"Base": {
@@ -2863,7 +2985,7 @@ def optimise(self):
}
}
for case in cases:
- str_log = f" {f'Optimised cost ({case}):':40s} {self.optimised_cost[case].sum():6.1f}p"
+ str_log = f" {f'Optimised cost ({case}):':60s} {self.optimised_cost[case].sum():6.1f}p"
self.summary_costs[case] = {"cost": ((self.optimised_cost[case].sum() + cost_today) / 100).round(2)}
if case == self.selected_case:
self.summary_costs[case]["Selected"] = " <=== Current Setup"
@@ -3143,6 +3265,11 @@ def optimise(self):
status = self.inverter.status
self._log_inverterstatus(status)
+ if self._axle_writes_suspended():
+ self.log("Axle VPP event active prior to next slot start: inverter writes suspended.")
+ self.status("Idle (Axle VPP)")
+ break
+
retries = 0
while not self.inverter.timed_mode and retries < WRITE_POLL_RETRIES:
retries += 1
@@ -3240,13 +3367,11 @@ def optimise(self):
): # not sure what this line will report
self.log("....but status is not hold")
self.log(f" Enabling SOC hold at SOC of {self.hold[0]['soc']:0.0f}%")
- # self.inverter.hold_soc_old(enable=True, soc=self.hold[0]["soc"])
- self.inverter.hold_soc(enable=True, target_soc=self.hold[0]["soc"])
+ self.inverter.hold_soc(enable=True, target_soc=self.hold[0]["soc"], start=self.charge_start_datetime, end=self.charge_end_datetime)
else:
self.log(f" Inverter already holding SOC of {self.hold[0]['soc']:0.0f}%")
- start = None
- end = self.charge_end_datetime
- self.inverter.hold_soc(enable=True, target_soc=self.hold[0]["soc"])
+ # Next line commented out - if its already holding there is nothing to update (there used to be when using backup mode)
+ # self.inverter.hold_soc(enable=True, target_soc=self.hold[0]["soc"], start=None, end=self.charge_end_datetime)
else: # if already in Car slot, this bit should run
self.log(f"Current charge/discharge window ends in {time_to_slot_end:0.1f} minutes.")
@@ -3425,16 +3550,18 @@ def _create_windows(self):
tolerance = self.get_config("forced_power_group_tolerance")
- # Increment "period" if
- # charge power varies by more than half the power tolerance
+ # Increment "period" if
+ # charge power varies by more than half the power tolerance
# OR non-contiguous car slot detected (when charge power = 0).
- # OR cross from 0 to positive/negative value (otherwise windows of very low values will get joined together).
-
+ # OR cross from 0 to positive/negative value (otherwise windows of very low values will get joined together).
+
forced_diff = self.opt["forced"].diff()
self.opt["period"] = (
- (forced_diff.abs() > (tolerance / 2)) # significant power change
- | ((forced_diff != 0) & ((self.opt["forced"] == 0) | (self.opt["forced"].shift() == 0))) # any transition to/from zero
+ (forced_diff.abs() > (tolerance / 2)) # significant power change
+ | (
+ (forced_diff != 0) & ((self.opt["forced"] == 0) | (self.opt["forced"].shift() == 0))
+ ) # any transition to/from zero
| ((self.opt["carslot"].diff() > 0) & (self.opt["forced"] == 0)) # new car slot with no charge
).cumsum()
@@ -3761,6 +3888,7 @@ def write_cost(
cost,
df,
attributes={},
+ full=True,
):
cost_today = self._cost_actual()
midnight = pd.Timestamp.now(tz="UTC").normalize() + pd.Timedelta(24, "hours")
@@ -3780,10 +3908,10 @@ def write_cost(
# self.log("")
# self.log(f"\n{cost.to_string()}")
# self.log(f"Dtype of cost_today is {cost_today.dtypes}")
- # self.log(f"Dtype of cost is {cost.dtypes}")
+ # self.log(f"Dtype of cost is {cost.dtypes}")
# cast cost_today as float64 in case it is empty, to prevent Futurewarning "The behavior of array concatenation with empty entries is deprecated"
- cost = pd.DataFrame(pd.concat([cost_today.astype("float64"), cost])).set_axis(["cost"], axis=1).fillna(0)
+ cost = pd.DataFrame(pd.concat([s for s in [cost_today.astype("float64"), cost] if not s.empty])).set_axis(["cost"], axis=1).fillna(0)
cost["cumulative_cost"] = cost["cost"].cumsum()
for d in [df, cost]:
@@ -3803,10 +3931,14 @@ def write_cost(
),
"cost_tomorrow": round((cost["cost"].loc[midnight:].sum()) / 100, 2),
}
- | {col: df[["period_start", col]].to_dict("records") for col in cols if col in df.columns}
- | {"cost": cost[["period_start", "cumulative_cost"]].to_dict("records")}
- | attributes
- )
+ ) | attributes
+
+ if full:
+ attributes = (
+ {col: df[["period_start", col]].to_dict("records") for col in cols if col in df.columns}
+ | {"cost": cost[["period_start", "cumulative_cost"]].to_dict("records")}
+ | attributes
+ )
self.write_to_hass(
entity=entity,
@@ -3857,6 +3989,26 @@ def _write_output(self):
attributes={"Summary": self.summary_costs},
)
+ self.write_to_hass(
+ f"sensor.{self.prefix}_cost_today",
+ np.round(self._cost_actual().sum() / 100, 2),
+ attributes={
+ "friendly_name": f"PV_Opt Cost Today",
+ "device_class": "monetary",
+ "state_class": "measurement",
+ "unit_of_measurement": "GBP",
+ },
+ )
+
+ for case in self.summary_costs:
+ self.write_cost(
+ f"PV_Opt Cost ({case})",
+ entity=f"sensor.{self.prefix}_cost_{case.lower().replace(" ","_")}",
+ cost=self.optimised_cost[case],
+ df=self.flows[case],
+ full=False,
+ )
+
if len(self.windows) > 0:
hass_start = self.charge_start_datetime
hass_end = self.charge_end_datetime
@@ -4100,6 +4252,10 @@ def _get_hass_power_from_daily_kwh(self, entity_id, start=None, end=None, days=N
if df is not None:
+ if df.empty:
+ self.log(f"No data returned for {entity_id} — skipping", level="WARNING")
+ return df
+
if self.debug and "Q" in self.debug_cat:
self.log(f"power: kWh data from {entity_id} is")
self.log(f"\n{df.to_string()}")
@@ -4256,6 +4412,23 @@ def load_consumption(self, start, end):
df = self._subtract_zappi_from_grid(ev_power, df)
# Add consumption margin
+
+ df_no_margin = df.copy()
+
+ # Log historical daily consumption - skip partial first day
+ daily_totals = df_no_margin.groupby(df_no_margin.index.date).sum() / 2000
+ daily_counts = df_no_margin.groupby(df_no_margin.index.date).count()
+ self.log(f" - Historical house consumption per day ({actual_days} days):")
+ for date, total in daily_totals.items():
+ if daily_counts.loc[date] >= 48: # only log complete days (48 x 30min slots)
+ self.log(
+ f" {pd.Timestamp(date).strftime('%d-%b-%Y')} ({pd.Timestamp(date).strftime('%a')}): {total:0.1f} kWh"
+ )
+ else:
+ self.log(
+ f" {pd.Timestamp(date).strftime('%d-%b-%Y')} ({pd.Timestamp(date).strftime('%a')}): {total:0.1f} kWh (partial day - {daily_counts.loc[date]} slots)"
+ )
+
df = df * (1 + self.get_config("consumption_margin") / 100)
if self.debug and "Q" in self.debug_cat:
self.log("Df after adding consumption margin is.......")
@@ -4302,9 +4475,9 @@ def load_consumption(self, start, end):
now_floor = pd.Timestamp.now(tz="UTC").floor("30min")
for week in range(1, days // 7 + 1):
start_dow_n = now_floor - pd.Timedelta(days=7 * week)
- slice_n = dfx.loc[start_dow_n : start_dow_n + pd.Timedelta(hours=47, minutes=30)].iloc[:48]
-
- if len(slice_n) > 40:
+ slice_n = dfx.loc[start_dow_n : start_dow_n + pd.Timedelta(hours=95, minutes=30)].iloc[:96]
+
+ if len(slice_n) > 80:
dow_slices.append(slice_n.values)
if index_dow is None:
index_dow = slice_n.index # capture the 7-days-ago index
@@ -4326,7 +4499,7 @@ def load_consumption(self, start, end):
# self.log(f">>> consumption_mean index: {consumption_mean.index[0]} to {consumption_mean.index[-1]}")
# Add extra entries to consumption_dow so it starts at midnight, then remove time column and change Nans to 0 (they are in the past)
- consumption_dow2 = pd.concat([temp, consumption_dow], axis=1).drop(["time"], axis=1).fillna(0)
+ consumption_dow2 = pd.concat([temp, consumption_dow], axis=1).drop(["time"], axis=1).astype("float64").fillna(0)
# merge consumption_mean and consumption dow, then trim back to 48 hours long
consumption_new = consumption_dow2.merge(
@@ -4345,6 +4518,42 @@ def load_consumption(self, start, end):
self.log(">>> Consumption New:")
self.log(f">>> {consumption_new.to_string()}")
+ # Log forecast (pre-margin) daily totals - actuals so far + forecast for remainder
+ consumption_margin_factor = 1 + self.get_config("consumption_margin") / 100
+ forecast_pre_margin = consumption_new["total"] / consumption_margin_factor
+
+ now_floor = pd.Timestamp.now(tz="UTC").floor("30min")
+ today = now_floor.date()
+
+ # Actual consumption so far today from raw history
+ actual_today = df_no_margin[df_no_margin.index.date == today].sum() / 2000
+
+ # Forecast for remaining slots today (from now onwards)
+ forecast_today_remaining = forecast_pre_margin[forecast_pre_margin.index >= now_floor]
+ forecast_today_remaining = (
+ forecast_today_remaining[forecast_today_remaining.index.date == today].sum() / 2000
+ )
+
+ # Tomorrow is pure forecast
+ tomorrow_start = pd.Timestamp(today, tz="UTC") + pd.Timedelta(days=1)
+ tomorrow_end = tomorrow_start + pd.Timedelta(hours=23, minutes=30)
+ forecast_tomorrow = (
+ forecast_pre_margin[
+ (forecast_pre_margin.index >= tomorrow_start) & (forecast_pre_margin.index <= tomorrow_end)
+ ].sum()
+ / 2000
+ )
+
+ tomorrow = (now_floor + pd.Timedelta(days=1)).date()
+
+ self.log(f" - Forecast consumption per day (weighted, pre-margin):")
+ self.log(
+ f" {pd.Timestamp(today).strftime('%d-%b-%Y')} ({pd.Timestamp(today).strftime('%a')}): {actual_today + forecast_today_remaining:0.1f} kWh ({actual_today:0.1f} kWh actual + {forecast_today_remaining:0.1f} kWh forecast)"
+ )
+ self.log(
+ f" {pd.Timestamp(tomorrow).strftime('%d-%b-%Y')} ({pd.Timestamp(tomorrow).strftime('%a')}): {forecast_tomorrow:0.1f} kWh (forecast)"
+ )
+
consumption["consumption"] += pd.Series(
consumption_new["total"].to_numpy(), index=consumption_mean.index
)
@@ -4385,17 +4594,6 @@ def load_consumption(self, start, end):
self.log("")
- ### This next section prints a consumption based on two days worth, as predicted from the last 7 days
- # What we want is a predicted consumption for the next day, so we can compare it to fixed consumption
- # problems:
- # Not sure where two days is created from (as df is definitley one day)
- # Each of the two days has different, so its not a straight double generated from the one day df
-
- self.log(
- f" Total consumption from {consumption.index[0].strftime(DATE_TIME_FORMAT_SHORT)} to {consumption.index[-1].strftime(DATE_TIME_FORMAT_SHORT)}:"
- )
- self.log(f" Total consumption: {(consumption['consumption'].sum() / 2000):0.1f} kWh")
-
if self.debug and "P" in self.debug_cat:
self.log("Printing final result of routine load_consumption.....")
self.log(consumption.to_string())
@@ -4887,7 +5085,7 @@ def write_and_poll_text(self, entity_id, text: str):
return (changed, written)
- def write_and_poll_value(self, entity_id, value: int | float, tolerance=0.0, verbose=True):
+ def write_and_poll_value(self, entity_id, value: int | float, tolerance=0.0, verbose=False):
changed = False
written = False
if tolerance == -1:
@@ -4933,7 +5131,7 @@ def set_select(self, item, state):
self.call_service("select/select_option", entity_id=entity_id, option=state)
self.rlog(f"Setting {entity_id} to {state}")
- def get_state_retry(self, *args, **kwargs):
+ def get_state_retry(self, *args, allow_none=False, **kwargs):
retries = 0
state = None
@@ -4941,11 +5139,14 @@ def get_state_retry(self, *args, **kwargs):
while not valid_state and retries < GET_STATE_RETRIES:
state = self.get_state(*args, **kwargs)
- valid_state = (
- (("attribute" in kwargs) and (isinstance(state, dict)))
- or (state not in ["unknown", "unavailable", "", None, nan])
- or (len(args) == 1)
- )
+ if allow_none:
+ valid_state = True
+ else:
+ valid_state = (
+ (("attribute" in kwargs) and (isinstance(state, dict)))
+ or (state not in ["unknown", "unavailable", "", None, nan])
+ or (len(args) == 1)
+ )
if not valid_state:
retries += 1
@@ -4984,4 +5185,101 @@ def get_entity_default(self, entity_id):
return DEFAULT_CONFIG[item]["default"]
+if __name__ == "__main__":
+ import asyncio
+ import json
+ import logging
+ import logging.handlers
+ import os
+ import sys
+
+ # Ensure /app is on sys.path so importName() can find sunsynk.py,
+ # solis.py etc. when called from _load_inverter()
+ sys.path.insert(0, "/app")
+
+ import yaml
+
+ # Add-On version is injected by the HA Supervisor at runtime.
+ # No need to hardcode — stays in sync with config.yaml automatically.
+ ADDON_VERSION = os.environ.get("ADDON_VERSION", "unknown")
+
+ LOG_FORMAT = "%(asctime)s %(levelname)-8s %(message)s"
+ LOG_DATE_FORMAT = "%Y-%m-%d %H:%M:%S"
+
+ # ── Console handler (captured by HA Supervisor → Add-On Log tab) ─────────
+ logging.basicConfig(
+ level=logging.INFO,
+ format=LOG_FORMAT,
+ datefmt=LOG_DATE_FORMAT,
+ )
+
+ # ── Persistent file handlers (/config/pv_opt mirrors AppDaemon log location) ─
+ PV_OPT_DIR = "/config/pv_opt"
+ os.makedirs(PV_OPT_DIR, exist_ok=True)
+
+ # Main log — all levels
+ LOG_FILE = f"{PV_OPT_DIR}/pv_opt.log"
+ file_handler = logging.handlers.RotatingFileHandler(
+ LOG_FILE,
+ maxBytes=5 * 1024 * 1024,
+ backupCount=3,
+ encoding="utf-8",
+ )
+ file_handler.setFormatter(logging.Formatter(fmt=LOG_FORMAT, datefmt=LOG_DATE_FORMAT))
+ logging.getLogger().addHandler(file_handler)
+
+ # Error log — WARNING and above only (mirrors AppDaemon's error.log)
+ ERROR_LOG_FILE = f"{PV_OPT_DIR}/error.log"
+ error_handler = logging.handlers.RotatingFileHandler(
+ ERROR_LOG_FILE,
+ maxBytes=1 * 1024 * 1024,
+ backupCount=3,
+ encoding="utf-8",
+ )
+ error_handler.setLevel(logging.WARNING)
+ error_handler.setFormatter(logging.Formatter(fmt=LOG_FORMAT, datefmt=LOG_DATE_FORMAT))
+ logging.getLogger().addHandler(error_handler)
+
+ logging.info(f"*************** PV Opt Add-On Version: {ADDON_VERSION} ***************")
+ logging.info(f"Logging to {LOG_FILE} and {ERROR_LOG_FILE}")
+
+ # ── Load Add-On UI options (MQTT credentials, log level, etc.) ─────────
+ OPTIONS_FILE = "/data/options.json"
+ if not os.path.exists(OPTIONS_FILE):
+ logging.warning(f"{OPTIONS_FILE} not found — using empty Add-On options")
+ addon_options = {}
+ else:
+ with open(OPTIONS_FILE) as f:
+ addon_options = json.load(f)
+
+ # ── Load pv_opt config.yaml (the main app configuration) ─────────────
+ # Defaults to /config/pv_opt/config.yaml — written by run.sh on first start.
+ # Users can find and edit this via the HA File Editor, consistent with
+ # where AppDaemon stores its files.
+ # Override by setting config_path in the Add-On UI if needed.
+
+ CONFIG_FILE = addon_options.get("config_path", f"{PV_OPT_DIR}/config.yaml")
+ if not os.path.exists(CONFIG_FILE):
+ logging.warning(f"pv_opt config.yaml not found at {CONFIG_FILE} — " f"running with Add-On UI options only.")
+ pv_opt_config = {}
+ else:
+ with open(CONFIG_FILE) as f:
+ raw = yaml.safe_load(f)
+ if isinstance(raw, dict) and "pv_opt" in raw:
+ pv_opt_config = raw["pv_opt"]
+ # Remove AppDaemon-only keys that have no meaning here
+ for ad_key in ("module", "class", "log"):
+ pv_opt_config.pop(ad_key, None)
+ else:
+ pv_opt_config = raw or {}
+ logging.info(f"Loaded pv_opt config from {CONFIG_FILE}")
+
+ # ── Merge: pv_opt config.yaml takes precedence over Add-On UI options ────
+ options = {**addon_options, **pv_opt_config}
+
+ app = PVOpt(options=options)
+ app.addon_version = ADDON_VERSION
+
+ asyncio.run(app._run())
+
# %%
diff --git a/apps/pv_opt/pvpy.py b/apps/pv_opt/pvpy.py
index 095a0ffb..65ed5825 100644
--- a/apps/pv_opt/pvpy.py
+++ b/apps/pv_opt/pvpy.py
@@ -228,7 +228,6 @@ def end(self):
return max([pd.Timestamp(x["valid_to"]) for x in self.unit])
def to_df(self, start=None, end=None, **kwargs):
-
if self.host.debug and "V" in self.host.debug_cat:
self.log(f">>> {self.name}")
self.log(f">>> Start: {start.strftime(TIME_FORMAT)} End: {end.strftime(TIME_FORMAT)}")
@@ -306,11 +305,15 @@ def to_df(self, start=None, end=None, **kwargs):
]
)
- # If the index frequency >30 minutes so we need to just extend it:
- if (len(df) > 1 and ((df.index[-1] - df.index[-2]).total_seconds() / 60) > 30) or len(df) == 1:
- newindex = pd.date_range(df.index[0], end, freq="30min")
- df = df.reindex(index=newindex).ffill().loc[start:]
+ # If the index frequency >30 minutes so we need to just extend it
+ if len(df) > 1:
+ last_dt_hours = (df.index[-1] - df.index[-2]).total_seconds() / 3600
else:
+ last_dt_hours = np.inf
+
+ # self.log(f"{self.name:30s} {last_dt_hours:10.1f}")
+ if last_dt_hours == 0.5:
+ # This is half hourly data like Agile
i = 0
while df.index[-1] < end and i < 7:
i += 1
@@ -323,6 +326,20 @@ def to_df(self, start=None, end=None, **kwargs):
df = pd.concat([df, dfx])
df = df[df.columns[0]]
df = df.loc[start:end]
+
+ elif last_dt_hours > 24:
+ # This isn't a daily tariff so just extend it
+ newindex = pd.date_range(df.index[0], end, freq="30min")
+ df = df.reindex(index=newindex).ffill().loc[start:]
+
+ else:
+ # This is a daiy tariff but doesn't have 30 minute data from Octopus (ie Flux/Go)
+ dfx = df.loc[df.index[-1] - pd.Timedelta(hours=23, minutes=59) :].copy()
+ dfx.index += pd.Timedelta(hours=24)
+ df2 = pd.concat([df, dfx])
+ newindex = pd.date_range(start - pd.Timedelta(hours=24), end, freq="30min")
+ df = df2.reindex(index=newindex).ffill().loc[start:]
+
df.name = "unit"
# SVB logging
@@ -701,7 +718,7 @@ def net_cost(self, grid_flow, sum=True, decimals=1, **kwargs):
end = grid_flow.index[-1]
# SVB debugging
- # self.log(f"Start = {start}, End = {end}")
+ # self.log(f">>>Net Cost: Start = {start}, End = {end}")
if (
isinstance(grid_flow, pd.DataFrame)
@@ -865,6 +882,9 @@ def calculate_flows(self, slots=[], solar_id="solar", consumption_id="consumptio
self.flows["soc"] = (self.flows["chg"] / self.battery.capacity) * 100
self.flows["soc_end"] = (self.flows["chg_end"] / self.battery.capacity) * 100
+ self.flows.drop(columns=["battery_temp"])
+
+
if self.prices is not None:
self.flows = pd.concat(
[self.flows, self.prices],
@@ -880,7 +900,6 @@ def optimised_force(
self,
log=True,
discharge=False,
- use_export=True,
max_iters=MAX_ITERS,
):
@@ -896,12 +915,6 @@ def optimised_force(
axis=1,
)
- if not use_export:
- if log:
- self.log(f"Ignoring export pricing because Use Export is turned off")
- discharge = False
- self.prices["export"] = 0
-
if log and (self.host.debug and "B" in self.host.debug_cat):
self.log("")
self.log("Prices is")
@@ -921,7 +934,53 @@ def optimised_force(
if log:
self.log(f"Base cost: {self.base_cost}")
+ # Run high cost swaps ignoring export pricing, then with real export pricing.
+ # Keep whichever produces the lower cost.
+
+ real_export_prices = self.prices["export"].copy()
+
+ self.prices["export"] = 0
+
+ if log:
+ self.log("")
+ self.log("High Cost Usage Swaps (export prices excluded/ignored)")
+ self.log("---------------------")
+ self.log("")
+
self._high_cost_swaps(log=log)
+ slots_no_export = list(self.slots)
+ cost_no_export = self.best_cost
+
+ # Reset state fully before second run
+ self.calculate_flows()
+ self.base_cost = self.net_cost
+ self.best_cost = self.base_cost
+ self.slots = []
+
+ self.prices["export"] = real_export_prices
+
+ if log:
+ self.log("")
+ self.log("High Cost Usage Swaps (export prices included)")
+ self.log("---------------------")
+ self.log("")
+
+ self._high_cost_swaps(log=log)
+ slots_with_export = list(self.slots)
+ cost_with_export = self.best_cost
+
+ if cost_no_export < cost_with_export:
+ if log:
+ self.log(f"High Cost Swaps: export-unaware plan is cheaper than export-aware plan ({cost_no_export:.1f}p vs {cost_with_export:.1f}p), using that")
+ self.slots = slots_no_export
+ self.best_cost = cost_no_export
+ else:
+ if log:
+ self.log(f"High Cost Swaps: export-aware plan ({cost_with_export:.1f})p is cheaper or equal to than export-unaware plan ({cost_no_export:.1f}p), using that")
+ self.slots = slots_with_export
+ self.best_cost = cost_with_export
+
+ self.calculate_flows(slots=self.slots)
# Only do the rest if there is an export tariff:
# self.log(f"Sum of Export Prices = {prices['export'].sum()}")
@@ -948,6 +1007,43 @@ def optimised_force(
self.calculate_flows(slots=self.slots)
+ # If discharge is enabled, also try fill_first and keep whichever is cheaper
+ if discharge:
+ slots_normal = list(self.slots)
+ cost_normal = self.best_cost
+
+ # Reset and re-run with fill_first
+ self.calculate_flows()
+ self.base_cost = self.net_cost
+ self.best_cost = self.base_cost
+ self.slots = []
+ self.slots_added = 999
+ j = 0
+
+ while (self.slots_added > 0) and (j < max_iters):
+ j += 1
+ self._low_cost_charging(log=False)
+ self._discharging(log=False, fill_first=True)
+
+ self.calculate_flows(slots=self.slots)
+ cost_fill_first = self.best_cost
+
+ if log:
+ self.log("Running Discharging, but with filling slots first ('Fill First')")
+
+ if cost_normal <= cost_fill_first:
+ if log:
+ self.log(f" Standard discharge is cheaper ({cost_normal:.1f}p vs {cost_fill_first:.1f}p), using that")
+ self.slots = slots_normal
+ self.best_cost = cost_normal
+ self.calculate_flows(slots=self.slots)
+ else:
+ if log:
+ self.log(f" Fill first discharge is cheaper ({cost_fill_first:.1f}p vs {cost_normal:.1f}p), using that")
+
+ self._charge_to_100(log=log)
+
+
# df.index = pd.to_datetime(df.index)
if (not self.host.get_config("allow_cyclic")) and (len(self.slots) > 0) and discharge:
@@ -1002,17 +1098,12 @@ def _high_cost_swaps(self, log=True):
# --------------------------------------------------------------------------------------------
# Charging 1st Pass
# --------------------------------------------------------------------------------------------
- if log:
- self.log("")
- self.log("High Cost Usage Swaps")
- self.log("---------------------")
- self.log("")
- if log and (self.host.debug and "C" in self.host.debug_cat):
- self.log(
- "SPR = Slot Power Required, SCPA = Slot Charger Power Available, SAC = Slot Available Capacity, RSC = Remaining Slot Capacity"
- )
- self.log("")
+ if log and (self.host.debug and "C" in self.host.debug_cat):
+ self.log(
+ "SPR = Slot Power Required, SCPA = Slot Charger Power Available, SAC = Slot Available Capacity, RSC = Remaining Slot Capacity"
+ )
+ self.log("")
done = False
i = 0
@@ -1151,7 +1242,12 @@ def _high_cost_swaps(self, log=True):
self.slots = slots
- def _low_cost_charging(self, log=True):
+ def _low_cost_charging(self, log=True, test=True):
+ """
+ test: If True: test the slots agains the thresholds and save the slots to self._slots
+ If False: don't test the slots, just calculate them and retuen the resulting slots array
+
+ """
slots = [slot for slot in self.slots]
best_cost = self.best_cost
slots_added = 0
@@ -1203,7 +1299,7 @@ def _low_cost_charging(self, log=True):
# Add rounding to ensure matching (may not be needed)
x["import"] = x["import"].round(2)
- min_price = min_price.round(2)
+ min_price = np.round(min_price, 2)
if len(x[x["import"] == min_price]) > 0:
start_window = x[x["import"] == min_price].index[0]
@@ -1253,66 +1349,285 @@ def _low_cost_charging(self, log=True):
self.calculate_flows(slots=slots)
- if self.host.debug and "F" in self.host.debug_cat:
- self.log("self.flows after flows called = ")
- self.log(f"\n{self.flows.to_string()}")
-
- net_cost = self.net_cost
-
- if self.host.debug and "C" in self.host.debug_cat:
- self.log(f"Cost = {net_cost:5.1f}")
- if net_cost < best_cost:
- self.log("Cost reduction found - printing flows")
+ if test:
+ if self.host.debug and "F" in self.host.debug_cat:
+ self.log("self.flows after flows called = ")
self.log(f"\n{self.flows.to_string()}")
- str_log += f"Net: {net_cost:5.1f} "
- if net_cost < best_cost - self.host.get_config("slot_threshold_p"):
- str_log += f"New SOC: {self.flows.loc[start_window]['soc']:5.1f}%->{self.flows.loc[start_window]['soc_end']:5.1f}% "
- str_log += f"Max export: {-self.flows['grid'].min():0.0f}W "
- best_cost = net_cost
- slots_added += 1
- if log:
- self.log(str_log)
- else:
- # done = True
- slots = slots[:-1]
- self.calculate_flows(slots=slots)
+ net_cost = self.net_cost
+
+ if self.host.debug and "C" in self.host.debug_cat:
+ self.log(f"Cost = {net_cost:5.1f}")
+ if net_cost < best_cost:
+ self.log("Cost reduction found - printing flows")
+ self.log(f"\n{self.flows.to_string()}")
+
+ str_log += f"Net: {net_cost:5.1f} "
+ if net_cost < best_cost - self.host.get_config("slot_threshold_p"):
+ str_log += f"New SOC: {self.flows.loc[start_window]['soc']:5.1f}%->{self.flows.loc[start_window]['soc_end']:5.1f}% "
+ str_log += f"Max export: {-self.flows['grid'].min():0.0f}W "
+ best_cost = net_cost
+ slots_added += 1
+ if log:
+ self.log(str_log)
+ else:
+ # We aren't using this slot so take it out of slots and new_slots, and add it to unused_slots
+ slots = slots[:-1]
+ self.calculate_flows(slots=slots)
done = available.sum() == 0
else:
done = True
- cost_delta = best_cost - self.best_cost
- str_log = f"Charge net cost delta:{(-cost_delta):5.1f}p"
- if cost_delta > -self.host.get_config("pass_threshold_p"):
- self.slots_added = 0
- str_log += f": < Pass Threshold {self.host.get_config('pass_threshold_p'):0.1f}p => Slots Excluded"
- self.calculate_flows(slots=self.slots)
+ if test:
+ cost_delta = best_cost - self.best_cost
+ str_log = f"Charge net cost delta:{(-cost_delta):5.1f}p"
+ if cost_delta >= -self.host.get_config("pass_threshold_p"):
+ self.slots_added = 0
+ str_log += f": < Pass Threshold {self.host.get_config('pass_threshold_p'):0.1f}p => Slots Excluded"
+ self.calculate_flows(slots=self.slots)
+ else:
+ str_log += f": > Pass Threshold {self.host.get_config('pass_threshold_p'):0.1f}p => Slots Included"
+ # Revert to the original slots and put all the new_slots into unuseded slots, ahead of the ones that failed the slot threshold test
+ self.slots = slots
+ self.slots_added = slots_added
+ self.best_cost = best_cost
+
+ if log:
+ self.log("")
+ self.log(str_log)
+ self._log_slots()
else:
- str_log += f": > Pass Threshold {self.host.get_config('pass_threshold_p'):0.1f}p => Slots Included"
- self.slots = slots
- self.slots_added = slots_added
- self.best_cost = best_cost
+ return slots
+
+
+ def _charge_to_100(self, log=True):
+ """If charge_to_100 is enabled, top up the battery evenly across all
+ cheap-rate slots so that SOC at the end of the cheap window reaches 100%.
+ Unlike _low_cost_charging this is not cost-gated: the whole point is to
+ accept the (usually tiny) extra cost in exchange for a full battery.
+
+ This is intended for import-only tariffs (e.g. Octopus Go) where there is no
+ export revenue to justify arbitrage. The optimiser will otherwise leave the
+ battery deliberately low just before the cheap window to minimise cost, which
+ is unhelpful in winter when consumption is unpredictable.
+
+ For users with an export tariff, Fill First is the correct alternative - it
+ works within the discharge optimiser and is aware of arbitrage value.
+
+ Not compatible with: forced_discharge, fill_first, or variable-rate tariffs
+ such as Octopus Agile where no distinct cheap window exists.
+ """
+
+ if not self.host.get_config("charge_to_100"):
+ return
+
+ if self.host.get_config("forced_discharge"):
+ if log:
+ self.log("")
+ self.log("Charge to 100%")
+ self.log("--------------")
+ self.log("")
+ self.log(
+ "Charge to 100% skipped: forced discharge is enabled. "
+ "Use Fill First for optimised overnight charging when discharge is active."
+ )
+ return
if log:
self.log("")
- self.log(str_log)
+ self.log("Charge to 100%")
+ self.log("--------------")
+ self.log("")
+
+ # Identify the cheap-rate window: a contiguous block of slots at the
+ # minimum import price that is long enough to represent a genuine overnight
+ # cheap period (not an isolated Agile bargain slot).
+ MIN_CHEAP_WINDOW_MINUTES = 60
+
+ min_import_price = self.flows["import"].min()
+ median_import_price = self.flows["import"].median()
+
+ # Guard 1: cheap rate must be meaningfully below the median (rules out Agile
+ # where the "cheapest" slot is only marginally cheaper than the rest).
+ if median_import_price > 0 and min_import_price >= 0.5 * median_import_price:
+ if log:
+ self.log(
+ f"Charge to 100% skipped: min import price ({min_import_price:.2f}p/kWh) is not "
+ f"sufficiently below median ({median_import_price:.2f}p/kWh) - tariff does not "
+ f"appear to have a distinct cheap-rate window (e.g. Agile)."
+ )
+ return
+
+ cheap_mask = self.flows["import"] == min_import_price
+
+ # Guard 2: the contiguous cheap block must be long enough to be a real window.
+ # Find the largest contiguous run of cheap slots and use that as the window.
+ cheap_groups = cheap_mask.ne(cheap_mask.shift()).cumsum()
+ largest_group = (
+ cheap_mask[cheap_mask]
+ .groupby(cheap_groups[cheap_mask])
+ .apply(lambda g: g.index)
+ )
+ window_durations = {
+ grp: (idx[-1] - idx[0] + self.flows["dt_hours"].loc[idx[0]] * pd.Timedelta("1h"))
+ for grp, idx in largest_group.items()
+ }
+ best_group = max(window_durations, key=lambda g: window_durations[g])
+ best_duration = window_durations[best_group]
+
+ if best_duration < pd.Timedelta(minutes=MIN_CHEAP_WINDOW_MINUTES):
+ if log:
+ self.log(
+ f"Charge to 100% skipped: longest contiguous cheap-rate block is only "
+ f"{best_duration.total_seconds()/60:.0f} min "
+ f"(minimum is {MIN_CHEAP_WINDOW_MINUTES} min). "
+ f"Enable this feature only with a tariff that has a fixed cheap window(s) e.g. Cosy, Go, Flux, IOG."
+ )
+ return
+
+ # Use only the largest contiguous cheap block as the window.
+ cheap_window_index = largest_group[best_group]
+ cheap_mask = self.flows.index.isin(cheap_window_index)
+ cheap_slots = self.flows[cheap_mask]
+
+ if cheap_slots.empty:
+ if log:
+ self.log("No cheap-rate slots found - skipping.")
+ return
+
+ # What SOC does the optimiser leave us with at the end of the cheap window?
+ soc_start_of_window = cheap_slots["soc"].iloc[0]
+ deficit_pct = 100.0 - soc_start_of_window
+
+ if log:
+ self.log(
+ f"Cheap-rate window: {cheap_slots.index[0].strftime(TIME_FORMAT)} - "
+ f"{cheap_slots.index[-1].strftime(TIME_FORMAT)} "
+ f"({len(cheap_slots)} slots @ {min_import_price:.2f}p/kWh)"
+ )
+ self.log(f"SOC at start of cheap window: {soc_start_of_window:.1f}% Deficit: {deficit_pct:.1f}%")
+
+ if deficit_pct <= 0:
+ if log:
+ self.log("Battery already at 100% at end of cheap window - nothing to do.")
+ return
+
+ # Energy needed to fill from current plan end-SOC to 100% (Wh, at the battery terminals).
+ # We account for charger efficiency: more grid energy is needed than battery energy stored.
+ energy_deficit_wh = (deficit_pct / 100.0) * self.battery.capacity
+
+ # How much extra power can each cheap slot absorb? Cap at (charger_power - solar - existing forced).
+ max_charger = min(self.battery.max_charge_power, self.inverter.charger_power)
+ cheap_headroom = (
+ max_charger
+ - cheap_slots["forced"]
+ - cheap_slots["solar"]
+ ).clip(lower=0)
+
+ total_headroom_wh = (cheap_headroom * cheap_slots["dt_hours"]).sum()
+
+ if total_headroom_wh <= 0:
+ if log:
+ self.log("WARNING: No headroom in cheap-rate slots - cannot add charge.")
+ return
+
+ if log:
+ self.log(f"Energy deficit: {energy_deficit_wh:.0f} Wh Total headroom: {total_headroom_wh:.0f} Wh")
+
+ if total_headroom_wh < energy_deficit_wh:
+ shortfall_pct = ((energy_deficit_wh - total_headroom_wh) / self.battery.capacity) * 100
+ if log:
+ self.log(
+ f"WARNING: Cheap-rate window has insufficient headroom to reach 100%. "
+ f"Maximum achievable SOC is approximately "
+ f"{soc_end_of_window + (total_headroom_wh / self.battery.capacity * 100):.1f}% "
+ f"({shortfall_pct:.1f}% short). Consider a longer charge window or higher charger power."
+ )
- def _discharging(self, log=True):
+ # Distribute the deficit evenly (in power terms) across all cheap slots,
+ # capped per slot by its individual headroom.
+
+ # Distribute charge iteratively across cheap slots, recalculating after each
+ # slot addition so that consumption within the window is naturally accounted for.
+ # At each step, spread the remaining deficit evenly across remaining slot-hours.
+
+ slots = [slot for slot in self.slots]
+ total_slot_hours = cheap_slots["dt_hours"].sum()
+ flat_power = (energy_deficit_wh / self.inverter.charger_efficiency) / total_slot_hours
+
+ for t in cheap_slots.index:
+ headroom = float(cheap_headroom.loc[t])
+ if headroom <= 0:
+ continue
+ added_power = min(flat_power, headroom)
+ if added_power > 0:
+ slots.append((t, added_power))
+
+ self.calculate_flows(slots=slots)
+
+ soc_end_new = self.flows[cheap_mask]["soc_end"].iloc[-1]
+
+ if soc_end_new < 99.5:
+ shortfall_pct = 100.0 - soc_end_new
+ if log:
+ self.log(
+ f"WARNING: Cheap-rate window has insufficient headroom to reach 100%. "
+ f"Maximum achievable SOC is approximately {soc_end_new:.1f}% "
+ f"({shortfall_pct:.1f}% short). Consider a longer charge window or higher charger power."
+ )
+
+ cost_delta = self.net_cost - self.best_cost
+
+ if log:
+ self.log(
+ f"New SOC at end of cheap window: {soc_end_new:.1f}% "
+ f"Cost delta: +{cost_delta:.1f}p"
+ )
+
+
+ self.slots = slots
+ self.best_cost = self.net_cost
+
+ if log:
+ self.log("Charge to 100% slots accepted.")
+
+
+ def _discharging(self, log=True, fill_first=False):
# -----------
# Discharging
# -----------
+ if log:
+ self.log("")
+ str_log = "Forced Discharging"
+ if fill_first:
+ str_log += "(Fill First)"
+ self.log(str_log)
+ self.log("-" * len(str_log))
+ self.log("")
+
slots = [slot for slot in self.slots]
best_cost = self.best_cost
slots_added = self.slots_added
+ if fill_first:
+ # If we are filling first, we first need to find all the slots we could use for filling
+ # We need to do this iteratively using the same logic as in low cost charging
+
+ slot_length = len(slots)
+ slots = self._low_cost_charging(log=False, test=False)
+ charging_slots_added = len(slots) - slot_length
+ self.calculate_flows(slots=slots)
+ best_cost = self.net_cost
+ if log:
+ self.log(f"Added {charging_slots_added} charging slots. Best cost with charging = {best_cost:6.1f}p")
+
+ if log:
+ self._log_slots(slots=slots)
+ slots_added += charging_slots_added
+
# Check how many slots which aren't full are at an export price less than any import price:
min_import_price = self.flows["import"].min()
- if log:
- self.log("")
- self.log("Forced Discharging")
- self.log("------------------")
- self.log("")
i = 0
available = (self.flows["export"] > min_import_price) & (self.flows["forced"] == 0)
@@ -1329,7 +1644,7 @@ def _discharging(self, log=True):
if len(x[x["export"] == max_price]) > 0:
# self.log("Entered routine successfully")
- start_window = x[x["export"] == max_price].index[0]
+ start_window = x[x["export"] == max_price].index[-1] # Start from the last
available.loc[start_window] = False
str_log = f"{available.sum():>2d} Max export price {max_price:5.2f}p/kWh at {start_window.strftime(TIME_FORMAT)} "
str_log += " "
@@ -1341,7 +1656,8 @@ def _discharging(self, log=True):
-min(
min(
self.battery.max_discharge_power,
- self.inverter.charger_power,
+ # self.inverter.charger_power, (incorrect, discharging could be using solar as well)
+ self.inverter.inverter_power,
)
- x["solar"].loc[start_window],
((x["soc_end"].loc[start_window] - self.battery.max_dod) / 100 * self.battery.capacity)
@@ -1360,17 +1676,19 @@ def _discharging(self, log=True):
net_cost = self.net_cost
str_log += f"Net: {net_cost:5.1f} "
+ str_log += f"New SOC: {self.flows.loc[start_window]['soc']:5.1f}%->{self.flows.loc[start_window]['soc_end']:5.1f}% "
+ str_log += f"Max export: {-self.flows['grid'].min():0.0f}W "
if net_cost < best_cost - self.host.get_config("slot_threshold_p"):
- str_log += f"New SOC: {self.flows.loc[start_window]['soc']:5.1f}%->{self.flows.loc[start_window]['soc_end']:5.1f}% "
- str_log += f"Max export: {-self.flows['grid'].min():0.0f}W "
- best_cost = net_cost
- slots_added += 1
if log:
self.log(str_log)
+ best_cost = net_cost
+ slots_added += 1
+
else:
# done = True
slots = slots[:-1]
self.calculate_flows(slots=slots)
+
else:
done = True
@@ -1391,6 +1709,20 @@ def _discharging(self, log=True):
if log:
self.log("")
self.log(str_log)
+ self._log_slots()
+
+ def _log_slots(self, slots=None):
+ if slots is None:
+ slots = self.slots
+
+ if len(slots) > 0:
+ df = pd.DataFrame(self.slots).set_axis(["Start", "Power"], axis=1)
+ df = df.set_index("Start")
+ df = df.groupby(level=0).sum()
+ self.log("")
+
+ if self.host.debug and "C" in self.host.debug_cat:
+ self.log(f"Interim Slot Summary:\n{df.to_string()}")
# %%
diff --git a/apps/pv_opt/solis.py b/apps/pv_opt/solis.py
index 9efa8560..27935c7c 100644
--- a/apps/pv_opt/solis.py
+++ b/apps/pv_opt/solis.py
@@ -636,6 +636,10 @@ def _control_charge_discharge(self, direction, enable, **kwargs):
current = 0
target_soc = None
+ # Already disabled - nothing to do
+ if self.status[direction]["start"] == self.status[direction]["end"]:
+ return
+
if target_soc is None and self._hmi_fb00:
self.log("Pv opt is configured for 6 slot firmware, setting initial value of target_soc")
if direction == "charge":
diff --git a/apps/pv_opt/sunsynk.py b/apps/pv_opt/sunsynk.py
index bbbdfcc7..ed5daa9c 100644
--- a/apps/pv_opt/sunsynk.py
+++ b/apps/pv_opt/sunsynk.py
@@ -1,25 +1,29 @@
+import base64
+import hashlib
import json
import time
+from abc import ABC, abstractmethod
+from datetime import datetime, timedelta
from time import sleep
-from typing import final
import numpy as np
import pandas as pd
+import requests
+from cryptography.hazmat.primitives.asymmetric.padding import PKCS1v15
+from cryptography.hazmat.primitives.serialization import load_pem_public_key
TIMEFORMAT = "%H:%M"
-READ_SENSOR_RETRIES = 10
-
INVERTER_DEFS = {
"SUNSYNK_SOLARSYNKV3": {
"online": "sensor.{device_name}_{inverter_sn}_battery_soc",
"default_config": {
"maximum_dod_percent": 20,
"id_battery_soc": "sensor.{device_name}_{inverter_sn}_battery_soc",
- "id_consumption_today": "sensor.{device_name}_{inverter_sn}_day_load_energy",
- "id_consumption": "sensor.{device_name}_{inverter_sn}_load_power",
- "id_grid_import_today": "sensor.{device_name}_{inverter_sn}_day_grid_import",
- "id_grid_export_today": "sensor.{device_name}_{inverter_sn}_day_grid_export",
+ "id_consumption_today": "sensor.{device_name}_{inverter_sn}_load_daily_used",
+ "id_consumption": "sensor.{device_name}_{inverter_sn}_load_power", # not believed used
+ "id_grid_import_today": "sensor.{device_name}_{inverter_sn}_grid_etoday_from",
+ "id_grid_export_today": "sensor.{device_name}_{inverter_sn}_grid_etoday_to",
"id_solar_power": [
"sensor.{device_name}_{inverter_sn}_pv1_power",
"sensor.{device_name}_{inverter_sn}_pv2_power",
@@ -27,22 +31,64 @@
"supports_hold_soc": False,
"update_cycle_seconds": 300,
},
- # Brand Conguration: Exposed as inverter.brand_config and can be over-written using arguments
+ # Brand Configuration: Exposed as inverter.brand_config and can be over-written using arguments
# from the config.yaml file but not required outside of this module
"brand_config": {
"battery_voltage": "sensor.{device_name}_{inverter_sn}_battery_voltage",
- "battery_current": "sensor.{device_name}_{inverter_sn}_battery_current",
- "id_control_helper": "input_text.{device_name}_{inverter_sn}_settings",
- "id_use_timer": "sensor.{device_name}_{inverter_sn}_use_timer",
- "id_priority_load": "sensor.{device_name}_{inverter_sn}_priority_load",
- "id_timed_charge_start": "sensor.{device_name}_{inverter_sn}_prog1_time",
- "id_timed_charge_end": "sensor.{device_name}_{inverter_sn}_prog2_time",
- "id_timed_charge_enable": "sensor.{device_name}_{inverter_sn}_prog1_charge",
- "id_timed_charge_capacity": "sensor.{device_name}_{inverter_sn}_prog1_capacity",
- "id_timed_discharge_start": "sensor.{device_name}_{inverter_sn}_prog3_time",
- "id_timed_discharge_end": "sensor.{device_name}_{inverter_sn}_prog4_time",
- "id_timed_dicharge_enable": "sensor.{device_name}_{inverter_sn}_prog3_charge",
- "id_timed_discharge_capacity": "sensor.{device_name}_{inverter_sn}_prog3_capacity",
+ "battery_current": "sensor.{device_name}_{inverter_sn}_battery_current", # not believed used
+ "id_control_helper": "input_text.{device_name}_{inverter_sn}_settings",
+ "id_use_timer": "sensor.{device_name}_{inverter_sn}_peakandvallery",
+ "id_priority_load": "sensor.{device_name}_{inverter_sn}_energymode",
+ "id_timed_charge_start": "sensor.{device_name}_{inverter_sn}_selltime1",
+ "id_timed_charge_end": "sensor.{device_name}_{inverter_sn}_selltime2",
+ "id_timed_charge_enable": "sensor.{device_name}_{inverter_sn}_gentime1on",
+ "id_timed_charge_target_soc": "sensor.{device_name}_{inverter_sn}_cap1",
+ "id_timed_charge_capacity": "sensor.{device_name}_{inverter_sn}_prog1_capacity", # not believed used
+ "id_timed_discharge_start": "sensor.{device_name}_{inverter_sn}_selltime3",
+ "id_timed_discharge_end": "sensor.{device_name}_{inverter_sn}_selltime4",
+ "id_timed_dicharge_enable": "sensor.{device_name}_{inverter_sn}_gentime3on",
+ "id_timed_discharge_target_soc": "sensor.{device_name}_{inverter_sn}_cap3",
+ "id_timed_discharge_capacity": "sensor.{device_name}_{inverter_sn}_prog3_capacity", # not believed used
+ "json_work_mode": "sysWorkMode",
+ "json_priority_load": "energyMode",
+ "json_grid_charge": "sdChargeOn",
+ "json_use_timer": "peakAndVallery",
+ "json_timed_charge_start": "sellTime1",
+ "json_timed_charge_end": "sellTime2",
+ "json_timed_unused": [f"sellTime{i}" for i in range(5, 7)],
+ "json_timed_charge_enable": "time1on",
+ "json_timed_charge_target_soc": "cap1",
+ "json_charge_current": "sdBatteryCurrent",
+ "json_gen_charge_enable": "genTime1on",
+ "json_timed_discharge_start": "sellTime3",
+ "json_timed_discharge_end": "sellTime4",
+ "json_timed_discharge_enable": "time3on",
+ "json_timed_discharge_target_soc": "cap3",
+ "json_timed_discharge_power": "sellTime3Pac",
+ "json_gen_discharge_enable": "genTime3on",
+ },
+ },
+ "SUNSYNK_SOLARSUNSYNK": {
+ "online": "sensor.state_of_charge",
+ "default_config": {
+ "maximum_dod_percent": 20,
+ "id_battery_soc": "sensor.state_of_charge",
+ "id_consumption_today": "sensor.total_load",
+ "id_consumption": "sensor.instantaneous_load",
+ "id_grid_import_today": "sensor.grid_to_load",
+ "id_grid_export_today": "sensor.solar_to_grid",
+ "id_solar_power": [
+ "sensor.instantaneous_ppv1",
+ "sensor.instantaneous_ppv2",
+ ],
+ "supports_hold_soc": False,
+ "update_cycle_seconds": 60,
+ },
+ # Brand Configuration: Exposed as inverter.brand_config and can be over-written using arguments
+ # from the config.yaml file but not required outside of this module
+ "brand_config": {
+ "battery_voltage": "sensor.instantaneous_battery_i_o",
+ "battery_current": "sensor.instantaneous_battery_i_o",
"json_work_mode": "sysWorkMode",
"json_priority_load": "energyMode",
"json_grid_charge": "sdChargeOn",
@@ -65,13 +111,25 @@
}
-class InverterController:
+def create_inverter_controller(inverter_type: str, host):
+ """Factory function to create the correct inverter controller."""
+ if inverter_type == "SUNSYNK_SOLARSYNKV3":
+ return SolarSynkV3Inverter(inverter_type=inverter_type, host=host)
+ elif inverter_type == "SUNSYNK_SOLARSUNSYNK":
+ return SolarSunsynkInverter(inverter_type=inverter_type, host=host)
+ else:
+ host.log(f"Unknown inverter type {inverter_type}", level="ERROR")
+ return False
+
+
+class SunsynkInverterController(ABC):
+ """Abstract base class for all Sunsynk inverter controllers."""
+
def __init__(self, inverter_type: str, host) -> None:
self._host = host
self.tz = self._host.tz
if host is not None:
self.log = host.log
- # self.config = self._host.config
self._type = inverter_type
self._device_name = self._host.device_name
self._inverter_sn = self._host.inverter_sn
@@ -94,14 +152,6 @@ def __init__(self, inverter_type: str, host) -> None:
self.log(f"Loading controller for inverter type {self._type}")
@property
- # def is_online(self):
- # entity_id = INVERTER_DEFS[self.type].get("online", (None, None))
- # if entity_id is not None:
- # entity_id = entity_id.replace("{device_name}", self._device_name)
- # return self._host.get_state(entity_id) not in [
- # "unknown", "unavailable"]
- # else:
- # return True
def is_online(self):
entity_id = self._online
if entity_id is not None:
@@ -133,218 +183,512 @@ def _unknown_inverter(self):
self._host.status(e)
raise Exception(e)
- def _solarsynk_set_helper(self, **kwargs):
- if self._host.get_config("id_control_helper") is not None:
- self.log("About to read Json")
- current_json = json.loads(self._host.get_config("id_control_helper"))
- else:
- current_json = {}
-
- # Convert numpy/pandas types to native Python types
-
- converted_kwargs = {}
+ def _convert_kwargs(self, kwargs: dict) -> dict:
+ """Convert numpy/pandas types to native Python types."""
+ converted = {}
for key, value in kwargs.items():
if isinstance(value, (np.integer, np.int64)):
- converted_kwargs[key] = int(value)
+ converted[key] = int(value)
elif isinstance(value, (np.floating, np.float64)):
- converted_kwargs[key] = float(value)
+ converted[key] = float(value)
elif isinstance(value, np.ndarray):
- converted_kwargs[key] = value.tolist()
+ converted[key] = value.tolist()
elif isinstance(value, np.datetime64):
- converted_kwargs[key] = pd.Timestamp(value).strftime("%H:%M")
+ converted[key] = pd.Timestamp(value).strftime("%H:%M")
elif isinstance(value, np.timedelta64):
- converted_kwargs[key] = str(value)
- elif isinstance(value, pd.Timestamp): # pd not expected but added for completeness
- converted_kwargs[key] = value.strftime("%H:%M")
+ converted[key] = str(value)
+ elif isinstance(value, pd.Timestamp):
+ converted[key] = value.strftime("%H:%M")
elif isinstance(value, pd.Timedelta):
- converted_kwargs[key] = str(value)
+ converted[key] = str(value)
else:
- converted_kwargs[key] = value
+ converted[key] = value
+ return converted
- updated_json = current_json | converted_kwargs
- new_json = json.dumps(updated_json)
+ @abstractmethod
+ def _set_inverter(self, **kwargs):
+ """Send settings to the inverter. Implemented by each subclass."""
+ pass
- # entity_id = self._host.config("id_control_helper")
- self.log("About to get entity id")
- entity_id = self._host.config.get("id_control_helper", None)
+ @abstractmethod
+ def enable_timed_mode(self):
+ pass
+
+ @abstractmethod
+ def control_charge(self, enable, **kwargs):
+ pass
+
+ @abstractmethod
+ def control_discharge(self, enable, **kwargs):
+ pass
+
+ @abstractmethod
+ def hold_soc(self, enable, soc=None):
+ pass
+
+ @property
+ @abstractmethod
+ def status(self):
+ pass
+
+ def _monitor_target_soc(self, target_soc, mode="charge"):
+ pass
- self.log(f"Setting SolarSynk input helper {entity_id} to {new_json}")
- # self._host.set_state(entity_id=entity_id, state=new_json)
- # Wait until text input helper is empty (i.e is written to the Cloud)
- # Max wait time of 10 seconds
- empty = False
- retries = 0
- while not empty and retries < READ_SENSOR_RETRIES:
- retries += 1
- time.sleep(1)
- self.log(f"Checking for {entity_id} to be empty")
- content = self._host.get_state_retry(entity_id=entity_id)
- empty = content == None
+class SunsynkBaseInverter(SunsynkInverterController):
+ """Shared implementation of charge/discharge/hold logic for all Sunsynk inverters.
+
+ Subclasses implement _set_inverter() to handle the actual write mechanism.
+ """
def enable_timed_mode(self):
self.log("Entered enable_timed_mode")
self.log(f"self._config = {self._config}")
- if self._type == "SUNSYNK_SOLARSYNKV3":
- params = {
- self._brand_config["json_use_timer"]: 1,
- self._brand_config["json_priority_load"]: 1,
- }
- self._solarsynk_set_helper(**params)
- params = {x: "00:00" for x in self._brand_config["json_timed_unused"]}
+ params = {
+ self._brand_config["json_use_timer"]: 1,
+ self._brand_config["json_priority_load"]: 1,
+ }
+ self._set_inverter(**params)
- self._solarsynk_set_helper(**params)
+ params = {x: "00:00" for x in self._brand_config["json_timed_unused"]}
+ self._set_inverter(**params)
- else:
- self._unknown_inverter()
+ def hold_soc(self, enable, soc=None):
+ pass
- def control_charge(self, enable, **kwargs):
- if self._type == "SUNSYNK_SOLARSYNKV3":
- time_now = pd.Timestamp.now(tz=self.tz)
-
- if enable:
- # self.enable_timed_mode()
-
- params = {
- self._brand_config["json_work_mode"]: 2,
- self._brand_config["json_timed_charge_target_soc"]: kwargs.get("target_soc", 100),
- self._brand_config["json_timed_charge_start"]: kwargs.get("start", time_now.strftime(TIMEFORMAT)),
- self._brand_config["json_timed_charge_end"]: kwargs.get(
- "end", time_now.ceil("30min").strftime(TIMEFORMAT)
- ),
- self._brand_config["json_charge_current"]: min(
- round(kwargs.get("power", 0) / self._host.get_config("battery_voltage")),
- self._host.get_config("battery_current_limit_amps"),
- ),
- }
-
- self._solarsynk_set_helper(**params)
-
- params = {
- self._brand_config["json_timed_charge_enable"]: True,
- self._brand_config["json_gen_charge_enable"]: False,
- }
-
- self._solarsynk_set_helper(**params)
- else:
- params = {
- self._brand_config["json_work_mode"]: 2,
- self._brand_config["json_timed_charge_target_soc"]: 100,
- self._brand_config["json_timed_charge_start"]: "00:00",
- self._brand_config["json_timed_charge_end"]: "00:00",
- self._brand_config["json_charge_current"]: self._host.get_config("battery_current_limit_amps"),
- }
+class SolarSynkV3Inverter(SunsynkBaseInverter):
+ """Controller for the martinville/solarsynkv3 Home Assistant Add-On.
+
+ Writes settings via an input_text helper entity in v2# semicolon-delimited
+ format. The Add-On polls the helper on its refresh interval (~5 minutes),
+ sends the accumulated settings to the Sunsynk cloud, then clears the helper.
+ FIFO merging ensures multiple writes between polls are not lost.
+ Multiple _set_inverter calls are required because battery and system mode
+ settings must be sent as separate writes, and the v2# format has a 255
+ character limit per write.
+ """
- self._solarsynk_set_helper(**params)
+ def control_charge(self, enable, **kwargs):
+ time_now = pd.Timestamp.now(tz=self.tz)
- params = {
- self._brand_config["json_timed_charge_enable"]: False,
- self._brand_config["json_gen_charge_enable"]: True,
- }
+ if enable:
+ params = {
+ self._brand_config["json_work_mode"]: 2,
+ self._brand_config["json_timed_charge_target_soc"]: kwargs.get("target_soc", 100),
+ self._brand_config["json_timed_charge_start"]: kwargs.get("start", time_now.strftime(TIMEFORMAT)),
+ self._brand_config["json_timed_charge_end"]: kwargs.get(
+ "end", time_now.ceil("30min").strftime(TIMEFORMAT)
+ ),
+ self._brand_config["json_charge_current"]: min(
+ round(kwargs.get("power", 0) / self._host.get_config("battery_voltage")),
+ self._host.get_config("battery_current_limit_amps"),
+ ),
+ }
+ self._set_inverter(**params)
- self._solarsynk_set_helper(**params)
+ params = {
+ self._brand_config["json_timed_charge_enable"]: True,
+ self._brand_config["json_gen_charge_enable"]: False,
+ }
+ self._set_inverter(**params)
else:
- self._unknown_inverter()
+ params = {
+ self._brand_config["json_work_mode"]: 2,
+ self._brand_config["json_timed_charge_target_soc"]: 100,
+ self._brand_config["json_timed_charge_start"]: "00:00",
+ self._brand_config["json_timed_charge_end"]: "00:00",
+ self._brand_config["json_charge_current"]: self._host.get_config("battery_current_limit_amps"),
+ }
+ self._set_inverter(**params)
+
+ params = {
+ self._brand_config["json_timed_charge_enable"]: False,
+ self._brand_config["json_gen_charge_enable"]: True,
+ }
+ self._set_inverter(**params)
def control_discharge(self, enable, **kwargs):
- if self._type == "SUNSYNK_SOLARSYNKV3":
- time_now = pd.Timestamp.now(tz=self.tz)
-
- if enable:
- # self.enable_timed_mode()
- params = {
- self._brand_config["json_work_mode"]: 0,
- self._brand_config["json_timed_discharge_target_soc"]: kwargs.get(
- "target_soc", self._host.get_config("maximum_dod_percent")
- ),
- self._brand_config["json_timed_discharge_start"]: kwargs.get(
- "start", time_now.strftime(TIMEFORMAT)
- ),
- self._brand_config["json_timed_discharge_end"]: kwargs.get(
- "end", time_now.ceil("30min").strftime(TIMEFORMAT)
- ),
- }
-
- self._solarsynk_set_helper(**params)
-
- params = {
- self._brand_config["json_timed_discharge_power"]: kwargs.get("power", 0),
- self._brand_config["json_timed_discharge_enable"]: True,
- self._brand_config["json_gen_discharge_enable"]: False,
- }
-
- self._solarsynk_set_helper(**params)
+ time_now = pd.Timestamp.now(tz=self.tz)
- else:
- params = {
- self._brand_config["json_work_mode"]: 2,
- self._brand_config["json_timed_discharge_target_soc"]: 100,
- self._brand_config["json_timed_discharge_start"]: "00:00",
- self._brand_config["json_timed_discharge_end"]: "00:00",
- self._brand_config["json_timed_discharge_power"]: 0,
- }
-
- self._solarsynk_set_helper(**params)
-
- params = {
- self._brand_config["json_timed_discharge_enable"]: False,
- self._brand_config["json_gen_discharge_enable"]: True,
- }
-
- self._solarsynk_set_helper(**params)
- else:
- self._unknown_inverter()
+ if enable:
+ params = {
+ self._brand_config["json_work_mode"]: 0,
+ self._brand_config["json_timed_discharge_target_soc"]: kwargs.get(
+ "target_soc", self._host.get_config("maximum_dod_percent")
+ ),
+ self._brand_config["json_timed_discharge_start"]: kwargs.get("start", time_now.strftime(TIMEFORMAT)),
+ self._brand_config["json_timed_discharge_end"]: kwargs.get(
+ "end", time_now.ceil("30min").strftime(TIMEFORMAT)
+ ),
+ }
+ self._set_inverter(**params)
- def hold_soc(self, enable, soc=None):
- if self._type == "SUNSYNK_SOLARSYNKV3":
- pass
+ params = {
+ self._brand_config["json_timed_discharge_power"]: kwargs.get("power", 0),
+ self._brand_config["json_timed_discharge_enable"]: True,
+ self._brand_config["json_gen_discharge_enable"]: False,
+ }
+ self._set_inverter(**params)
else:
- self._unknown_inverter()
+ params = {
+ self._brand_config["json_work_mode"]: 2,
+ self._brand_config["json_timed_discharge_target_soc"]: 100,
+ self._brand_config["json_timed_discharge_start"]: "00:00",
+ self._brand_config["json_timed_discharge_end"]: "00:00",
+ self._brand_config["json_timed_discharge_power"]: 0,
+ }
+ self._set_inverter(**params)
+
+ params = {
+ self._brand_config["json_timed_discharge_enable"]: False,
+ self._brand_config["json_gen_discharge_enable"]: True,
+ }
+ self._set_inverter(**params)
@property
def status(self):
- status = None
+ """Read current inverter status from HA entities exposed by the martinville/solarsynkv3 addon."""
time_now = pd.Timestamp.now(tz=self.tz)
+ charge_start = pd.Timestamp(self._host.get_config("id_timed_charge_start"), tz=self.tz)
+ charge_end = pd.Timestamp(self._host.get_config("id_timed_charge_end"), tz=self.tz)
+ discharge_start = pd.Timestamp(self._host.get_config("id_timed_discharge_start"), tz=self.tz)
+ discharge_end = pd.Timestamp(self._host.get_config("id_timed_discharge_end"), tz=self.tz)
+
+ return {
+ "timer mode": self._host.get_config("id_use_timer"),
+ "priority load": self._host.get_config("id_priority_load"),
+ "charge": {
+ "start": charge_start,
+ "end": charge_end,
+ "active": self._host.get_config("id_timed_charge_enable")
+ and (time_now >= charge_start)
+ and (time_now < charge_end),
+ "target_soc": self._host.get_config("id_timed_charge_target_soc"),
+ },
+ "discharge": {
+ "start": discharge_start,
+ "end": discharge_end,
+ "active": self._host.get_config("id_timed_discharge_enable")
+ and (time_now >= discharge_start)
+ and (time_now < discharge_end),
+ "target_soc": self._host.get_config("id_timed_discharge_target_soc"),
+ },
+ "hold_soc": {
+ "active": False,
+ "soc": 0.0,
+ },
+ }
+
+ def _set_inverter(self, **kwargs):
+ converted = self._convert_kwargs(kwargs)
- if self._type == "SUNSYNK_SOLARSYNKV3":
- charge_start = pd.Timestamp(self._host.get_config("id_timed_charge_start"), tz=self.tz)
- charge_end = pd.Timestamp(self._host.get_config("id_timed_charge_end"), tz=self.tz)
- discharge_start = pd.Timestamp(self._host.get_config("id_timed_charge_start"), tz=self.tz)
- discharge_end = pd.Timestamp(self._host.get_config("id_timed_charge_end"), tz=self.tz)
-
- status = {
- "timer mode": self._host.get_config("id_use_timer"),
- "priority load": self._host.get_config("id_priority_load"),
- "charge": {
- "start": charge_start,
- "end": charge_end,
- "active": self._host.get_config("id_timed_charge_enable")
- and (time_now >= charge_start)
- and (time_now < charge_end),
- "target_soc": self._host.get_config("id_timed_charge_target_soc"),
- },
- "discharge": {
- "start": discharge_start,
- "end": discharge_end,
- "active": self._host.get_config("id_timed_discharge_enable")
- and (time_now >= discharge_start)
- and (time_now < discharge_end),
- "target_soc": self._host.get_config("id_timed_discharge_target_soc"),
- },
- "hold_soc": {
- "active": False,
- "soc": 0.0,
- },
+ entity_id = self._host.config.get("id_control_helper", None)
+ if entity_id is not None:
+ # Force a fresh state read from HA, bypassing AppDaemon's cache
+ self._host.call_service("homeassistant/update_entity", entity_id=entity_id)
+ current_state = self._host.get_state(entity_id)
+ try:
+ # Parse any pending v2# settings already in the helper (FIFO merge)
+ if current_state not in [None, ""] and current_state.startswith("v2#"):
+ current_dict = dict(pair.split(":", 1) for pair in current_state[3:].split(";") if ":" in pair)
+ else:
+ current_dict = {}
+ except Exception:
+ self.log("Error parsing current helper state, starting fresh")
+ current_dict = {}
+ else:
+ self.log(f"Entity not detected, entity_id read was {entity_id}")
+ current_dict = {}
+
+ # Merge pending settings with new ones and serialise to v2# format
+ updated_dict = current_dict | converted
+ new_value = "v2#" + ";".join(f"{k}:{v}" for k, v in updated_dict.items())
+
+ self.log(f"Setting SolarSynk input helper {entity_id} to {new_value}")
+ # self._host.call_service("input_text/set_value", entity_id=entity_id, value=new_value)
+
+
+class SolarSunsynkInverter(SunsynkBaseInverter):
+ """Controller for the MorneSaunders360/Solar-Sunsynk HACS integration.
+
+ Writes settings by calling the solar_sunsynk.set_solar_settings HA service
+ directly, providing real-time inverter control with no intermediate helper
+ entity or polling delay. Partial parameter sets are supported — only the
+ fields being changed need to be supplied. All settings for a charge or
+ discharge command are sent in a single service call.
+ """
+
+ def control_charge(self, enable, **kwargs):
+ time_now = pd.Timestamp.now(tz=self.tz)
+
+ if enable:
+ params = {
+ self._brand_config["json_work_mode"]: 2,
+ self._brand_config["json_timed_charge_target_soc"]: kwargs.get("target_soc", 100),
+ self._brand_config["json_timed_charge_start"]: kwargs.get("start", time_now.strftime(TIMEFORMAT)),
+ self._brand_config["json_timed_charge_end"]: kwargs.get(
+ "end", time_now.ceil("30min").strftime(TIMEFORMAT)
+ ),
+ self._brand_config["json_charge_current"]: min(
+ round(kwargs.get("power", 0) / self._host.get_config("battery_voltage")),
+ self._host.get_config("battery_current_limit_amps"),
+ ),
+ self._brand_config["json_timed_charge_enable"]: True,
+ self._brand_config["json_gen_charge_enable"]: False,
}
+ else:
+ params = {
+ self._brand_config["json_work_mode"]: 2,
+ self._brand_config["json_timed_charge_target_soc"]: 100,
+ self._brand_config["json_timed_charge_start"]: "00:00",
+ self._brand_config["json_timed_charge_end"]: "00:00",
+ self._brand_config["json_charge_current"]: self._host.get_config("battery_current_limit_amps"),
+ self._brand_config["json_timed_charge_enable"]: False,
+ self._brand_config["json_gen_charge_enable"]: True,
+ }
+
+ self._set_inverter(**params)
- return status
+ def control_discharge(self, enable, **kwargs):
+ time_now = pd.Timestamp.now(tz=self.tz)
+ if enable:
+ params = {
+ self._brand_config["json_work_mode"]: 0,
+ self._brand_config["json_timed_discharge_target_soc"]: kwargs.get(
+ "target_soc", self._host.get_config("maximum_dod_percent")
+ ),
+ self._brand_config["json_timed_discharge_start"]: kwargs.get("start", time_now.strftime(TIMEFORMAT)),
+ self._brand_config["json_timed_discharge_end"]: kwargs.get(
+ "end", time_now.ceil("30min").strftime(TIMEFORMAT)
+ ),
+ self._brand_config["json_timed_discharge_power"]: kwargs.get("power", 0),
+ self._brand_config["json_timed_discharge_enable"]: True,
+ self._brand_config["json_gen_discharge_enable"]: False,
+ }
else:
- self._unknown_inverter()
+ params = {
+ self._brand_config["json_work_mode"]: 2,
+ self._brand_config["json_timed_discharge_target_soc"]: 100,
+ self._brand_config["json_timed_discharge_start"]: "00:00",
+ self._brand_config["json_timed_discharge_end"]: "00:00",
+ self._brand_config["json_timed_discharge_power"]: 0,
+ self._brand_config["json_timed_discharge_enable"]: False,
+ self._brand_config["json_gen_discharge_enable"]: True,
+ }
- def _monitor_target_soc(self, target_soc, mode="charge"):
- pass
+ self._set_inverter(**params)
+
+ def _authenticate(self) -> str:
+ """Authenticate with the Sunsynk API and return a Bearer token.
+
+ Uses RSA public key encryption for the password, matching the
+ authentication flow in the Solar-Sunsynk integration.
+
+ Returns:
+ str: Bearer token on success, empty string on failure.
+ """
+ base_url = "https://api.sunsynk.net"
+ username = self._host.get_config("sunsynk_username")
+ password = self._host.get_config("sunsynk_password")
+
+ if not username or not password:
+ self.log("sunsynk_username or sunsynk_password not set in config", level="ERROR")
+ return ""
+
+ try:
+ # Fetch RSA public key
+ nonce = str(int(time.time() * 1000))
+ sign = hashlib.md5(f"nonce={nonce}&source=sunsynkPOWER_VIEW".encode()).hexdigest()
+ resp = requests.get(
+ f"{base_url}/anonymous/publicKey",
+ params={"source": "sunsynk", "nonce": nonce, "sign": sign},
+ timeout=10,
+ )
+ resp.raise_for_status()
+ public_key_string = str(resp.json()["data"])
+
+ # Encrypt password with RSA public key
+ pem = "-----BEGIN PUBLIC KEY-----\n" + public_key_string + "\n-----END PUBLIC KEY-----"
+ public_key = load_pem_public_key(pem.encode())
+ encrypted_password = base64.b64encode(public_key.encrypt(password.encode(), PKCS1v15())).decode()
+
+ # Obtain Bearer token
+ token_nonce = str(int(time.time() * 1000))
+ token_sign = hashlib.md5(
+ f"nonce={token_nonce}&source=sunsynk{public_key_string[:10]}".encode()
+ ).hexdigest()
+
+ resp = requests.post(
+ f"{base_url}/oauth/token/new",
+ json={
+ "client_id": "csp-web",
+ "grant_type": "password",
+ "password": encrypted_password,
+ "source": "sunsynk",
+ "username": username,
+ "nonce": token_nonce,
+ "sign": token_sign,
+ },
+ timeout=10,
+ )
+ resp.raise_for_status()
+ resp_json = resp.json()
+
+ if resp_json.get("msg") != "Success":
+ self.log(f"Sunsynk authentication failed: {resp_json.get('msg')}", level="ERROR")
+ return ""
+
+ return str(resp_json["data"]["access_token"])
+
+ except Exception as e:
+ self.log(f"Sunsynk authentication error: {e}", level="ERROR")
+ return ""
+
+ def _get_inverter_settings(self) -> dict:
+ """Fetch current inverter settings directly from the Sunsynk cloud API.
+
+ Returns:
+ dict: Settings data dict (e.g. sellTime1, cap1, time1on etc.)
+ Returns empty dict on failure.
+ """
+ sn = self._inverter_sn
+ if not sn:
+ self.log("inverter_sn not set in config.yaml", level="ERROR")
+ return {}
+
+ token = self._authenticate()
+ if not token:
+ return {}
+
+ try:
+ resp = requests.get(
+ f"https://api.sunsynk.net/api/v1/common/setting/{sn}/read",
+ headers={
+ "Authorization": f"Bearer {token}",
+ "Content-Type": "application/json",
+ },
+ timeout=10,
+ )
+ resp.raise_for_status()
+ resp_json = resp.json()
+
+ if resp_json.get("msg") != "Success":
+ self.log(f"Sunsynk settings fetch failed: {resp_json.get('msg')}", level="ERROR")
+ return {}
+
+ return resp_json.get("data", {})
+
+ except Exception as e:
+ self.log(f"Sunsynk settings fetch error: {e}", level="ERROR")
+ return {}
+
+ def _set_inverter(self, **kwargs):
+ converted = self._convert_kwargs(kwargs)
+ sn = self._inverter_sn
+ self.log(f"Calling solar_sunsynk.set_solar_settings for inverter {sn} with {converted}")
+ self._host.call_service(
+ "solar_sunsynk/set_solar_settings",
+ sn=sn,
+ **converted,
+ )
+
+ @property
+ def status(self):
+ """Read current inverter status directly from the Sunsynk cloud API."""
+ time_now = pd.Timestamp.now(tz=self.tz)
+ bc = self._brand_config
+
+ self.log("Fetching inverter settings from Sunsynk API for status check")
+ settings = self._get_inverter_settings()
+
+ if not settings:
+ self.log("Unable to fetch inverter settings — returning empty status", level="WARNING")
+ return None
+
+ def _parse_time(t):
+ try:
+ return pd.Timestamp(f"{pd.Timestamp.now(tz=self.tz).date()} {t}", tz=self.tz)
+ except Exception:
+ return None
+
+ charge_start = _parse_time(settings.get(bc["json_timed_charge_start"], "00:00"))
+ charge_end = _parse_time(settings.get(bc["json_timed_charge_end"], "00:00"))
+ discharge_start = _parse_time(settings.get(bc["json_timed_discharge_start"], "00:00"))
+ discharge_end = _parse_time(settings.get(bc["json_timed_discharge_end"], "00:00"))
+ charge_enable = settings.get(bc["json_timed_charge_enable"], False)
+ discharge_enable = settings.get(bc["json_timed_discharge_enable"], False)
+
+ return {
+ "timer mode": settings.get(bc["json_use_timer"]),
+ "priority load": settings.get(bc["json_priority_load"]),
+ "charge": {
+ "start": charge_start,
+ "end": charge_end,
+ "active": charge_enable
+ and charge_start is not None
+ and charge_end is not None
+ and (time_now >= charge_start)
+ and (time_now < charge_end),
+ "target_soc": settings.get(bc["json_timed_charge_target_soc"]),
+ },
+ "discharge": {
+ "start": discharge_start,
+ "end": discharge_end,
+ "active": discharge_enable
+ and discharge_start is not None
+ and discharge_end is not None
+ and (time_now >= discharge_start)
+ and (time_now < discharge_end),
+ "target_soc": settings.get(bc["json_timed_discharge_target_soc"]),
+ },
+ "hold_soc": {
+ "active": False,
+ "soc": 0.0,
+ },
+ }
+
+
+# Legacy compatibility: InverterController is kept as an alias so any existing
+# code that instantiates InverterController directly continues to work.
+# New code should use create_inverter_controller() instead.
+class InverterController(SunsynkInverterController):
+ """Legacy entry point — wraps the correct subclass for the inverter type."""
+
+ def __init__(self, inverter_type: str, host) -> None:
+ self._delegate = create_inverter_controller(inverter_type=inverter_type, host=host)
+ super().__init__(inverter_type=inverter_type, host=host)
+
+ def _set_inverter(self, **kwargs):
+ self._delegate._set_inverter(**kwargs)
+
+ def enable_timed_mode(self):
+ self._delegate.enable_timed_mode()
+
+ def control_charge(self, enable, **kwargs):
+ self._delegate.control_charge(enable, **kwargs)
+
+ def control_discharge(self, enable, **kwargs):
+ self._delegate.control_discharge(enable, **kwargs)
+
+ def hold_soc(self, enable, soc=None):
+ self._delegate.hold_soc(enable, soc=soc)
+
+ @property
+ def status(self):
+ return self._delegate.status
+
+ @property
+ def is_online(self):
+ return self._delegate.is_online
+
+ @property
+ def timed_mode(self):
+ return self._delegate.timed_mode
+
+ def clear_hold_status(self):
+ self._delegate.clear_hold_status()