diff --git a/CHANGELOG.md b/CHANGELOG.md index 8cd6af05..8d2a7c57 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,7 @@ All notable changes to this project will be documented in this file. ### Added - Add project-level `CLAUDE.md` file [#329](https://github.com/ie3-institute/pypsdm/issues/329) - Adding congestion result handling [#198](https://github.com/ie3-institute/pypsdm/issues/198) +- Enrich an existing grid container by EVs and EVCS [#355](https://github.com/ie3-institute/pypsdm/issues/355) ### Changed diff --git a/nbs/enrich_grid_with_ev_and_evcs.ipynb b/nbs/enrich_grid_with_ev_and_evcs.ipynb new file mode 100644 index 00000000..3fb4a6ce --- /dev/null +++ b/nbs/enrich_grid_with_ev_and_evcs.ipynb @@ -0,0 +1,162 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "id": "initial_id", + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "# Some jupyter notebook magic to reload modules automatically when they change\n", + "# not necessary for this specific notebook but useful in general\n", + "%load_ext autoreload\n", + "%autoreload 2\n", + "\n", + "# Gives you high resolution images within the notebook\n", + "%config InlineBackend.figure_format = 'retina'" + ] + }, + { + "metadata": {}, + "cell_type": "code", + "outputs": [], + "execution_count": null, + "source": [ + "from definitions import ROOT_DIR\n", + "import os\n", + "from pypsdm import GridContainer\n", + "\n", + "# The PSDM specific input models can be imported from the pypsdm.models.input and\n", + "# pypsdm.models.result. The `GridWithResults` container is located in pypsdm.models.gwr\n", + "from pypsdm.models.gwr import GridWithResults\n", + "\n", + "grid_path = os.path.join(ROOT_DIR, \"tests\", \"resources\", \"simple_grid\", \"input\")\n", + "# IO data models in general have a from_csv method to parse psdm files\n", + "gwr = GridContainer.from_csv(grid_path)\n", + "\n", + "# Output directory\n", + "target_grid_path = os.path.join(ROOT_DIR, \"output\", \"enriched_grid\")\n", + "if not os.path.exists(target_grid_path):\n", + " os.makedirs(target_grid_path)" + ], + "id": "8a6d151463bebeff" + }, + { + "metadata": {}, + "cell_type": "code", + "outputs": [], + "execution_count": null, + "source": [ + "from pypsdm.io.utils import delete_all_files_in_directory\n", + "\n", + "delete_all_files_in_directory(target_grid_path)" + ], + "id": "ece5d5686f9eda0c" + }, + { + "metadata": {}, + "cell_type": "code", + "outputs": [], + "execution_count": null, + "source": [ + "# input parameter, so far we only support to sample p_rated of the EVs in a normal distribution.\n", + "# EVs will have a storage size of 120.0 kWh and a consumption of 0.18 kWh/km each\n", + "# The EVCS will have a charging power of 22.0 kW and two charging points.\n", + "ev_p_rated_mean = 22.0\n", + "ev_p_rated_sigma = 0.0\n", + "ev_p_rated_min = 22.0\n", + "ev_p_rated_max = 22.0\n", + "ev_p_rated_params = [ev_p_rated_mean, ev_p_rated_sigma, ev_p_rated_min, ev_p_rated_max]\n", + "evcs_v2g = False" + ], + "id": "dfd54ffa547aa7a6" + }, + { + "metadata": {}, + "cell_type": "code", + "outputs": [], + "execution_count": null, + "source": [ + "# identify nodes to which should be connected. For example, customer nodes most probably have an existing load. So we're looking for nodes in lv grids with an existing load\n", + "lv_nodes = gwr.nodes.data[gwr.nodes.data[\"v_rated\"] <= 1.0]\n", + "household_nodes = lv_nodes[lv_nodes.index.isin(gwr.loads.node)]\n", + "household_nodes.head()" + ], + "id": "5e5143be50696313" + }, + { + "metadata": {}, + "cell_type": "code", + "outputs": [], + "execution_count": null, + "source": [ + "from pypsdm.models.input.create.enrich_grid import enrich_grid_by_evs_and_evcs\n", + "from pypsdm.models.input.create.participants import create_electric_vehicles\n", + "from pypsdm.models.input.create.participants import create_ev_charging_stations\n", + "\n", + "# get dictionaries for the ev and evcs as well as DataFrames for POIs and POI Mapping\n", + "ev_dict, evcs_dict, df_poi, df_poi_map = enrich_grid_by_evs_and_evcs(\n", + " nodes=household_nodes,\n", + " ev_p_rated_params=ev_p_rated_params,\n", + " evcs_v2g=evcs_v2g,\n", + " controlling_em=False,\n", + ")\n", + "\n", + "# with these the EVs and EVCSs can be created\n", + "evs = create_electric_vehicles(ev_dict)\n", + "evcs = create_ev_charging_stations(evcs_dict)" + ], + "id": "6e0ba8c8222b49cf" + }, + { + "metadata": {}, + "cell_type": "code", + "outputs": [], + "execution_count": null, + "source": [ + "from pypsdm.models.input.container.grid import GridContainer\n", + "from pypsdm.models.input.create.enrich_grid import add_pois_of_enriched_evcs\n", + "\n", + "# we now can create an updated SystemParticipantsContainer and an updated node_participants_map\n", + "updated_participants = gwr.participants.copy(evs=evs, evcs=evcs)\n", + "node_participants = updated_participants.build_node_participants_map(gwr.raw_grid.nodes)\n", + "node_participants_map = updated_participants.build_node_participants_map(\n", + " gwr.raw_grid.nodes\n", + ")\n", + "\n", + "# with this we can build our updated grid containing the updated participants.\n", + "updated_grid = GridContainer(\n", + " gwr.raw_grid, updated_participants, gwr.primary_data, node_participants_map\n", + ")\n", + "\n", + "# Finally, we can write the updated grid as well as the updated POI and POI Mapping data to our output path\n", + "updated_grid.to_csv(target_grid_path, include_primary_data=True)\n", + "add_pois_of_enriched_evcs(df_poi, df_poi_map, target_grid_path)" + ], + "id": "adfcb8863d6ca0b3" + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 2 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython2", + "version": "2.7.6" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/pypsdm/io/utils.py b/pypsdm/io/utils.py index 0d7ee33f..4a181c1d 100644 --- a/pypsdm/io/utils.py +++ b/pypsdm/io/utils.py @@ -1,8 +1,11 @@ import os +import shutil from datetime import datetime from enum import Enum from pathlib import Path +from random import normalvariate from typing import Optional, Union +from uuid import UUID import pandas as pd from pandas import DataFrame @@ -64,6 +67,21 @@ def read_csv( return pd.read_csv(full_path, delimiter=delimiter, quotechar='"') +def delete_all_files_in_directory(directory_path): + if os.path.exists(directory_path) and os.path.isdir(directory_path): + for filename in os.listdir(directory_path): + file_path = os.path.join(directory_path, filename) + try: + if os.path.isfile(file_path) or os.path.islink(file_path): + os.unlink(file_path) + elif os.path.isdir(file_path): + shutil.rmtree(file_path) + except Exception as e: + print(f"Failed to delete {file_path}. Reason: {e}") + else: + print(f"Directory {directory_path} does not exist or is not a directory.") + + def to_date_time(zoned_date_time: str) -> datetime: """ Converts zoned date time string with format: "yyyy-MM-dd'T'HH:mm:ss[.S[S][S]]'Z'" @@ -164,3 +182,16 @@ def bool_converter(maybe_bool): return maybe_bool.lower() == "true" else: raise ValueError("Cannot convert to bool: " + str(maybe_bool)) + + +def normaldistribution(dist_params): + normal_variant = normalvariate(dist_params[0], dist_params[1]) + return max(min(normal_variant, dist_params[3]), dist_params[2]) + + +def is_valid_uuid(uuid_string): + try: + uuid_obj = UUID(uuid_string) + return str(uuid_obj) == uuid_string + except ValueError: + return False diff --git a/pypsdm/models/input/create/enrich_grid.py b/pypsdm/models/input/create/enrich_grid.py new file mode 100644 index 00000000..ac151edb --- /dev/null +++ b/pypsdm/models/input/create/enrich_grid.py @@ -0,0 +1,233 @@ +import os +from json import loads +from typing import Optional +from uuid import uuid4 + +import pandas as pd +from pandas.errors import DataError + +from pypsdm.io.utils import is_valid_uuid, normaldistribution +from pypsdm.models.input.participant.evcs import EvcsLocationType + + +def create_dummy_public_pois(node_for_public_pois): + + geo_position_public_pois = loads(node_for_public_pois.geo_position) + longitude_public_pois, latitude_public_pois = geo_position_public_pois[ + "coordinates" + ] + + data_poi = { + "uuid": [ + "3ae3a120-a4ab-4057-8b48-022d8b952930", + "52c73a61-a600-4e02-8ed0-24822cef0aa4", + "523770be-10a6-4c39-929e-2ed4fc82ff19", + "8555fdc8-6f8c-40c6-b467-86c3b745e163", + "a2fdb223-c1ec-4ad9-b558-0f145e4ffd8d", + "1abce734-a83b-476f-a543-795e60bfe0a2", + "1a6123e8-3c36-4ffd-a753-234edaaf391e", + "a1554878-6129-4434-bc54-7111dd20d7ee", + "020d2e8a-c93a-4042-9972-949e8a6ac2ad", + "145b3ab5-74a2-4171-b82f-0c7316d45d60", + ], + "id": [ + "Culture-poi", + "Sports-poi", + "RELIGIOUS-poi", + "Work-poi", + "Supermarket-poi", + "Services-poi", + "Other_Shop-poi", + "Medicical-poi1", + "BBPG-poi", + "Restaurant-poi", + ], + "size": [500, 500, 500, 500, 500, 500, 500, 500, 500, 500], + "lat": [ + latitude_public_pois, + latitude_public_pois, + latitude_public_pois, + latitude_public_pois, + latitude_public_pois, + latitude_public_pois, + latitude_public_pois, + latitude_public_pois, + latitude_public_pois, + latitude_public_pois, + ], + "lon": [ + longitude_public_pois, + longitude_public_pois, + longitude_public_pois, + longitude_public_pois, + longitude_public_pois, + longitude_public_pois, + longitude_public_pois, + longitude_public_pois, + longitude_public_pois, + longitude_public_pois, + ], + "categoricallocation": [ + "culture", + "sports", + "religious", + "work", + "supermarket", + "services", + "other_shop", + "medicinal", + "bbpg", + "restaurant", + ], + } + df_poi = pd.DataFrame(data_poi) + df_poi.set_index("uuid", inplace=True) + + data_poi_map = { + "poi": [], + "evcs": [], + "evs": [], + } + df_poi_map = pd.DataFrame(data_poi_map) + df_poi_map.set_index("poi", inplace=True) + + # Add Home POI WITHOUT charging + no_charge_home_uuid = "bbf19030-9cea-4c49-bec8-acbfe86ca717" + new_row_poi = { + "id": "HOME_WITHOUT_CHARGING-poi", + "size": 500, + "lat": latitude_public_pois, + "lon": longitude_public_pois, + "categoricallocation": "home", + } + df_poi.loc[no_charge_home_uuid] = new_row_poi + return df_poi, df_poi_map + + +def enrich_grid_by_evs_and_evcs( + nodes, + ev_p_rated_params: list, + evcs_v2g: bool = False, + controlling_em: bool = False, + df_public_poi: Optional[pd.DataFrame] = None, + df_public_poi_map: Optional[pd.DataFrame] = None, +): + if df_public_poi is not None and df_public_poi_map is not None: + df_poi = df_public_poi + df_poi_map = df_public_poi_map + else: + df_poi, df_poi_map = create_dummy_public_pois(nodes.iloc[0]) + + def create_ev_data_dict(this_node): + ev_uuid = uuid4() + ev_type_uuid = uuid4() + ev_p_rated = normaldistribution(ev_p_rated_params) + + return { + "id": f"EV_{this_node.id}", + "uuid": ev_uuid, + "type_uuid": ev_type_uuid, + "node": str(this_node.Index), + "s_rated": ev_p_rated, # AC + "s_ratedDC": ev_p_rated, # DC + "e_storage": 120.0, + "e_cons": 0.18, + } + + def create_evcs_data_dict(this_node): + evcs_uuid = uuid4() + evcs_id = f"EVCS_{this_node.id}" + + return { + "id": evcs_id, + "uuid": evcs_uuid, + "node": str(this_node.Index), + "location_type": EvcsLocationType.HOME.value, + "s_rated": 22.0, + "v2g_support": str(evcs_v2g).lower(), + "controlling_em": get_em_uuid(controlling_em), + } + + def get_em_uuid(em: bool): + em_uuid = "fixme" + + if em: + if em_uuid and is_valid_uuid(em_uuid): + return em_uuid + else: + raise DataError( + "EVCS should be connected to EM but no valid UUID of an EM could be found." + ) + + return "" + + # Initialize dictionaries for EV and EVCS data + ev_dict = { + "id": [], + "uuid": [], + "type_uuid": [], + "node": [], + "s_rated": [], + "s_ratedDC": [], + "e_storage": [], + "e_cons": [], + } + + evcs_dict = { + "id": [], + "uuid": [], + "node": [], + "location_type": [], + "s_rated": [], + "v2g_support": [], + "controlling_em": [], + } + + for node in nodes.itertuples(index=True): + + # Create EV and EVCS data dictionaries and append them to respective lists in dicts + current_ev_data = create_ev_data_dict(node) + current_evcs_data = create_evcs_data_dict(node) + + for key in ev_dict.keys(): + ev_dict[key].append(current_ev_data[key]) + + for key in evcs_dict.keys(): + evcs_dict[key].append(current_evcs_data[key]) + + geo_position = loads(node.geo_position) + longitude, latitude = geo_position["coordinates"] + + # POI Mapping + poi_uuid = uuid4() + + df_poi.loc[poi_uuid] = { + "uuid": poi_uuid, + "id": f"POI_EVCS_{node.id}", + "size": 1, + "lat": latitude, + "lon": longitude, + "categoricallocation": "home", + } + + df_poi_map.loc[poi_uuid] = { + "poi": poi_uuid, + **{ + key: value + for key, value in zip( + ["evcs", "evs"], + [current_evcs_data["uuid"], current_ev_data["uuid"]], + ) + }, + } + + return (ev_dict, evcs_dict, df_poi, df_poi_map) + + +def add_pois_of_enriched_evcs(df_poi, df_poi_map, target_grid_path): + pois_path = os.path.join(target_grid_path, "pois") + + if not os.path.exists(pois_path): + os.mkdir(pois_path) + df_poi.to_csv(pois_path + "/poi.csv", index_label="uuid") + df_poi_map.to_csv(pois_path + "/poi_mapping.csv", index=True)