diff --git a/MappingGlobalCarbon/CalculateRatesInRegions/CalculateRateStatisticsInRegion.ipynb b/MappingGlobalCarbon/CalculateRatesInRegions/CalculateRateStatisticsInRegion.ipynb new file mode 100644 index 0000000..9152380 --- /dev/null +++ b/MappingGlobalCarbon/CalculateRatesInRegions/CalculateRateStatisticsInRegion.ipynb @@ -0,0 +1,260 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Calculate Average Potential Carbon Sequestration in Ecozones, Countries, and Continents\n", + "\n", + "This script calculates statistics on potential carbon sequestration rates over ecozones, countries, and continents. We use per-pixel estimates of the average potential carbon sequestration rate in Mg C/ha/year and per-pixel variance of model estimates from [Cook-Patton et. al 2020](https://www.nature.com/articles/s41586-020-2686-x) to calculate the regional average potential carbon sequestration rate, standard deviation of model variance, and the 95% cofidence interval for the average rate in the region.\n", + "\n", + "Ecozones sourced from FAO [Global Ecological Zones (GEZ) mapping](https://www.fao.org/forest-resources-assessment/remote-sensing/global-ecological-zones-gez-mapping/en/) which can be [downlaoded here](https://data.apps.fao.org/map/catalog/static/api/records/2fb209d0-fd34-4e5e-a3d8-a13c241eb61b).\n", + "\n", + "Country boundaries are sourced from [GADM version 3.6](https://gadm.org/download_world36.html)\n", + "\n", + "Ecozones and country boundaries were intersected using QGIS.\n" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [], + "source": [ + "import sys\n", + "import ee\n", + "import numpy as np\n", + "import pandas as pd\n", + "import time" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [], + "source": [ + "#Initialize earth engine\n", + "try:\n", + " ee.Initialize()\n", + "except Exception as e:\n", + " ee.Authenticate()\n", + " ee.Initialize()" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [], + "source": [ + "# load in image collection from Data Lab repository\n", + "imageCollection = ee.ImageCollection('projects/wri-datalab/GFWCarbonSequestrationYoungForests')\n", + "\n", + "# save projection information\n", + "projection = imageCollection.first().projection().getInfo()\n", + "projection_gee = imageCollection.first().projection()\n", + "crs = projection.get('crs')\n", + "crs_transform = projection.get('transform')\n", + "\n", + "# convert image collection to image\n", + "image = imageCollection.toBands()\n", + "# select bands and rename them\n", + "image = image.select(['AboveGround_CoefVariation_b1', 'AboveGround_Lower_CI_b1', 'AboveGround_Mean_b1', \n", + " 'AboveGround_StandardDeviation_b1', 'AboveGround_Upper_CI_b1', 'AboveGround_Variance_b1', \n", + " 'BelowGround_Mean_b1', 'BelowGround_Variance_b1'])\n", + "image = image.rename(['AboveGround_CoefVariation', 'AboveGround_Lower_CI', 'AboveGround_Mean', \n", + " 'AboveGround_StandardDeviation', 'AboveGround_Upper_CI', 'AboveGround_Variance', \n", + " 'BelowGround_Mean', 'BelowGround_Variance'])\n", + "\n", + "def calculate_rate_statistics(feature_collection, region_name, file_name, folder_name):\n", + " \"\"\"\n", + " Function to calculate statistics on potential carbon sequestion rate over a given feature collection,\n", + " including the mean, minimum, and maximum of rates in each region, the 95% confidence interval for the \n", + " mean rate in each region, and the standard deviation of the rate in the region. \n", + "\n", + " Parameters:\n", + " feature_collection (ee.FeatureCollection): feature collection over which to calculate statistics\n", + " region_name (String): name to be included in column names (e.g., 'Average Rate in X' where X is region_name)\n", + " file_name (String): file name of CSV to export to in Google Drive\n", + " folder_name (String): folder name to export to in Google Drive\n", + "\n", + " Returns:\n", + " ee.FeatureCollection of regions with calculated potential carbon sequestration rates\n", + " \"\"\"\n", + " # create combined reducer for calculating the mean, minimum, and maximum of rates in region\n", + " min_max_reducers = ee.Reducer.mean().unweighted().combine(ee.Reducer.min().unweighted(), sharedInputs=True)\n", + " min_max_reducers = min_max_reducers.combine(ee.Reducer.max().unweighted(), sharedInputs=True)\n", + " \n", + " # create combined reducer for calculating sum of variance and count of pixels, which will be used\n", + " # to calculate confidence intervals for the average rate in the region\n", + " sum_count_reducers = ee.Reducer.sum().unweighted().combine(ee.Reducer.count().unweighted(), sharedInputs=True)\n", + "\n", + " # calculate mean, minimum, and maximum of rates over regions\n", + " reduce_region_results = image.select(['AboveGround_Mean']).reduceRegions(\n", + " feature_collection, \n", + " reducer=min_max_reducers.setOutputs(['AboveGround_Mean_mean','AboveGround_Mean_min','AboveGround_Mean_max']), \n", + " crs=crs, crsTransform=crs_transform, \n", + " tileScale=16)\n", + " # calculate sum of model variance and count of pixels over regions\n", + " reduce_region_results = image.select(['AboveGround_Variance']).reduceRegions(\n", + " reduce_region_results, \n", + " reducer=sum_count_reducers.setOutputs(['AboveGround_Variance_sum','AboveGround_Variance_count']), \n", + " crs=crs, crsTransform=crs_transform, \n", + " tileScale=16)\n", + " \n", + " # function to format results from reduce regions and calculate confidence intervals from\n", + " # regional variance sum and pixel count\n", + " def format_results(feature):\n", + " feature = ee.Feature(feature)\n", + " # select statistics\n", + " mean = ee.Number(feature.get('AboveGround_Mean_mean'))\n", + " minimum = ee.Number(feature.get('AboveGround_Mean_min'))\n", + " maximum = ee.Number(feature.get('AboveGround_Mean_max'))\n", + " variance_sum = ee.Number(feature.get('AboveGround_Variance_sum'))\n", + " variance_count = ee.Number(feature.get('AboveGround_Variance_count'))\n", + " \n", + " # calculate standard deviation and 95% confidence intervals\n", + " # with GEE, Null values can cause errors in calculations, so use If statement to catch\n", + " # Null values and otherwise return value\n", + " \n", + " # calculate variance\n", + " variance = ee.Algorithms.If(variance_sum, variance_sum.divide(variance_count.pow(2)), None)\n", + " \n", + " # calculate standard deviation taking the square root of the variance\n", + " standard_deviation = ee.Algorithms.If(variance_sum, ee.Number(variance).sqrt(), None)\n", + " \n", + " # calculate 95% confidence itnervals\n", + " lower_bound = ee.Algorithms.If(mean, mean.subtract(ee.Number(standard_deviation).multiply(1.96)), None)\n", + " upper_bound = ee.Algorithms.If(mean, mean.add(ee.Number(standard_deviation).multiply(1.96)), None)\n", + " \n", + " # save to features with formatted names\n", + " feature = feature.set({\n", + " 'Above Ground Rate Average in '+region_name: mean,\n", + " 'Above Ground Rate Minimum in '+region_name: minimum,\n", + " 'Above Ground Rate Maximum in '+region_name: maximum,\n", + " 'AboveGroundSequestration_Unit'+region_name: 'Mg C/ha/year',\n", + " 'Variance of Above Ground Rate Average in '+region_name:variance,\n", + " 'Standard Deviation of Above Ground Rate Average in '+region_name:standard_deviation,\n", + " 'Lower Bound of 95% Confidence Interval of Above Ground Rate Average in '+region_name:lower_bound,\n", + " 'Upper Bound of 95% Confidence Interval of Above Ground Rate Average in '+region_name:upper_bound,\n", + " 'Number of Pixels in ': variance_count\n", + " })\n", + " \n", + " # remove property names from reduce regions calculation\n", + " property_names = feature.propertyNames()\n", + " property_names = property_names.removeAll(ee.List(['AboveGround_Mean_mean',\n", + " 'AboveGround_Mean_min',\n", + " 'AboveGround_Mean_max',\n", + " 'AboveGround_Variance_sum',\n", + " 'AboveGround_Variance_count']))\n", + " # drop geometry so file can be easily read in Excel\n", + " feature = feature.select(property_names,retainGeometry=False)\n", + " return feature\n", + " \n", + " # loop over results to format and return\n", + " results = reduce_region_results.map(format_results)\n", + " results = ee.FeatureCollection(results)\n", + " # export to Google Drive\n", + " export_results_task = ee.batch.Export.table.toDrive(\n", + " collection = results, \n", + " folder = folder_name,\n", + " description = file_name, \n", + " fileNamePrefix = file_name)\n", + "\n", + " export_results_task.start()\n", + " \n" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": {}, + "outputs": [], + "source": [ + "# define regions asset\n", + "regions = ee.FeatureCollection('projects/wri-datalab/GFWCarbonSequestrationYoungForestsBoundaries/FAO_GEZ_Ecozones_By_Country')\n", + "# define name for columns\n", + "region_name = 'Ecozone and Country'\n", + "# define file name to export to\n", + "output_name = 'CarbonSequestration_AboveGroundAverage_Ecozone_Country_GEE'\n", + "# define folder to export to\n", + "folder_name = 'CarbonRateCalculationsForIPCC'\n", + "\n", + "# rename columns from FAO Ecozones and GADM country boundaries to be easily readable\n", + "regions = ee.FeatureCollection(regions.map(lambda x: ee.Feature(x).select(['GID_0','NAME_0','gez_abbrev','gez_code','gez_name','RootShoot'], \n", + " ['ISO Code','Country','Ecozone Abbreviation','Ecozone Code','Ecozone Name','RootShoot'], True)))\n", + "\n", + "# calculate rates for regions\n", + "calculate_rate_statistics(regions,region_name,output_name,folder_name)\n" + ] + }, + { + "cell_type": "code", + "execution_count": 36, + "metadata": {}, + "outputs": [], + "source": [ + "# define regions asset\n", + "regions = ee.FeatureCollection('projects/wri-datalab/GFWCarbonSequestrationYoungForestsBoundaries/FAO_GEZ_Ecozones_By_Continent')\n", + "# define name for columns\n", + "region_name = 'Ecozone and Continent'\n", + "# define file name to export to\n", + "output_name = 'CarbonSequestration_AboveGroundAverage_Ecozone_Continent_GEE'\n", + "# define folder to export to\n", + "folder_name = 'CarbonRateCalculationsForIPCC'\n", + "\n", + "#rename columns from FAO Ecozones and continent boundaries to be easily readable\n", + "regions = ee.FeatureCollection(regions.map(lambda x: ee.Feature(x).select(['REGION','gez_abbrev','gez_code','gez_name','RootShoot'], \n", + " ['Continent','Ecozone Abbreviation','Ecozone Code','Ecozone Name','RootShoot'], True)))\n", + "\n", + "#calculate rates for regions\n", + "calculate_rate_statistics(regions,region_name,output_name,folder_name)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# define regions asset\n", + "regions = ee.FeatureCollection('projects/wri-datalab/GFWCarbonSequestrationYoungForestsBoundaries/FAO_GEZ_Ecozones')\n", + "# define name for columns\n", + "region_name = 'Ecozone'\n", + "# define file name to export to\n", + "output_name = 'CarbonSequestration_AboveGroundAverage_Ecozone_GEE'\n", + "# define folder to export to\n", + "folder_name = 'CarbonRateCalculationsForIPCC'\n", + "\n", + "#rename columns from FAO Ecozones and continent boundaries to be easily readable\n", + "regions = ee.FeatureCollection(regions.map(lambda x: ee.Feature(x).select(['gez_abbrev','gez_code','gez_name','RootShoot'], \n", + " ['Ecozone Abbreviation','Ecozone Code','Ecozone Name','RootShoot'], True)))\n", + "\n", + "#calculate rates for regions\n", + "calculate_rate_statistics(regions,region_name,output_name,folder_name)" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.9.0" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git a/MappingGlobalCarbon/CalculateRatesInRegions/README.md b/MappingGlobalCarbon/CalculateRatesInRegions/README.md new file mode 100644 index 0000000..9f31848 --- /dev/null +++ b/MappingGlobalCarbon/CalculateRatesInRegions/README.md @@ -0,0 +1,7 @@ +## Calculate Average Carbon Sequestration Rate and Other Statistics in Areas of Interest + +This folder contains code to calculate the average carbon sequestration rate and other statistics in areas of interest. Other statistics include the lower and upper confidence interval bounds for the average rate in a region, the variance in modeled rates over the region, and the minimum and maximum observed rate in the region. + +Contents: +1. CalculateRateStatisticsInRegion.ipynb: Jupyter notebook using [Google Earth Engine's Python API](https://developers.google.com/earth-engine/guides/python_install) to calculate rate statistics in ecozones by country and continent. +2. SreamlitApp-CarbonRate-GriscomAreas: Python code for a locally deployable [Streamlit](https://streamlit.io/) based web-app that allows users to calculate carbon sequestration rate and accumulation statistics for inputted regions. diff --git a/MappingGlobalCarbon/CalculateRatesInRegions/SreamlitApp-CarbonRate-GriscomAreas/README.md b/MappingGlobalCarbon/CalculateRatesInRegions/SreamlitApp-CarbonRate-GriscomAreas/README.md new file mode 100644 index 0000000..3efbd0b --- /dev/null +++ b/MappingGlobalCarbon/CalculateRatesInRegions/SreamlitApp-CarbonRate-GriscomAreas/README.md @@ -0,0 +1,7 @@ +## Calculate Carbon Sequestration Rate and Other Statistics in Areas of Interest over [Griscom et al. 2017](https://www.pnas.org/doi/full/10.1073/pnas.1710465114) Potential Restoration Areas +This folder contains code for locally deploying a Python based [Streamlit](https://streamlit.io/) app for calculating the average carbon sequestration rate and total accumulation for an inputted area of interest over potential restoration areas defined in [Griscom et al. 2017](https://www.pnas.org/doi/full/10.1073/pnas.1710465114). + +To deploy the app, install required models by running +1. `pip install -r requirements.txt` to install required modules +2. `streamlit run carbon-app.py` to run the streamlit app + diff --git a/MappingGlobalCarbon/CalculateRatesInRegions/SreamlitApp-CarbonRate-GriscomAreas/Result_Column_Names.csv b/MappingGlobalCarbon/CalculateRatesInRegions/SreamlitApp-CarbonRate-GriscomAreas/Result_Column_Names.csv new file mode 100644 index 0000000..e7d9dd4 --- /dev/null +++ b/MappingGlobalCarbon/CalculateRatesInRegions/SreamlitApp-CarbonRate-GriscomAreas/Result_Column_Names.csv @@ -0,0 +1,24 @@ +Column Name,Description +Griscom_Restoration_Area_Hectares,"Area of restoration opportunity from Griscom et al 2017, in hectares" +Modeled_AboveGround_Restoration_Area_Hectares,"Area of restoration opportunity from Griscom et al 2017 and included in modeled sequestration rates, in hectares" +AboveGround_Sequestration_Rate_Mean,"Estimated average rate of aboveground carbon sequestration in polygon(s), in Mg C/ha/year" +AboveGround_Sequestration_Rate_Variance,"Variance of the estimate of average rate of aboveground carbon sequestration in polygon(s), in (Mg C/ha/year)^2" +AboveGround_Sequestration_Rate_LowerBound,"Lower bound of estimate of average rate of aboveground carbon sequestration in polygon(s), in Mg C/ha/year" +AboveGround_Sequestration_Rate_UpperBound,"Upper bound of estimate of average rate of aboveground carbon sequestration in polygon(s), in Mg C/ha/year" +AboveGround_Sequestration_Rate_Max,"Maximum observed rate of aboveground carbon sequestration in polygon(s), in Mg C/ha/year" +AboveGround_Sequestration_Rate_Min,"Minimum observed rate of aboveground carbon sequestration in polygon(s), in Mg C/ha/year" +AboveGround_Carbon_Accumulation,"Estimated total aboveground carbon accumulation per year in polygon(s), in Mg C/year" +AboveGround_Carbon_Accumulation_Variance,"Variance of the estimate of total aboveground carbon accumulation per year in polygon(s), in (Mg C/year)^2" +AboveGround_Carbon_Accumulation_LowerBound,"Lower bound of estimate of total aboveground carbon accumulation per year in polygon(s), in Mg C/year" +AboveGround_Carbon_Accumulation_UpperBound,"Upper bound of estimate of total aboveground carbon accumulation per year in polygon(s), in Mg C/year" +Modeled_BelowGround_Restoration_Area_Hectares,"Area of restoration opportunity from Griscom et al 2017 and included in modeled sequestration rates, in hectares" +BelowGround_Sequestration_Rate_Mean,"Estimated average rate of belowground carbon sequestration in polygon(s), in Mg C/ha/year" +BelowGround_Sequestration_Rate_Variance,"Variance of the estimate of average rate of belowground carbon sequestration in polygon(s), in (Mg C/ha/year)^2" +BelowGround_Sequestration_Rate_LowerBound,"Lower bound of estimate of average rate of belowground carbon sequestration in polygon(s), in Mg C/ha/year" +BelowGround_Sequestration_Rate_UpperBound,"Upper bound of estimate of average rate of belowground carbon sequestration in polygon(s), in Mg C/ha/year" +BelowGround_Sequestration_Rate_Max,"Maximum observed rate of belowground carbon sequestration in polygon(s), in Mg C/ha/year" +BelowGround_Sequestration_Rate_Min,"Minimum observed rate of belowground carbon sequestration in polygon(s), in Mg C/ha/year" +BelowGround_Carbon_Accumulation,"Estimated total belowground carbon accumulation per year in polygon(s), in Mg C/year" +BelowGround_Carbon_Accumulation_Variance,"Variance of the estimate of total belowground carbon accumulation per year in polygon(s), in (Mg C/year)^2" +BelowGround_Carbon_Accumulation_LowerBound,"Lower bound of estimate of total belowground carbon accumulation per year in polygon(s), in Mg C/year" +BelowGround_Carbon_Accumulation_UpperBound,"Upper bound of estimate of total belowground carbon accumulation per year in polygon(s), in Mg C/year" \ No newline at end of file diff --git a/MappingGlobalCarbon/CalculateRatesInRegions/SreamlitApp-CarbonRate-GriscomAreas/carbon-app.py b/MappingGlobalCarbon/CalculateRatesInRegions/SreamlitApp-CarbonRate-GriscomAreas/carbon-app.py new file mode 100644 index 0000000..75fa7bb --- /dev/null +++ b/MappingGlobalCarbon/CalculateRatesInRegions/SreamlitApp-CarbonRate-GriscomAreas/carbon-app.py @@ -0,0 +1,341 @@ +import numpy as np +import pandas as pd +import streamlit as st +import geopandas +import geojson +import zipfile +import tempfile +import folium +import base64 +import requests +import json +from osgeo import ogr +from ast import literal_eval +import rasterio +from rasterio.enums import Resampling +from rasterio.vrt import WarpedVRT +#from rio_tiler import * +from rio_tiler.utils import render +from rio_tiler.colormap import cmap +from rio_tiler.utils import linear_rescale +from rio_tiler.reader import read as rio_preview +from rio_tiler import reader +import os +from streamlit_folium import folium_static + +from time import sleep +import logging +# from flask import * #Flask, request, abort +# from ai4e_app_insights_wrapper import AI4EAppInsights +# from ai4e_service import APIService + +from os import getenv +import sys +import glob +from rasterstats import zonal_stats +import fiona +from geojson import Point, Feature, FeatureCollection, dump +from fiona.crs import from_epsg + +#st.set_option('deprecation.showfileUploaderEncoding', False) + +target_wkt = 'PROJCS["World_Cylindrical_Equal_Area",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0],UNIT["degree",0.0174532925199433],AUTHORITY["EPSG","4326"]],PROJECTION["Cylindrical_Equal_Area"],PARAMETER["standard_parallel_1",0],PARAMETER["central_meridian",0],PARAMETER["false_easting",0],PARAMETER["false_northing",0],UNIT["metre",1,AUTHORITY["EPSG","9001"]]]' + +url = 'https://gfw-files.s3.amazonaws.com/ai4e/{}'#.format(geotiff_name) + +COG_SOURCE = 'Sequestration_Rate_Map_Griscom_Restoration_Areas_cog.tif'#'rcog.tif' +rate_tif = 'Sequestration_Rate_Map_Griscom_Restoration_Areas.tif' +variance_tif = 'Sequestration_Rate_Map_Griscom_Restoration_Areas_Variance.tif' +belowground_rate_tif = 'Sequestration_Rate_Map_Griscom_Restoration_Areas_BelowGround_Rate.tif' +belowground_variance_tif = 'Sequestration_Rate_Map_Griscom_Restoration_Areas_BelowGround_Variance.tif' +griscom_tif = 'Griscom.tif' + +files_to_download = [rate_tif, variance_tif, belowground_rate_tif, belowground_variance_tif,COG_SOURCE,griscom_tif] + + +pixel_size_m = 739.34*739.34 #Pixel area in meters +pixel_size_ha = pixel_size_m*0.0001 #Pixel area in hectares + + + + +def download_tif_from_aws(geotiff_name, download_url): + r = requests.get(download_url, stream=True) + chunk_size = 20000000 + with open(os.path.join(os.getcwd(),geotiff_name), 'wb') as fd: + for chunk in r.iter_content(chunk_size): + fd.write(chunk) + +def caclulate_carbon_stats(user_gdf): + griscom_stats = zonal_stats(user_gdf, griscom_tif, stats=['sum']) + griscom_stats = pd.DataFrame(griscom_stats).fillna(0) + for i,row in griscom_stats.iterrows(): + griscom_stats.at[i,'Griscom_Restoration_Area_Hectares'] = row['sum']*pixel_size_ha + griscom_stats = griscom_stats[['Griscom_Restoration_Area_Hectares']] + + rate_stats = None + #try: + rate_stats = zonal_stats(user_gdf, rate_tif, stats=['mean','sum','max','min','count']) + rate_stats = pd.DataFrame(rate_stats).fillna(0) + for i,row in rate_stats.iterrows(): + rate_stats.at[i,'Modeled_AboveGround_Restoration_Area_Hectares'] = row['count']*pixel_size_ha + rate_stats.at[i,'AboveGround_Carbon_Accumulation'] = row['sum']*pixel_size_ha + + rate_stats = rate_stats.rename(columns={ + "mean": "AboveGround_Sequestration_Rate_Mean", + "max": "AboveGround_Sequestration_Rate_Max", + "min": "AboveGround_Sequestration_Rate_Min", + "count": "Pixel_Count" + }) + rate_stats = rate_stats[['Modeled_AboveGround_Restoration_Area_Hectares','AboveGround_Sequestration_Rate_Mean', + 'AboveGround_Carbon_Accumulation', + 'AboveGround_Sequestration_Rate_Max','AboveGround_Sequestration_Rate_Min']] + + belowground_rate_stats = zonal_stats(user_gdf, belowground_rate_tif, stats=['mean','sum','max','min','count']) + belowground_rate_stats = pd.DataFrame(belowground_rate_stats).fillna(0) + for i,row in belowground_rate_stats.iterrows(): + belowground_rate_stats.at[i,'Modeled_BelowGround_Restoration_Area_Hectares'] = row['count']*pixel_size_ha + belowground_rate_stats.at[i,'BelowGround_Carbon_Accumulation'] = row['sum']*pixel_size_ha + + belowground_rate_stats = belowground_rate_stats.rename(columns={ + "mean": "BelowGround_Sequestration_Rate_Mean", + "max": "BelowGround_Sequestration_Rate_Max", + "min": "BelowGround_Sequestration_Rate_Min" + }) + belowground_rate_stats = belowground_rate_stats[['Modeled_BelowGround_Restoration_Area_Hectares','BelowGround_Sequestration_Rate_Mean','BelowGround_Carbon_Accumulation', + 'BelowGround_Sequestration_Rate_Max','BelowGround_Sequestration_Rate_Min']] + + variance_stats = zonal_stats(user_gdf, variance_tif, stats=['sum','count']) + variance_stats = pd.DataFrame(variance_stats).fillna(0) + for i,row in variance_stats.iterrows(): + variance_of_accumulation = row['sum']*pixel_size_ha**2 #variance of accumulation is sum of pixel level variances multiplied by conversion to hectares squared + if row['count'] !=0: + variance_of_rate = row['sum']/(row['count']**2) #variance of rate is sum of pixel level variances, divided by n^2 (n=number of pixels) + else: + variance_of_rate = 0 + + std_of_accumulation = np.sqrt(variance_of_accumulation) + std_of_rate = np.sqrt(variance_of_rate) + + variance_stats.at[i,'AboveGround_Sequestration_Rate_Variance'] = variance_of_rate + variance_stats.at[i,'AboveGround_Sequestration_Rate_LowerBound'] = rate_stats.at[i,'AboveGround_Sequestration_Rate_Mean']-1.96*std_of_rate + variance_stats.at[i,'AboveGround_Sequestration_Rate_UpperBound'] = rate_stats.at[i,'AboveGround_Sequestration_Rate_Mean']+1.96*std_of_rate + + variance_stats.at[i,'AboveGround_Carbon_Accumulation_Variance'] = variance_of_accumulation + variance_stats.at[i,'AboveGround_Carbon_Accumulation_LowerBound'] = rate_stats.at[i,'AboveGround_Carbon_Accumulation']-1.96*std_of_accumulation + variance_stats.at[i,'AboveGround_Carbon_Accumulation_UpperBound'] = rate_stats.at[i,'AboveGround_Carbon_Accumulation']+1.96*std_of_accumulation + + variance_stats = variance_stats[['AboveGround_Sequestration_Rate_Variance','AboveGround_Sequestration_Rate_LowerBound', + 'AboveGround_Sequestration_Rate_UpperBound','AboveGround_Carbon_Accumulation_Variance', + 'AboveGround_Carbon_Accumulation_LowerBound','AboveGround_Carbon_Accumulation_UpperBound']] + + below_variance_stats = zonal_stats(user_gdf, belowground_variance_tif, stats=['sum','count']) + below_variance_stats = pd.DataFrame(below_variance_stats).fillna(0) + for i,row in below_variance_stats.iterrows(): + variance_of_accumulation = row['sum']*pixel_size_ha**2 #variance of accumulation is sum of pixel level variances multiplied by conversion to hectares squared + if row['count'] !=0: + variance_of_rate = row['sum']/(row['count']**2) #variance of rate is sum of pixel level variances, divided by n^2 (n=number of pixels) + else: + variance_of_rate = 0 + + std_of_accumulation = np.sqrt(variance_of_accumulation) + std_of_rate = np.sqrt(variance_of_rate) + + below_variance_stats.at[i,'BelowGround_Sequestration_Rate_Variance'] = variance_of_rate + below_variance_stats.at[i,'BelowGround_Sequestration_Rate_LowerBound'] = belowground_rate_stats.at[i,'BelowGround_Sequestration_Rate_Mean']-1.96*std_of_rate + below_variance_stats.at[i,'BelowGround_Sequestration_Rate_UpperBound'] = belowground_rate_stats.at[i,'BelowGround_Sequestration_Rate_Mean']+1.96*std_of_rate + + below_variance_stats.at[i,'BelowGround_Carbon_Accumulation_Variance'] = variance_of_accumulation + below_variance_stats.at[i,'BelowGround_Carbon_Accumulation_LowerBound'] = belowground_rate_stats.at[i,'BelowGround_Carbon_Accumulation']-1.96*std_of_accumulation + below_variance_stats.at[i,'BelowGround_Carbon_Accumulation_UpperBound'] = belowground_rate_stats.at[i,'BelowGround_Carbon_Accumulation']+1.96*std_of_accumulation + + below_variance_stats = below_variance_stats[['BelowGround_Sequestration_Rate_Variance','BelowGround_Sequestration_Rate_LowerBound', + 'BelowGround_Sequestration_Rate_UpperBound','BelowGround_Carbon_Accumulation_Variance', + 'BelowGround_Carbon_Accumulation_LowerBound','BelowGround_Carbon_Accumulation_UpperBound']] + + + #user_gdf = geopandas.read_file(user_user_gdf,encoding='utf-8') + user_gdf = user_gdf[[x for x in list(user_gdf) if x!='geometry']] + stats = pd.concat([user_gdf,griscom_stats, rate_stats,variance_stats, belowground_rate_stats, below_variance_stats],axis=1) + + stat_columns = ['Griscom_Restoration_Area_Hectares', 'Modeled_AboveGround_Restoration_Area_Hectares', + 'AboveGround_Sequestration_Rate_Mean','AboveGround_Sequestration_Rate_Variance', + 'AboveGround_Sequestration_Rate_LowerBound','AboveGround_Sequestration_Rate_UpperBound', + 'AboveGround_Sequestration_Rate_Max','AboveGround_Sequestration_Rate_Min', + 'AboveGround_Carbon_Accumulation','AboveGround_Carbon_Accumulation_Variance', + 'AboveGround_Carbon_Accumulation_LowerBound','AboveGround_Carbon_Accumulation_UpperBound', + 'Modeled_BelowGround_Restoration_Area_Hectares', + 'BelowGround_Sequestration_Rate_Mean','BelowGround_Sequestration_Rate_Variance', + 'BelowGround_Sequestration_Rate_LowerBound','BelowGround_Sequestration_Rate_UpperBound', + 'BelowGround_Sequestration_Rate_Max','BelowGround_Sequestration_Rate_Min', + 'BelowGround_Carbon_Accumulation','BelowGround_Carbon_Accumulation_Variance', + 'BelowGround_Carbon_Accumulation_LowerBound','BelowGround_Carbon_Accumulation_UpperBound'] + + stats = stats[list(user_gdf)+stat_columns] + return stats + + +def download_link(object_to_download, download_filename, download_link_text): + """ + Generates a link to download the given object_to_download. + object_to_download (str, pd.DataFrame): The object to be downloaded. + download_filename (str): filename and extension of file. e.g. mydata.csv, some_txt_output.txt + download_link_text (str): Text to display for download link. + """ + if isinstance(object_to_download,pd.DataFrame): + object_to_download = object_to_download.to_csv(index=False) + b64 = base64.b64encode(object_to_download.encode()).decode() + return f'{download_link_text}' + + +def make_folium_map(geo_in, w,s,e,n): + """ + function to render a folium map + geo_in: inpit geojson with coordinates in degrees latitude and longitude + w, s, e, n: bounding coordinates, e.g, resulting from GeoDataFrame.total_bounds + """ + m = folium.Map(tiles='CartoDB Positron', ) + input_shape_style = {'fillColor': '#00aacc', 'color': '#0000bb','fillOpacity':0.08,'width':3,'opacity':0.5} + folium.GeoJson(geo_in, name='Uploaded shape preview', style_function=lambda x: input_shape_style).add_to(m) + folium.raster_layers.ImageOverlay('preview.png',name='Carbon raster',bounds=[[s,w],[n,e]],zindex=99,opacity=0.9,interactive=True).add_to(m) + m.fit_bounds([[s,w],[n,e]]) + folium.LayerControl().add_to(m) + return m + +#upload rio-compliant colormap, which is fun +with open('rio-colormap.json') as json_data: + cmap_data = json.load(json_data, object_hook=lambda d: {int(k): [int(i) for i in v] if isinstance(v, list) else v for k, v in d.items()}) + +#@st.cache +def generate_preview_tile(cog_tif, input_gdf): + """ + Hodge podge function to pass windowed GDAL virtual format warp args to the + rio_tiler preview function to create a quick png image to overlay on the + preview map. + + cog_tif: Filepath + input_gdf: GeoDataFrame with a valid crs + ----- + output: writes 'preview.png' file + """ + with rasterio.open(cog_tif) as src: + with WarpedVRT(src, crs='EPSG:3857',resampling=Resampling.bilinear) as vrt: + # Determine the destination tile and its mercator bounds (3857) + left, bottom, right, top = input_gdf.to_crs('EPSG:3857').total_bounds + # Determine the window to use in reading from the dataset. + dst_window = vrt.window(left, bottom, right, top) + tile,mask = rio_preview(vrt, window=dst_window,width=1024,height=1024) + out_data = np.where( + # mask, linear_rescale(tile, in_range=(np.nanmin(tile), np.nanmax(tile)), out_range=[0, 255]), 0 + mask, linear_rescale(tile, in_range=(0.3, 3), out_range=[0, 255]), 0 + ).astype(np.uint8) + #cm = cmap.get("viridis") + cm = cmap_data + buffer = render(out_data, mask=mask, colormap=cm) + with open('preview.png', "wb") as f: + f.write(buffer) + +st.title("Web Application for Calculating Potential Carbon Sequestration Due to Natural Forest Regrowth in Your Area(s) of Interest") +st.markdown("This demo allows the user to query our predicted map of carbon sequestration rates in young, naturally regenerating forests. \ + Given an area/areas of interest, the API will return summary statistics of the area(s) including, the area of restoration potential, the average rate of \ + aboveground and belowground carbon sequestration and accumulation, error bounds for those estimates, and the minimum and maximum observed \ + aboveground and belowground rates in the area of interest. \n\nRestoration areas are reported in hectares,\ + sequestration rates are reported in megagrams of carbon per hectare per year (Mg C/ha/year), and carbon accumulation is reported in \ + megagrams of carbon per year (Mg C/year).") +st.subheader('Instructions:') + +st.markdown('Read through the title for a description of web application above and in the left sidebar. Then scroll to see "Upload a zipped shapefile to initiate analysis" \ + where you can upload a zip file of a shapefile describing your area(s) of interest. A preview of our map and your area(s) of interest will appear \ + then click the "Submit Request" check-box to start the results. Once the results are calculated, you will see a preview of the table of results \ + and a download link to download the results as a CSV, which can be opened in Excel, Pages, or by any text editor.') +st.sidebar.title('About:') +st.sidebar.markdown("We spatially estimated aboveground and belowground rates of carbon sequestration for young, \ + naturally regenerating forests, where the rates can be applied linearly for the first 30 years of forest regrowth. \ + The map was made by training a Random Forest based machine learning model \ + on forest inventory data using about 80 covariates, with variables covering soil, topography, and climate. \n\nOur map is available at 1-kilometer resolution. \ + \n\nTo estimate belowground carbon sequestration rates, we applied a conversion factor based on \ + root:shoot ratios defined by the [IPCC](https://www.ipcc.ch/report/2019-refinement-to-the-2006-ipcc-guidelines-for-national-greenhouse-gas-inventories/). \ + \n\nWe clipped our prediciton map to restoration areas defined by \ + [Griscom et al 2017](https://www.pnas.org/content/114/44/11645). Only certain biomes from [RESOLVE Ecoregions](https://ecoregions2017.appspot.com/) \ + were including in our areas of model predictions, and the root:shoot ratios were only available for certain biomes. Therefore we also provide in the results the \ + restoration potential area with modeled aboveground rates and modeled belowground ratese.\n\n This map and web app can be \ + used by governments, land use planners, and restoration organizations to assess the potential carbon sequestration potential from naturally \ + regenerating forests in a given region.\n\nThis web app was put together by the [World Resources Institute](https://www.wri.org/) and [Global Forest Watch](globalforestwatch.org).") +st.sidebar.markdown('This application is maintained by Kristine Lister (kristine.lister@wri.org) with many thanks to David Gibbs and Alex Kovac for help in developing.') + + +## UPLOADER PROCESS +uploaded_file = st.file_uploader("Upload a zipped shapefile to initiate analysis", type=['zip']) ### TO DO: ADD geojson file upload and error handling + + +if uploaded_file: + st.subheader('Downloading data') + st.markdown('Starting downloading geotiffs') + for file_name in files_to_download: + if os.path.exists(file_name): + st.markdown('- File '+file_name+' already downloaded') + else: + st.markdown('- Downloading file '+file_name) + download_tif_from_aws(file_name, url.format(file_name)) + + st.markdown('Finished downloading geotiffs') + with tempfile.TemporaryDirectory() as tmpdir: + z = zipfile.ZipFile(uploaded_file) #z.namelist() printed later... + z.extractall(tmpdir) + filenames = [y for y in sorted(z.namelist()) for ending in ['shp'] if y.endswith(ending)] + shf = tmpdir+'/'+filenames[0] # TO DO: If multiple .shp files in zip, current script will only open up the first in the directory + driver = ogr.GetDriverByName('ESRI Shapefile') + shape = driver.Open(shf) + layer= shape.GetLayer() + crs = layer.GetSpatialRef() + gdf = geopandas.GeoDataFrame.from_file(shf, crs=crs.ExportToWkt()) + gdf = gdf.to_crs(target_wkt) + df_attrs = pd.DataFrame(gdf.drop(columns='geometry')) + #target_geojson = geojson.loads(gdf.to_json()) #may not be needed? Should reproject to target source + gdf_4326 = gdf.to_crs("EPSG:4326") + w,s,e,n= gdf_4326.total_bounds + geo_4326 = geojson.loads(gdf_4326.to_json()) + + try: + generate_preview_tile(COG_SOURCE, gdf_4326) + + ### DISPLAY UPLOADED FILE ON PREVIEW MAP + st.subheader("Map preview") + m = make_folium_map(geo_4326,w,s,e,n) + + #m.fit_bounds([[s,w],[n,e]]) + #st.markdown(m._repr_html_(), unsafe_allow_html=True) + folium_static(m) + except Exception as e: + logging.info("Folium preview failed with exception") + logging.info(e) + st.markdown('Folium preview failed') + st.markdown(e) + some_space = st.markdown('---') + + + ### BUTTON TO APPROVE AREA + st.subheader('Check checkbox above to initiate analysis') + agree = st.checkbox("Submit request") + # if not agree: + # + # #st.json(z.namelist()) + + if agree: + ### PASS GEOJSON TO API + + result = caclulate_carbon_stats(gdf) + legend_df = pd.read_csv('Result_Column_Names.csv') + st.header('Results shown below.') + ### MAKE DOWNLOADABLE CSV + tmp_download_link = download_link(result, 'carbon_data.csv', 'Click to download csv of results!') + st.markdown(tmp_download_link, unsafe_allow_html=True) + tmp_download_legend_link = download_link(legend_df, 'carbon_data_legend.csv', 'Click to download csv of legend for results!') + st.markdown(tmp_download_legend_link, unsafe_allow_html=True) + #st.write(result) + st.table(result) + st.header('Legend: ') + st.table(legend_df) + + #st.balloons() # uncomment for cheeky balloon animation + diff --git a/MappingGlobalCarbon/CalculateRatesInRegions/SreamlitApp-CarbonRate-GriscomAreas/config.toml b/MappingGlobalCarbon/CalculateRatesInRegions/SreamlitApp-CarbonRate-GriscomAreas/config.toml new file mode 100644 index 0000000..bf94bd7 --- /dev/null +++ b/MappingGlobalCarbon/CalculateRatesInRegions/SreamlitApp-CarbonRate-GriscomAreas/config.toml @@ -0,0 +1,133 @@ +# Below are all the sections and options you can have in ~/.streamlit/config.toml. + +[global] + +# By default, Streamlit checks if the Python watchdog module is available and, if not, prints a warning asking for you to install it. The watchdog module is not required, but highly recommended. It improves Streamlit's ability to detect changes to files in your filesystem. +# If you'd like to turn off this warning, set this to True. +# Default: false +disableWatchdogWarning = false + +# Configure the ability to share apps to the cloud. +# Should be set to one of these values: - "off" : turn off sharing. - "s3" : share to S3, based on the settings under the [s3] section of this config file. +# Default: "off" +sharingMode = "off" + +# If True, will show a warning when you run a Streamlit-enabled script via "python my_script.py". +# Default: true +showWarningOnDirectExecution = true + +# Level of logging: 'error', 'warning', 'info', or 'debug'. +# Default: 'info' +logLevel = "debug" + + +[client] + +# Whether to enable st.cache. +# Default: true +caching = true + +# If false, makes your Streamlit script not draw to a Streamlit app. +# Default: true +displayEnabled = true + + +[runner] + +# Allows you to type a variable or string by itself in a single line of Python code to write it to the app. +# Default: true +magicEnabled = true + +# Install a Python tracer to allow you to stop or pause your script at any point and introspect it. As a side-effect, this slows down your script's execution. +# Default: false +installTracer = false + +# Sets the MPLBACKEND environment variable to Agg inside Streamlit to prevent Python crashing. +# Default: true +fixMatplotlib = true + + +[server] + +# List of folders that should not be watched for changes. Relative paths will be taken as relative to the current working directory. +# Example: ['/home/user1/env', 'relative/path/to/folder'] +# Default: [] +folderWatchBlacklist = [''] + + + +# If false, will attempt to open a browser window on start. +# Default: false unless (1) we are on a Linux box where DISPLAY is unset, or (2) server.liveSave is set. +headless = true + +# Immediately share the app in such a way that enables live monitoring, and post-run analysis. +# Default: false +liveSave = false + +# Automatically rerun script when the file is modified on disk. +# Default: false +runOnSave = false + +# The port where the server will listen for client and browser connections. +# Default: 8501 +port = 8509 + +# Enables support for Cross-Origin Request Sharing, for added security. +# Default: true +enableCORS = false + + +[browser] + +# Internet address of the server server that the browser should connect to. Can be IP address or DNS name. +# Default: 'localhost' +serverAddress = "172.17.0.3" #"0.0.0.0" + +# Whether to send usage statistics to Streamlit. +# Default: true +gatherUsageStats = true + +# Port that the browser should use to connect to the server when in liveSave mode. +# Default: whatever value is set in server.port. +serverPort = 80 + +[s3] + +# Name of the AWS S3 bucket to save apps. +# Default: (unset) +# bucket = + +# URL root for external view of Streamlit apps. +# Default: (unset) +# url = "kristine.com" + +# Access key to write to the S3 bucket. +# Leave unset if you want to use an AWS profile. +# Default: (unset) +# accessKeyId = + +# Secret access key to write to the S3 bucket. +# Leave unset if you want to use an AWS profile. +# Default: (unset) +#secretAccessKey = + +# Make the shared app visible only to users who have been granted view permission. If you are interested in this option, contact us at support@streamlit.io. +# Default: false +# requireLoginToView = false + +# The "subdirectory" within the S3 bucket where to save apps. +# S3 calls paths "keys" which is why the keyPrefix is like a subdirectory. Use "" to mean the root directory. +# Default: "" +keyPrefix = "" + +# AWS region where the bucket is located, e.g. "us-west-2". +# Default: (unset) +# region = + +# AWS credentials profile to use. +# Leave unset to use your default profile. +# Default: (unset) +# profile = + +[deprecation] +showfileUploaderEncoding = False \ No newline at end of file diff --git a/MappingGlobalCarbon/CalculateRatesInRegions/SreamlitApp-CarbonRate-GriscomAreas/credentials.toml b/MappingGlobalCarbon/CalculateRatesInRegions/SreamlitApp-CarbonRate-GriscomAreas/credentials.toml new file mode 100644 index 0000000..bf20fa1 --- /dev/null +++ b/MappingGlobalCarbon/CalculateRatesInRegions/SreamlitApp-CarbonRate-GriscomAreas/credentials.toml @@ -0,0 +1,3 @@ +[general] +email="" + diff --git a/MappingGlobalCarbon/CalculateRatesInRegions/SreamlitApp-CarbonRate-GriscomAreas/requirements.txt b/MappingGlobalCarbon/CalculateRatesInRegions/SreamlitApp-CarbonRate-GriscomAreas/requirements.txt new file mode 100644 index 0000000..312bdc8 --- /dev/null +++ b/MappingGlobalCarbon/CalculateRatesInRegions/SreamlitApp-CarbonRate-GriscomAreas/requirements.txt @@ -0,0 +1,12 @@ +streamlit +pandas +numpy +folium +geojson +geopandas +GDAL +rasterio +rasterstats +fiona +streamlit-folium +rio-tiler \ No newline at end of file diff --git a/MappingGlobalCarbon/CalculateRatesInRegions/SreamlitApp-CarbonRate-GriscomAreas/rio-colormap.json b/MappingGlobalCarbon/CalculateRatesInRegions/SreamlitApp-CarbonRate-GriscomAreas/rio-colormap.json new file mode 100644 index 0000000..2d0dec0 --- /dev/null +++ b/MappingGlobalCarbon/CalculateRatesInRegions/SreamlitApp-CarbonRate-GriscomAreas/rio-colormap.json @@ -0,0 +1 @@ +{"0": [255, 255, 204, 255], "1": [255, 254, 202, 255], "2": [255, 254, 200, 255], "3": [255, 253, 199, 255], "4": [255, 252, 197, 255], "5": [255, 252, 195, 255], "6": [254, 251, 193, 255], "7": [254, 250, 192, 255], "8": [254, 250, 190, 255], "9": [254, 249, 188, 255], "10": [254, 249, 186, 255], "11": [254, 248, 184, 255], "12": [254, 247, 183, 255], "13": [254, 247, 181, 255], "14": [254, 246, 179, 255], "15": [253, 245, 177, 255], "16": [253, 245, 175, 255], "17": [253, 244, 174, 255], "18": [253, 243, 172, 255], "19": [253, 243, 170, 255], "20": [253, 242, 168, 255], "21": [253, 241, 166, 255], "22": [252, 240, 165, 255], "23": [252, 240, 163, 255], "24": [252, 239, 161, 255], "25": [252, 238, 159, 255], "26": [252, 238, 157, 255], "27": [252, 237, 156, 255], "28": [251, 236, 154, 255], "29": [251, 235, 152, 255], "30": [251, 234, 150, 255], "31": [251, 234, 148, 255], "32": [251, 233, 146, 255], "33": [250, 232, 144, 255], "34": [250, 231, 143, 255], "35": [250, 230, 141, 255], "36": [250, 229, 139, 255], "37": [250, 228, 137, 255], "38": [249, 227, 135, 255], "39": [249, 226, 133, 255], "40": [249, 225, 132, 255], "41": [249, 224, 130, 255], "42": [248, 223, 128, 255], "43": [248, 222, 126, 255], "44": [248, 221, 124, 255], "45": [248, 220, 123, 255], "46": [247, 218, 121, 255], "47": [247, 217, 119, 255], "48": [247, 216, 117, 255], "49": [246, 215, 116, 255], "50": [246, 214, 114, 255], "51": [246, 212, 112, 255], "52": [246, 211, 111, 255], "53": [245, 210, 109, 255], "54": [245, 208, 108, 255], "55": [245, 207, 106, 255], "56": [244, 206, 105, 255], "57": [244, 204, 104, 255], "58": [244, 203, 102, 255], "59": [243, 202, 101, 255], "60": [243, 200, 100, 255], "61": [243, 199, 99, 255], "62": [242, 197, 98, 255], "63": [242, 196, 97, 255], "64": [242, 195, 96, 255], "65": [241, 193, 95, 255], "66": [241, 192, 94, 255], "67": [241, 191, 93, 255], "68": [241, 189, 92, 255], "69": [240, 188, 91, 255], "70": [240, 187, 91, 255], "71": [240, 185, 90, 255], "72": [239, 184, 89, 255], "73": [239, 183, 89, 255], "74": [239, 181, 88, 255], "75": [238, 180, 88, 255], "76": [238, 179, 87, 255], "77": [238, 178, 87, 255], "78": [238, 176, 87, 255], "79": [237, 175, 86, 255], "80": [237, 174, 86, 255], "81": [237, 173, 86, 255], "82": [237, 171, 85, 255], "83": [236, 170, 85, 255], "84": [236, 169, 85, 255], "85": [236, 168, 85, 255], "86": [236, 167, 84, 255], "87": [235, 165, 84, 255], "88": [235, 164, 84, 255], "89": [235, 163, 84, 255], "90": [235, 162, 84, 255], "91": [234, 161, 83, 255], "92": [234, 159, 83, 255], "93": [234, 158, 83, 255], "94": [234, 157, 83, 255], "95": [233, 156, 83, 255], "96": [233, 155, 83, 255], "97": [233, 154, 83, 255], "98": [233, 152, 82, 255], "99": [232, 151, 82, 255], "100": [232, 150, 82, 255], "101": [232, 149, 82, 255], "102": [231, 148, 82, 255], "103": [231, 147, 82, 255], "104": [231, 145, 82, 255], "105": [231, 144, 82, 255], "106": [230, 143, 82, 255], "107": [230, 142, 82, 255], "108": [230, 141, 81, 255], "109": [230, 140, 81, 255], "110": [229, 138, 81, 255], "111": [229, 137, 81, 255], "112": [229, 136, 81, 255], "113": [228, 135, 81, 255], "114": [228, 134, 81, 255], "115": [228, 132, 81, 255], "116": [227, 131, 81, 255], "117": [227, 130, 81, 255], "118": [227, 129, 80, 255], "119": [226, 128, 80, 255], "120": [226, 126, 80, 255], "121": [225, 125, 80, 255], "122": [225, 124, 80, 255], "123": [224, 123, 80, 255], "124": [224, 121, 80, 255], "125": [223, 120, 80, 255], "126": [223, 119, 79, 255], "127": [222, 118, 79, 255], "128": [222, 116, 79, 255], "129": [221, 115, 79, 255], "130": [221, 114, 79, 255], "131": [220, 112, 79, 255], "132": [219, 111, 79, 255], "133": [218, 110, 78, 255], "134": [218, 108, 78, 255], "135": [217, 107, 78, 255], "136": [216, 106, 78, 255], "137": [215, 105, 78, 255], "138": [214, 103, 78, 255], "139": [213, 102, 77, 255], "140": [212, 101, 77, 255], "141": [211, 99, 77, 255], "142": [210, 98, 77, 255], "143": [208, 97, 76, 255], "144": [207, 95, 76, 255], "145": [206, 94, 76, 255], "146": [205, 93, 76, 255], "147": [203, 92, 75, 255], "148": [202, 91, 75, 255], "149": [200, 89, 75, 255], "150": [199, 88, 75, 255], "151": [197, 87, 74, 255], "152": [196, 86, 74, 255], "153": [194, 85, 74, 255], "154": [193, 84, 73, 255], "155": [191, 83, 73, 255], "156": [189, 82, 72, 255], "157": [188, 81, 72, 255], "158": [186, 80, 72, 255], "159": [184, 79, 71, 255], "160": [182, 78, 71, 255], "161": [181, 77, 70, 255], "162": [179, 77, 70, 255], "163": [177, 76, 70, 255], "164": [175, 75, 69, 255], "165": [174, 74, 69, 255], "166": [172, 74, 68, 255], "167": [170, 73, 68, 255], "168": [168, 72, 67, 255], "169": [166, 71, 67, 255], "170": [165, 71, 66, 255], "171": [163, 70, 66, 255], "172": [161, 70, 65, 255], "173": [159, 69, 65, 255], "174": [157, 68, 64, 255], "175": [156, 68, 63, 255], "176": [154, 67, 63, 255], "177": [152, 67, 62, 255], "178": [150, 66, 62, 255], "179": [148, 66, 61, 255], "180": [147, 65, 60, 255], "181": [145, 65, 60, 255], "182": [143, 64, 59, 255], "183": [141, 64, 58, 255], "184": [139, 63, 58, 255], "185": [138, 63, 57, 255], "186": [136, 62, 56, 255], "187": [134, 62, 55, 255], "188": [132, 61, 55, 255], "189": [131, 61, 54, 255], "190": [129, 60, 53, 255], "191": [127, 59, 52, 255], "192": [125, 59, 52, 255], "193": [123, 58, 51, 255], "194": [122, 58, 50, 255], "195": [120, 57, 49, 255], "196": [118, 57, 49, 255], "197": [116, 56, 48, 255], "198": [115, 56, 47, 255], "199": [113, 55, 46, 255], "200": [111, 55, 45, 255], "201": [110, 54, 45, 255], "202": [108, 54, 44, 255], "203": [106, 53, 43, 255], "204": [104, 53, 42, 255], "205": [103, 52, 41, 255], "206": [101, 51, 40, 255], "207": [99, 51, 40, 255], "208": [97, 50, 39, 255], "209": [96, 50, 38, 255], "210": [94, 49, 37, 255], "211": [92, 49, 36, 255], "212": [91, 48, 36, 255], "213": [89, 48, 35, 255], "214": [87, 47, 34, 255], "215": [86, 47, 33, 255], "216": [84, 46, 32, 255], "217": [82, 46, 32, 255], "218": [81, 45, 31, 255], "219": [79, 44, 30, 255], "220": [78, 44, 29, 255], "221": [76, 43, 29, 255], "222": [74, 43, 28, 255], "223": [73, 42, 27, 255], "224": [71, 42, 26, 255], "225": [69, 41, 26, 255], "226": [68, 41, 25, 255], "227": [66, 40, 24, 255], "228": [65, 40, 23, 255], "229": [63, 39, 23, 255], "230": [62, 39, 22, 255], "231": [60, 38, 21, 255], "232": [58, 38, 21, 255], "233": [57, 37, 20, 255], "234": [55, 37, 19, 255], "235": [54, 36, 19, 255], "236": [52, 36, 18, 255], "237": [51, 35, 17, 255], "238": [49, 35, 17, 255], "239": [48, 34, 16, 255], "240": [46, 34, 15, 255], "241": [45, 33, 14, 255], "242": [43, 33, 13, 255], "243": [42, 32, 13, 255], "244": [41, 31, 12, 255], "245": [39, 31, 11, 255], "246": [38, 30, 10, 255], "247": [36, 30, 9, 255], "248": [35, 29, 8, 255], "249": [34, 29, 7, 255], "250": [32, 28, 6, 255], "251": [31, 28, 5, 255], "252": [30, 27, 4, 255], "253": [28, 27, 3, 255], "254": [27, 26, 2, 255], "255": [26, 26, 1, 255]} \ No newline at end of file