@@ -922,6 +922,146 @@ def graph_curtailment_per_tech(tools):
922922 df .plot (ax = ax , kind = "line" , xlabel = "Period" , marker = "x" , ** kwargs )
923923
924924 # Set the y-axis to use percent
925- ax .yaxis .set_major_formatter (tools .mplt .ticker .PercentFormatter (1.0 ))
925+ ax .yaxis .set_major_formatter (tools .plt .ticker .PercentFormatter (1.0 ))
926926 # Horizontal line at 100%
927927 # ax.axhline(y=1, linestyle="--", color='b')
928+
929+
930+ @graph (
931+ "energy_balance_2" ,
932+ title = "Balance between demand, generation and storage for last period" ,
933+ note = "Dashed green and red lines are total generation and total demand (incl. transmission losses),"
934+ " respectively.\n Dotted line is the total state of charge (scaled for readability)."
935+ "\n We used a 14-day rolling mean to smoothen out values." ,
936+ )
937+ def graph_energy_balance_2 (tools ):
938+ # Get dispatch dataframe
939+ dispatch = tools .get_dataframe (
940+ "dispatch.csv" ,
941+ usecols = [
942+ "timestamp" ,
943+ "gen_tech" ,
944+ "gen_energy_source" ,
945+ "DispatchGen_MW" ,
946+ "scenario_name" ,
947+ ],
948+ ).rename ({"DispatchGen_MW" : "value" }, axis = 1 )
949+ dispatch = tools .transform .gen_type (dispatch )
950+
951+ # Sum dispatch across all the projects of the same type and timepoint
952+ dispatch = dispatch .groupby (["timestamp" , "gen_type" ], as_index = False ).sum ()
953+ dispatch = dispatch [dispatch ["gen_type" ] != "Storage" ]
954+
955+ # Get load dataframe
956+ load = tools .get_dataframe (
957+ "load_balance.csv" ,
958+ usecols = ["timestamp" , "zone_demand_mw" , "TXPowerNet" , "scenario_name" ],
959+ )
960+
961+ def process_time (df ):
962+ df = df .astype ({"period" : int })
963+ df = df [df ["period" ] == df ["period" ].max ()].drop (columns = "period" )
964+ return df .set_index ("datetime" )
965+
966+ # Sum load across all the load zones
967+ load = load .groupby (["timestamp" ], as_index = False ).sum ()
968+
969+ # Include Tx Losses in demand and flip sign
970+ load ["value" ] = (load ["zone_demand_mw" ] + load ["TXPowerNet" ]) * - 1
971+
972+ # Rename and convert from wide to long format
973+ load = load [["timestamp" , "value" ]]
974+
975+ # Add the timestamp information and make period string to ensure it doesn't mess up the graphing
976+ dispatch = process_time (tools .transform .timestamp (dispatch ))
977+ load = process_time (tools .transform .timestamp (load ))
978+
979+ # Convert to TWh (incl. multiply by timepoint duration)
980+ dispatch ["value" ] *= dispatch ["tp_duration" ] / 1e6
981+ load ["value" ] *= load ["tp_duration" ] / 1e6
982+
983+ days = 14
984+ freq = str (days ) + "D"
985+ offset = tools .pd .Timedelta (freq ) / 2
986+
987+ def rolling_sum (df ):
988+ df = df .rolling (freq , center = True ).value .sum ().reset_index ()
989+ df ["value" ] /= days
990+ df = df [
991+ (df .datetime .min () + offset < df .datetime )
992+ & (df .datetime < df .datetime .max () - offset )
993+ ]
994+ return df
995+
996+ dispatch = rolling_sum (dispatch .groupby ("gen_type" , as_index = False ))
997+ load = rolling_sum (load ).set_index ("datetime" )["value" ]
998+
999+ # Get the state of charge data
1000+ soc = tools .get_dataframe (
1001+ "StateOfCharge.csv" , dtype = {"STORAGE_GEN_TPS_1" : str }
1002+ ).rename (columns = {"STORAGE_GEN_TPS_2" : "timepoint" , "StateOfCharge" : "value" })
1003+ # Sum over all the projects that are in the same scenario with the same timepoint
1004+ soc = soc .groupby (["timepoint" ], as_index = False ).sum ()
1005+ soc ["value" ] /= 1e6 # Convert to TWh
1006+ max_soc = soc ["value" ].max ()
1007+
1008+ # Group by time
1009+ soc = process_time (
1010+ tools .transform .timestamp (soc , use_timepoint = True , key_col = "timepoint" )
1011+ )
1012+ soc = soc .rolling (freq , center = True )["value" ].mean ().reset_index ()
1013+ soc = soc [
1014+ (soc .datetime .min () + offset < soc .datetime )
1015+ & (soc .datetime < soc .datetime .max () - offset )
1016+ ]
1017+ soc = soc .set_index ("datetime" )["value" ]
1018+
1019+ dispatch = dispatch [dispatch ["value" ] != 0 ]
1020+ dispatch = dispatch .pivot (columns = "gen_type" , index = "datetime" , values = "value" )
1021+ dispatch = dispatch [dispatch .std ().sort_values ().index ].rename_axis (
1022+ "Technology" , axis = 1
1023+ )
1024+ total_dispatch = dispatch .sum (axis = 1 )
1025+
1026+ max_val = max (total_dispatch .max (), load .max ())
1027+
1028+ # Scale soc to the graph
1029+ soc *= max_val / max_soc
1030+
1031+ # Plot
1032+ # Get the colors for the lines
1033+ # plot
1034+ ax = tools .get_axes (ylabel = "Average Daily Generation (TWh)" )
1035+ ax .set_ylim (0 , max_val * 1.05 )
1036+ dispatch .plot (ax = ax , color = tools .get_colors ())
1037+ soc .plot (ax = ax , color = "black" , linestyle = "dotted" )
1038+ load .plot (ax = ax , color = "red" , linestyle = "dashed" )
1039+ total_dispatch .plot (ax = ax , color = "green" , linestyle = "dashed" )
1040+ ax .fill_between (
1041+ total_dispatch .index ,
1042+ total_dispatch .values ,
1043+ load .values ,
1044+ alpha = 0.2 ,
1045+ where = load < total_dispatch ,
1046+ facecolor = "green" ,
1047+ )
1048+ ax .fill_between (
1049+ total_dispatch .index ,
1050+ total_dispatch .values ,
1051+ load .values ,
1052+ alpha = 0.2 ,
1053+ where = load > total_dispatch ,
1054+ facecolor = "red" ,
1055+ )
1056+
1057+
1058+ @graph ("dispatch_map" , title = "Dispatched electricity per load zone" )
1059+ def dispatch_map (tools ):
1060+ dispatch = tools .get_dataframe ("dispatch_zonal_annual_summary.csv" ).rename (
1061+ {"Energy_GWh_typical_yr" : "value" }, axis = 1
1062+ )
1063+ dispatch = tools .transform .gen_type (dispatch )
1064+ dispatch = dispatch .groupby (["gen_type" , "gen_load_zone" ], as_index = False )[
1065+ "value"
1066+ ].sum ()
1067+ tools .maps .graph_pie_chart (dispatch )
0 commit comments