/
tools.jl
1121 lines (895 loc) · 49.4 KB
/
tools.jl
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
# XXX prints dataframe to csv file
"""
```julia
printObject(print_df::DataFrame, model_object::anyModel)
```
Writes a DataFrame of parameters, constraints, or variables to a `.csv` file in readable format (strings instead of ids). See [Individual elements](@ref).
"""
function printObject(print_df::DataFrame,anyM::anyModel; fileName::String = "", rtnDf::Tuple{Vararg{Symbol,N} where N} = (:csv,), filterFunc::Function = x -> true)
sets = anyM.sets
options = anyM.options
colNam_arr = namesSym(print_df)
cntCol_int = size(colNam_arr,1)
# filters values according to filter function,
print_df = copy(filter(filterFunc,print_df))
# converts variable column to value of variable
if :var in colNam_arr
print_df[!,:var] = value.(print_df[!,:var])
end
for i = 1:cntCol_int
lookUp_sym = Symbol(split(String(colNam_arr[i]),"_")[1])
if !(lookUp_sym in keys(sets)) && lookUp_sym == :eqn
print_df[!,i] = string.(print_df[!,i])
elseif lookUp_sym in keys(sets)
print_df[!,i] = map(x -> createFullString(x,sets[lookUp_sym]),print_df[!,i])
end
end
# rename columns
colName_dic = Dict(:Ts_dis => :timestep_dispatch, :Ts_exp => :timestep_expansion, :Ts_expSup => :timestep_superordinate_expansion, :Ts_disSup => :timestep_superordinate_dispatch,
:R => :region, :R_dis => :region_dispatch, :R_exp => :region_expansion, :R_to => :region_to, :R_from => :region_from, :C => :carrier, :Te => :technology,
:cns => :constraint, :var => :variable)
rename!(print_df,map(x -> x in keys(colName_dic) ? colName_dic[x] : x, namesSym(print_df)) )
if :csv in rtnDf
CSV.write("$(options.outDir)/$(fileName)_$(options.outStamp).csv", print_df)
end
if :csvDf in rtnDf return print_df end
end
# <editor-fold desc="report results to csv files"
"""
```julia
reportResults(reportType::Symbol, model_object::anyModel; rtnOpt::Tuple = (:csv,))
```
Writes results to `.csv` file with content depending on `reportType`. Available types are `:summary`, `:exchange`, and `:costs`. See [Analysed results](@ref).
"""
reportResults(reportType::Symbol,anyM::anyModel; kwargs...) = reportResults(Val{reportType}(),anyM::anyModel; kwargs...)
# XXX summary of all capacity and dispatch results
function reportResults(objGrp::Val{:summary},anyM::anyModel; wrtSgn::Bool = true, rtnOpt::Tuple{Vararg{Symbol,N} where N} = (:csv,))
techSym_arr = collect(keys(anyM.parts.tech))
allData_df = DataFrame(Ts_disSup = Int[], R_dis = Int[], Te = Int[], C = Int[], variable = Symbol[], value = Float64[])
# XXX get demand values
dem_df = copy(anyM.parts.bal.par[:dem].data)
if !isempty(dem_df)
dem_df[!,:lvlR] = map(x -> anyM.cInfo[x].rDis, :C in namesSym(dem_df) ? dem_df[!,:C] : filter(x -> x != 0,getfield.(values(anyM.sets[:C].nodes),:idx)))
# aggregates demand values
# artificially add dispatch dimensions, if none exist
if :Ts_dis in namesSym(dem_df)
ts_dic = Dict(x => anyM.sets[:Ts].nodes[x].lvl == anyM.supTs.lvl ? x : getAncestors(x,anyM.sets[:Ts],:int,anyM.supTs.lvl)[end] for x in unique(dem_df[!,:Ts_dis]))
dem_df[!,:Ts_disSup] = map(x -> ts_dic[x],dem_df[!,:Ts_dis])
else
dem_df[!,:Ts_disSup] .= anyM.supTs.step
dem_df = flatten(dem_df,:Ts_disSup)
end
dem_df[!,:val] = dem_df[!,:val] .* getResize(dem_df,anyM.sets[:Ts],anyM.supTs) ./ anyM.options.redStep
allR_arr = :R_dis in namesSym(dem_df) ? unique(dem_df[!,:R_dis]) : getfield.(getNodesLvl(anyM.sets[:R],1),:idx)
allLvlR_arr = unique(dem_df[!,:lvlR])
r_dic = Dict((x[1], x[2]) => (anyM.sets[:R].nodes[x[1]].lvl < x[2] ? getDescendants(x[1], anyM.sets[:R],false,x[2]) : getAncestors(x[1],anyM.sets[:R],:int,x[2])[end]) for x in Iterators.product(allR_arr,allLvlR_arr))
if :R_dis in namesSym(dem_df)
dem_df[!,:R_dis] = map(x -> r_dic[x.R_dis,x.lvlR],eachrow(dem_df[!,[:R_dis,:lvlR]]))
else
dem_df[!,:R_dis] .= 0
end
dem_df = combine(groupby(dem_df,[:Ts_disSup,:R_dis,:C]),:val => ( x -> sum(x) / 1000) => :value)
dem_df[!,:Te] .= 0
dem_df[!,:variable] .= :demand
if wrtSgn dem_df[!,:value] = dem_df[!,:value] .* -1 end
allData_df = vcat(allData_df,dem_df)
end
# XXX get expansion and capacity variables
for t in techSym_arr
part = anyM.parts.tech[t]
tech_df = DataFrame(Ts_disSup = Int[], R_dis = Int[], Te = Int[], C = Int[], variable = Symbol[], value = Float64[])
# get installed capacity values
for va in intersect(keys(part.var),(:expConv, :expStIn, :expStOut, :expStSize, :expExc, :capaConv, :capaStIn, :capaStOut, :capaStSize, :oprCapaConv, :oprCapaStIn, :oprCapaStOut, :oprCapaStSize))
capa_df = copy(part.var[va])
if va in (:expConv, :expStIn, :expStOut, :expStSize)
capa_df = flatten(capa_df,:Ts_expSup)
select!(capa_df,Not(:Ts_disSup))
rename!(capa_df,:Ts_expSup => :Ts_disSup)
end
# set carrier column to zero for conversion capacities and add a spatial dispatch column
if va in (:expConv,:capaConv,:oprCapaConv)
capa_df[!,:C] .= 0
capa_df[!,:R_dis] = map(x -> getAncestors(x,anyM.sets[:R],:int,part.balLvl.ref[2])[end],capa_df[!,:R_exp])
else
capa_df[!,:R_dis] = map(x -> getAncestors(x.R_exp,anyM.sets[:R],:int,anyM.cInfo[x.C].rDis)[end],eachrow(capa_df))
end
select!(capa_df,Not(:R_exp))
# aggregate values and add to tech data frame
capa_df = combine(groupby(capa_df,[:Ts_disSup,:R_dis,:C,:Te]),:var => ( x -> value.(sum(x))) => :value)
capa_df[!,:variable] .= va
tech_df = vcat(tech_df,capa_df)
end
# add tech dataframe to overall data frame
allData_df = vcat(allData_df,tech_df)
end
# XXX get dispatch variables
for va in (:use, :gen, :stIn, :stOut, :stExtIn, :stExtOut, :stIntIn, :stIntOut, :emission, :crt, :lss, :trdBuy, :trdSell)
# get all variables, group them and get respective values
allVar_df = getAllVariables(va,anyM)
if isempty(allVar_df) continue end
disp_df = combine(groupby(allVar_df,intersect(intCol(allVar_df),[:Ts_disSup,:R_dis,:C,:Te])),:var => (x -> value(sum(x))) => :value)
# scales values to twh (except for emissions of course)
if va != :emission disp_df[!,:value] = disp_df[!,:value] ./ 1000 end
disp_df[!,:variable] .= va
# add empty values for non-existing columns
for dim in (:Te,:C)
if !(dim in namesSym(disp_df))
disp_df[:,dim] .= 0
end
end
# adjust sign, if enabled
if wrtSgn && va in (:use,:stIn,:stIntIn,:stExtIn,:crt,:trdSell) disp_df[!,:value] = disp_df[!,:value] .* -1 end
allData_df = vcat(allData_df,disp_df)
end
# XXX get exchange variables aggregated by import and export
allExc_df = getAllVariables(:exc,anyM)
if !isempty(allExc_df)
# add losses to all exchange variables
allExc_df = getExcLosses(convertExcCol(allExc_df),anyM.parts.exc.par,anyM.sets)
# compute export and import of each region, losses are considered at import
excFrom_df = rename(combine(groupby(allExc_df,[:Ts_disSup,:R_a,:C]),:var => ( x -> value(sum(x))/1000) => :value),:R_a => :R_dis)
excFrom_df[!,:variable] .= :export; excFrom_df[!,:Te] .= 0
if wrtSgn excFrom_df[!,:value] = excFrom_df[!,:value] .* -1 end
excTo_df = rename(combine(x -> (value = value(dot(x.var,(1 .- x.loss)))/1000,),groupby(allExc_df,[:Ts_disSup,:R_b,:C])),:R_b => :R_dis)
excTo_df[!,:variable] .= :import; excTo_df[!,:Te] .= 0
allData_df = vcat(allData_df,vcat(excFrom_df,excTo_df))
end
# XXX get full load hours for conversion, storage input and storage output
if anyM.options.decomm == :none
flh_dic = Dict(:capaConv => :flhConv, :capaStIn => :flhStIn, :capaStOut => :flhStOut)
else
flh_dic = Dict(:oprCapaConv => :flhConv, :oprCapaStIn => :flhStIn, :oprCapaStOut => :flhStOut)
end
flhAss_dic = Dict(:capaConv => [:use,:stIntOut,:gen,:stIntIn],:oprCapaConv => [:use,:stIntOut,:gen,:stIntIn], :capaStIn => [:stIntIn,:stExtIn],:oprCapaStIn => [:stIntIn,:stExtIn], :capaStOut => [:stIntOut,:stExtOut],:oprCapaStOut => [:stIntOut,:stExtOut])
for flhCapa in collect(keys(flh_dic))
# gets relevant capacity variables
capaFlh_df = rename(filter(x -> x.variable == flhCapa && x.value > 0.0, allData_df),:variable => :varCapa, :value => :valCapa)
if isempty(capaFlh_df) continue end
# expand with relevant dispatch variables
capaFlh_df[!,:varDisp] = map(x -> flhAss_dic[x.varCapa],eachrow(capaFlh_df))
capaFlh_df = flatten(capaFlh_df,:varDisp)
intCol(capaFlh_df,:varDisp)
# omit carrier in case of conversion capacity
if flhCapa in (:capaConv,:oprCapaConv) select!(capaFlh_df,Not([:C])) end
# joins capacity and with relevant dispatch variables
allFLH_df = innerjoin(capaFlh_df,rename(allData_df,:variable => :varDisp), on = intCol(capaFlh_df,:varDisp))
# remove row for gen variables in case gen exists as well and compute full load hours for conversion
allFLH_df = combine(combine(y -> filter(x -> :use in y[!,:varDisp] ? x.varDisp != :gen : true,y), groupby(allFLH_df, intCol(capaFlh_df,:varCapa)), ungroup = false), AsTable([:value,:valCapa]) => (x -> 1000*abs(sum(x.value)/x.valCapa[1])) => :value)
allFLH_df[!,:variable] = map(x -> flh_dic[x], allFLH_df[!,:varCapa])
# adds carrier again in case of conversion capacity
allFLH_df[!,:C] .= 0
allData_df = vcat(allData_df, select(allFLH_df,Not([:varCapa])))
end
# XXX comptue storage cycles
if anyM.options.decomm == :none
cyc_dic = Dict(:capaStIn => :cycStIn, :capaStOut => :cycStOut)
else
cyc_dic = Dict(:oprCapaStIn => :cycStIn, :oprCapaStOut => :cycStOut)
end
cycAss_dic = Dict(:oprCapaStIn => [:stIntIn,:stExtIn], :capaStIn => [:stIntIn,:stExtIn], :oprCapaStOut => [:stIntOut,:stExtOut], :capaStOut => [:stIntOut,:stExtOut])
for cycCapa in collect(keys(cyc_dic))
capaCyc_df = rename(filter(x -> x.variable == :capaStSize && x.value > 0.0, allData_df),:variable => :varCapa, :value => :valCapa)
if isempty(capaCyc_df) continue end
# expand with relevant dispatch variables
capaCyc_df[!,:varDisp] = map(x -> cycAss_dic[cycCapa],eachrow(capaCyc_df))
capaCyc_df = flatten(capaCyc_df,:varDisp)
# joins capacity and with relevant dispatch variables
capaCyc_df = innerjoin(capaCyc_df,rename(allData_df,:variable => :varDisp), on = intCol(capaCyc_df,:varDisp))
# compute cycling value and add to overall
capaCyc_df = combine(groupby(capaCyc_df, intCol(capaCyc_df,:varCapa)), AsTable([:value,:valCapa]) => (x -> 1000*abs(sum(x.value)/x.valCapa[1])) => :value)
capaCyc_df[!,:variable] = map(x -> cyc_dic[cycCapa], capaCyc_df[!,:varCapa])
allData_df = vcat(allData_df,select(capaCyc_df,Not([:varCapa])))
end
# return dataframes and write csv files based on specified inputs
if :csv in rtnOpt || :csvDf in rtnOpt
csvData_df = printObject(allData_df,anyM, fileName = "results_summary",rtnDf = rtnOpt)
end
if :raw in rtnOpt
CSV.write("$(anyM.options.outDir)/results_summary_$(anyM.options.outStamp).csv", allData_df)
end
if :rawDf in rtnOpt && :csvDf in rtnOpt
return allData_df, csvData_df
else
if :rawDf in rtnOpt return allData_df end
if :csvDf in rtnOpt return csvData_df end
end
end
# XXX results for costs
function reportResults(objGrp::Val{:costs},anyM::anyModel; rtnOpt::Tuple{Vararg{Symbol,N} where N} = (:csv,))
# prepare empty dataframe
allData_df = DataFrame(Ts_disSup = Int[], R = Int[], Te = Int[], C = Int[], variable = Symbol[], value = Float64[])
# loops over all objective variables with keyword "cost" in it
for cst in filter(x -> occursin("cost",string(x)),keys(anyM.parts.obj.var))
cost_df = copy(anyM.parts.obj.var[cst])
# rename all dispatch and expansion regions simply to region
if !isempty(intersect([:R_dis,:R_exp],namesSym(cost_df)))
rename!(cost_df,:R_dis in namesSym(cost_df) ? :R_dis : :R_exp => :R)
end
# add empty column for non-existing dimensions
for dim in (:Te,:C,:R)
if !(dim in namesSym(cost_df))
cost_df[:,dim] .= 0
end
end
# obtain values and write to dataframe
cost_df[:,:variable] .= string(cst)
cost_df[:,:value] = value.(cost_df[:,:var])
if :Ts_exp in namesSym(cost_df) cost_df = rename(cost_df,:Ts_exp => :Ts_disSup) end
allData_df = vcat(allData_df,cost_df[:,Not(:var)])
end
# return dataframes and write csv files based on specified inputs
if :csv in rtnOpt || :csvDf in rtnOpt
csvData_df = printObject(allData_df,anyM, fileName = "results_costs", rtnDf = rtnOpt)
end
if :raw in rtnOpt
CSV.write("$(anyM.options.outDir)/results_costs_$(anyM.options.outStamp).csv", allData_df)
end
if :rawDf in rtnOpt && :csvDf in rtnOpt
return allData_df, csvData_df
else
if :rawDf in rtnOpt return allData_df end
if :csvDf in rtnOpt return csvData_df end
end
end
# XXX results for exchange
function reportResults(objGrp::Val{:exchange},anyM::anyModel; rtnOpt::Tuple{Vararg{Symbol,N} where N} = (:csv,))
allData_df = DataFrame(Ts_disSup = Int[], R_from = Int[], R_to = Int[], C = Int[], variable = Symbol[], value = Float64[])
if isempty(anyM.parts.exc.var) error("No exchange data found") end
# XXX expansion variables
if :expExc in keys(anyM.parts.exc.var)
exp_df = copy(anyM.parts.exc.var[:expExc]) |> (x -> vcat(x,rename(x,:R_from => :R_to, :R_to => :R_from)))
exp_df = flatten(exp_df,:Ts_expSup)
select!(exp_df,Not(:Ts_disSup))
rename!(exp_df,:Ts_expSup => :Ts_disSup)
exp_df = combine(groupby(exp_df,[:Ts_disSup,:R_from,:R_to,:C]), :var => (x -> value.(sum(x))) => :value)
exp_df[!,:variable] .= :expExc
else
exp_df = DataFrame(Ts_disSup = Int[], R_from = Int[], R_to = Int[], C = Int[], variable = Symbol[], value = Float64[])
end
# XXX capacity variables
capa_df = copy(anyM.parts.exc.var[:capaExc])
capa_df = vcat(capa_df,rename(filter(x -> x.dir == 0, capa_df),:R_from => :R_to, :R_to => :R_from))
capa_df = combine(groupby(capa_df,[:Ts_disSup,:R_from,:R_to,:C]), :var => (x -> value.(sum(x))) => :value)
capa_df[!,:variable] .= :capaExc
if anyM.options.decomm != :none
oprCapa_df = copy(anyM.parts.exc.var[:oprCapaExc])
oprCapa_df = vcat(oprCapa_df,rename(filter(x -> x.dir == 0, oprCapa_df),:R_from => :R_to, :R_to => :R_from))
oprCapa_df = combine(groupby(oprCapa_df,[:Ts_disSup,:R_from,:R_to,:C]), :var => (x -> value.(sum(x))) => :value)
oprCapa_df[!,:variable] .= :oprCapaExc
capa_df = vcat(capa_df,oprCapa_df)
end
# XXX dispatch variables
disp_df = getAllVariables(:exc,anyM)
disp_df = combine(groupby(disp_df,[:Ts_disSup,:R_from,:R_to,:C]), :var => (x -> value.(sum(x)) ./ 1000) => :value)
disp_df[!,:variable] .= :exc
# XXX get full load hours
capaExt_df = replCarLeafs(copy(capa_df),anyM.sets[:C])
flh_df = innerjoin(rename(select(capaExt_df,Not(:variable)),:value => :capa),rename(select(disp_df,Not(:variable)),:value => :disp),on = [:Ts_disSup,:R_from,:R_to,:C])
flh_df[!,:value] = flh_df[!,:disp] ./ flh_df[!,:capa] .* 1000
flh_df[!,:variable] .= :flhExc
# XXX merge and print all data
allData_df = vcat(exp_df,capa_df,disp_df,select(flh_df,Not([:capa,:disp])))
# return dataframes and write csv files based on specified inputs
if :csv in rtnOpt || :csvDf in rtnOpt
csvData_df = printObject(allData_df,anyM, fileName = "results_exchange", rtnDf = rtnOpt)
end
if :raw in rtnOpt
CSV.write("$(anyM.options.outDir)/results_exchange_$(anyM.options.outStamp).csv", allData_df)
end
if :rawDf in rtnOpt && :csvDf in rtnOpt
return allData_df, csvData_df
else
if :rawDf in rtnOpt return allData_df end
if :csvDf in rtnOpt return csvData_df end
end
end
# XXX print time series for in and out into separate tables
"""
```julia
reportTimeSeries(car_sym::Symbol, model_object::anyModel)
```
Writes elements of energy balance for carrier specified by `car_sym` to `.csv` file. See [Time-series](@ref).
"""
function reportTimeSeries(car_sym::Symbol, anyM::anyModel; filterFunc::Function = x -> true, unstck::Bool = true, signVar::Tuple = (:in,:out), minVal::Number = 1e-3, mergeVar::Bool = true, rtnOpt::Tuple{Vararg{Symbol,N} where N} = (:csv,))
# XXX converts carrier named provided to index
node_arr = filter(x -> x.val == string(car_sym),collect(values(anyM.sets[:C].nodes)))
if length(node_arr) != 1
error("no carrier named $car_sym defined")
return
end
c_int = node_arr[1].idx
# XXX initialize dictionary to save data
allData_dic = Dict{Symbol,DataFrame}()
for signItr in signVar
allData_dic[signItr] = DataFrame(Ts_disSup = Int[], Ts_dis = Int[], R_dis = Int[], variable = String[], value = Float64[])
end
# XXX initialize relevant dimensions and carriers
relDim_df = filter(filterFunc,createPotDisp([c_int],anyM))
relC_arr = unique([c_int,getDescendants(c_int,anyM.sets[:C])...])
cRes_tup = anyM.cInfo[c_int] |> (x -> (Ts_dis = x.tsDis, R_dis = x.rDis, C = anyM.sets[:C].nodes[c_int].lvl))
# XXX add demand and size it
if :out in signVar
dem_df = matchSetParameter(relDim_df,anyM.parts.bal.par[:dem],anyM.sets,newCol = :value)
dem_df[!,:value] = dem_df[!,:value] .* getResize(dem_df,anyM.sets[:Ts],anyM.supTs) .* -1
dem_df[!,:variable] .= "demand"
filter!(x -> abs(x.value) > minVal, dem_df)
allData_dic[:out] = vcat(allData_dic[:out],select!(dem_df,Not(:C)))
end
# XXX adds all technology related variables
cBalRes_tup = anyM.cInfo[c_int] |> (x -> (x.tsDis, x.rDis))
relType_tup = map(x -> x in signVar ? (x == :in ? (:use, :stExtIn) : (:gen,:stExtOut)) : tuple(),(:in,:out)) |> (x -> tuple(vcat(collect.(x)...)...))
for c in relC_arr
# gets technologies relevant for respective filterCarrier
relTech_arr = getRelTech(c,anyM.parts.tech,anyM.sets[:C])
if isempty(relTech_arr) continue end
for x in relTech_arr
# gets resolution and adjusts add_df in case of an agggregated technology
add_df = select(filter(r -> r.C == c,anyM.parts.tech[x[1]].var[x[2]]),[:Ts_disSup,:Ts_dis,:R_dis,:var])
tRes_tup = anyM.parts.tech[x[1]].disAgg ? (cRes_tup[1], anyM.parts.tech[x[1]].balLvl.exp[2]) : (cRes_tup[1], cRes_tup[2])
checkTechReso!(tRes_tup,cBalRes_tup,add_df,anyM.sets)
# filter values based on filter function and minimum value reported
add_df = combine(groupby(add_df,[:Ts_disSup,:Ts_dis,:R_dis]), :var => (x -> sum(x)) => :var)
filter!(filterFunc,add_df)
if isempty(add_df) continue end
add_df[!,:value] = value.(add_df[!,:var]) .* (x[2] in (:use,:stExtIn) ? -1.0 : 1.0)
add_df[!,:variable] .= string(x[2],"; ", x[1])
filter!(x -> abs(x.value) > minVal, add_df)
# add to dictionary of dataframe for in or out
sign_sym = x[2] in (:use,:stExtIn) ? :out : :in
allData_dic[sign_sym] = vcat(allData_dic[sign_sym] ,select(add_df,Not(:var)))
end
end
# XXX add import and export variables
if :exc in keys(anyM.parts.exc.var)
exc_df = filterCarrier(anyM.parts.exc.var[:exc],relC_arr)
if :out in signVar
excFrom_df = combine(groupby(filter(filterFunc,rename(copy(exc_df),:R_from => :R_dis)), [:Ts_disSup,:Ts_dis,:R_dis]), :var => (x -> value(sum(x)) * -1) => :value)
excFrom_df[!,:variable] .= :export
filter!(x -> abs(x.value) > minVal, excFrom_df)
if !isempty(excFrom_df)
allData_dic[:out] = vcat(allData_dic[:out],excFrom_df)
end
end
if :in in signVar
addLoss_df = rename(getExcLosses(convertExcCol(exc_df),anyM.parts.exc.par,anyM.sets),:R_b => :R_dis)
excTo_df = combine(x -> (value = value(dot(x.var,(1 .- x.loss))),),groupby(filter(filterFunc,addLoss_df), [:Ts_disSup,:Ts_dis,:R_dis]))
excTo_df[!,:variable] .= :import
filter!(x -> abs(x.value) > minVal, excTo_df)
if !isempty(excTo_df)
allData_dic[:in] = vcat(allData_dic[:in],excTo_df)
end
end
end
# XXX add trade
agg_arr = [:Ts_dis, :R_dis, :C]
if !isempty(anyM.parts.trd.var)
for trd in intersect(keys(anyM.parts.trd.var),(:trdBuy,:trdSell))
trdVar_df = copy(relDim_df)
trdVar_df[!,:value] = value.(filterCarrier(anyM.parts.trd.var[trd],relC_arr) |> (x -> aggUniVar(x,relDim_df,agg_arr,cRes_tup,anyM.sets))) .* (trd == :trdBuy ? 1.0 : -1.0)
trdVar_df[!,:variable] .= trd
filter!(x -> abs(x.value) > minVal, trdVar_df)
sign_sym = :trdBuy == trd ? :in : :out
allData_dic[sign_sym] = vcat(allData_dic[sign_sym],select(trdVar_df,Not(:C)))
end
end
# XXX add curtailment
if :crt in keys(anyM.parts.bal.var)
crt_df = copy(relDim_df)
crt_df[!,:value] = value.(filterCarrier(anyM.parts.bal.var[:crt],relC_arr) |> (x -> aggUniVar(x,crt_df,agg_arr, cRes_tup,anyM.sets))) .* -1.0
crt_df[!,:variable] .= :crt
filter!(x -> abs(x.value) > minVal, crt_df)
allData_dic[:out] = vcat(allData_dic[:out],select(crt_df,Not(:C)))
end
# XXX add losted load
if :lss in keys(anyM.parts.bal.var)
lss_df = copy(relDim_df)
lss_df[!,:value] = value.(filterCarrier(anyM.parts.bal.var[:lss],relC_arr) |> (x -> aggUniVar(x,lss_df,agg_arr, cRes_tup,anyM.sets)))
lss_df[!,:variable] .= :lss
filter!(x -> abs(x.value) > minVal, lss_df)
allData_dic[:in] = vcat(allData_dic[:in],select(lss_df,Not(:C)))
end
# XXX unstack data and write to csv
if mergeVar
# merges in and out files and writes to same csv file
data_df = vcat(values(allData_dic)...)
if unstck && !isempty(data_df)
data_df[!,:variable] = CategoricalArray(data_df[!,:variable])
data_df = unstack(data_df,:variable,:value)
end
if :csv in rtnOpt || :csvDf in rtnOpt
csvData_df = printObject(data_df,anyM, fileName = string("timeSeries_",car_sym,), rtnDf = rtnOpt)
end
if :raw in rtnOpt
CSV.write("$(anyM.options.outDir)/$(string("timeSeries_",car_sym,))_$(anyM.options.outStamp).csv", data_df)
end
else
# loops over different signs and writes to different csv files
for signItr in signVar
data_df = allData_dic[signItr]
if unstck && !isempty(data_df)
data_df[!,:variable] = CategoricalArray(data_df[!,:variable])
data_df = unstack(data_df,:variable,:value)
end
if :csv in rtnOpt || :csvDf in rtnOpt
csvData_df = printObject(data_df,anyM, fileName = string("timeSeries_",car_sym,"_",signItr), rtnDf = rtnOpt)
end
if :raw in rtnOpt
CSV.write("$(anyM.options.outDir)/$(string("timeSeries_",car_sym,"_",signItr))_$(anyM.options.outStamp).csv", data_df)
end
end
end
# return dataframes based on specified inputs
if :rawDf in rtnOpt && :csvDf in rtnOpt
return data_df, csvData_df
else
if :rawDf in rtnOpt return data_df end
if :csvDf in rtnOpt return csvData_df end
end
end
# XXX write dual values for constraint dataframe
"""
```julia
printDuals(print_df::DataFrame, model_object::anyModel)
```
Writes duals of a constraint DataFrame to a `.csv` file in readable format (strings instead of ids). See [Individual elements](@ref).
"""
function printDuals(cns_df::DataFrame,anyM::anyModel;filterFunc::Function = x -> true, fileName::String = "", rtnOpt::Tuple{Vararg{Symbol,N} where N} = (:csv,))
if !(:cns in namesSym(cns_df)) error("No constraint column found!") end
cns_df = copy(filter(filterFunc,cns_df))
cns_df[!,:dual] = dual.(cns_df[!,:cns])
if :csv in rtnOpt || :csvDf in rtnOpt
csvData_df = printObject(select(cns_df,Not(:cns)),anyM;fileName = string("dual",fileName != "" ? "_" : "",fileName), rtnDf = rtnOpt)
end
if :rawDf in rtnOpt
CSV.write("$(anyM.options.outDir)/$(string("dual",fileName != "" ? "_" : "",fileName))_$(anyM.options.outStamp).csv", data_df)
end
# return dataframes based on specified inputs
if :rawDf in rtnOpt && :csvDf in rtnOpt
return select(cns_df,Not(:cns)), csvData_df
else
if :rawDf in rtnOpt return data_df end
if :csvDf in rtnOpt return csvData_df end
end
end
# </editor-fold>
# <editor-fold desc="plotting tools"
# XXX plots tree graph for input set
"""
```julia
plotTree(tree_sym::Symbol, model_object::anyModel)
```
Plots the hierarchical tree of nodes for the set specified by `tree_sym`. See [Node trees](@ref).
"""
function plotTree(tree_sym::Symbol, anyM::anyModel; plotSize::Tuple{Float64,Float64} = (8.0,4.5), fontSize::Int = 12, useColor::Bool = true, wide::Array{Float64,1} = fill(1.0,30))
netw = pyimport("networkx")
plt = pyimport("matplotlib.pyplot")
PyCall.fixqtpath()
# <editor-fold desc="initialize variables"
treeName_dic = Dict(:region => :R,:timestep => :Ts,:carrier => :C,:technology => :Te)
# convert tree object into a data frame
tree_obj = anyM.sets[treeName_dic[tree_sym]]
data_arr = filter(x -> x.idx != 0,collect(values(tree_obj.nodes))) |> (y -> map(x -> getfield.(y,x),(:idx,:val,:lvl,:down,:subIdx)))
tree_df = DataFrame(idx = data_arr[1], val = data_arr[2], lvl = data_arr[3], down = data_arr[4], subIdx = data_arr[5], up =map(x -> tree_obj.up[x],data_arr[1]))
# sets options
col_dic = Dict(:region => (0.133, 0.545, 0.133),:timestep => (0.251,0.388,0.847),:carrier => (0.584, 0.345, 0.698),:technology => (0.796,0.235,0.2))
# </editor-fold>
# <editor-fold desc="computes positon of nodes"
# adds a new dummy top node
push!(tree_df,(0,"",0,tree_obj.nodes[0].down ,0,1))
nodes_int = nrow(tree_df)
idxPos_dic = Dict(zip(tree_df[:,:idx], 1:(nodes_int)))
# create vertical position and labels from input tree
locY_arr = float(tree_df[!,:lvl]) .+ 1.2
# horizontal position is computed in a two step process
locX_arr = zeros(Float64, nodes_int)
# first step, filter all nodes at end of a respective branch and sort them correctly
lowLvl_df = tree_df[isempty.(tree_df[!,:down]),:]
lowLvl_df = lowLvl_df[map(y -> findall(x -> x == y, lowLvl_df[:,:idx])[1],sortSiblings(convert(Array{Int64,1},lowLvl_df[:,:idx]),tree_obj)),:]
# sets distance from next node on the left depending on if they are part of the same subtree
for (idx2, lowNode) in Iterators.drop(enumerate(eachrow(lowLvl_df)),1)
if lowNode[:up] == lowLvl_df[idx2-1,:up] distance_fl = wide[lowNode[:lvl]] else distance_fl = 1 end
locX_arr[idxPos_dic[lowNode[:idx]]] = locX_arr[idxPos_dic[lowLvl_df[idx2-1,:idx]]] + distance_fl
end
# second step, remaining horizontal nodes are placed in the middle of their children
highLvl_df = tree_df[false .== isempty.(tree_df[!,:down]),:]
highLvl_df = highLvl_df[map(y -> findall(x -> x == y, highLvl_df[:,:idx])[1],sortSiblings(convert(Array{Int64,1},highLvl_df[:,:idx]),tree_obj)),:]
for highNode in reverse(eachrow(highLvl_df))
locX_arr[idxPos_dic[highNode[:idx]]] = Statistics.mean(locX_arr[map(x -> idxPos_dic[x],highNode.down)])
end
locX_arr[end] = Statistics.mean(locX_arr[map(x -> idxPos_dic[x],tree_df[findall(tree_df[:,:lvl] .== 1),:idx])])
locY_arr = abs.(locY_arr .- maximum(locY_arr))
# compute dictionary of final node positions
pos_dic = Dict(x => (locX_arr[x]/maximum(locX_arr),locY_arr[x]/maximum(locY_arr)) for x in 1:nodes_int)
posIdx_dic = collect(idxPos_dic) |> (z -> Dict(Pair.(getindex.(z,2),getindex.(z,1))))
# </editor-fold>
# <editor-fold desc="determine node colors and labels"
name_dic = anyM.graInfo.names
label_dic = Dict(x[1] => x[2] == "" ? "" : name_dic[x[2]] for x in enumerate(tree_df[!,:val]))
if useColor
col_arr = [col_dic[tree_sym]]
else
col_arr = getNodeColors(collect(1:nodes_int),label_dic,anyM)
end
# </editor-fold>
# <editor-fold desc="draw final tree"
# draw single nodes
edges_arr = Array{Tuple{Int,Int},1}()
for rowTree in eachrow(tree_df)[1:end-1]
# 0 node in tree_df becomes last node in graph, because there is 0 node within the plots
if rowTree[:up] == 0 pare_int = nodes_int else pare_int = idxPos_dic[rowTree[:up]] end
push!(edges_arr, (idxPos_dic[rowTree[:idx]], pare_int))
end
# draw graph object
plt.clf()
graph_obj = netw.Graph()
netw.draw_networkx_nodes(graph_obj, pos_dic; nodelist = collect(1:nodes_int), node_color = col_arr)
netw.draw_networkx_edges(graph_obj, pos_dic; edgelist = edges_arr)
posLabOff_dic = netw.draw_networkx_labels(graph_obj, pos_dic, font_family = "arial", font_size = fontSize, labels = label_dic)
figure = plt.gcf()
figure.set_size_inches(plotSize[1],plotSize[2])
r = figure.canvas.get_renderer()
trans = plt.gca().transData.inverted()
for x in collect(posLabOff_dic)
down_boo = isempty(tree_obj.nodes[posIdx_dic[x[1]]].down)
bb = x[2].get_window_extent(renderer=r)
bbdata = bb.transformed(trans)
# computes offset of label for leaves and non-leaves by first moving according to size auf letters itself (bbdata) and then by size of the node
# (node-size in pixel is devided by dpi and plot size to get relative offset)
offset_arr = [down_boo ? 0.0 : (bbdata.width/2.0 + (150/plotSize[1]/600)), down_boo ? (-bbdata.height/2.0 - 150/plotSize[2]/600) : 0.0]
x[2].set_position([x[2]."_x" + offset_arr[1],x[2]."_y" + offset_arr[2]])
x[2].set_clip_on(false)
end
# size plot and save
plt.axis("off")
plt.savefig("$(anyM.options.outDir)/$(tree_sym)_$(anyM.options.outStamp)", dpi = 600, bbox_inches="tight")
# </editor-fold>
end
"""
```julia
plotEnergyFlow(plotType::Symbol, model_object::anyModel)
```
Plots the energy flow in a model. Set `plotType` to `:graph` for a qualitative node graph or to `:sankey` for a quantitative Sankey diagram. See [Energy flow](@ref).
"""
plotEnergyFlow(plotType::Symbol,anyM::anyModel; kwargs...) = plotEnergyFlow(Val{plotType}(),anyM::anyModel; kwargs...)
# XXX plot qualitative energy flow graph (applies python modules networkx and matplotlib via PyCall package)
function plotEnergyFlow(objGrp::Val{:graph},anyM::anyModel; plotSize::Tuple{Number,Number} = (16.0,9.0), fontSize::Int = 12, replot::Bool = true, scaDist::Number = 0.5, maxIter::Int = 5000, initTemp::Number = 2.0, useTeColor::Bool = false)
# XXX import python function
netw = pyimport("networkx")
plt = pyimport("matplotlib.pyplot")
PyCall.fixqtpath()
# <editor-fold desc="create graph and map edges"
graph_obj = netw.DiGraph()
flowGrap_obj = anyM.graInfo.graph
edges_arr = vcat(collect.(flowGrap_obj.edgeC),collect.(flowGrap_obj.edgeTe))
for x in edges_arr
graph_obj.add_edge(x[1],x[2])
end
# </editor-fold>
# <editor-fold desc="obtain and order graph properties (colors, names, etc.)"
# get carriers that should be plotted, because they are connected with a technology
relNodeC1_arr = filter(x -> x[2] in vcat(getindex.(flowGrap_obj.edgeTe,1),getindex.(flowGrap_obj.edgeTe,2)), collect(flowGrap_obj.nodeC))
# get carriers that shold be plotted, because they are connected with another carrier that should be plotted
relNodeC2_arr = filter(x -> any(map(y -> x[2] in y && !isempty(intersect(getindex.(relNodeC1_arr,2),y)) , collect.(flowGrap_obj.edgeC))), collect(flowGrap_obj.nodeC))
# maps node id to node names
idToC_arr = map(x -> x[2] => anyM.sets[:C].nodes[x[1]].val, filter(y -> y[2] in union(edges_arr...), intersect(flowGrap_obj.nodeC, union(relNodeC1_arr,relNodeC2_arr))))
idToTe_arr = map(x -> x[2] => anyM.sets[:Te].nodes[x[1]].val, filter(y -> y[2] in union(edges_arr...), collect(flowGrap_obj.nodeTe)))
idToName_dic = Dict(vcat(idToC_arr,idToTe_arr))
# obtain colors of nodes
ordC_arr = intersect(unique(vcat(edges_arr...)), getindex.(idToC_arr,1))
ordTe_arr = intersect(unique(vcat(edges_arr...)), getindex.(idToTe_arr,1))
nodeC_arr = getNodeColors(ordC_arr,idToName_dic,anyM)
nodeTe_arr = useTeColor ? getNodeColors(ordTe_arr,idToName_dic,anyM) : [(0.85,0.85,0.85)]
nodesCnt_int = length(idToName_dic)
# converts edges to sparse matrix for flowLayout function
id_arr = vcat(getindex.(idToC_arr,1), getindex.(idToTe_arr,1))
edges_mat = convert(Array{Int64,2},zeros(nodesCnt_int,nodesCnt_int))
foreach(x -> edges_mat[findall(id_arr .== x[1])[1],findall(id_arr .== x[2])[1]] = 1, filter(x -> x[1] in id_arr && x[2] in id_arr,edges_arr))
edges_smat = SparseArrays.sparse(edges_mat)
# compute position of nodes
if replot || !(isdefined(flowGrap_obj,:nodePos))
pos_dic = flowLayout(nodesCnt_int,edges_smat; scaDist = scaDist, maxIter = maxIter, initTemp = initTemp)
flowGrap_obj.nodePos = Dict(id_arr[x] => pos_dic[x] for x in keys(pos_dic))
end
# separate into edges between technologies and carriers and between carriers, then get respective colors
cEdges_arr = filter(x -> x[1] in ordC_arr && x[2] in ordC_arr, collect(graph_obj.edges))
edgeColC_arr = map(x -> anyM.graInfo.colors[idToName_dic[x[1]]], cEdges_arr)
teEdges_arr = filter(x -> x[1] in ordTe_arr || x[2] in ordTe_arr, collect(graph_obj.edges))
edgeColTe_arr = map(x -> x[1] in ordC_arr ? anyM.graInfo.colors[idToName_dic[x[1]]] : anyM.graInfo.colors[idToName_dic[x[2]]], teEdges_arr)
# </editor-fold>
# <editor-fold desc="draw and save graph with python"
# plot final graph object
plt.clf()
netw.draw_networkx_nodes(graph_obj, flowGrap_obj.nodePos, nodelist = ordC_arr, node_shape="s", node_size = 300, node_color = nodeC_arr)
netw.draw_networkx_nodes(graph_obj, flowGrap_obj.nodePos, nodelist = ordTe_arr, node_shape="o", node_size = 185,node_color = nodeTe_arr)
netw.draw_networkx_edges(graph_obj, flowGrap_obj.nodePos, edgelist = cEdges_arr, edge_color = edgeColC_arr, arrowsize = 16.2, width = 1.62)
netw.draw_networkx_edges(graph_obj, flowGrap_obj.nodePos, edgelist = teEdges_arr, edge_color = edgeColTe_arr)
posLabC_dic = netw.draw_networkx_labels(graph_obj, flowGrap_obj.nodePos, font_size = fontSize, labels = Dict(y[1] => anyM.graInfo.names[y[2]] for y in filter(x -> x[1] in ordC_arr,idToName_dic)), font_weight = "bold", font_family = "arial")
posLabTe_dic = netw.draw_networkx_labels(graph_obj, flowGrap_obj.nodePos, font_size = fontSize, font_family = "arial", labels = Dict(y[1] => anyM.graInfo.names[y[2]] for y in filter(x -> !(x[1] in ordC_arr),idToName_dic)))
# adjusts position of carrier labels so that they are right from node, uses code provided by ImportanceOfBeingErnest from here https://stackoverflow.com/questions/43894987/networkx-node-labels-relative-position
figure = plt.gcf()
figure.set_size_inches(plotSize[1],plotSize[2])
r = figure.canvas.get_renderer()
trans = plt.gca().transData.inverted()
for x in vcat(collect(posLabC_dic),collect(posLabTe_dic))
cNode_boo = x[1] in ordC_arr
bb = x[2].get_window_extent(renderer=r)
bbdata = bb.transformed(trans)
# computes offset of label for leaves and non-leaves by first moving according to size auf letters itself (bbdata) and then by size of the node
# (node-size in pixel is devided by dpi and plot size to get relative offset)
offset_arr = [cNode_boo ? (bbdata.width/2.0 + (500/plotSize[1]/600)) : 0.0, cNode_boo ? 0.0 : (bbdata.height/2.0 + 200/plotSize[2]/600)]
x[2].set_position([x[2]."_x" + offset_arr[1],x[2]."_y" + offset_arr[2]])
x[2].set_clip_on(false)
end
plt.axis("off")
# size plot and save
plt.savefig("$(anyM.options.outDir)/energyFlowGraph_$(anyM.options.outStamp)", dpi = 600)
# </editor-fold>
end
# XXX plot quantitative energy flow sankey diagramm (applies python module plotly via PyCall package)
function plotEnergyFlow(objGrp::Val{:sankey},anyM::anyModel; plotSize::Tuple{Number,Number} = (16.0,9.0), minVal::Float64 = 0.1, filterFunc::Function = x -> true, dropDown::Tuple{Vararg{Symbol,N} where N} = (:region,:timestep), rmvNode::Tuple{Vararg{String,N} where N} = tuple(), useTeColor = true)
plt = pyimport("plotly")
flowGrap_obj = anyM.graInfo.graph
# <editor-fold desc="initialize data"
if !isempty(setdiff(dropDown,[:region,:timestep]))
error("dropDown only accepts array :region and :timestep as content")
end
# get mappings to create buttons of dropdown menue
drop_dic = Dict(:region => :R_dis, :timestep => :Ts_disSup)
dropDim_arr = collect(map(x -> drop_dic[x], dropDown))
# get summarised data and filter dispatch variables
data_df = reportResults(:summary,anyM,rtnOpt = (:rawDf,))
filter!(x -> x.variable in (:demand,:gen,:use,:stIn,:stOut,:trdBuy,:trdSell,:demand,:import,:export,:lss,:crt),data_df)
# filter non relevant entries
filter!(x -> abs(x.value) > minVal, data_df)
filter!(filterFunc, data_df)
# create dictionaries for nodes that are neither technology nor carrier
othNode_dic = maximum(values(flowGrap_obj.nodeTe)) |> (z -> Dict((x[2].C,x[2].variable) => x[1] + z for x in enumerate(eachrow(unique(filter(x -> x.Te == 0,data_df)[!,[:variable,:C]])))))
othNodeId_dic = collect(othNode_dic) |> (z -> Dict(Pair.(getindex.(z,2),getindex.(z,1))))
# </editor-fold>
# <editor-fold desc="prepare labels and colors"
# prepare name and color assignment
names_dic = anyM.graInfo.names
revNames_dic = collect(names_dic) |> (z -> Dict(Pair.(getindex.(z,2),getindex.(z,1))))
col_dic = anyM.graInfo.colors
sortTe_arr = getindex.(sort(collect(flowGrap_obj.nodeTe),by = x -> x[2]),1)
cColor_dic = Dict(x => anyM.sets[:C].nodes[x].val |> (z -> z in keys(col_dic) ? col_dic[z] : (names_dic[z] in keys(col_dic) ? col_dic[col_dic[z]] : (0.85,0.85,0.85))) for x in sort(collect(keys(flowGrap_obj.nodeC))))
# create array of node labels
cLabel_arr = map(x -> names_dic[anyM.sets[:C].nodes[x].val],sort(collect(keys(flowGrap_obj.nodeC))))
teLabel_arr = map(x -> names_dic[anyM.sets[:Te].nodes[x].val],sortTe_arr)
othLabel_arr = map(x -> names_dic[String(othNodeId_dic[x][2])],sort(collect(keys(othNodeId_dic))))
nodeLabel_arr = vcat(cLabel_arr, teLabel_arr, othLabel_arr)
revNodelLabel_arr = map(x -> revNames_dic[x],nodeLabel_arr)
# create array of node colors
cColor_arr = map(x -> anyM.sets[:C].nodes[x].val |> (z -> z in keys(col_dic) ? col_dic[z] : (names_dic[z] in keys(col_dic) ? col_dic[names_dic[z]] : (0.85,0.85,0.85))),sort(collect(keys(flowGrap_obj.nodeC))))
teColor_arr = map(x -> anyM.sets[:Te].nodes[x].val |> (z -> useTeColor && z in keys(col_dic) ? col_dic[z] : (useTeColor && names_dic[z] in keys(col_dic) ? col_dic[names_dic[z]] : (0.85,0.85,0.85))),sortTe_arr)
othColor_arr = map(x -> anyM.sets[:C].nodes[othNodeId_dic[x][1]].val |> (z -> z in keys(col_dic) ? col_dic[z] : (names_dic[z] in keys(col_dic) ? col_dic[names_dic[z]] : (0.85,0.85,0.85))),sort(collect(keys(othNodeId_dic))))
nodeColor_arr = vcat(map(x -> replace.(string.("rgb",string.(map(z -> z .* 255.0,x)))," " => ""),[cColor_arr, teColor_arr, othColor_arr])...)
dropData_arr = Array{Dict{Symbol,Any},1}()
# </editor-fold>
# XXX loop over potential buttons in dropdown menue
for drop in eachrow(unique(data_df[!,dropDim_arr]))
# <editor-fold desc="filter data and create flow array"
dropData_df = copy(data_df)
if :region in dropDown subR_arr = [drop.R_dis, getDescendants(drop.R_dis,anyM.sets[:R],true)...] end
for d in dropDown
filter!(x -> d == :region ? x.R_dis in subR_arr : x.Ts_disSup == drop.Ts_disSup, dropData_df)
end
flow_arr = Array{Tuple,1}()
# write flows reported in data summary
for x in eachrow(dropData_df)
a = Array{Any,1}(undef,3)
# technology related entries
if x.variable in (:demand,:export,:trdSell,:crt)
a[1] = flowGrap_obj.nodeC[x.C]
a[2] = othNode_dic[(x.C,x.variable)]
elseif x.variable in (:import,:trdBuy,:lss)
a[1] = othNode_dic[(x.C,x.variable)]
a[2] = flowGrap_obj.nodeC[x.C]
elseif x.variable in (:gen,:stOut)
if x.Te in keys(flowGrap_obj.nodeTe) # if technology is not directly part of the graph, use its smallest parent that its
a[1] = flowGrap_obj.nodeTe[x.Te]
else
a[1] = flowGrap_obj.nodeTe[minimum(intersect(keys(flowGrap_obj.nodeTe),getAncestors(x.Te,anyM.sets[:Te],:int)))]
end
a[2] = flowGrap_obj.nodeC[x.C]
else
a[1] = flowGrap_obj.nodeC[x.C]
if x.Te in keys(flowGrap_obj.nodeTe)
a[2] = flowGrap_obj.nodeTe[x.Te]
else
a[2] = flowGrap_obj.nodeTe[minimum(intersect(keys(flowGrap_obj.nodeTe),getAncestors(x.Te,anyM.sets[:Te],:int)))]
end
end
a[3] = abs(x.value)
push!(flow_arr,tuple(a...))
end
# create flows connecting different carriers
idToC_dic = Dict(map(x -> x[2] => x[1], collect(flowGrap_obj.nodeC)))
for x in filter(x -> anyM.sets[:C].up[x] != 0,intersect(union(getindex.(flow_arr,1),getindex.(flow_arr,2)),values(flowGrap_obj.nodeC)))
a = Array{Any,1}(undef,3)
a[1] = flowGrap_obj.nodeC[x]
a[2] = flowGrap_obj.nodeC[anyM.sets[:C].up[x]]
a[3] = (getindex.(filter(y -> y[2] == x,flow_arr),3) |> (z -> isempty(z) ? 0.0 : sum(z))) - (getindex.(filter(y -> y[1] == x,flow_arr),3) |> (z -> isempty(z) ? 0.0 : sum(z)))
push!(flow_arr,tuple(a...))
end
# merges flows for different regions that connect the same nodes
flow_arr = map(unique(map(x -> x[1:2],flow_arr))) do fl
allFl = filter(y -> y[1:2] == fl[1:2],flow_arr)
return (allFl[1][1],allFl[1][2],sum(getindex.(allFl,3)))
end
# removes nodes accoring function input provided
for rmv in rmvNode
# splits remove expression by semicolon and searches for first part
rmvStr_arr = split(rmv,"; ")
relNodes_arr = findall(nodeLabel_arr .== rmvStr_arr[1])
if isempty(relNodes_arr) relNodes_arr = findall(revNodelLabel_arr .== rmvStr_arr[1]) end
if isempty(relNodes_arr) continue end
if length(rmvStr_arr) == 2 # if rmv contains two strings separated by a semicolon, the second one should relate to a carrier, carrier is searched for and all related flows are removed
relC_arr = findall(nodeLabel_arr .== rmvStr_arr[2])
if isempty(relNodes_arr) relC_arr = findall(revNodelLabel_arr .== rmvStr_arr[2]) end
if isempty(relC_arr)
produceMessage(anyM.options,anyM.report, 1," - Remove string contained a carrier not found in graph, check for typos: "*rmv)
continue
else
c_int = relC_arr[1]
end
filter!(x -> !((x[1] in relNodes_arr || x[2] in relNodes_arr) && (x[1] == c_int || x[2] == c_int)),flow_arr)
elseif length(rmvStr_arr) > 2
error("one remove string contained more then one semicolon, this is not supported")
else # if rmv only contains one string, only nodes where in- and outgoing flow are equal or only one of both exists
out_tup = filter(x -> x[1] == relNodes_arr[1],flow_arr)
in_tup = filter(x -> x[2] == relNodes_arr[1],flow_arr)
if length(out_tup) == 1 && length(in_tup) == 1 && out_tup[1][3] == in_tup[1][3] # in- and outgoing are the same
filter!(x -> !(x in (out_tup[1],in_tup[1])),flow_arr)
push!(flow_arr,(in_tup[1][1],out_tup[1][2],in_tup[1][3]))
elseif length(out_tup) == 0 # only ingoing flows
filter!(x -> !(x in in_tup),flow_arr)
elseif length(in_tup) == 0 # only outgoing flows
filter!(x -> !(x in out_tup),flow_arr)
end
end
end
# </editor-fold>
# <editor-fold desc="create dictionaries for later plotting"
# collect data for drop in a dictionary
linkColor_arr = map(x -> collect(x[1] in keys(cColor_dic) ? cColor_dic[x[1]] : cColor_dic[x[2]]) |>
(z -> replace(string("rgba",string(tuple([255.0 .*z..., (x[1] in keys(cColor_dic) && x[2] in keys(cColor_dic) ? 0.8 : 0.5)]...)))," " => "")), flow_arr)
link_dic = Dict(:source => getindex.(flow_arr,1) .- 1, :target => getindex.(flow_arr,2) .- 1, :value => getindex.(flow_arr,3), :color => linkColor_arr)
fullData_arr = [Dict(:link => link_dic, :node => Dict(:label => nodeLabel_arr, :color => nodeColor_arr))]
# pushes dictionary to overall array
label_str = string("<b>",join(map(y -> anyM.sets[Symbol(split(String(y),"_")[1])].nodes[drop[y]].val,dropDim_arr),", "),"</b>")
push!(dropData_arr,Dict(:args => fullData_arr, :label => label_str, :method => "restyle"))
# </editor-fold>
end
# <editor-fold desc="create various dictionaries to define format and create plot"
menues_dic =[Dict(:buttons => dropData_arr, :direction => "down", :pad => Dict(:l => 10, :t => 10), :font => Dict(:size => 16, :family => "Arial"), :showactive => true, :x => 0.01, :xanchor => "center", :y => 1.1, :yanchor => "middle")]
data_dic = Dict(:type => "sankey", :orientation => "h", :valueformat => ".0f", :textfont => Dict(:family => "Arial"), :node => Dict(:pad => 8, :thickness => 36, :line => Dict(:color => "white",:width => 0.01), :hoverinfo => "skip"))
layout_dic = Dict(:width => 125*plotSize[1], :height => 125*plotSize[2], :updatemenus => menues_dic, :font => Dict(:size => 32, :family => "Arial"))
fig = Dict(:data => [data_dic], :layout => layout_dic)
plt.offline.plot(fig, filename="$(anyM.options.outDir)/energyFlowSankey_$(join(string.(dropDown),"_"))_$(anyM.options.outStamp).html")
# </editor-fold>
end
# XXX define postions of nodes in energy flow graph
# function is mostly taken from [GraphPlot.jl](https://github.com/JuliaGraphs/GraphPlot.jl), who again reference the following source [IainNZ](https://github.com/IainNZ)'s [GraphLayout.jl](https://github.com/IainNZ/GraphLayout.jl)
function flowLayout(nodesCnt_int::Int,edges_smat::SparseMatrixCSC{Int64,Int64}, locsX_arr::Array{Float64,1} = 2*rand(nodesCnt_int).-1.0, locsY_arr::Array{Float64,1} = 2*rand(nodesCnt_int).-1.0; scaDist::Number = 0.5, maxIter::Int=5000, initTemp::Number=2.0)
# optimal distance bewteen vertices
k = scaDist * sqrt(4.0 / nodesCnt_int)
k² = k * k
# store forces and apply at end of iteration all at once
force_x = zeros(nodesCnt_int)
force_y = zeros(nodesCnt_int)
# iterate maxIter times
@inbounds for iter = 1:maxIter
# Calculate forces
for i = 1:nodesCnt_int
force_vec_x = 0.0
force_vec_y = 0.0
for j = 1:nodesCnt_int
i == j && continue
d_x = locsX_arr[j] - locsX_arr[i]
d_y = locsY_arr[j] - locsY_arr[i]
dist² = (d_x * d_x) + (d_y * d_y)
dist = sqrt(dist²)
if !( iszero(edges_smat[i,j]) && iszero(edges_smat[j,i]) )
# Attractive + repulsive force