From 17a1e660c43c20649385055ab74e6947254a6497 Mon Sep 17 00:00:00 2001 From: Risto0211 <2533895673@qq.com> Date: Tue, 8 Apr 2025 16:35:01 -0400 Subject: [PATCH 1/3] leaderboard exps for lptm --- config/lptm.json | 11 ++++ leaderboard.py | 23 +++++++- leaderboard/lptm.csv | 56 ++++++++++++++++++ leaderboard/moment.csv | 126 ++++++++++++++++++---------------------- leaderboard/timesfm.csv | 110 +++++++++++++++++------------------ leaderboard/ttm.csv | 110 +++++++++++++++++------------------ src/samay/dataset.py | 68 +++++++++++++++------- src/samay/metric.py | 2 +- src/samay/model.py | 30 +++++++++- 9 files changed, 328 insertions(+), 208 deletions(-) create mode 100644 config/lptm.json create mode 100644 leaderboard/lptm.csv diff --git a/config/lptm.json b/config/lptm.json new file mode 100644 index 0000000..148444c --- /dev/null +++ b/config/lptm.json @@ -0,0 +1,11 @@ +{ + "config": { + "task_name": "forecasting2", + "forecast_horizon": 192, + "head_dropout": 0.1, + "weight_decay": 0, + "freeze_encoder": true, + "freeze_embedder": true, + "freeze_head": false + } +} diff --git a/leaderboard.py b/leaderboard.py index 783c931..ce70fe9 100644 --- a/leaderboard.py +++ b/leaderboard.py @@ -3,13 +3,14 @@ import numpy as np import pandas as pd import time +import torch src_path = os.path.abspath(os.path.join("src")) if src_path not in sys.path: sys.path.insert(0, src_path) -from samay.model import TimesfmModel, MomentModel, ChronosModel, ChronosBoltModel, TinyTimeMixerModel, MoiraiTSModel -from samay.dataset import TimesfmDataset, MomentDataset, ChronosDataset, ChronosBoltDataset, TinyTimeMixerDataset, MoiraiDataset +from samay.model import TimesfmModel, MomentModel, ChronosModel, ChronosBoltModel, TinyTimeMixerModel, MoiraiTSModel, LPTMModel +from samay.dataset import TimesfmDataset, MomentDataset, ChronosDataset, ChronosBoltDataset, TinyTimeMixerDataset, MoiraiDataset, LPTMDataset from samay.utils import load_args, get_gifteval_datasets from samay.metric import * @@ -35,7 +36,7 @@ print(f"Time taken to load datasets: {end-start:.2f} seconds") -MODEL_NAMES = ["moirai", "chronos", "chronosbolt", "timesfm", "moment", "ttm"] +MODEL_NAMES = ["moirai", "chronos", "chronosbolt", "timesfm", "moment", "ttm", "lptm"] MONASH_NAMES = { # "weather": "1D", "tourism_yearly": ["1YE"], @@ -171,6 +172,9 @@ def calc_pred_and_context_len(freq): elif model_name == "moirai": arg_path = "config/moirai.json" args = load_args(arg_path) + elif model_name == "lptm": + arg_path = "config/lptm.json" + args = load_args(arg_path) for fname, freq, fs in filesizes: print(f"Evaluating {fname} ({freq})") @@ -255,6 +259,7 @@ def calc_pred_and_context_len(freq): start = time.time() metrics = model.evaluate(dataset) end = time.time() + print("Metrics: ", metrics) print(f"Size of dataset: {fs:.2f} MB") print(f"Time taken for evaluation of {fname}: {end-start:.2f} seconds") @@ -268,6 +273,18 @@ def calc_pred_and_context_len(freq): end = time.time() print(f"Size of dataset: {fs:.2f} MB") print(f"Time taken for evaluation of {fname}: {end-start:.2f} seconds") + + elif model_name == "lptm": + args["config"]["task_name"] = "forecasting2" + dataset = LPTMDataset(name=fname, datetime_col='timestamp', task_name="forecasting2", + path=dataset_path, mode='test', seq_len=context_len, horizon=pred_len) + args["config"]["forecast_horizon"] = dataset.forecast_horizon + model = LPTMModel(**args) + start = time.time() + metrics = model.evaluate(dataset, task_name="forecasting2") + end = time.time() + print(f"Size of dataset: {fs:.2f} MB") + print(f"Time taken for evaluation of {fname}: {end-start:.2f} seconds") print("Evaluation done!") diff --git a/leaderboard/lptm.csv b/leaderboard/lptm.csv new file mode 100644 index 0000000..8c912ef --- /dev/null +++ b/leaderboard/lptm.csv @@ -0,0 +1,56 @@ +dataset,size_in_MB,eval_time,mse,mae,mase,mape,rmse,nrmse,smape,msis,nd,mwsq,crps +us_births (M),0.0,0.19s,0.055124827,0.15998736,9.530034,14372.004,0.23478676,0.0700507946770209,1.9974595,16067.119,9.685108923182485,, +ett1 (W),0.01,0.15s,0.0488535314798355,0.1674365997314453,16743.66015625,16743.66015625,0.2210283428430557,22102.834284305573,1.999502420425415,16743.66015625,16743.65997314453,, +ett2 (W),0.01,0.15s,0.0428696312010288,0.1690398752689361,16903.986328125,16903.986328125,0.2070498317480087,20704.98317480088,1.9995646476745603,16903.986328125,16903.987526893616,, +saugeenday (M),0.02,0.15s,0.0786341056227684,0.218505322933197,38.17536163330078,21294.53125,0.2804177403450012,0.0732781777464692,1.995750069618225,21904.072265625,-1621.458096894606,, +us_births (W),0.02,0.15s,0.0783709809184074,0.1206122413277626,5.576417446136475,9935.3740234375,0.2799481749534607,0.0683924208324503,1.9988937377929688,12065.775390625,-5.670217406018904,, +ett1 (D),0.05,0.15s,0.3522150814533233,0.2378393411636352,1.850512146949768,13859.9970703125,0.5934771299362183,0.0933311141338214,1.9892314672470093,0.0815224200487136,-2.6057982964047604,, +ett2 (D),0.05,0.15s,0.1101309061050415,0.1997541040182113,3.6726791858673096,15349.4443359375,0.3318597674369812,0.0605107324174662,1.980210423469544,0.5620905160903931,-10.68909426314578,, +solar (W),0.06,0.29s,0.0408655889332294,0.1665971279144287,16659.712890625,16659.712890625,0.2021523863077163,20215.23863077164,1.9998334646224976,16659.712890625,16659.71279144287,, +saugeenday (W),0.07,0.15s,0.043898481875658,0.1654233038425445,36.71079635620117,16095.8466796875,0.2095196396112442,0.0532047780719569,1.9951443672180176,16556.044921875,292.12913297641086,, +jena_weather (D),0.08,0.15s,1.1020350456237793,0.5094773173332214,1.7396596670150757,20313.896484375,1.0497785806655884,0.076471509717966,1.9494208097457888,0.0722709000110626,2.2042390728710592,, +us_births (D),0.13,0.15s,0.1322969645261764,0.2508893311023712,20.383893966674805,23872.9609375,0.3637264966964721,0.0904531567871558,1.9978073835372925,25110.4140625,23.47373915911368,, +hierarchical_sales (W),0.15,0.22s,0.6720020174980164,0.5001465082168579,1.0001691579818726,8923.009765625,0.8197572827339172,0.0446911505279487,1.731558918952942,0.1060729026794433,-13.37932130534372,, +bizitobs_l2c (H),0.18,0.16s,0.0814106613397598,0.2050499469041824,7.534029483795166,17090.337890625,0.2853255271911621,0.0990571243007222,1.9766982793807983,0.3998841047286987,-14.275825010002904,, +M_DENSE (D),0.21,0.15s,0.471742033958435,0.391443133354187,2.440467834472656,14404.3330078125,0.6868348121643066,0.0892926741198714,1.9019259214401243,0.0964324325323104,-1.7703457783058258,, +covid_deaths (D),0.27,0.43s,130.1488494873047,2.7232394218444824,0.8122357130050659,18173.26171875,11.408279418945312,0.0572881135474539,1.977840662002564,0.0461527332663536,1.0850551050704809,, +bizitobs_application (10s),0.33,0.15s,108.9468994140625,1.1500740051269531,1.178505539894104,21815.099609375,10.437763214111328,0.0764751087938118,1.9990524053573608,115007.3984375,1.3150540833773166,, +solar (D),0.35,0.29s,0.1216193586587905,0.2379303574562072,1.517604112625122,8038.330078125,0.3487396836280823,0.1201081473073024,1.8227678537368768,0.1086352318525314,2.985067356124781,, +hospital (ME),0.35,0.91s,0.0339378900825977,0.1612531393766403,16125.314453125,16125.314453125,0.184222400188446,18422.240018844604,1.999804735183716,16125.314453125,16125.31393766403,, +saugeenday (D),0.38,0.15s,0.0456870310008525,0.1747212260961532,21.31578254699707,16723.7890625,0.2137452512979507,0.1715811729333612,1.99677836894989,17587.24609375,-21.707514060173256,, +car_parts_with_missing (ME),0.58,2.99s,0.024675590917468,0.1338994950056076,13389.9482421875,13389.9482421875,0.157084658741951,15708.465874195095,1.9996201992034912,13389.9482421875,13389.94950056076,, +electricity (W),0.66,0.5s,84294088.0,670.1434326171875,0.6281120181083679,16702.03515625,9181.181640625,0.0277522243026582,1.9066163301467896,205.3894958496093,1.0005484827255255,, +hierarchical_sales (D),0.9,0.22s,0.4668669998645782,0.4229911267757416,1.190596342086792,8809.7080078125,0.6832766532897949,0.0667929249380931,1.8036607503890991,0.1055179387331008,-3.976076737422468,, +kdd_cup_2018_with_missing (D),1.08,0.43s,0.4801678359508514,0.406242161989212,1.4104912281036377,10150.95703125,0.692941427230835,0.0340770804953835,1.8425010442733765,0.1734642833471298,-2.280918302705925,, +LOOP_SEATTLE (D),1.13,0.5s,0.6134179830551147,0.5059168338775635,3.0927038192749023,10673.720703125,0.783210039138794,0.0331757751805067,1.8612024784088133,0.1058104261755943,-7.301689346182965,, +SZ_TAXI (H),1.14,0.29s,0.4677931070327759,0.4296532273292541,1.002686858177185,9919.7001953125,0.6839540004730225,0.0596565185897341,1.8430001735687256,0.0948102101683616,85.13545565039159,, +ett1 (H),1.22,0.15s,0.1355438977479934,0.1979636400938034,3.063187837600708,14486.1767578125,0.3681628704071045,0.0496718389514035,1.983390212059021,0.512479841709137,12.939105197520448,, +ett2 (H),1.26,0.15s,0.355162501335144,0.2562243044376373,2.777869462966919,14471.5625,0.5959551334381104,0.0861516062501054,1.9884074926376345,0.0735671669244766,2.782024553505087,, +jena_weather (H),1.65,0.15s,0.7593508362770081,0.3370769023895263,1.182328462600708,7531.78662109375,0.8714073896408081,0.0646120192046349,1.9661555290222168,0.0549071468412876,2.077227526390641,, +bizitobs_l2c (5T),1.68,0.15s,0.033086035400629,0.1349213719367981,13492.1376953125,13492.1376953125,0.1818956732749939,18189.56732749939,1.999726414680481,13492.1376953125,13492.137193679808,, +restaurant (D),1.77,0.98s,179.8831024169922,5.821052551269531,0.787813127040863,10800.744140625,13.412050247192385,0.040878555871032,1.907026290893555,0.0863948315382003,1.050822686913354,, +m4_hourly (h),2.43,0.58s,0.5834542512893677,0.5132453441619873,1.715920090675354,9203.9140625,0.7638417482376099,0.0745608099488633,1.8717085123062127,0.1191538497805595,7.795115736164987,, +bizitobs_service (10s),3.07,0.15s,6179767.5,223.1941070556641,0.9511830806732178,9392.5517578125,2485.913818359375,0.0775583295382262,1.967041015625,1.13612961769104,1.007010146282544,, +M_DENSE (H),3.7,0.15s,0.4351095259189605,0.4425162076950073,3.800527334213257,18049.697265625,0.6596283316612244,0.093136951281485,1.935481071472168,0.1026388704776763,7.144371597596367,, +ett1 (15T),4.4,0.15s,0.4200723171234131,0.2857143580913543,3.627429246902466,15582.3154296875,0.6481298804283142,0.1209688026848626,1.991615653038025,0.0603000782430172,2.325986454182912,, +ett2 (15T),4.57,0.15s,0.0720074847340583,0.1562635004520416,3.2718117237091064,12088.7197265625,0.2683421075344085,0.0441081987228431,1.9748386144638064,0.5689333081245422,-41.01642600107952,, +SZ_TAXI (15T),4.58,0.29s,0.5773839950561523,0.441036969423294,0.8938093185424805,8282.0107421875,0.7598578929901123,0.0459518365525932,1.8631014823913568,0.0888485759496688,-26.884405549314884,, +electricity (D),4.63,0.5s,1.3991117477416992,0.604572594165802,1.6717993021011353,19415.26171875,1.1828405857086182,0.0465663763242852,1.961073994636536,0.1235982030630111,1.8149414684473515,, +solar (H),5.97,0.29s,0.3431311249732971,0.3517597615718841,3.298314094543457,10524.7236328125,0.5857739448547363,0.1483445932118295,1.8236889839172363,0.0906566008925437,-60.15128201697731,, +bitbrains_rnd (H),6.1,1.19s,2.707172393798828,0.4226620495319366,1.4640172719955444,7555.45751953125,1.6453486680984497,0.0085185260899419,1.860836625099182,0.0959804356098175,8.61793117536629,, +m4_weekly (W-SUN),7.18,0.5s,5.342000961303711,1.0753824710845947,1.2239097356796265,9670.59375,2.311276912689209,0.0747243814591452,1.8840149641036987,0.0883786529302597,1.757597133946465,, +jena_weather (10T),7.18,0.15s,0.8216581344604492,0.3969688415527344,1.2204697132110596,12125.37890625,0.9064536094665528,0.0739428990339585,1.969690203666687,0.0305895842611789,-809.8966612421868,, +kdd_cup_2018_with_missing (H),14.28,0.43s,47.05148696899414,0.9063844084739684,1.1563570499420166,13170.6767578125,6.859408855438232,0.0352711236766285,1.859691143035889,0.1635989248752594,24.882323397960413,, +bitbrains_fast_storage (H),15.64,2.87s,73.87757110595703,0.7784671187400818,1.2445563077926636,11203.3740234375,8.595206260681152,0.0057955758779222,1.8235962390899656,0.1379421949386596,2.738425337938606,, +LOOP_SEATTLE (H),27.05,0.5s,0.7402505278587341,0.4042249023914337,2.762101650238037,9959.802734375,0.8603781461715698,0.041880195075153,1.7668596506118774,0.0780623778700828,-3.436187412442018,, +solar (10T),33.4,0.29s,5.66863489151001,1.317663311958313,2.003763198852539,11768.8837890625,2.380889415740967,0.1866724966185662,1.949769139289856,0.1504184901714325,1.120214692282918,, +m4_yearly (YE-DEC),51.4,25.31s,0.2163411974906921,0.3483390808105469,2.7536509037017822,9430.61328125,0.4651249349117279,0.02422227170925,1.692229986190796,0.2797471880912781,1.901229521756186,, +bitbrains_rnd (5T),63.69,1.2s,12389.15234375,4.439082622528076,1.0363279581069946,11291.8369140625,111.30657196044922,0.0112550285476126,1.822264671325684,1.3631213903427124,1.0846484127269294,, +electricity (H),110.58,0.51s,0.2479172348976135,0.3492029905319214,3.229201316833496,16960.265625,0.4979128837585449,0.099414130399371,1.917153000831604,0.100257471203804,11.516904002443791,, +temperature_rain_with_missing (D),113.99,35.5s,1382.498046875,1.788382887840271,1.0194227695465088,10131.83203125,37.18195724487305,0.0014061144000396,1.7418212890625,0.1595958620309829,1.2831563627999287,, +bitbrains_fast_storage (5T),160.06,2.88s,6986.3017578125,2.822540283203125,1.0647568702697754,10402.6728515625,83.58409881591797,0.0240544837785082,1.887996912002564,0.8851537704467773,1.20631119139654,, +m4_quarterly (QE-DEC),163.93,26.59s,1.3572735786437988,0.5758647918701172,1.5053298473358154,7050.94091796875,1.1650208234786987,0.0116367779707989,1.845426678657532,0.1332866549491882,1.401992660844506,, +m4_daily (D),316.28,4.8s,243545.21875,5.504239559173584,0.5589375495910645,4336.58935546875,493.5030212402344,0.0057169779041742,1.9301851987838743,0.542895495891571,1.1884039907109825,, +LOOP_SEATTLE (5T),324.08,0.52s,0.5425701141357422,0.4436022043228149,1.4039080142974854,10133.951171875,0.7365936040878296,0.0408843181593714,1.835789561271668,0.0997983217239379,-3.574192900911173,, +electricity (15T),442.39,0.5s,1.5613300800323486,0.395835131406784,1.560932755470276,9775.2841796875,1.2495319843292236,0.0295343845566381,1.9416948556900024,0.0691113844513893,2.4643661361904665,, +m4_monthly (ME),1025.34,53.02s,11.853350639343262,1.0829262733459473,1.1910291910171509,9881.9052734375,3.4428694248199463,0.0018059994365167275,1.8517600297927856,0.09637115150690079,1.8456140286162748,, diff --git a/leaderboard/moment.csv b/leaderboard/moment.csv index bdccd3b..be212d1 100644 --- a/leaderboard/moment.csv +++ b/leaderboard/moment.csv @@ -1,72 +1,56 @@ dataset,size_in_MB,eval_time,mse,mae,mase,mape,rmse,nrmse,smape,msis,nd,mwsq,crps -us_births,0.13,16.53s,0.0166749041527509,0.0132494084537029,0.6622852683067322,0.0336879082024097,0.1291313469409942,0.0200039861313061,0.2919767796993255,2393.22509765625,1.7653433739843682,, -ett1,0.05,0.49s,0.0845161378383636,0.0771553590893745,0.818662166595459,0.1507003307342529,0.2907165884971618,0.0408028170847522,0.3720319271087646,0.0229524467140436,37.06777924404701,, -ett2,0.05,0.48s,0.1525356918573379,0.0985602214932441,0.8158197999000549,0.159540593624115,0.3905581831932068,0.0520415722981005,0.3430469930171966,0.0324768535792827,-0.7515478683683955,, -saugeenday,0.38,57.42s,0.0181519500911235,0.0106288585811853,0.8184567093849182,0.0984068512916565,0.1347291767597198,0.0080451322495082,0.2162542045116424,856.370361328125,65.20783161905682,, -solar,0.35,0.69s,1.0479254722595217,0.6552554965019226,1.2421728372573853,1.644118309020996,1.0236823558807373,0.2602073408719523,1.2416865825653076,0.1769601106643676,-1.5701247442115474,, -jena_weather,0.08,0.26s,0.4467118084430694,0.290574699640274,,-0.3770225942134857,0.6683650016784668,0.0439291477903074,0.626776397228241,0.0621072389185428,-6.0627986557802815,, -hierarchical_sales,0.9,6.09s,0.9766348600387572,0.6037952899932861,1.2904167175292969,-0.0605682358145713,0.9882484078407288,0.0296427630370565,1.2487094402313232,0.1234115213155746,-14.845645767332355,, -bizitobs_l2c,0.18,5.12s,0.1141704395413398,0.094519555568695,0.5653119683265686,0.0437845885753631,0.337891161441803,0.0411253008501759,0.2735987901687622,0.0017722472548484,-43.61411877196251,, -M_DENSE,0.21,0.48s,0.65444016456604,0.4488910138607025,0.7423524856567383,0.2140763998031616,0.8089747428894043,0.0746180797828536,0.8750887513160706,0.0955005586147308,-12.196748869327852,, -covid_deaths,0.27,1.13s,2532.86962890625,15.704353332519531,0.5370470285415649,0.5348979234695435,50.32762145996094,0.0688400358056701,0.8886440992355347,0.0817901268601417,0.8951871516715,, -bizitobs_application,0.33,20.28s,0.031650137156248,0.0232451893389225,0.8000913262367249,-9.096251487731934,0.1779048591852188,0.0292253176385202,0.1688371747732162,2183.644775390625,-7.934246776719986,, -hospital,0.35,2.76s,1.871889352798462,1.0410010814666748,0.5708366632461548,38.29072570800781,1.3681700229644775,0.0623355435952419,1.2547467947006226,0.1276404559612274,1.871985084498025,, -car_parts_with_missing,0.58,9.95s,3.5560872554779053,0.8787520527839661,0.7763674855232239,2191.994140625,1.885759115219116,0.0130401857953503,1.6353957653045654,0.1159390807151794,10.414244728513925,, -electricity,0.66,1.37s,231504363520.0,33604.7578125,0.5374543070793152,36700.98828125,481149.0,0.0324357633701603,1.2939711809158323,0.7841228246688843,0.8005288085222292,, -kdd_cup_2018_with_missing,1.08,1.12s,0.8444492816925049,0.5074900984764099,0.636409342288971,-0.1625679284334182,0.9189392328262328,0.0264241123643264,0.993318736553192,0.1114950478076934,-4.843327060262989,, -LOOP_SEATTLE,1.13,1.37s,1.1636507511138916,0.7680737972259521,0.962784230709076,-0.2038627117872238,1.0787264108657837,0.0758186484268039,1.0627912282943726,0.1333916038274765,-3.549600403287944,, -us_births (M),0.0,0.26s,0.0490848012268543,0.025092938914895,,0.198994442820549,0.2215508967638015,0.0619808429064959,0.332131415605545,2407.629638671875,1.0232207619288771,, -ett1 (W),0.01,0.26s,0.0661440715193748,0.0679000094532966,,0.1340852975845337,0.2571848928928375,0.0600888141994269,0.3639847040176391,0.0118538662791252,13.56712316983709,, -ett2 (W),0.01,0.5s,0.1784712672233581,0.1021255776286125,,0.3991040885448456,0.4224585890769958,0.0620690916861361,0.4547058641910553,0.0119766816496849,-0.6570217851294525,, -saugeenday (M),0.02,0.52s,0.0146793872117996,0.0103598264977335,1.1410484313964844,0.114974558353424,0.121158517897129,0.0245601202086958,0.2466829866170883,1111.4794921875,-15.320425227975118,, -us_births (W),0.02,1.36s,0.0194058045744895,0.0143667459487915,1.4129470586776731,0.1352113783359527,0.1393047124147415,0.0343529098623171,0.2357538044452667,2704.560546875,0.7064739408163458,, -ett1 (D),0.05,0.49s,0.0856200754642486,0.0776782855391502,0.8242107033729553,0.1468994617462158,0.2926090955734253,0.0410684352954762,0.3727368116378784,0.0231287106871604,37.319009002160186,, -ett2 (D),0.05,0.57s,0.1532271057367324,0.0985804125666618,0.8159868717193604,0.1755074113607406,0.3914423286914825,0.0521593839939005,0.3719439208507538,0.0325935669243335,-0.751701830666341,, -solar (W),0.06,0.69s,0.3245261907577514,0.3878287076950073,0.8207972049713135,-1.0662380456924438,0.5696719884872437,0.1840309984690182,0.8310236930847168,0.1476865410804748,-0.8024131291804086,, -saugeenday (W),0.07,11.41s,0.0171893555670976,0.0098273605108261,0.7497754096984863,0.1031473726034164,0.1311081796884536,0.0117120778330479,0.20797760784626,1062.8035888671875,31.996620891779777,, -jena_weather (D),0.08,0.5s,0.4404400885105133,0.2876468002796173,,-0.4747923910617828,0.6636565923690796,0.0436196815437357,0.6286620497703552,0.0604314506053924,-6.001708463378722,, -us_births (D),0.13,16.44s,0.0165644250810146,0.0131850000470876,0.6590656042098999,0.0071459645405411,0.1287028491497039,0.0199376067116119,0.2973404228687286,2406.93896484375,1.7567616358453075,, -hierarchical_sales (W),0.15,0.49s,1.111017107963562,0.6712689399719238,0.6610993146896362,-0.2276024222373962,1.05404794216156,0.0589132884568094,1.2139064073562622,0.1194732338190078,-4.8134139784308285,, -bizitobs_l2c (H),0.18,5.16s,0.1132991909980773,0.0941098034381866,0.5628612637519836,0.042415402829647,0.3365994393825531,0.040968083188504,0.2739208936691284,0.0018358565866947,-43.42504701872018,, -M_DENSE (D),0.21,0.54s,0.6656246781349182,0.4525275528430938,0.7483664155006409,0.2164407223463058,0.8158582448959351,0.0752529991130594,0.8902981877326965,0.0965064167976379,-12.29555671210528,, -covid_deaths (D),0.27,1.13s,2528.41943359375,15.66344928741455,0.5356481671333313,0.5312768220901489,50.283390045166016,0.0687795343933266,0.8796680569648743,0.0815204679965972,0.8928555194893874,, -bizitobs_application (10s),0.33,22.71s,0.0317411497235298,0.0232481099665164,0.8001918196678162,-9.959293365478516,0.1781604588031768,0.0292673062613005,0.1536118537187576,2209.325439453125,-7.935243670302386,, -solar (D),0.35,1.33s,1.047087788581848,0.6567782163619995,1.2450594902038574,1.7552998065948486,1.0232731103897097,0.2601033157509053,1.2456032037734983,0.1776239722967147,-1.5737734890806443,, -hospital (ME),0.35,2.86s,1.859989047050476,1.0373529195785522,0.5688363313674927,37.09786605834961,1.363814115524292,0.0621370829846621,1.2558231353759766,0.1271890103816986,1.8654247602467016,, -saugeenday (D),0.38,64.48s,0.0180799681693315,0.0105487462133169,0.812287449836731,0.0973807573318481,0.1344617754220962,0.0080291648163463,0.2135202288627624,865.2394409179688,64.71634386854565,, -car_parts_with_missing (ME),0.58,9.97s,3.562715530395508,0.875319242477417,0.7733346819877625,2163.47216796875,1.887515664100647,0.0130523324813129,1.6376537084579468,0.1151701137423515,10.37356189138631,, -electricity (W),0.66,3.86s,230903316480.0,33545.01953125,0.5364989042282104,40271.640625,480524.0,0.0323936301596448,1.3060535192489624,0.7850792407989502,0.7991057298207234,, -hierarchical_sales (D),0.9,16.82s,0.9756141304969788,0.6047087907791138,1.29236900806427,-0.0754419490694999,0.9877318143844604,0.0296272676845832,1.255674123764038,0.1236139833927154,-14.86810620930554,, -kdd_cup_2018_with_missing (D),1.08,1.13s,0.8476442694664001,0.5114850997924805,0.6414192318916321,-0.1579349040985107,0.9206759929656982,0.0264740529299653,1.0037513971328735,0.1125412508845329,-4.88145410557481,, -LOOP_SEATTLE (D),1.13,1.38s,1.1801847219467163,0.7724948525428772,0.9683259129524232,-0.2286671549081802,1.0863630771636963,0.0763553940847966,1.061790943145752,0.134036049246788,-3.5700320073767613,, -SZ_TAXI (H),1.14,2.58s,0.1794196963310241,0.2704257071018219,0.742323100566864,0.9259967803955078,0.4235796332359314,0.0629424358185336,0.4413276612758636,0.0920828655362129,-0.4234020415293309,, -ett1 (H),1.22,71.91s,0.0931190550327301,0.0700619220733642,1.2231614589691162,0.2108829170465469,0.3051541447639465,0.0278852932860295,0.3243643641471863,0.0397731065750122,-18.09391609509558,, -ett2 (H),1.26,54.03s,0.0527420304715633,0.0486048087477684,1.0875681638717651,0.1189476400613784,0.2296563237905502,0.0182699874468558,0.3114534318447113,0.0579129941761493,-0.9500558201964512,, -jena_weather (H),1.65,20.61s,0.269149512052536,0.1946270763874054,1.0476793050765991,-0.025795079767704,0.5187962055206299,0.0046744943853909,0.4797450304031372,0.0455239117145538,4.279154276658326,, -bizitobs_l2c (5T),1.68,93.12s,0.1061218082904815,0.0827776566147804,1.6546341180801392,0.0960157662630081,0.3257634341716766,0.0395778664079169,0.196520447731018,0.0183309577405452,60.77264861275297,, -restaurant (D),1.77,8.25s,32.19078826904297,1.542858362197876,0.5222368836402893,21.732288360595703,5.67369270324707,0.021389853140395,1.0622296333312988,0.163014754652977,0.8957319824794684,, -m4_hourly (h),2.43,22.74s,0.6365521550178528,0.6075160503387451,2.4075798988342285,-0.0194074865430593,0.7978422045707703,0.0582948840714332,0.965739369392395,0.1226370185613632,1.5180986175834796,, -bizitobs_service (10s),3.07,38.71s,0.6393813490867615,0.4555206298828125,0.7736144065856934,-0.4311994910240173,0.7996132373809814,0.0578584589820048,0.981988787651062,0.0674135088920593,66.96168597556668,, -M_DENSE (H),3.7,42.05s,0.5782449841499329,0.444500982761383,1.7446072101593018,0.245728924870491,0.7604241967201233,0.0899721087789721,0.9053943753242492,0.0985398814082145,-111.408677600323,, -ett1 (15T),4.4,312.68s,0.0770852044224739,0.0642813295125961,1.3816813230514526,-0.0450062677264213,0.2776422202587127,0.0248858562863315,0.2945191562175751,0.0444143638014793,-29.72643510740077,, -ett2 (15T),4.57,170.67s,0.0352975837886333,0.0388589762151241,1.1798412799835205,0.1034263670444488,0.1878765076398849,0.0121796422919794,0.2683697342872619,0.0611041486263275,-0.8056754890308132,, -SZ_TAXI (15T),4.58,17.77s,0.3003464341163635,0.3433499932289123,1.583962321281433,-1.0764042139053345,0.5480387210845947,0.0426858127010438,0.6180946826934814,0.0975749045610427,-0.734930536398494,, -electricity (D),4.63,12.82s,265048064.0,909.3511962890624,1.0505276918411257,9888.9248046875,16280.296875,0.0079866265077432,0.6811089515686035,1.2647544145584106,0.153261294375977,, -solar (H),5.97,60.66s,0.4952342212200165,0.4485998749732971,5.05915117263794,-0.4632841348648071,0.7037287950515747,0.1873204244722185,0.9461215734481812,0.1192889809608459,-3.141420588442726,, -bitbrains_rnd (H),6.1,7.56s,2541.150634765625,4.16558313369751,0.6189665794372559,5294.056640625,50.40982818603516,0.0065061475422719,0.7933628559112549,0.073084145784378,1.014913167418809,, -m4_weekly (W-SUN),7.18,29.96s,2.014073371887207,0.2677460014820099,1.0587129592895508,-0.0184317361563444,1.4191805124282837,0.0067748010896629,0.1633300334215164,0.0710709914565086,0.1223636577958742,, -jena_weather (10T),7.18,129.14s,0.2310131639242172,0.1709593832492828,0.8379159569740295,0.4183250367641449,0.4806382954120636,0.0018715012395996,0.4648206830024719,0.0465143471956253,4.276199613918661,, -kdd_cup_2018_with_missing (H),14.28,128.7s,1.903819441795349,0.616597056388855,4.51470422744751,-0.9062976241111756,1.3797895908355713,0.0370714838689682,1.1574369668960571,0.1389963328838348,10.823472138906274,, -bitbrains_fast_storage (H),15.64,18.96s,10086.7998046875,2.826951742172241,0.5318763256072998,564.5944213867188,100.4330596923828,0.0093783960005972,0.8978911638259888,0.9046183824539183,0.6542457009370054,, -LOOP_SEATTLE (H),27.05,121.61s,0.7220565676689148,0.6239529252052307,4.835772037506104,0.7766930460929871,0.849739134311676,0.0553899985984129,1.426215887069702,0.1485606729984283,6.506469472198424,, -solar (10T),33.4,387.49s,0.3633464574813843,0.3939985632896423,0.9862849712371826,0.1683084964752197,0.6027822494506836,0.1532161214761041,0.8634337186813354,0.106142945587635,-1.932767178107501,, -m4_yearly (YE-DEC),51.4,85.37s,0.2249934822320938,0.3888959884643554,1.2640678882598877,1.3605670928955078,0.4743347764015198,0.0156129467417141,1.4391971826553345,0.2767557203769684,1.1485716617412427,, -bitbrains_rnd (5T),63.69,320.12s,473.8028564453125,2.5002923011779785,14.552663803100586,36466.94140625,21.767013549804688,0.0124533049345982,0.7882186770439148,0.0320991352200508,0.4796458303331513,, -electricity (H),110.58,513.65s,428889.8125,42.35646438598633,38.55291366577149,2073.8056640625,654.8967895507812,0.0074876989582218,0.8307960629463196,1.041404128074646,0.1693215336592214,, -temperature_rain_with_missing (D),113.99,239.13s,10.15415382385254,0.8827998638153076,1.944754958152771,8.244161605834961,3.186558246612549,0.0034670636363697,1.4160765409469604,0.1196500360965728,2.4729034145101805,, -bitbrains_fast_storage (5T),160.06,801.34s,1755.422607421875,2.4329330921173096,13.81318473815918,2202.016357421875,41.897762298583984,0.0038607244908998,0.8529143929481506,0.8787821531295776,0.5712061641206433,, -m4_quarterly (QE-DEC),163.93,267.8s,0.0343928597867488,0.0017406134866178,0.0162255093455314,0.0002408254076726,0.1854531168937683,0.0008958026187081,0.0006573513965122,0.0275926832109689,0.0059233256104646,, -m4_daily (D),316.28,26.11m,0.0361730828881263,0.0036603524349629,0.2650297284126282,0.0032658216077834,0.1901922225952148,0.0001379508480314,0.0050389510579407,0.2410273253917694,0.0056191225238017,, -LOOP_SEATTLE (5T),324.08,25.98m,0.6140819191932678,0.4618953168392181,2.7076990604400635,0.4295462965965271,0.7836337685585022,0.0466134961183778,1.013070583343506,0.1044607460498809,1.694795358113825,, -electricity (15T),442.39,34.73m,44896.16796875,12.45554256439209,103.10625457763672,513.2125244140625,211.8871612548828,0.0113598928018075,0.8586267232894897,1.054625153541565,0.2306723787020087,, -m4_monthly (ME),1025.34,73.67m,0.00413757236674428,0.0005038601229898632,0.042170461267232895,0.000296300946502015,0.06432396173477173,0.0029465290793323916,0.0004989044973626733,0.010481921955943108,0.0043333082261389575,, +us_births (M),0.0,0.26s,0.0469331257045269,0.0245261006057262,0.983124852180481,0.1849082857370376,0.2166405469179153,0.0606071286635863,0.3136320412158966,2457.217529296875,1.0001066608438942,, +ett1 (W),0.01,0.27s,0.0670214816927909,0.0680913180112838,0.5938838720321655,0.1770760267972946,0.2588850855827331,0.0604860481173963,0.3784884214401245,0.0113183036446571,13.605348595585252,, +ett2 (W),0.01,0.26s,0.1741187423467636,0.1021774187684059,0.5133547186851501,0.3896964192390442,0.4172753691673279,0.0613075549104121,0.4377534985542297,0.0125496108084917,-0.6573553035192732,, +saugeenday (M),0.02,1.02s,0.0147482426837086,0.0104784658178687,1.0255438089370728,0.1093523502349853,0.1214423403143882,0.0246176540313836,0.2288037091493606,1087.9429931640625,-15.495872648221535,, +us_births (W),0.02,1.47s,0.0196240544319152,0.0145208276808261,0.6982958316802979,0.1551779806613922,0.1400858759880066,0.0345455468618481,0.2659468054771423,2687.662109375,0.714050794254581,, +ett1 (D),0.05,0.61s,0.0849475190043449,0.0774221494793891,0.6829198002815247,0.1491615623235702,0.2914575636386871,0.0409068148418816,0.370927482843399,0.0230760481208562,37.19595345512211,, +ett2 (D),0.05,0.49s,0.1526618748903274,0.0984543189406395,0.5820711851119995,0.173407033085823,0.3907196819782257,0.0520630918848312,0.3652094304561615,0.0328069739043712,-0.7507403332750386,, +solar (W),0.06,0.7s,0.3385293483734131,0.3953697681427002,2.012068033218384,-1.0926722288131714,0.581832766532898,0.1879595049975641,0.8249495625495911,0.1504873037338256,-0.8180154963881773,, +saugeenday (W),0.07,6.82s,0.017284395173192,0.0098348129540681,0.9754614233970642,0.1165607497096061,0.1314701288938522,0.0117444112639239,0.2292452156543731,1064.36083984375,32.020885087732644,, +jena_weather (D),0.08,0.26s,0.4461028575897217,0.2888902127742767,1.1433098316192627,-0.4343129694461822,0.6679093241691589,0.0438991978010039,0.6291244029998779,0.0615952052175998,-6.027652083420433,, +us_births (D),0.13,16.6s,0.016859732568264,0.0133043425157666,0.7950576543807983,0.015121298842132,0.1298450380563736,0.0201145453991546,0.2798978090286255,2399.68212890625,1.772662756038975,, +hierarchical_sales (W),0.15,0.49s,1.1032949686050415,0.6678270101547241,0.7787802815437317,-0.2592039406299591,1.050378441810608,0.0587081912083629,1.2033673524856567,0.1188364624977111,-4.7887332102493305,, +bizitobs_l2c (H),0.18,5.18s,0.1140130683779716,0.0944352895021438,1.2964223623275757,0.0448632538318634,0.3376582264900207,0.0410969499459093,0.275805652141571,0.0018112994730472,-43.57523591631536,, +M_DENSE (D),0.21,0.69s,0.6599177718162537,0.4499979019165039,1.6254125833511353,0.1941371411085128,0.812353253364563,0.074929706278487,0.8791477680206299,0.0957659482955932,-12.22682395487469,, +covid_deaths (D),0.27,2.24s,2526.121826171875,15.654495239257812,0.6176470518112183,0.5333730578422546,50.26054000854492,0.0687482792436985,0.8818761706352234,0.081499233841896,0.8923451165013979,, +bizitobs_application (10s),0.33,40.76s,0.0317143015563488,0.023255256935954,0.771091103553772,-8.387896537780762,0.1780851036310196,0.0292549272917092,0.1583029925823211,2176.161865234375,-7.937683135019012,, +solar (D),0.35,1.49s,1.038826584815979,0.6574079990386963,2.536269187927246,1.4319617748260498,1.019228458404541,0.2590752154502924,1.2444651126861572,0.1777056902647018,-1.5752825757948137,, +hospital (ME),0.35,4.4s,1.8646297454833984,1.0390050411224363,0.7106813192367554,35.79023742675781,1.3655145168304443,0.0622145554025414,1.2578766345977783,0.1273369640111923,1.868395695573274,, +saugeenday (D),0.38,120.18s,0.0180988106876611,0.0105823501944541,1.118381142616272,0.102086529135704,0.1345318257808685,0.0080333477588536,0.2212738990783691,864.545654296875,64.92250361062824,, +car_parts_with_missing (ME),0.58,21.4s,3.5556981563568115,0.8814246654510498,0.841040849685669,2245.123046875,1.8856558799743648,0.0130394719147908,1.6444664001464844,0.1163924783468246,10.445918330062115,, +electricity (W),0.66,2.98s,231350681600.0,33611.11328125,0.3989854454994201,36788.98828125,480989.28125,0.0324249962282131,1.2979296445846558,0.7840292453765869,0.8006802077929641,, +hierarchical_sales (D),0.9,6.13s,0.9645678400993348,0.610886812210083,0.830234706401825,-0.0141591047868132,0.9821241497993468,0.0112084922511458,1.302215337753296,0.1268036961555481,-12.15088636629144,, +kdd_cup_2018_with_missing (D),1.08,1.96s,0.8443799614906311,0.5089293122291565,0.8750379681587219,-0.1028765514492988,0.91890150308609,0.026423027445044,1.0007576942443848,0.1119600459933281,-4.857062466993302,, +LOOP_SEATTLE (D),1.13,2.01s,1.1608164310455322,0.7672717571258545,2.2135884761810303,-0.2031948417425155,1.0774118900299072,0.0757262568879474,1.0640236139297483,0.1328028738498687,-3.5458938299442857,, +SZ_TAXI (H),1.14,1.62s,0.1768087744712829,0.2677923440933227,0.5676416158676147,-0.5019075870513916,0.4204863607883453,0.0624827864699234,0.4430662393569946,0.0903791263699531,-0.4250639896581444,, +ett1 (H),1.22,67.85s,0.0929810479283332,0.0700207650661468,0.603032112121582,0.2185447961091995,0.3049279451370239,0.027864622936155,0.350702702999115,0.0398958101868629,-18.083287048485392,, +ett2 (H),1.26,41.84s,0.0528179183602333,0.0486167892813682,0.4001955986022949,0.1135616898536682,0.2298214882612228,0.018283126875181,0.3037349581718445,0.0578386858105659,-0.9502899981712,, +jena_weather (H),1.65,20.71s,0.2675952613353729,0.1935021132230758,0.7373051047325134,-0.0232729203999042,0.5172961354255676,0.0046609783473736,0.466286838054657,0.0451014675199985,4.254420354610698,, +bizitobs_l2c (5T),1.68,79.39s,0.1058427467942237,0.0826850086450576,1.129166603088379,0.0969898700714111,0.3253348171710968,0.0395257925880536,0.1973535120487213,0.0182853229343891,60.70462950302088,, +restaurant (D),1.77,6.81s,32.15745162963867,1.5387860536575315,0.540695071220398,21.395565032958984,5.6707539558410645,0.0213787740462814,1.0571155548095703,0.162871703505516,0.8933677362910404,, +m4_hourly (h),2.43,8.33s,1.1153695583343506,0.8282145261764526,1.408022165298462,-0.117620512843132,1.0561106204986572,0.0682783747085667,1.3844348192214966,0.1808195859193802,2.545130212945835,, +bizitobs_service (10s),3.07,20.64s,0.6430262327194214,0.45716592669487,0.5969609022140503,-0.3185150623321533,0.801889181137085,0.0580231418465472,0.9822568297386168,0.0675413757562637,67.20354516094307,, +M_DENSE (H),3.7,42.8s,0.5784375667572021,0.444374531507492,2.4077565670013428,0.2949043810367584,0.760550856590271,0.0899870949612425,0.9071030020713806,0.0984628275036811,-111.3769841563864,, +ett1 (15T),4.4,329.97s,0.0779099911451339,0.0645546168088913,0.560560405254364,-0.0481127053499221,0.2791236042976379,0.0250186369212912,0.2958253622055053,0.0443522036075592,-29.852814837574265,, +ett2 (15T),4.57,226.81s,0.0352018028497695,0.0387738905847072,0.3235756754875183,0.1107677444815635,0.1876214295625686,0.0121631061120319,0.2793645560741424,0.0610741227865219,-0.8039113816463029,, +SZ_TAXI (15T),4.58,17.77s,0.42354616522789,0.4185243248939514,0.7615123987197876,-0.7070361971855164,0.6508042216300964,0.0506900444088563,0.7788030505180359,0.1038615405559539,-1.1161610703586788,, +electricity (D),4.63,12.85s,2061263104.0,2192.175048828125,0.2587335407733917,9705.6865234375,45401.13671875,0.0194227446917634,0.9591110348701476,1.4977014064788818,0.5220507689941757,, +solar (H),5.97,61.06s,0.8266324996948242,0.6340412497520447,4.845605373382568,-0.1567424982786178,0.9091933369636536,0.2420115291642467,1.209081768989563,0.1694316267967224,-34.80226701270591,, +bitbrains_rnd (H),6.1,12.41s,1650.3675537109375,3.28809666633606,0.3081104159355163,2738.19140625,40.62471771240234,0.0052432316635678,0.7822993397712708,0.0683856233954429,0.7029842429913499,, +m4_weekly (W-SUN),7.18,52.58s,14.701823234558104,0.5906059145927429,0.332653135061264,0.2083614021539688,3.834295749664306,0.0125719051061351,0.5218396186828613,0.0845267623662948,0.5617101458367062,, +jena_weather (10T),7.18,167.78s,0.2325582355260849,0.1713819056749344,0.6592965722084045,0.3889910280704498,0.4822429120540619,0.0018777492686544,0.4705949425697326,0.0464869439601898,4.286768148965424,, +kdd_cup_2018_with_missing (H),14.28,156.25s,0.6713148355484009,0.5135934948921204,0.9226472973823548,-0.4631863832473755,0.8193380236625671,0.0151843473907651,1.1105737686157229,0.1338521540164947,-5.905587927977317,, +bitbrains_fast_storage (H),15.64,19.09s,6001.80029296875,2.4014060497283936,0.319601446390152,6690.85986328125,77.4712905883789,0.0072342358585961,0.905264139175415,0.8357183933258057,0.6903022491626275,, +LOOP_SEATTLE (H),27.05,122.39s,0.9911009073257446,0.6836065053939819,2.6852030754089355,1.1819508075714111,0.9955404996871948,0.0352485268949327,1.4283233880996704,0.1249789893627166,-19.62866533710461,, +solar (10T),33.4,391.15s,0.674527108669281,0.5838488340377808,4.178455829620361,-0.1536557227373123,0.821295976638794,0.2055568836303094,1.1840494871139526,0.1582845151424408,-20.79368487543768,, +m4_yearly (YE-DEC),51.4,86.22s,0.2261692285537719,0.3908155858516693,1.6628925800323486,1.3507766723632812,0.4755725264549255,0.0156536878524756,1.4637120962142944,0.2783320844173431,1.1542410315121352,, +bitbrains_rnd (5T),63.69,473.85s,601.7960815429688,0.9585692882537842,0.2155532985925674,3885.76904296875,24.531532287597656,0.001846518789332,0.8277668356895447,0.6908813118934631,0.4589610091222341,, +electricity (H),110.58,517.8s,281990.09375,20.352977752685547,0.0845265612006187,16568.697265625,531.0274047851562,0.0046644560554118,0.9146561622619628,3.617120742797852,0.1704790066896602,, +temperature_rain_with_missing (D),113.99,241.59s,7.567882061004639,0.9097580909729004,1.9529906511306765,6.748717784881592,2.750978469848633,0.0029802773484801,1.4647784233093262,0.1361345201730728,3.029471131048231,, +bitbrains_fast_storage (5T),160.06,24.41m,4415.42578125,1.2872124910354614,0.3495244681835174,5681.8251953125,66.44866943359375,0.0060978034532362,0.9151012897491456,0.8526456952095032,0.7709492303464724,, +m4_quarterly (QE-DEC),163.93,269.9s,0.5789180994033813,0.0065141702070832,0.0263908170163631,0.0004282108857296,0.7608667016029358,0.0036089904420436,0.0019763675518333,0.0205469280481338,0.0223107845215627,, +m4_daily (D),316.28,26.3m,11.079607963562012,0.1509556770324707,0.235131025314331,0.7039647102355957,3.328604459762573,0.0022279231173181,0.231623962521553,0.0915686339139938,0.5142236192862293,, +LOOP_SEATTLE (5T),324.08,26.42m,0.9686259627342224,0.6023902297019958,1.9317667484283447,0.1514020264148712,0.9841879606246948,0.0327025805961626,1.2063839435577393,0.1071085184812545,-21.432668446438697,, +electricity (15T),442.39,35.69m,16854.087890625,5.356211185455322,0.0891454666852951,5173.1025390625,129.8232879638672,0.0041978286842909,0.8558568954467773,3.32273530960083,0.1802054728326724,, +m4_monthly (ME),1025.34,69.44m,0.06791886687278748,0.0664273053407669,0.31685030460357666,-1.037688136100769,0.26061248779296875,0.0067522199418217155,0.16453681886196136,0.05816132575273514,0.5769245312275849,, diff --git a/leaderboard/timesfm.csv b/leaderboard/timesfm.csv index 4df5302..a24f71e 100644 --- a/leaderboard/timesfm.csv +++ b/leaderboard/timesfm.csv @@ -1,56 +1,56 @@ dataset,size_in_MB,eval_time,mse,mae,mase,mape,rmse,nrmse,smape,msis,nd,mwsq,crps -us_births (M),0.0,0.77s,18569150000.0,75900.75,0.48510584,94.00559,136268.67,0.3842894984790061,1.6851945,0.18086526,0.7276587664532638,3453739800.0,870729314829769.6 -ett1 (W),0.01,0.28s,755923.3125,342.9079895019531,0.2738148868083954,103.1428680419922,869.4384765625,0.094460304935709,1.4275156259536743,0.0339242033660411,0.412374092665897,174977.84375,788742719.1274706 -ett2 (W),0.01,0.29s,14168267.0,1529.9302978515625,0.2831499278545379,103.1278305053711,3764.075927734375,0.0929492278964032,1.5025070905685425,0.0230261739343404,0.5242624465613168,4060390.75,63681374415.23305 -saugeenday (M),0.02,0.56s,360.3646545410156,11.900530815124512,1.8721747398376465,0.3798299133777618,18.983272552490234,0.1309890539355766,0.3822072446346283,0.0799184143543243,0.3780238860253221,150.04086303710938,9751.949426476773 -us_births (W),0.02,0.75s,8879331.0,2476.028564453125,1.5292190313339231,0.0340057127177715,2979.820556640625,0.13660129069747,0.0346551649272441,0.1548268944025039,0.0342241437212358,123388816.0,625608175326.1519 -ett1 (D),0.05,0.6s,72115.7734375,184.72604370117188,2.584463596343994,1.16010844707489,268.5437927246094,0.1185072873783585,0.5655626058578491,0.1256510615348816,0.5092587809366369,-6898.75146484375,28825573.22282556 -ett2 (D),0.05,0.52s,387765.34375,460.1440734863281,2.8439860343933105,75350.078125,622.7080688476562,0.1021176211031365,0.4315763115882873,0.0793652832508087,0.3425711255986428,81097.703125,667222233.926787 -solar (W),0.06,0.87s,243211.953125,140.67047119140625,0.1039494723081588,168.28846740722656,493.1652526855469,0.0271321008013764,1.5959948301315308,0.0375822409987449,0.1559242251909539,68889.2734375,313517743.6982181 -saugeenday (W),0.07,1.9s,1413.5189208984375,18.77586555480957,0.752160370349884,0.5625652074813843,37.59679412841797,0.0965116291642801,0.5715811848640442,0.0891419872641563,0.611226523135817,314.54638671875,31355.627402559603 -jena_weather (D),0.08,0.17s,216406.8125,158.7361602783203,1.4110695123672483,1640.08203125,465.1954650878906,0.1400331827079712,1.2437394857406616,0.1674105823040008,1.3376674560340065,-59087.4296875,87550721.91806899 -us_births (D),0.13,5.83s,256599.03125,366.220703125,0.3182243406772613,0.0378155298531055,506.5560607910156,0.0820200874305075,0.0375937931239604,0.0830649435520172,0.0377344403028494,125517.234375,2468827337.600878 -hierarchical_sales (W),0.15,0.68s,172.6010284423828,2.2923226356506348,0.3291488289833069,1600.3328857421875,13.137770652770996,0.0240618505717078,1.6805850267410278,0.0789058208465576,0.4937229247823241,3.173884153366089,889.1261805051131 -bizitobs_l2c (H),0.18,3.22s,236.19967651367188,10.64823055267334,0.6495946049690247,175502.625,15.368788719177246,0.1293306067065899,0.9493139386177064,0.1546213030815124,0.6156449659573296,69.27212524414062,3192.6249713620327 -M_DENSE (D),0.21,0.39s,11565.7919921875,49.07347106933594,1.0506058931350708,15768.125,107.54437255859376,0.0308658598693937,0.0997544825077056,0.0235972441732883,0.0857000354899125,-4261.94775390625,6237674.302734707 -covid_deaths (D),0.27,1.48s,3808534.25,71.79171752929688,0.3206616640090942,133.26976013183594,1951.546630859375,0.0175656762437778,1.8222180604934688,1.604204177856445,0.4809924540781238,709.318603515625,21421881.951157507 -bizitobs_application (10s),0.33,5.05s,14336030.0,1443.1549072265625,0.4737738966941833,0.0729291960597038,3786.294921875,0.0663502367512032,0.0753102004528045,0.0247951094061136,0.0575195930776688,-7014385.5,367766633594.7068 -solar (D),0.35,0.85s,150108.46875,168.56527709960938,0.4180575609207153,90.0322494506836,387.4383544921875,0.0868861469356943,1.511743187904358,0.1081578359007835,0.6270861685860822,-61677.10546875,73463620.34841867 -hospital (ME),0.35,9.63s,1444.345947265625,2.275298595428467,0.3479009568691253,155.61337280273438,38.00455093383789,0.0168161729051664,1.8580025434494016,1.265772581100464,0.5218510124201764,-22.01920509338379,29251.403845280827 -saugeenday (D),0.38,15.61s,1644.2486572265625,20.858020782470703,0.6737617254257202,0.8013893961906433,40.54933547973633,0.0635868497219086,0.6335486173629761,0.1162731721997261,0.692772052551671,231.62429809570312,43203.344686742144 -car_parts_with_missing (ME),0.58,13.5s,0.0004000368935521,0.0024594825226813,2.811450242996216,212.77281188964844,0.020000921562314,0.0100004107791031,1.8619308471679688,295.2582702636719,4.193208759020757,0.0001002870267257,3.65184084980213e-05 -electricity (W),0.66,4.85s,1141021824.0,2945.174560546875,0.4218693077564239,3608.99609375,33779.015625,0.0258700696664527,1.8567241430282595,0.0892531797289848,0.6328040646388613,4315899.0,1289009397906.8467 -hierarchical_sales (D),0.9,3.98s,37.22464370727539,2.452910661697388,0.8289899826049805,32822.65625,6.101200103759766,0.0167156162646674,0.9927151203155518,0.1096980571746826,0.6821998839505022,3.9786980152130127,122.56138584698132 -kdd_cup_2018_with_missing (D),1.08,2.0s,39549.703125,31.431442260742188,2.9347970485687256,107.65431213378906,198.8710784912109,0.1128987098281145,1.7379422187805176,0.5802035331726074,3.913061567638313,-159.70419311523438,288219.43507911486 -LOOP_SEATTLE (D),1.13,2.17s,5496.12255859375,12.844322204589844,1.1645691394805908,113.16234588623048,74.1358413696289,0.9563215616469374,1.754644751548767,0.2735498249530792,1.7468517590991,-775.4537353515625,362258.9999608363 -SZ_TAXI (H),1.14,3.67s,6.786564350128174,1.813169240951538,2.023066282272339,0.9974349737167358,2.6051034927368164,0.04569469967052,0.2922604978084564,0.0554111190140247,0.168388223505618,0.2839378714561462,69.91879421028993 -ett1 (H),1.22,24.1s,116.49008178710938,5.808158874511719,0.8058133125305176,6461.5,10.793057441711426,0.03833238138548,0.4080630242824554,0.0634973719716072,0.3246970863170947,1.4737393856048584,2433.867334308367 -ett2 (H),1.26,20.21s,853.8825073242188,10.279205322265623,0.8325547575950623,29186.673828125,29.221267700195312,0.0532953735879658,0.362695574760437,0.0523780435323715,0.1520864028310236,96.42228698730467,65475.6251824666 -jena_weather (H),1.65,5.12s,11633.09375,20.63885307312012,0.8180392384529114,49308.7734375,107.85681915283205,0.0087660520085325,0.5560029745101929,0.0171003174036741,0.1093307254642427,18.18128967285156,2465080.805412517 -bizitobs_l2c (5T),1.68,33.52s,55.61268615722656,3.860199451446533,0.7001544237136841,30274.798828125,7.457391262054443,0.0616313276507534,0.7837044596672058,0.0747101381421089,0.2468502933257013,7.223553657531738,876.0684101909227 -restaurant (D),1.77,19.27s,23.778711318969727,0.7549307346343994,0.4492984116077423,122.9323272705078,4.876341819763184,0.0186119909681041,1.880303978919983,0.0409628488123416,0.5990635938742331,0.7695077061653137,48.720557867116376 -m4_hourly (h),2.43,7.15s,5090942.0,191.70616149902344,0.8005938529968262,0.3146225810050964,2256.3115234375,0.0032095561059994,0.0889025703072547,0.1215216591954231,0.0266055212389293,663349.375,118992351217.515 -bizitobs_service (10s),3.07,10.27s,62803.484375,68.61558532714844,0.4750987589359283,2.9249050617218018,250.60623168945312,0.0235975735831899,0.1754108965396881,0.0299571566283702,0.053328221931553,-14734.201171875,80983520.59893805 -M_DENSE (H),3.7,10.18s,43144.1015625,81.26242065429688,0.4388032555580139,54280.05078125,207.71157836914065,0.0370648783009443,0.1887461990118026,0.0287262313067913,0.1446289795464684,-35.95600509643555,24230014.607163224 -ett1 (15T),4.4,91.31s,5.761040210723877,1.2852407693862915,0.8999533653259277,1481.733642578125,2.400216817855835,0.0331640709094088,0.3748109936714172,0.0601485595107078,0.2810126563568119,0.7427869439125061,29.50964871780717 -ett2 (15T),4.57,84.77s,13.491145133972168,2.0715839862823486,0.9193405508995056,9298.2470703125,3.6730294227600098,0.026357355471976,0.3589045703411102,0.0463107377290725,0.1216792821410198,5.443060874938965,262.18723574122566 -SZ_TAXI (15T),4.58,8.99s,22.566789627075195,3.1129724979400635,0.8204217553138733,48.178466796875,4.750451564788818,0.056952752199296,0.3988228738307953,0.0825097113847732,0.2548119228106883,6.911273002624512,273.16549159023543 -electricity (D),4.63,24.69s,21634930688.0,13334.37109375,1.434918999671936,1785203.625,147088.171875,0.0140987636829257,0.2670892477035522,0.1853649616241455,0.2322878584310251,347290688.0,1738899002069049.5 -solar (H),5.97,26.06s,803.7706298828125,13.406301498413086,0.9649030566215516,89397.3671875,28.350849151611328,0.0556936435998997,1.2737843990325928,0.0841935649514198,0.3545522668925008,-30.8249568939209,32633.153636320803 -bitbrains_rnd (H),6.1,12.47s,2108625.75,201.92837524414065,2.5005455017089844,10012.623046875,1452.11083984375,0.0663705297630374,0.668876051902771,0.1365956366062164,0.8682190726281784,315.523681640625,563879789.6400548 -m4_weekly (W-SUN),7.18,38.16s,561843.9375,169.09071350097656,0.6597051024436951,0.0394658930599689,749.5625,0.0146108341018089,0.0377030260860919,0.026721965521574,0.0343743559651665,258700.15625,2611755055.7616825 -jena_weather (10T),7.18,58.47s,12574.1083984375,10.42119026184082,0.1510143727064132,317530.09375,112.13433074951172,0.0089722254680751,0.3868859410285949,0.0127869937568902,0.055151233084783,-393.72802734375,2353797.8153502587 -kdd_cup_2018_with_missing (H),14.28,150.86s,2952.594970703125,20.5595817565918,0.7884857654571533,198.80604553222656,54.33778762817383,0.0164959889080795,0.5316383242607117,0.0977005138993263,0.4983196497471576,234.4306182861328,121685.40442855174 -bitbrains_fast_storage (H),15.64,30.69s,6645929.0,507.407470703125,3.779883623123169,12325388.0,2577.969970703125,0.0441231050084102,0.5935467481613159,0.2566251456737518,1.298484072868801,-188083.875,4203831024.334692 -LOOP_SEATTLE (H),27.05,133.16s,40.00091171264648,3.2189533710479736,0.456672191619873,0.0995595604181289,6.32462739944458,0.078617404675649,0.0764003098011016,0.0566152445971965,0.0568730516049304,-27.0537052154541,2250.8890889121903 -solar (10T),33.4,257.14s,30.337221145629883,2.5926098823547363,0.2261365950107574,16518.150390625,5.507923603057861,0.0619563879522613,1.3525595664978027,0.0886123850941658,0.4094627211186514,3.389242887496948,187.1054757980695 -m4_yearly (YE-DEC),51.4,290.76s,6459.0498046875,1.0295237302780151,0.1053829044103622,131.66738891601562,80.36821746826172,0.0027782154812112,1.9379576444625848,1206815.625,0.1580744366296219,502.7213439941406,3110155.747033201 -bitbrains_rnd (5T),63.69,387.53s,2270520.5,176.41055297851562,0.8124773502349854,38687.7890625,1506.8245849609375,0.0683913157311615,0.7168135046958923,0.1587728559970855,0.5689000036888757,12547.703125,585054641.1886609 -electricity (H),110.58,240.47s,3291210.25,154.08624267578125,0.6742883920669556,54339.984375,1814.1693115234373,0.0023745671616488,0.4415097832679748,0.1650619953870773,0.0724492695100105,64593.03125,13094113815.789062 -temperature_rain_with_missing (D),113.99,388.37s,223.14825439453125,6.465656280517578,2.1704370975494385,30970.69921875,14.93814754486084,0.0154001519493395,1.5684967041015625,0.0656752437353134,0.7594556113206428,27.58770751953125,1312.226885612527 -bitbrains_fast_storage (5T),160.06,388.75s,3863427.25,308.6574401855469,0.8093902468681335,32153.390625,1965.560302734375,0.0307712322939562,0.7466534972190857,0.054063756018877,0.5174100188246353,-722.779296875,2388020468.450806 -m4_quarterly (QE-DEC),163.93,372.07s,11540.2001953125,4.373758316040039,8.315937042236328,0.000894644006621,107.42532348632812,0.0021157962591371,0.0008130415226332,0.0147697096690535,0.0007122407249761,8877.486328125,76965652.61745518 -m4_daily (D),316.28,734.5s,72798.4453125,27.041128158569336,0.1879831701517105,0.0043948837555944,269.8118591308594,0.006309027242851,0.0045300694182515,0.0084697818383574,0.0044873143451785,40192.64453125,424549963.43306565 -LOOP_SEATTLE (5T),324.08,637.41s,121.34342956542967,6.154635906219482,0.5620866417884827,0.2324665635824203,11.015599250793455,0.0699603028839909,0.1380070000886917,0.0848249346017837,0.108806673646852,-55.0250244140625,6821.649119886511 -electricity (15T),442.39,930.99s,165336.84375,40.74889755249024,0.8498629927635193,15328.3115234375,406.6163330078125,0.0021090058764871,0.4655930399894714,0.1650999933481216,0.0769932113720002,2079.595458984375,175638871.9419063 -m4_monthly (ME),1025.34,41.26m,21410.37890625,6.217271327972412,2.6664843559265137,0.0012720649829134345,146.32286071777344,0.0019095096563116639,0.0012352424673736095,0.019323231652379036,0.0012871883735410055,12769.849609375,105929602.0931511 +us_births (M),0.0,0.87s,18569150464.0,75900.75,,94.0055923461914,136268.671875,0.3842894984790061,1.685194492340088,0.1808652579784393,0.7276587664532638,3453739776.0,870729314829769.6 +ett1 (W),0.01,0.27s,755923.3125,342.9079895019531,0.4838866293430328,103.1428680419922,869.4384765625,0.094460304935709,1.4275156259536743,0.0339242033660411,0.412374092665897,174977.84375,788742719.1274706 +ett2 (W),0.01,0.31s,14168267.0,1529.9302978515625,0.2853349149227142,103.1278305053711,3764.075927734375,0.0929492278964032,1.5025070905685425,0.0230261739343404,0.5242624465613168,4060390.75,63681374415.23305 +saugeenday (M),0.02,0.48s,360.3646545410156,11.900530815124512,,0.3798299133777618,18.983272552490234,0.1309890539355766,0.3822072446346283,0.0799184143543243,0.3780238860253221,150.04086303710938,9751.949426476773 +us_births (W),0.02,1.52s,8879331.0,2476.028564453125,,0.0340057127177715,2979.820556640625,0.13660129069747,0.0346551649272441,0.1548268944025039,0.0342241437212358,123388816.0,625608175326.1519 +ett1 (D),0.05,0.39s,72115.7734375,184.72604370117188,0.5543407201766968,1.16010844707489,268.5437927246094,0.1185072873783585,0.5655626058578491,0.1256510615348816,0.5092587809366369,-6898.75146484375,28825573.22282556 +ett2 (D),0.05,0.39s,387765.34375,460.1440734863281,0.2000954896211624,75350.078125,622.7080688476562,0.1021176211031365,0.4315763115882873,0.0793652832508087,0.3425711255986428,81097.703125,667222233.926787 +solar (W),0.06,0.91s,243211.953125,140.67047119140625,0.4379618167877197,168.28846740722656,493.1652526855469,0.0271321008013764,1.5959948301315308,0.0375822409987449,0.1559242251909539,68889.2734375,313517743.6982181 +saugeenday (W),0.07,2.02s,1413.5189208984375,18.77586555480957,,0.5625652074813843,37.59679412841797,0.0965116291642801,0.5715811848640442,0.0891419872641563,0.611226523135817,314.54638671875,31355.627402559603 +jena_weather (D),0.08,0.19s,216406.8125,158.7361602783203,0.892136812210083,1640.08203125,465.1954650878906,0.1400331827079712,1.2437394857406616,0.1674105823040008,1.3376674560340065,-59087.4296875,87550721.91806899 +us_births (D),0.13,4.79s,256599.03125,366.220703125,,0.0378155298531055,506.5560607910156,0.0820200874305075,0.0375937931239604,0.0830649435520172,0.0377344403028494,125517.234375,2468827337.600878 +hierarchical_sales (W),0.15,0.68s,172.6010284423828,2.2923226356506348,0.4788693487644195,1600.3328857421875,13.137770652770996,0.0240618505717078,1.6805850267410278,0.0789058208465576,0.4937229247823241,3.173884153366089,889.1261805051131 +bizitobs_l2c (H),0.18,1.48s,236.19967651367188,10.64823055267334,0.9124109745025636,175502.625,15.368788719177246,0.1293306067065899,0.9493139386177064,0.1546213030815124,0.6156449659573296,69.27212524414062,3192.6249713620327 +M_DENSE (D),0.21,0.43s,11565.7919921875,49.07347106933594,0.1443746834993362,15768.125,107.54437255859376,0.0308658598693937,0.0997544825077056,0.0235972441732883,0.0857000354899125,-4261.94775390625,6237674.302734707 +covid_deaths (D),0.27,1.68s,3808534.25,71.79171752929688,0.2467459589242935,133.26976013183594,1951.546630859375,0.0175656762437778,1.8222180604934688,1.604204177856445,0.4809924540781238,709.318603515625,21421881.951157507 +bizitobs_application (10s),0.33,5.09s,14336030.0,1443.1549072265625,0.0288167968392372,0.0729291960597038,3786.294921875,0.0663502367512032,0.0753102004528045,0.0247951094061136,0.0575195930776688,-7014385.5,367766633594.7068 +solar (D),0.35,1.12s,150108.46875,168.56527709960938,1.5647693872451782,90.0322494506836,387.4383544921875,0.0868861469356943,1.511743187904358,0.1081578359007835,0.6270861685860822,-61677.10546875,73463620.34841867 +hospital (ME),0.35,4.36s,1444.345947265625,2.275298595428467,0.4929713904857635,155.61337280273438,38.00455093383789,0.0168161729051664,1.8580025434494016,1.265772581100464,0.5218510124201764,-22.01920509338379,29251.403845280827 +saugeenday (D),0.38,15.85s,1644.2486572265625,20.858020782470703,,0.8013893961906433,40.54933547973633,0.0635868497219086,0.6335486173629761,0.1162731721997261,0.692772052551671,231.62429809570312,43203.344686742144 +car_parts_with_missing (ME),0.58,13.9s,0.0004000368935521,0.0024594825226813,2.601860523223877,212.77281188964844,0.020000921562314,0.0100004107791031,1.8619308471679688,295.2582702636719,4.193208759020757,0.0001002870267257,3.65184084980213e-05 +electricity (W),0.66,2.0s,1141021824.0,2945.174560546875,0.4524020850658417,3608.99609375,33779.015625,0.0258700696664527,1.8567241430282595,0.0892531797289848,0.6328040646388613,4315899.0,1289009397906.8467 +hierarchical_sales (D),0.9,4.46s,37.22464370727539,2.452910661697388,0.5911553502082825,32822.65625,6.101200103759766,0.0167156162646674,0.9927151203155518,0.1096980571746826,0.6821998839505022,3.9786980152130127,122.56138584698132 +kdd_cup_2018_with_missing (D),1.08,2.1s,39549.703125,31.431442260742188,3.546457052230835,107.65431213378906,198.8710784912109,0.1128987098281145,1.7379422187805176,0.5802035331726074,3.913061567638313,-159.70419311523438,288219.43507911486 +LOOP_SEATTLE (D),1.13,1.79s,5496.12255859375,12.844322204589844,29.06014060974121,113.16234588623048,74.1358413696289,0.9563215616469374,1.754644751548767,0.2735498249530792,1.7468517590991,-775.4537353515625,362258.9999608363 +SZ_TAXI (H),1.14,2.09s,6.786564350128174,1.813169240951538,0.2287033498287201,0.9974349737167358,2.6051034927368164,0.04569469967052,0.2922604978084564,0.0554111190140247,0.168388223505618,0.2839378714561462,69.91879421028993 +ett1 (H),1.22,10.37s,116.49008178710938,5.808158874511719,0.2350834161043167,6461.5,10.793057441711426,0.03833238138548,0.4080630242824554,0.0634973719716072,0.3246970863170947,1.4737393856048584,2433.867334308367 +ett2 (H),1.26,11.82s,853.8825073242188,10.279205322265623,0.1053689420223236,29186.673828125,29.221267700195312,0.0532953735879658,0.362695574760437,0.0523780435323715,0.1520864028310236,96.42228698730467,65475.6251824666 +jena_weather (H),1.65,5.19s,11633.09375,20.63885307312012,0.0746945664286613,49308.7734375,107.85681915283205,0.0087660520085325,0.5560029745101929,0.0171003174036741,0.1093307254642427,18.18128967285156,2465080.805412517 +bizitobs_l2c (5T),1.68,19.29s,55.61268615722656,3.860199451446533,0.3617765605449676,30274.798828125,7.457391262054443,0.0616313276507534,0.7837044596672058,0.0747101381421089,0.2468502933257013,7.223553657531738,876.0684101909227 +restaurant (D),1.77,6.2s,23.778711318969727,0.7549307346343994,0.6323703527450562,122.9323272705078,4.876341819763184,0.0186119909681041,1.880303978919983,0.0409628488123416,0.5990635938742331,0.7695077061653137,48.720557867116376 +m4_hourly (h),2.43,7.52s,5090942.0,191.70616149902344,0.0182035341858863,0.3146225810050964,2256.3115234375,0.0032095561059994,0.0889025703072547,0.1215216591954231,0.0266055212389293,663349.375,118992351217.515 +bizitobs_service (10s),3.07,10.8s,62803.484375,68.61558532714844,0.0307874605059623,2.9249050617218018,250.60623168945312,0.0235975735831899,0.1754108965396881,0.0299571566283702,0.053328221931553,-14734.201171875,80983520.59893805 +M_DENSE (H),3.7,11.14s,43144.1015625,81.26242065429688,0.2254992425441742,54280.05078125,207.71157836914065,0.0370648783009443,0.1887461990118026,0.0287262313067913,0.1446289795464684,-35.95600509643555,24230014.607163224 +ett1 (15T),4.4,40.75s,5.761040210723877,1.2852407693862915,0.2044875472784042,1481.733642578125,2.400216817855835,0.0331640709094088,0.3748109936714172,0.0601485595107078,0.2810126563568119,0.7427869439125061,29.50964871780717 +ett2 (15T),4.57,41.45s,13.491145133972168,2.0715839862823486,0.0850998982787132,9298.2470703125,3.6730294227600098,0.026357355471976,0.3589045703411102,0.0463107377290725,0.1216792821410198,5.443060874938965,262.18723574122566 +SZ_TAXI (15T),4.58,8.24s,22.566789627075195,3.1129724979400635,0.3422078788280487,48.178466796875,4.750451564788818,0.056952752199296,0.3988228738307953,0.0825097113847732,0.2548119228106883,6.911273002624512,273.16549159023543 +electricity (D),4.63,10.8s,21634930688.0,13334.37109375,0.1572931259870529,1785203.625,147088.171875,0.0140987636829257,0.2670892477035522,0.1853649616241455,0.2322878584310251,347290688.0,1738899002069049.5 +solar (H),5.97,26.31s,803.7706298828125,13.406301498413086,0.8212879300117493,89397.3671875,28.350849151611328,0.0556936435998997,1.2737843990325928,0.0841935649514198,0.3545522668925008,-30.8249568939209,32633.153636320803 +bitbrains_rnd (H),6.1,12.69s,2108625.75,201.92837524414065,0.4400153458118438,10012.623046875,1452.11083984375,0.0663705297630374,0.668876051902771,0.1365956366062164,0.8682190726281784,315.523681640625,563879789.6400548 +m4_weekly (W-SUN),7.18,18.33s,561843.9375,169.09071350097656,0.0449125319719314,0.0394658930599689,749.5625,0.0146108341018089,0.0377030260860919,0.026721965521574,0.0343743559651665,258700.15625,2611755055.7616825 +jena_weather (10T),7.18,32.38s,12574.1083984375,10.42119026184082,0.0376155413687229,317530.09375,112.13433074951172,0.0089722254680751,0.3868859410285949,0.0127869937568902,0.055151233084783,-393.72802734375,2353797.8153502587 +kdd_cup_2018_with_missing (H),14.28,63.58s,2952.594970703125,20.5595817565918,0.4423283338546753,198.80604553222656,54.33778762817383,0.0164959889080795,0.5316383242607117,0.0977005138993263,0.4983196497471576,234.4306182861328,121685.40442855174 +bitbrains_fast_storage (H),15.64,37.42s,6645929.0,507.407470703125,0.656402587890625,12325388.0,2577.969970703125,0.0441231050084102,0.5935467481613159,0.2566251456737518,1.298484072868801,-188083.875,4203831024.334692 +LOOP_SEATTLE (H),27.05,110.89s,40.00091171264648,3.2189533710479736,0.895560085773468,0.0995595604181289,6.32462739944458,0.078617404675649,0.0764003098011016,0.0566152445971965,0.0568730516049304,-27.0537052154541,2250.8890889121903 +solar (10T),33.4,292.74s,30.337221145629883,2.5926098823547363,0.904692769050598,16518.150390625,5.507923603057861,0.0619563879522613,1.3525595664978027,0.0886123850941658,0.4094627211186514,3.389242887496948,187.1054757980695 +m4_yearly (YE-DEC),51.4,118.74s,6459.0498046875,1.0295237302780151,0.2522365152835846,131.66738891601562,80.36821746826172,0.0027782154812112,1.9379576444625848,1206815.625,0.1580744366296219,502.7213439941406,3110155.747033201 +bitbrains_rnd (5T),63.69,156.4s,2270520.5,176.41055297851562,0.287757009267807,38687.7890625,1506.8245849609375,0.0683913157311615,0.7168135046958923,0.1587728559970855,0.5689000036888757,12547.703125,585054641.1886609 +electricity (H),110.58,237.51s,3291210.25,154.08624267578125,0.048514399677515,54339.984375,1814.1693115234373,0.0023745671616488,0.4415097832679748,0.1650619953870773,0.0724492695100105,64593.03125,13094113815.789062 +temperature_rain_with_missing (D),113.99,386.88s,223.14825439453125,6.465656280517578,0.7719624638557434,30970.69921875,14.93814754486084,0.0154001519493395,1.5684967041015625,0.0656752437353134,0.7594556113206428,27.58770751953125,1312.226885612527 +bitbrains_fast_storage (5T),160.06,384.07s,3863427.25,308.6574401855469,0.261232852935791,32153.390625,1965.560302734375,0.0307712322939562,0.7466534972190857,0.054063756018877,0.5174100188246353,-722.779296875,2388020468.450806 +m4_quarterly (QE-DEC),163.93,368.5s,11540.2001953125,4.373758316040039,0.0010121726663783,0.000894644006621,107.42532348632812,0.0021157962591371,0.0008130415226332,0.0147697096690535,0.0007122407249761,8877.486328125,76965652.61745518 +m4_daily (D),316.28,748.72s,72798.4453125,27.041128158569336,0.0085102980956435,0.0043948837555944,269.8118591308594,0.006309027242851,0.0045300694182515,0.0084697818383574,0.0044873143451785,40192.64453125,424549963.43306565 +LOOP_SEATTLE (5T),324.08,658.32s,121.34342956542967,6.154635906219482,1.3892171382904053,0.2324665635824203,11.015599250793455,0.0699603028839909,0.1380070000886917,0.0848249346017837,0.108806673646852,-55.0250244140625,6821.649119886511 +electricity (15T),442.39,948.67s,165336.84375,40.74889755249024,0.051505222916603,15328.3115234375,406.6163330078125,0.0021090058764871,0.4655930399894714,0.1650999933481216,0.0769932113720002,2079.595458984375,175638871.9419063 +m4_monthly (ME),1025.34,39.59m,21410.37890625,6.217271327972412,0.0018654258456081152,0.0012720649829134345,146.32286071777344,0.0019095096563116639,0.0012352424673736095,0.019323231652379036,0.0012871883735410055,12769.849609375,105929602.0931511 diff --git a/leaderboard/ttm.csv b/leaderboard/ttm.csv index d11d23e..e20efe5 100644 --- a/leaderboard/ttm.csv +++ b/leaderboard/ttm.csv @@ -1,56 +1,56 @@ dataset,size_in_MB,eval_time,mse,mae,mase,mape,rmse,nrmse,smape,msis,nd,mwsq,crps -us_births (M),0.0,0.4s,3000344600.0,45357.152,,0.1415474,54775.402,0.7083148287470498,0.15697034,0.5907716,0.1449459928586625,, -ett1 (W),0.01,0.01s,3367362.0,1289.9720458984375,,0.5109699964523315,1835.037353515625,0.1993679744503945,0.6322484016418457,0.1768371313810348,0.5170979509975956,, -ett2 (W),0.01,0.01s,13926457.0,2763.200439453125,,0.0872123539447784,3731.81689453125,0.0921526307271427,0.6151796579360962,0.0594872161746025,0.3156227157251128,, -saugeenday (M),0.02,0.01s,313.7554931640625,12.181077003479004,0.490847647190094,0.4888153672218323,17.713144302368164,0.1222248697096848,0.3978652358055115,0.0862952545285224,0.4094651288420972,, -us_births (W),0.02,0.01s,20105792.0,3474.269775390625,1.1367878913879397,0.0478292293846607,4483.9482421875,0.2055536921303733,0.0495077259838581,0.2409838438034057,0.0490819298801119,, -ett1 (D),0.05,0.01s,70841.4765625,173.8226318359375,1.0557185411453247,0.7592813372612,266.1606140136719,0.1174556002717394,0.5317819714546204,0.1279015988111496,0.5071168510241886,, -ett2 (D),0.05,0.01s,298211.0625,411.258544921875,1.3115198612213137,250305.484375,546.0870361328125,0.0895525717987575,0.5169385671615601,0.0653064250946044,0.3366817636550053,, -solar (W),0.06,0.03s,833353.3125,589.9544677734375,0.173105239868164,3.119361162185669,912.8818969726562,0.0412475200542005,0.6278225183486938,0.0507600344717502,0.1465783350562043,, -saugeenday (W),0.07,0.03s,1398.6142578125,22.994462966918945,0.7861799001693726,1.2953144311904907,37.39805221557617,0.0960014551923699,0.727879524230957,0.1199762746691703,0.7558754163830121,, -jena_weather (D),0.08,0.01s,14943.7822265625,64.64045715332031,,43650.609375,122.24476623535156,0.0956858458779762,0.649933397769928,0.0411364063620567,0.3804947519350621,, -us_births (D),0.13,0.06s,806776.8125,766.9176635742188,0.6859992742538452,0.0819367095828056,898.2075805664062,0.1454351650116668,0.080326035618782,0.1538719534873962,0.0796836055276249,, -hierarchical_sales (W),0.15,0.01s,933.9584350585938,12.470171928405762,0.4878568947315216,26011.6796875,30.560733795166016,0.0374978324174082,0.6585963368415833,0.1202765107154846,0.5715919034526574,, -bizitobs_l2c (H),0.18,0.03s,226.3994140625,11.30506992340088,1.701867938041687,342492.65625,15.046574592590332,0.1266191276666756,0.9697225093841552,0.1977446973323822,0.7473387601455413,, -M_DENSE (D),0.21,0.01s,32961.9765625,103.63765716552734,0.8126806020736694,7649.943359375,181.5543365478516,0.0521071496094654,0.2080607116222381,0.0397377796471118,0.1913010429129907,, -covid_deaths (D),0.27,0.01s,8224839.0,488.4300231933594,0.1252886950969696,5.621520042419434,2867.89794921875,0.0165605013890594,0.8315650820732117,0.0789231956005096,0.2569935866197406,, -bizitobs_application (10s),0.33,0.08s,32887148.0,2724.28076171875,4.040218353271484,0.1427105665206909,5734.73193359375,0.100494237598957,0.1326426267623901,0.0446400567889213,0.109170628872927,, -solar (D),0.35,0.02s,99387.53125,179.03985595703125,0.3714072406291961,5.904353618621826,315.25787353515625,0.0881408744401335,0.8150160908699036,0.0990645289421081,0.3294589633157893,, -hospital (ME),0.35,0.02s,13203.482421875,33.762908935546875,0.0815439075231552,0.2418745905160904,114.90641021728516,0.0095042522847181,0.2441760152578354,0.0584825575351715,0.1218119291653831,, -saugeenday (D),0.38,0.3s,1582.4224853515625,22.28341293334961,1.3019130229949951,1.121761441230774,39.7796745300293,0.062379917115817,0.6690614819526672,0.1259668171405792,0.7433594678021981,, -car_parts_with_missing (ME),0.58,0.09s,1.2756143808364868,0.4929523468017578,0.7112451791763306,14998.923828125,1.1294310092926023,0.028235768173373,1.827894687652588,0.128747209906578,1.13195623478641,, -electricity (W),0.66,0.07s,446165745664.0,81218.609375,0.1408221572637558,2991302.0,667956.375,0.012225214412783,0.3823184967041015,0.2099758982658386,0.2088171696003437,, -hierarchical_sales (D),0.9,0.24s,32.510623931884766,2.5985300540924072,1.3520228862762451,55830.75390625,5.701808929443359,0.0300095191018324,1.0427050590515137,0.1340508759021759,0.7876320192863907,, -kdd_cup_2018_with_missing (D),1.08,0.01s,1714.657958984375,15.489001274108888,0.3731913864612579,3.3303918838500977,41.40842819213867,0.0235074810996672,0.6908389925956726,0.0748580545186996,0.4764814901530926,, -LOOP_SEATTLE (D),1.13,0.02s,50.29787826538086,5.00611686706543,0.3392249345779419,2.9659583568573,7.092099666595459,0.0914851400242258,0.3923839032649994,0.0668393373489379,0.1064061499495556,, -SZ_TAXI (H),1.14,0.05s,5.775453567504883,1.6222788095474243,1.1324000358581543,4.42769193649292,2.403217315673828,0.04215281638966,0.6184875965118408,0.0535639263689518,0.1847219385881447,, -ett1 (H),1.22,0.22s,107.93508911132812,5.934848785400391,0.4735427796840668,5916.12548828125,10.38918113708496,0.0368979833360757,0.4260028898715973,0.0630128383636474,0.3322640667716387,, -ett2 (H),1.26,0.21s,331.7087707519531,10.07536506652832,0.8400070071220398,37354.57421875,18.212873458862305,0.03321765178223,0.3521240055561065,0.0502875447273254,0.1495425734373404,, -jena_weather (H),1.65,0.22s,12420.8720703125,23.077777862548828,0.3023591339588165,163810.03125,111.44896697998048,0.0090580034578934,0.5544849038124084,0.0178078208118677,0.1210779545763768,, -bizitobs_l2c (5T),1.68,0.39s,48.1903190612793,3.692827939987183,3.621275663375856,36569.3515625,6.941924571990967,0.0573712727130433,0.7619636058807373,0.069492720067501,0.2402496421142862,, -restaurant (D),1.77,0.02s,184.53414916992188,8.541068077087402,0.5167205929756165,1.398659586906433,13.58433437347412,0.0154895487098958,0.5329251885414124,0.1196827441453933,0.4292247823289208,, -m4_hourly (h),2.43,0.27s,12600969.0,501.5806579589844,0.857112467288971,1.8221070766448968,3549.784423828125,0.0050494225155014,0.3531174659729004,0.1175054088234901,0.0753622761310135,, -bizitobs_service (10s),3.07,0.55s,141876.734375,129.13916015625,3.906403064727783,1.519543170928955,376.665283203125,0.0354675407578577,0.2370375692844391,0.0369267538189888,0.1009228077270859,, -M_DENSE (H),3.7,0.77s,62066.765625,116.72567749023438,0.2362783104181289,65202.38671875,249.13201904296875,0.0444561061024995,0.2629947066307068,0.0346622243523597,0.2068990456674438,, -ett1 (15T),4.4,0.91s,5.762168884277344,1.2901490926742554,0.8856348991394043,1191.9605712890625,2.400451898574829,0.0331673190466505,0.3788388073444366,0.0573586523532867,0.2822993711938242,, -ett2 (15T),4.57,0.92s,11.665009498596191,1.952955722808838,1.1466844081878662,9516.822265625,3.415407657623291,0.0245086829840971,0.3426353335380554,0.0440990105271339,0.1148741645348626,, -SZ_TAXI (15T),4.58,0.65s,15.45772933959961,2.413118600845337,1.522667646408081,57.01747131347656,3.931631803512573,0.0488206840925649,0.6771668791770935,0.0702245682477951,0.259493088402059,, -electricity (D),4.63,0.54s,1972160128.0,5507.720703125,5.606801509857178,732960.9375,44409.0078125,0.0068595934217533,0.2260050177574157,0.1872722059488296,0.1030819908619662,, -solar (H),5.97,2.51s,532.6610107421875,10.401812553405762,0.3966506123542785,91685.8125,23.079450607299805,0.0453382785725622,1.5323195457458496,0.0735802948474884,0.5002257237532436,, -bitbrains_rnd (H),6.1,0.19s,2025258.625,269.1065673828125,6.230059146881104,13996.7177734375,1423.1158447265625,0.0650452774933061,0.7615897059440613,0.2213128209114074,1.2124374447000463,, -m4_weekly (W-SUN),7.18,1.17s,227124.890625,89.59058380126953,1.181859254837036,1.1994612216949463,476.5762023925781,0.0092701070278132,0.1410123705863952,0.0134288519620895,0.0175228749335788,, -jena_weather (10T),7.18,1.68s,15407.9208984375,21.225975036621094,0.8548964262008667,181215.421875,124.12864685058594,0.0099319289565152,0.5240204334259033,0.0173721332103014,0.1121931640293443,, -kdd_cup_2018_with_missing (H),14.28,5.55s,11291.087890625,28.80390739440918,5.818058013916016,23.35333824157715,106.25952911376952,0.0323498433250687,0.725252091884613,0.1343024224042892,0.5877470092960014,, -bitbrains_fast_storage (H),15.64,0.44s,3359623.0,384.8792114257813,6.031384468078613,6836025.5,1832.927490234375,0.0834934509771442,0.6652535796165466,0.2152519524097442,1.1167391518923546,, -LOOP_SEATTLE (H),27.05,5.15s,32.28872299194336,3.2998805046081543,1.5066041946411133,2.9606635570526123,5.682316780090332,0.0704656429845557,0.3691274523735046,0.0481607876718044,0.0675387362283571,, -solar (10T),33.4,17.44s,16.853031158447266,1.948245644569397,1.6943179368972778,25896.412109375,4.105244159698486,0.0461782185315402,1.5377682447433472,0.0858474746346473,0.6783722603527258,, -m4_yearly (YE-DEC),51.4,0.34s,1995536.5,931.7138671875,0.1797195076942443,0.1414711624383926,1412.6346435546875,0.0093663615134665,0.155395969748497,0.0524347834289073,0.1384283334694768,, -bitbrains_rnd (5T),63.69,14.63s,3736168.5,262.73681640625,49.20981216430664,56525.73828125,1932.9171142578125,0.0877790254297552,0.7882185578346252,0.1011142283678054,0.7134745559623733,, -electricity (H),110.58,25.04s,945865.0,185.29136657714844,8.34165096282959,42604.21875,972.555908203125,0.0045617068862922,0.3089126944541931,0.1586364656686782,0.0924270373726737,, -temperature_rain_with_missing (D),113.99,10.29s,171.4033660888672,6.7863616943359375,9.965627670288086,141903.234375,13.092110633850098,0.013800053048795,1.3989208936691284,0.0783360004425048,0.7876673891786393,, -bitbrains_fast_storage (5T),160.06,36.34s,4163852.75,307.83111572265625,38.83745193481445,41233.34375,2040.5521240234373,0.092747321814352,0.7892562747001648,0.1296463012695312,0.6764640485449673,, -m4_quarterly (QE-DEC),163.93,9.62s,1026.746826171875,1.2073380947113037,0.00657942565158,0.0002141703880624,32.04288864135742,0.0006311009519832,0.0001976169733097,0.014599579386413,0.0001966043961196,, -m4_daily (D),316.28,101.55s,692.2584228515625,0.2827971577644348,0.0474786385893821,0.2579828798770904,26.31080436706543,0.0006640788581631,0.0269436743110418,0.0082796076312661,4.447048255093878e-05,, -LOOP_SEATTLE (5T),324.08,118.69s,54.779911041259766,4.063791751861572,4.830263614654541,2.981239318847656,7.401345252990723,0.0849846995330269,0.3835786581039428,0.0561670549213886,0.0799155799383991,, -electricity (15T),442.39,99.23s,243348.875,85.44944763183594,25.467529296875,20813.115234375,493.3040466308594,0.0104292610259316,0.3779970407485962,0.18891641497612,0.1852758766545539,, -m4_monthly (ME),1025.34,275.24s,134.231201171875,0.1008019670844078,0.005941803567111492,2.0788544134120457e-05,11.585819244384766,0.0001511946500697889,1.8685559552977793e-05,0.01899047940969467,2.0859902922530216e-05,, +us_births (M),0.0,0.58s,3000344576.0,45357.15234375,3.502741575241089,0.1415473967790603,54775.40234375,0.7083148287470498,0.1569703370332718,0.5907716155052185,0.1449459928586625,, +ett1 (W),0.01,0.33s,3367362.0,1289.9720458984375,1.8491568565368648,0.5109699964523315,1835.037353515625,0.1993679744503945,0.6322484016418457,0.1768371313810348,0.5170979509975956,, +ett2 (W),0.01,0.02s,13926457.0,2763.200439453125,2.117555618286133,0.0872123539447784,3731.81689453125,0.0921526307271427,0.6151796579360962,0.0594872161746025,0.3156227157251128,, +saugeenday (M),0.02,0.02s,313.7554931640625,12.181077003479004,0.6699055433273315,0.4888153672218323,17.713144302368164,0.1222248697096848,0.3978652358055115,0.0862952545285224,0.4094651288420972,, +us_births (W),0.02,0.02s,20105792.0,3474.269775390625,3.0583033561706543,0.0478292293846607,4483.9482421875,0.2055536921303733,0.0495077259838581,0.2409838438034057,0.0490819298801119,, +ett1 (D),0.05,0.03s,70841.4765625,173.8226318359375,1.7866737842559814,0.7592813372612,266.1606140136719,0.1174556002717394,0.5317819714546204,0.1279015988111496,0.5071168510241886,, +ett2 (D),0.05,0.02s,298211.0625,411.258544921875,2.987841844558716,250305.484375,546.0870361328125,0.0895525717987575,0.5169385671615601,0.0653064250946044,0.3366817636550053,, +solar (W),0.06,0.02s,833353.3125,589.9544677734375,0.7473648190498352,3.119361162185669,912.8818969726562,0.0412475200542005,0.6278225183486938,0.0507600344717502,0.1465783350562043,, +saugeenday (W),0.07,0.05s,1398.6142578125,22.994462966918945,1.82844352722168,1.2953144311904907,37.39805221557617,0.0960014551923699,0.727879524230957,0.1199762746691703,0.7558754163830121,, +jena_weather (D),0.08,0.02s,14943.7822265625,64.64045715332031,6.062127590179443,43650.609375,122.24476623535156,0.0956858458779762,0.649933397769928,0.0411364063620567,0.3804947519350621,, +us_births (D),0.13,0.08s,806776.8125,766.9176635742188,1.107465386390686,0.0819367095828056,898.2075805664062,0.1454351650116668,0.080326035618782,0.1538719534873962,0.0796836055276249,, +hierarchical_sales (W),0.15,0.03s,933.9584350585938,12.470171928405762,1.088371515274048,26011.6796875,30.560733795166016,0.0374978324174082,0.6585963368415833,0.1202765107154846,0.5715919034526574,, +bizitobs_l2c (H),0.18,0.05s,226.3994140625,11.30506992340088,10.589118003845217,342492.65625,15.046574592590332,0.1266191276666756,0.9697225093841552,0.1977446973323822,0.7473387601455413,, +M_DENSE (D),0.21,0.03s,32961.9765625,103.63765716552734,1.4155995845794678,7649.943359375,181.5543365478516,0.0521071496094654,0.2080607116222381,0.0397377796471118,0.1913010429129907,, +covid_deaths (D),0.27,0.02s,8224839.0,488.4300231933594,29.05698204040528,5.621520042419434,2867.89794921875,0.0165605013890594,0.8315650820732117,0.0789231956005096,0.2569935866197406,, +bizitobs_application (10s),0.33,0.09s,32887148.0,2724.28076171875,20.44576644897461,0.1427105665206909,5734.73193359375,0.100494237598957,0.1326426267623901,0.0446400567889213,0.109170628872927,, +solar (D),0.35,0.02s,99387.53125,179.03985595703125,0.9749029874801636,5.904353618621826,315.25787353515625,0.0881408744401335,0.8150160908699036,0.0990645289421081,0.3294589633157893,, +hospital (ME),0.35,0.02s,13203.482421875,33.762908935546875,1.543909788131714,0.2418745905160904,114.90641021728516,0.0095042522847181,0.2441760152578354,0.0584825575351715,0.1218119291653831,, +saugeenday (D),0.38,0.2s,1582.4224853515625,22.28341293334961,4.931447982788086,1.121761441230774,39.7796745300293,0.062379917115817,0.6690614819526672,0.1259668171405792,0.7433594678021981,, +car_parts_with_missing (ME),0.58,0.05s,1.2756143808364868,0.4929523468017578,0.8502885699272156,14998.923828125,1.1294310092926023,0.028235768173373,1.827894687652588,0.128747209906578,1.13195623478641,, +electricity (W),0.66,0.02s,446165745664.0,81218.609375,4.149017333984375,2991302.0,667956.375,0.012225214412783,0.3823184967041015,0.2099758982658386,0.2088171696003437,, +hierarchical_sales (D),0.9,0.28s,38.69567108154297,2.674888134002685,1.0154752731323242,53239.0546875,6.220584392547607,0.0170426964989606,1.0247446298599243,0.1280752718448639,0.7920525756293889,, +kdd_cup_2018_with_missing (D),1.08,0.02s,1714.657958984375,15.489001274108888,1.0540289878845217,3.3303918838500977,41.40842819213867,0.0235074810996672,0.6908389925956726,0.0748580545186996,0.4764814901530926,, +LOOP_SEATTLE (D),1.13,0.02s,50.29787826538086,5.00611686706543,1.5094910860061646,2.9659583568573,7.092099666595459,0.0914851400242258,0.3923839032649994,0.0668393373489379,0.1064061499495556,, +SZ_TAXI (H),1.14,0.05s,5.922147274017334,1.6430833339691162,0.9922404885292052,4.412487030029297,2.433546304702759,0.0426847917118593,0.6217026710510254,0.0555255636572837,0.1865383686512823,, +ett1 (H),1.22,0.22s,107.93508911132812,5.934848785400391,2.128864049911499,5916.12548828125,10.38918113708496,0.0368979833360757,0.4260028898715973,0.0630128383636474,0.3322640667716387,, +ett2 (H),1.26,0.22s,331.7087707519531,10.07536506652832,2.745605945587158,37354.57421875,18.212873458862305,0.03321765178223,0.3521240055561065,0.0502875447273254,0.1495425734373404,, +jena_weather (H),1.65,0.26s,12420.8720703125,23.077777862548828,1.494724154472351,163810.03125,111.44896697998048,0.0090580034578934,0.5544849038124084,0.0178078208118677,0.1210779545763768,, +bizitobs_l2c (5T),1.68,0.4s,48.1903190612793,3.692827939987183,22.724634170532227,36569.3515625,6.941924571990967,0.0573712727130433,0.7619636058807373,0.069492720067501,0.2402496421142862,, +restaurant (D),1.77,0.03s,184.53414916992188,8.541068077087402,0.9636807441711426,1.398659586906433,13.58433437347412,0.0154895487098958,0.5329251885414124,0.1196827441453933,0.4292247823289208,, +m4_hourly (h),2.43,0.27s,16112336.0,581.5810546875,4.242598533630371,2.06164813041687,4014.017333984375,0.0057097747592165,0.3292426466941833,0.1218496486544609,0.0871955975339855,, +bizitobs_service (10s),3.07,0.49s,141876.734375,129.13916015625,19.40226936340332,1.519543170928955,376.665283203125,0.0354675407578577,0.2370375692844391,0.0369267538189888,0.1009228077270859,, +M_DENSE (H),3.7,0.82s,62066.765625,116.72567749023438,0.9779656529426576,65202.38671875,249.13201904296875,0.0444561061024995,0.2629947066307068,0.0346622243523597,0.2068990456674438,, +ett1 (15T),4.4,0.89s,5.762168884277344,1.2901490926742554,2.971309900283813,1191.9605712890625,2.400451898574829,0.0331673190466505,0.3788388073444366,0.0573586523532867,0.2822993711938242,, +ett2 (15T),4.57,0.94s,11.665009498596191,1.952955722808838,2.917888164520264,9516.822265625,3.415407657623291,0.0245086829840971,0.3426353335380554,0.0440990105271339,0.1148741645348626,, +SZ_TAXI (15T),4.58,0.67s,21.411352157592773,2.839176654815674,1.060652256011963,57.892879486083984,4.627240180969238,0.055475581594543,0.6981817483901978,0.0800180286169052,0.2834299772902283,, +electricity (D),4.63,0.5s,8184195072.0,8163.6796875,2.8835554122924805,2622175.5,90466.5390625,0.0086714406685146,0.3303181529045105,0.1930992007255554,0.1453668103264981,, +solar (H),5.97,2.72s,459.587158203125,9.728689193725586,1.1026630401611328,93281.4296875,21.43798446655273,0.0421137109507887,1.4376612901687622,0.0660665854811668,0.3487886209011904,, +bitbrains_rnd (H),6.1,0.22s,2293833.75,257.7992858886719,2.942677497863769,45887.7109375,1514.540771484375,0.0692239673398166,0.7229076027870178,0.1583413928747177,0.9653971352083204,, +m4_weekly (W-SUN),7.18,1.22s,708305.0,239.3766632080078,4.900619029998779,1.2359340190887451,841.6085815429688,0.0163705228823042,0.1757995337247848,0.0250235237181186,0.0535825255937335,, +jena_weather (10T),7.18,1.68s,15407.9208984375,21.225975036621094,2.7143449783325195,181215.421875,124.12864685058594,0.0099319289565152,0.5240204334259033,0.0173721332103014,0.1121931640293443,, +kdd_cup_2018_with_missing (H),14.28,6.21s,1665.766357421875,16.97125244140625,3.7404980659484863,204.8477783203125,40.81380081176758,0.012390346292612,0.7473012208938599,0.0868938341736793,0.5036688158396798,, +bitbrains_fast_storage (H),15.64,0.54s,4370666.5,433.1981201171875,3.134866952896118,7533590.0,2090.614013671875,0.0357817905970949,0.6642678380012512,0.1082945317029953,0.8848439000937609,, +LOOP_SEATTLE (H),27.05,5.48s,45.80112457275391,3.704625129699707,1.2094013690948486,2.9955410957336426,6.767652988433838,0.0833599706835951,0.3856550455093384,0.0520103275775909,0.0779686167880097,, +solar (10T),33.4,18.61s,13.429282188415527,1.6477147340774536,4.154961109161377,21407.31640625,3.66459846496582,0.0412215746889869,1.4725667238235474,0.065751813352108,0.3620900494279583,, +m4_yearly (YE-DEC),51.4,0.36s,1995536.5,931.7138671875,1953.4176025390625,0.1414711624383926,1412.6346435546875,0.0093663615134665,0.155395969748497,0.0524347834289073,0.1384283334694768,, +bitbrains_rnd (5T),63.69,15.62s,1760675.25,171.27040100097656,5.844140529632568,28186.66015625,1326.9044189453125,0.0602251515981967,0.7826628088951111,0.1633218228816986,0.5711231344585912,, +electricity (H),110.58,25.54s,3578931.25,182.0205078125,0.9512049555778505,86359.09375,1891.806396484375,0.0024761863828005,0.5135901570320129,0.1669394522905349,0.0887649403943472,, +temperature_rain_with_missing (D),113.99,6.95s,178.54200744628906,6.84397554397583,1.239003300666809,121066.0,13.361961364746094,0.0137752177597875,1.4166418313980105,0.0793913006782531,0.7996685753736822,, +bitbrains_fast_storage (5T),160.06,39.11s,3204899.25,307.4036560058594,6.750433921813965,94736.125,1790.2232666015625,0.0280262965822048,0.7797213196754456,0.0495328195393085,0.5186760305028709,, +m4_quarterly (QE-DEC),163.93,10.32s,98274.4921875,79.8010025024414,476.1973571777344,0.0105551024898886,313.4876403808594,0.006174298156877,0.0107166962698102,0.0172789134085178,0.0129954022474126,, +m4_daily (D),316.28,76.65s,189277.609375,109.53118896484376,8.449933052062988,0.2787124216556549,435.0604553222656,0.0101673394536244,0.0459354817867279,0.0137499244883656,0.0184790231930776,, +LOOP_SEATTLE (5T),324.08,82.35s,69.71082305908203,4.590020656585693,1.916183352470398,3.0414836406707764,8.349300384521484,0.05277952219551,0.4036703705787658,0.0640279129147529,0.0965072733803788,, +electricity (15T),442.39,99.86s,284451.4375,58.41763687133789,2.008990526199341,26955.388671875,533.3399047851562,0.0027662858130575,0.5471258759498596,0.1690192073583603,0.1145084450326763,, +m4_monthly (ME),1025.34,306.38s,24029.677734375,15.839301109313965,62.82125473022461,0.002622629050165415,155.0150909423828,0.002022943042368745,0.002622885163873434,0.019591720774769783,0.0032794628335581825,, diff --git a/src/samay/dataset.py b/src/samay/dataset.py index 4595d08..247a335 100644 --- a/src/samay/dataset.py +++ b/src/samay/dataset.py @@ -347,7 +347,7 @@ def pad_sequence(self): def __getitem__(self, index): chunk_index = index // self.one_chunk_num data_chunk = self.data[:, chunk_index * self.max_col_num: (chunk_index + 1) * self.max_col_num] if (chunk_index + 1) * self.max_col_num < self.n_channels else self.data[:, chunk_index * self.max_col_num:] - seq_start = self.stride * index + seq_start = self.stride * (index % self.one_chunk_num) seq_end = seq_start + self.context_len input_mask = np.ones(self.context_len) # if the sequence is padded, mask of padded part is 0 @@ -497,7 +497,7 @@ def pad_sequence(self): def __getitem__(self, index): chunk_index = index // self.one_chunk_num data_chunk = self.data[:, chunk_index * self.max_col_num: (chunk_index + 1) * self.max_col_num] if (chunk_index + 1) * self.max_col_num < self.n_channels else self.data[:, chunk_index * self.max_col_num:] - seq_start = self.stride * index + seq_start = self.stride * (index % self.one_chunk_num) seq_end = seq_start + self.context_len pred_end = seq_end + self.horizon_len @@ -656,7 +656,7 @@ def pad_sequence(self): def __getitem__(self, index): chunk_index = index // self.one_chunk_num data_chunk = self.data[:, chunk_index * self.max_col_num: (chunk_index + 1) * self.max_col_num] if (chunk_index + 1) * self.max_col_num < self.n_channels else self.data[:, chunk_index * self.max_col_num:] - seq_start = self.stride * index + seq_start = self.stride * (index % self.one_chunk_num) seq_end = seq_start + self.seq_len input_mask = np.ones(self.seq_len) # if the sequence is padded, mask of padded part is 0 @@ -812,7 +812,7 @@ def pad_sequence(self): def __getitem__(self, index): chunk_index = index // self.one_chunk_num data_chunk = self.data[:, chunk_index * self.max_col_num: (chunk_index + 1) * self.max_col_num] if (chunk_index + 1) * self.max_col_num < self.n_channels else self.data[:, chunk_index * self.max_col_num:] - seq_start = self.stride * index + seq_start = self.stride * (index % self.one_chunk_num) seq_end = seq_start + self.context_len pred_end = seq_end + self.horizon_len @@ -856,7 +856,7 @@ def __init__( name=None, datetime_col=None, path=None, - batchsize=8, + batchsize=16, mode="train", boundaries=[0, 0, 0], horizon=0, @@ -881,12 +881,11 @@ def __init__( self.forecast_horizon = horizon self.boundaries = boundaries - self._read_data() - self.required_len = self.seq_len + self.forecast_horizon + self.max_col_num = 64 self.pad = False - self.pad_len = 0 - if self.length_timeseries < self.required_len: - self.pad = True + self._read_data() + + self.one_chunk_num = (self.length_timeseries - self.seq_len - self.forecast_horizon) // self.stride + 1 def _read_data(self): self.scaler = StandardScaler() @@ -899,6 +898,12 @@ def _read_data(self): if self.boundaries[2] == 0: self.boundaries[2] = int(len(self.df) - 1) + if self.boundaries == [-1, -1, -1]: + # use all data for training + self.boundaries = [0, 0, len(self.df) - 1] + + self.forecast_horizon = min(self.forecast_horizon, int(0.3 * len(self.df) + 1)) + if self.task_name == "detection": self.n_channels = 1 else: @@ -972,18 +977,33 @@ def _read_data(self): ) self.length_timeseries = self.data.shape[0] + self.required_len = self.seq_len + self.forecast_horizon + self.pad_len = 0 + if self.length_timeseries < self.required_len: + self.pad = True + if self.pad: + self.pad_sequence() + self.num_chunks = ( + self.n_channels + self.max_col_num - 1 + ) // self.max_col_num def pad_sequence(self): self.pad_len = self.required_len - self.length_timeseries # Pad data with zeros from the left self.data = np.pad(self.data, ((self.pad_len, 0), (0, 0))) + # If num of channels isn't multiple of max_col_num, pad with zeros + if self.n_channels % self.max_col_num != 0: + self.data = np.pad( + self.data, ((0, 0), (0, self.max_col_num - self.n_channels % self.max_col_num)) + ) self.length_timeseries = self.data.shape[0] def __getitem__(self, index): - if self.pad: - self.pad_sequence() - seq_start = self.stride * index + chunk_index = index // self.one_chunk_num + data_chunk = self.data[:, chunk_index * self.max_col_num: (chunk_index + 1) * self.max_col_num] if (chunk_index + 1) * self.max_col_num < self.n_channels else self.data[:, chunk_index * self.max_col_num:] + + seq_start = self.stride * (index % self.one_chunk_num) seq_end = seq_start + self.seq_len input_mask = np.ones(self.seq_len) # if the sequence is padded, mask of padded part is 0 @@ -996,18 +1016,22 @@ def __getitem__(self, index): seq_end = pred_end - self.forecast_horizon seq_start = seq_end - self.seq_len - input_seq = self.data[seq_start:seq_end, :].T + # input_seq = self.data[seq_start:seq_end, :].T + input_seq = data_chunk[seq_start:seq_end, :].T if self.task_name == "forecasting": - forecast_seq = self.data[seq_end:pred_end, :].T + # forecast_seq = self.data[seq_end:pred_end, :].T + forecast_seq = data_chunk[seq_end:pred_end, :].T return input_seq, input_mask, forecast_seq elif self.task_name == "imputation": return input_seq, input_mask elif self.task_name == "forecasting2": - input_seq = self.data[pred_end - self.seq_len : pred_end, :].T + # input_seq = self.data[pred_end - self.seq_len : pred_end, :].T + input_seq = data_chunk[seq_end - self.seq_len : seq_end, :].T input_mask[seq_end:pred_end] = 0 input_mask[self.pad_len :] = 1 # input_mask[: self.pad_len] = 0 - forecast_seq = self.data[seq_end:pred_end, :].T + # forecast_seq = self.data[seq_end:pred_end, :].T + forecast_seq = data_chunk[seq_end:pred_end, :].T return input_seq, input_mask, forecast_seq elif self.task_name == "detection": labels = ( @@ -1026,10 +1050,12 @@ def __len__(self): if self.task_name == "classification": return self.num_series if self.length_timeseries < self.seq_len + self.forecast_horizon: - return 1 - return ( - self.length_timeseries - self.seq_len - self.forecast_horizon - ) // self.stride + 1 + # return 1 + return 1 * self.num_chunks + # return ( + # self.length_timeseries - self.seq_len - self.forecast_horizon + # ) // self.stride + 1 + return self.num_chunks * self.one_chunk_num def get_data_loader(self): if self.mode == "train": diff --git a/src/samay/metric.py b/src/samay/metric.py index 949cc17..141d07f 100644 --- a/src/samay/metric.py +++ b/src/samay/metric.py @@ -29,7 +29,7 @@ def MASE(y_true:np.array, y_pred:np.array, freq:str='h'): "QE": 4, } # seasonality = DEFAULT_SEASONALITIES[freq] - y_t = y_true[1:] - y_true[:-1] + y_t = y_true[:, :, 1:] - y_true[:, :, :-1] return np.mean(np.abs(y_true - y_pred) / (np.mean(np.abs(y_t)) + 1e-5)) diff --git a/src/samay/model.py b/src/samay/model.py index 7885043..eb8f2ac 100644 --- a/src/samay/model.py +++ b/src/samay/model.py @@ -794,7 +794,29 @@ def evaluate(self, dataset, task_name="forecasting"): preds = np.concatenate(preds, axis=0) histories = np.concatenate(histories, axis=0) - return average_loss, trues, preds, histories + mse = MSE(trues, preds) + mae = MAE(trues, preds) + mase = MASE(trues, preds) + mape = MAPE(trues, preds) + rmse = RMSE(trues, preds) + nrmse = NRMSE(trues, preds) + smape = SMAPE(trues, preds) + msis = MSIS(trues, preds) + nd = ND(trues, preds) + + return { + "mse": mse, + "mae": mae, + "mase": mase, + "mape": mape, + "rmse": rmse, + "nrmse": nrmse, + "smape": smape, + "msis": msis, + "nd": nd, + } + + # return average_loss, trues, preds, histories elif task_name == "imputation": trues, preds, masks = [], [], [] @@ -1257,7 +1279,7 @@ def __init__(self, config=None, repo=None): self.model = TinyTimeMixerForPrediction.from_pretrained( repo, revision=revision, prediction_filter_length=horizon_len ) - self.model = self.model.to(self.device) + # self.model = self.model.to(self.device) else: raise ValueError("TinyTimeMixer model requires a repository") @@ -1267,6 +1289,7 @@ def finetune(self, dataset, **kwargs): dataset: dataset for finetuning, call get_data_loader() to get the dataloader """ dataloader = dataset.get_data_loader() + self.model.to(self.device) self.model.train() optimizer = torch.optim.Adam(self.model.parameters(), lr=1e-4) for epoch in range(5): @@ -1293,6 +1316,7 @@ def plot(self, dataset, **kwargs): """ dataloader = dataset.get_data_loader() trues, preds, histories = [], [], [] + self.model.to(self.device) self.model.eval() with torch.no_grad(): for i, data in enumerate(dataloader): @@ -1327,6 +1351,7 @@ def evaluate(self, dataset, **kwargs): """ dataloader = dataset.get_data_loader() trues, preds, histories = [], [], [] + self.model.to(self.device) self.model.eval() with torch.no_grad(): for i, data in enumerate(dataloader): @@ -1343,6 +1368,7 @@ def evaluate(self, dataset, **kwargs): preds = np.concatenate(preds, axis=0) histories = np.concatenate(histories, axis=0) + print(trues.shape, preds.shape, histories.shape) mse = MSE(trues, preds) mae = MAE(trues, preds) mase = MASE(trues, preds) From a8e2f02722e7d0042f2d93cfd035f90a40d2e953 Mon Sep 17 00:00:00 2001 From: Risto0211 <2533895673@qq.com> Date: Wed, 9 Apr 2025 10:34:20 -0400 Subject: [PATCH 2/3] monash exps and modification to gpu using & utils --- leaderboard.py | 137 ++++++++++++++++++++--------- leaderboard/monash_chronosbolt.csv | 28 ------ leaderboard/monash_lptm.csv | 28 ++++++ leaderboard/monash_moment.csv | 55 ++++++------ leaderboard/monash_timesfm.csv | 56 ++++++------ leaderboard/monash_ttm.csv | 56 ++++++------ src/samay/utils.py | 38 ++++++-- 7 files changed, 239 insertions(+), 159 deletions(-) delete mode 100644 leaderboard/monash_chronosbolt.csv create mode 100644 leaderboard/monash_lptm.csv diff --git a/leaderboard.py b/leaderboard.py index ce70fe9..febf2a6 100644 --- a/leaderboard.py +++ b/leaderboard.py @@ -4,6 +4,7 @@ import pandas as pd import time import torch +import gc src_path = os.path.abspath(os.path.join("src")) if src_path not in sys.path: @@ -11,7 +12,7 @@ from samay.model import TimesfmModel, MomentModel, ChronosModel, ChronosBoltModel, TinyTimeMixerModel, MoiraiTSModel, LPTMModel from samay.dataset import TimesfmDataset, MomentDataset, ChronosDataset, ChronosBoltDataset, TinyTimeMixerDataset, MoiraiDataset, LPTMDataset -from samay.utils import load_args, get_gifteval_datasets +from samay.utils import load_args, get_gifteval_datasets, get_monash_datasets from samay.metric import * @@ -30,46 +31,42 @@ # "restaurant": ['D'], # } -start = time.time() -NAMES, filesizes = get_gifteval_datasets("data/gifteval") -end = time.time() - -print(f"Time taken to load datasets: {end-start:.2f} seconds") +SERIES = "monash" MODEL_NAMES = ["moirai", "chronos", "chronosbolt", "timesfm", "moment", "ttm", "lptm"] MONASH_NAMES = { # "weather": "1D", - "tourism_yearly": ["1YE"], - "tourism_quarterly": ["1Q"], - "tourism_monthly": ["1M"], - "cif_2016": ["1M"], + "tourism_yearly": "1YE", + "tourism_quarterly": "1Q", + "tourism_monthly": "1M", + "cif_2016": "1M", # "london_smart_meters": ["30min"], - "australian_electricity_demand": ["30min"], + "australian_electricity_demand": "30min", # "wind_farms_minutely": ["1min"], - "bitcoin": ["1D"], - "pedestrian_counts": ["1h"], - "vehicle_trips": ["1D"], - "kdd_cup_2018": ["1H"], - "nn5_daily": ["1D"], - "nn5_weekly": ["1W"], + "bitcoin": "1D", + "pedestrian_counts": "1h", + "vehicle_trips": "1D", + "kdd_cup_2018": "1H", + "nn5_daily": "1D", + "nn5_weekly": "1W", # "kaggle_web_traffic": ["1D"], # "kaggle_web_traffic_weekly": ["1W"], - "solar_10_minutes": ["10min"], - "solar_weekly": ["1W"], - "car_parts": ["1M"], - "fred_md": ["1M"], - "traffic_hourly": ["1h"], - "traffic_weekly": ["1W"], - "hospital": ["1M"], - "covid_deaths": ["1D"], - "sunspot": ["1D"], - "saugeenday": ["1D"], - "us_births": ["1D"], - "solar_4_seconds": ["4s"], - "wind_4_seconds": ["4s"], - "rideshare": ["1h"], - "oikolab_weather": ["1h"], - "temperature_rain": ["1D"] + "solar_10_minutes": "10min", + "solar_weekly": "1W", + "car_parts": "1M", + "fred_md": "1M", + "traffic_hourly": "1h", + "traffic_weekly": "1W", + "hospital": "1M", + "covid_deaths": "1D", + "sunspot": "1D", + "saugeenday": "1D", + "us_births": "1D", + "solar_4_seconds": "4s", + "wind_4_seconds": "4s", + "rideshare": "1h", + "oikolab_weather": "1h", + "temperature_rain": "1D" } MONASH_SETTINGS = { @@ -113,6 +110,19 @@ "chronos": 512 } +start = time.time() +if SERIES == "gifteval": + # Load the datasets from the Gifteval dataset + NAMES = get_gifteval_datasets("data/gifteval") +elif SERIES == "monash": + # Load the datasets from the Monash dataset + NAMES = get_monash_datasets("data/monash", MONASH_NAMES, MONASH_SETTINGS) + +end = time.time() +print(NAMES) + +print(f"Time taken to load datasets: {end-start:.2f} seconds") + def calc_pred_and_context_len(freq): # split feq into base and multiplier @@ -147,10 +157,12 @@ def calc_pred_and_context_len(freq): if __name__ == "__main__": - for model_name in ["ttm"]: + for model_name in ["moment"]: print(f"Evaluating model: {model_name}") # create csv file for leaderboard if not already created csv_path = f"leaderboard/{model_name}.csv" + if SERIES == "monash": + csv_path = f"leaderboard/monash_{model_name}.csv" if not os.path.exists(csv_path): print(f"Creating leaderboard csv file: {csv_path}") df = pd.DataFrame(columns=["dataset", "size_in_MB", "eval_time", "mse", "mae", "mase", "mape", "rmse", "nrmse", "smape", "msis", "nd", "mwsq", "crps"]) @@ -176,12 +188,23 @@ def calc_pred_and_context_len(freq): arg_path = "config/lptm.json" args = load_args(arg_path) - for fname, freq, fs in filesizes: + for fpath, attrs in NAMES.items(): + if SERIES == "monash": + freq = attrs[0] + horizon = attrs[1] + fs = attrs[2] + elif SERIES == "gifteval": + freq = attrs[0] + fs = attrs[1] + fname = fpath.split("/")[2] print(f"Evaluating {fname} ({freq})") # Adjust the context and prediction length based on the frequency # pred_len, context_len = calc_pred_and_context_len(freq) pred_len, context_len = 96, 512 + if SERIES == "monash": + pred_len = horizon + if model_name == "timesfm": args["config"]["horizon_len"] = pred_len args["config"]["context_len"] = context_len @@ -194,12 +217,8 @@ def calc_pred_and_context_len(freq): args["config"]["horizon_len"] = pred_len args["config"]["context_len"] = context_len - # Set the dataset path - if len(NAMES.get(fname)) == 1: - dataset_path = f"data/gifteval/{fname}/data.csv" - else: - dataset_path = f"data/gifteval/{fname}/{freq}/data.csv" - + dataset_path = fpath + if model_name == "timesfm": dataset = TimesfmDataset(datetime_col='timestamp', path=dataset_path, mode='test', context_len=args["config"]["context_len"], horizon_len=args["config"]["horizon_len"], boundaries=(-1, -1, -1), batchsize=64) @@ -212,6 +231,10 @@ def calc_pred_and_context_len(freq): print(f"Size of dataset: {fs:.2f} MB") print(f"Time taken for evaluation of {fname}: {end-start:.2f} seconds") + del model + torch.cuda.empty_cache() + gc.collect() + elif model_name == "moment": args["config"]["task_name"] = "forecasting" @@ -227,6 +250,13 @@ def calc_pred_and_context_len(freq): print(f"Time taken for evaluation of {fname}: {end-start:.2f} seconds") print(metrics) + del model + del finetuned_model + del dataset + del train_dataset + torch.cuda.empty_cache() + gc.collect() + elif model_name == "chronos": dataset_config = load_args("config/chronos_dataset.json") @@ -241,6 +271,11 @@ def calc_pred_and_context_len(freq): print(f"Size of dataset: {fs:.2f} MB") print(f"Time taken for evaluation of {fname}: {end-start:.2f} seconds") + del model + del dataset + torch.cuda.empty_cache() + gc.collect() + elif model_name == "chronosbolt": repo = "amazon/chronos-bolt-small" model = ChronosBoltModel(repo=repo) @@ -251,6 +286,11 @@ def calc_pred_and_context_len(freq): print(f"Size of dataset: {fs:.2f} MB") print(f"Time taken for evaluation of {fname}: {end-start:.2f} seconds") + del model + del dataset + torch.cuda.empty_cache() + gc.collect() + elif model_name == "ttm": dataset = TinyTimeMixerDataset(datetime_col='timestamp', path=dataset_path, mode='test', context_len=context_len, horizon_len=pred_len, boundaries=[-1, -1, -1]) @@ -262,6 +302,11 @@ def calc_pred_and_context_len(freq): print("Metrics: ", metrics) print(f"Size of dataset: {fs:.2f} MB") print(f"Time taken for evaluation of {fname}: {end-start:.2f} seconds") + + del model + del dataset + torch.cuda.empty_cache() + gc.collect() elif model_name == "moirai": model = MoiraiTSModel(**args) @@ -274,6 +319,11 @@ def calc_pred_and_context_len(freq): print(f"Size of dataset: {fs:.2f} MB") print(f"Time taken for evaluation of {fname}: {end-start:.2f} seconds") + del model + del dataset + torch.cuda.empty_cache() + gc.collect() + elif model_name == "lptm": args["config"]["task_name"] = "forecasting2" dataset = LPTMDataset(name=fname, datetime_col='timestamp', task_name="forecasting2", @@ -285,6 +335,11 @@ def calc_pred_and_context_len(freq): end = time.time() print(f"Size of dataset: {fs:.2f} MB") print(f"Time taken for evaluation of {fname}: {end-start:.2f} seconds") + + del model + del dataset + torch.cuda.empty_cache() + gc.collect() print("Evaluation done!") diff --git a/leaderboard/monash_chronosbolt.csv b/leaderboard/monash_chronosbolt.csv deleted file mode 100644 index dad5d3d..0000000 --- a/leaderboard/monash_chronosbolt.csv +++ /dev/null @@ -1,28 +0,0 @@ -dataset,mse,mae,mase,mape,rmse,nrmse,smape,msis,nd,mwsq,crps -tourism_yearly,444141692.32991457,1091.6817688190906,0.041673069482334,258942.9412618748,21074.669447702247,0.0056080123981317,0.069903645118077,2315085360.881909,0.0937644062905105,239.86975188013847,105468655.5279068 -tourism_quarterly,6928432844.955573,16058.179070454917,0.219953558391302,17033866.447995476,83237.20829626359,0.0118343129663926,0.7357593638009957,0.129624555852842,0.3376971205486341,216650372.01311347,57348306032441.02 -tourism_monthly,190129684.77504933,3006.765480087757,0.5812615013938789,0.5539094888596078,13788.752110870995,0.0548260521285198,1.1077688946932025,0.0580983604960161,0.9999999966719292,0.0003618518892388,32.20209788417588 -cif_2016,1.203214870564485e+16,10224691.385480355,0.4199903410326607,1021039116.791298,109691151.44643553,0.0739733070305259,0.8060840675419945,2.0952376901855407,0.8399768110936794,11067973489.49984,2.174084312096058e+18 -australian_electricity_demand,3341478.908718931,712.8013315638235,,0.2392915110667949,1827.9712548940508,0.2657256314340223,0.4784888274983774,0.0547413870139505,0.9999999860293306,0.0001166485832802,0.6991631823939618 -bitcoin,1027181666879.6758,163108.16262782528,,0.1986968013352183,1013499.7123234302,0.1522775090971237,0.3972998371276115,0.0010149408681403,0.9999999999389824,0.0209032186294409,186509.0281783463 -pedestrian_counts,228594.34047321105,128.47884119836914,0.503464522496511,0.1560662013498633,478.115404973748,0.0734432264576522,0.312031576953403,0.0253257396070908,0.9999999225750844,1.8699487607833263e-05,0.0459507402418486 -vehicle_trips,12944.928988510794,32.11006947344806,0.5454305395333896,0.3289663930775494,113.77578384045874,0.1142327135523409,0.6578524140559953,0.0401714920146534,0.999999689474565,3.976037404105965e-06,0.0023768851609593 -kdd_cup_2018,5859.462274087244,20.07007816671078,0.6065589496924628,0.220538504946128,76.54712453180227,0.1040749469626822,0.4415058613042274,0.0260086316122621,1.0000364849647414,0.0003742988248793,0.0023274558891799 -nn5_daily,84.31101334863708,3.8252922590923766,1.546564363951125,0.2199299254310309,9.182102882708136,0.1372937272737188,0.4397646271359306,0.0864441601113176,0.9999973973145824,5.936201735498581e-07,1.6354225978457626e-05 -nn5_weekly,1434.0553996533272,25.91377713704548,0.3999215499897716,0.2169826998834618,37.86892393049118,0.0969497218088367,0.2384182640276755,0.1038714168011784,0.2471726169922481,1659.5134230779124,147781.99617711874 -solar_10_minutes,0.1193782517528535,0.0210894843515833,0.5453868444274433,0.0253123334121342,0.3455115797666607,0.0193563801794318,0.0505052039691868,2108.9487197905933,0.9995295901274224,8.345428103678382e-08,4.64501025476016e-08 -solar_weekly,3115173.9774587457,1268.2185252844597,0.3964157272080421,0.2898165957313559,1764.9855459631235,0.0987733284632026,0.2322388023965503,0.14352721870811,0.3609709695986244,-5750399.354954169,14351019438.737406 -car_parts,1.4482230689164717,0.4312706358651241,0.6238069437176991,3591.236062680824,1.2034214012208988,0.0300855275091405,0.9228552893912162,0.101706622670986,1.0377270347805376,0.1260305670088572,0.4300970526348686 -fred_md,596678701.7317729,4747.458188851477,0.418512827719631,0.6901471294216209,24427.00762950249,0.0792932358470027,1.2341052375211934,0.0547806777312514,0.835420873265261,9464211.357160904,271734124990.43045 -traffic_hourly,0.0012408267518655,0.0164771175073004,1.920375330283732,0.2803240677344528,0.0352253708549039,0.0923556562620379,0.5605629458709293,0.1283124850031318,0.9993953964097638,5.6946830577454864e-08,2.426979569144636e-10 -traffic_weekly,4.330103429838062,1.5275869707844083,0.3660947153271351,0.1544419116201267,2.0808900571241296,0.0588545095973592,0.1725321287321687,0.0851924259707548,0.1630502056889859,11.097874348338795,35.70920819671376 -hospital,538650.1377824282,219.3589574775862,0.5111717857336138,0.8134917999289581,733.9278832299725,0.0632641912419042,1.4303838519944216,0.0738407343066312,0.8331902825793209,19364.327364448287,31426275.1677896 -covid_deaths,10885.932603545672,4.382604233492268,0.4113709230009012,0.0620728957504436,104.33567272771892,0.035260450278849,0.1240379439938584,1.4129466990756654,0.9999977335047028,6.094549571428515e-07,0.0019350583597334 -sunspot,1908.999997991608,15.400000054947062,,0.1731458620114036,43.69210452692349,0.2275630325588185,0.3461901848530289,0.0662351601844637,0.9999993542177612,2.699789169486293e-06,0.0004650855344745 -saugeenday,269.8163317717818,11.023333342256688,,0.4708444976961905,16.426086928169525,0.4874208324617566,0.9416232138456884,0.327101782529343,0.9999990936437032,1.432314843488929e-06,4.855334875272051e-05 -us_births,43719532.26602842,4511.600000008923,,0.4708447023482694,6612.074732338438,0.6386009972911365,0.9416236259946013,0.3768951160167986,0.9999999977854692,0.0005369385937169,7.587445059005216 -solar_4_seconds,6.135621587873876e-15,7.747566570515118e-08,,0.0077475665705151,7.833020865460449e-08,0.0078330208654604,0.0153734036898451,0.0077475665705151,0.0077475665705151,7.747565615178496e-08,6.135620772494481e-15 -wind_4_seconds,69.02933271778653,4.013333375021395,,0.2392913829873439,8.308389297438254,0.4830456085454749,0.4784885690779666,0.2333332000982206,0.9999975186992214,7.204877227383475e-07,1.4554829158972376e-05 -rideshare,41.59257181027639,1.9854899661244407,0.9003329006690424,0.2536560772838738,6.449230326967427,0.07010032202135,0.5072268674056887,0.0379857954952194,0.999994981093036,3.290119220325788e-07,7.992024858382082e-06 -oikolab_weather,376969134.2603373,3722.07442711643,,0.2737455091364411,19415.692989443804,0.1906107690066239,0.5474063116953172,-0.0154761834873608,0.9999999973222178,0.0005518700247492,74.02871341393308 -temperature_rain,97.59024449532855,2.484052272779186,0.6232494721809301,0.2042947760327026,9.878777479796199,0.056311145724182256,0.40849688722974814,0.025763328950111283,0.9999959928198491,3.7952170635389444e-07,1.8271202610847874e-05 diff --git a/leaderboard/monash_lptm.csv b/leaderboard/monash_lptm.csv new file mode 100644 index 0000000..04e0c29 --- /dev/null +++ b/leaderboard/monash_lptm.csv @@ -0,0 +1,28 @@ +dataset,size_in_MB,eval_time,mse,mae,mase,mape,rmse,nrmse,smape,msis,nd,mwsq,crps +solar_weekly (1W),0.06,2.75s,0.55713016,0.5827845,0.9193981,2312.94,0.7464115,0.241527814113271,1.279668,0.24223992,-0.7101555789460262,, +cif_2016 (1M),0.08,1.09s,4.79622220993042,1.1191126108169556,2.232949495315552,7669.09130859375,2.190027952194214,0.0709856265447052,1.8812631368637085,0.090973824262619,-89.32242458981342,, +nn5_weekly (1W),0.12,1.08s,2.3922371864318848,1.0009725093841553,1.1899375915527344,1442.516357421875,1.546685814857483,0.0906475411054615,1.7222161293029783,0.1542649567127227,-3.767426830290444,, +us_births (1D),0.13,0.71s,0.0336517021059989,0.151817038655281,27.325857162475582,14743.533203125,0.183444008231163,0.0554849329327153,1.9972188472747805,15231.9658203125,59.965669084134255,, +tourism_yearly (1YE),0.16,1.42s,1.0759775638580322,0.7509145736694336,709.8499145507812,3395.483154296875,1.0372934341430664,0.0531310641371135,1.0066295862197876,0.2932159602642059,-0.6346815359582841,, +covid_deaths (1D),0.27,0.85s,8761.462890625,3.141115188598633,4.273681640625,14859.7431640625,93.6026840209961,0.0162782355205312,1.9895230531692505,1.476721167564392,1.0483660517334383,, +hospital (1M),0.35,1.34s,2.1187047958374023,1.0012705326080322,1.0728511810302734,1518.9754638671875,1.4555771350860596,0.0446312104551212,1.7474713325500488,0.135364681482315,3.2008420339431023,, +tourism_quarterly (1Q),0.36,1.2s,29.753582000732425,3.124391555786133,1.7342491149902344,456.3225402832031,5.454684257507324,0.0747293067958182,1.7171670198440552,0.1756040006875991,1.0298862920506142,, +vehicle_trips (1D),0.36,1.35s,18.384183883666992,0.709293007850647,2.246061325073242,20468.33984375,4.287678241729736,0.02245668686585,1.879390835762024,0.1616765558719635,5.000655293509141,, +saugeenday (1D),0.38,0.76s,0.3124355673789978,0.2036107331514358,28.51386260986328,16874.26953125,0.5589593648910522,0.0582417827571362,1.9996287822723389,20206.2734375,5.970269635177577,, +fred_md (1M),0.53,0.45s,6.5625529289245605,1.7052522897720337,4.985159873962402,8391.3681640625,2.5617480278015137,0.1829576307677099,1.8083301782608032,0.1668658256530761,1.2715349480369065,, +car_parts (1M),0.56,4.48s,3.691395998001098,0.7357101440429688,1.0231982469558716,3912.867431640625,1.9213006496429443,0.0120119613194862,1.7561115026474,0.0970971286296844,15.200462193629782,, +traffic_weekly (1W),0.66,1.17s,7.349606513977051,1.2065640687942505,1.4027974605560305,826.66796875,2.7110157012939453,0.0360857042045679,1.5174293518066406,0.1373468190431594,2.7181206916176945,, +bitcoin (1D),0.72,0.25s,0.1237392500042915,0.2868958115577698,25.73795509338379,26060.74609375,0.351765900850296,0.1303279725756736,1.977872967720032,1.0599027872085571,22.854588971117472,, +tourism_monthly (1M),0.78,0.64s,1.5662906169891355,0.5129481554031372,1.1300078630447388,6100.40478515625,1.25151526927948,0.0303415244706523,1.89304268360138,0.076696291565895,2.500350122152087,, +nn5_daily (1D),0.85,0.35s,0.3261348903179168,0.2673821151256561,1.2734382152557373,11132.4736328125,0.5710822343826294,0.0495283246707147,1.9464800357818604,0.0659900605678558,129.1232018653477,, +sunspot (1D),1.19,0.41s,0.0698287338018417,0.2156486213207245,672.8484497070312,21189.58203125,0.2642512619495392,0.4585308196784577,1.998434901237488,21414.615234375,-72.08688126226609,, +rideshare (1h),4.01,3.8s,25.14054298400879,1.1094075441360474,2.419894933700561,12233.5947265625,5.014034748077393,0.0192455103716894,1.9561517238616943,0.0673308223485946,-1.2555885759067225,, +oikolab_weather (1h),6.78,0.3s,0.1821108907461166,0.2105907201766967,21.31402015686035,16262.7001953125,0.4267445206642151,0.0508019842672087,1.99454402923584,21048.494140625,-26.40310529966788,, +kdd_cup_2018 (1H),17.6,0.48s,70.37008666992188,0.5598122477531433,2.7799301147460938,15287.068359375,8.388688087463379,0.0127827673606529,1.948464035987854,0.1162315011024475,1.7011658157014553,, +australian_electricity_demand (30min),22.54,0.36s,0.0759173110127449,0.2061913460493087,35.979957580566406,19320.037109375,0.2755309641361236,0.0687695600881552,1.995548248291016,20611.5234375,26.680269034354897,, +pedestrian_counts (1h),33.02,0.44s,1.2072677612304688,0.2849870026111603,3.512889623641968,16806.091796875,1.0987573862075806,0.0157932567166886,1.966840744018555,0.150884136557579,10.085724999814484,, +solar_10_minutes (10min),33.4,0.81s,2.580330371856689,0.4769264161586761,4.405221939086914,17537.79296875,1.6063406467437744,0.0505933902617282,1.9817545413970947,0.0485753044486045,1.5941990511354822,, +traffic_hourly (1h),104.18,1.45s,0.6358243227005005,0.3618080615997314,2.248907804489136,10494.185546875,0.7973859310150146,0.0356483568368512,1.927661657333374,0.052518717944622,2.510382010989376,, +temperature_rain (1D),106.72,72.69s,3.798187017440796,0.4533283710479736,1.1610000133514404,12320.748046875,1.9488937854766848,0.0047840804208083,1.8536553382873533,0.0890497490763664,3.049539429811166,, +solar_4_seconds (4s),181.73,0.4s,0.0229806378483772,0.1275054365396499,12750.54296875,12750.54296875,0.1515936553478241,15159.365534782408,1.999740481376648,12750.54296875,12750.543653964996,, +wind_4_seconds (4s),184.15,0.48s,0.05065712705254555,0.1873578429222107,362.3088073730469,18218.080078125,0.22507138550281525,0.1175367378412336,1.9996145963668823,18697.71484375,-37.64700963603436,, diff --git a/leaderboard/monash_moment.csv b/leaderboard/monash_moment.csv index 71410f8..c5a026a 100644 --- a/leaderboard/monash_moment.csv +++ b/leaderboard/monash_moment.csv @@ -1,28 +1,27 @@ -dataset,mse,mae,mase,mape,rmse,nrmse,smape,msis,nd,mwsq,crps -tourism_yearly,0.1193213462829589,0.0552518963813781,0.0502919629216194,-0.0192584041506052,0.3454292118549347,0.0170845361689556,0.0596492551267147,0.0432840399444103,-0.0332806138839186,, -tourism_quarterly,15.174468994140623,2.131948232650757,0.4453669488430023,0.6742493510246277,3.895442008972168,0.0364306505544808,0.6830410361289978,0.100502148270607,0.5155130011882703,, -tourism_monthly,,,,,,,,,,, -cif_2016,3.051345109939575,0.770009458065033,0.1008551716804504,2.7060635089874268,1.746809959411621,0.0052530776336364,0.5942800641059875,0.4276498556137085,-0.3124985771275554,, -australian_electricity_demand,,,,,,,,,,, -bitcoin,,,,,,,,,,, -pedestrian_counts,,,,,,,,,,, -vehicle_trips,,,,,,,,,,, -kdd_cup_2018,,,,,,,,,,, -nn5_daily,,,,,,,,,,, -nn5_weekly,7.142962455749512,1.8851556777954104,0.9576101303100586,-0.4964344501495361,2.6726322174072266,0.0984423137013404,1.3164684772491455,0.1382441818714141,-15.28806140511234,, -solar_10_minutes,,,,,,,,,,, -solar_weekly,1.0168001651763916,0.7820338010787964,1.376811146736145,-0.7340742945671082,1.0083651542663574,0.4434716977863531,1.3281521797180176,0.4218072891235351,-1.0606733235349932,, -car_parts,3.754984617233277,0.7155598402023315,0.6451520919799805,4147.560546875,1.9377782344818115,0.0139375998713791,0.7457103133201599,0.1027182564139366,16.873369196029433,, -fred_md,2.542620658874512,1.1022943258285522,0.3314592540264129,0.6176068782806396,1.594559669494629,0.0840454737283687,0.7133917212486267,0.1085511520504951,0.6246864106058683,, -traffic_hourly,,,,,,,,,,, -traffic_weekly,5.216869831085205,0.8759592771530151,0.4404235184192657,-1.4892243146896362,2.2840468883514404,0.0377834815133501,0.7395275831222534,0.1350718736648559,0.947835748847314,, -hospital,3.000790596008301,1.285870909690857,0.8030640482902527,120.14913177490234,1.7322790622711182,0.0711396958176925,1.35271418094635,0.2003196626901626,4.766237518108584,, -covid_deaths,,,,,,,,,,, -sunspot,,,,,,,,,,, -saugeenday,,,,,,,,,,, -us_births,,,,,,,,,,, -solar_4_seconds,,,,,,,,,,, -wind_4_seconds,,,,,,,,,,, -rideshare,,,,,,,,,,, -oikolab_weather,,,,,,,,,,, -temperature_rain,,,,,,,,,,, +dataset,size_in_MB,eval_time,mse,mae,mase,mape,rmse,nrmse,smape,msis,nd,mwsq,crps +solar_weekly (1W),0.06,0.7s,1.3988211154937744,0.91600239276886,1.5458827018737793,-0.7656228542327881,1.1827176809310913,0.4064341473373669,1.4792702198028564,0.4341770708560943,-1.2266530751609337,, +cif_2016 (1M),0.08,0.55s,6.545669078826904,1.343870997428894,3.9534099102020255,-0.00933179538697,2.55845046043396,0.069877693018643,0.7147252559661865,0.0875509828329086,-1466.3242474669391,, +nn5_weekly (1W),0.12,0.49s,1.7520450353622437,0.9541510343551636,1.0868446826934814,0.7561516761779785,1.3236483335494995,0.1407169144325989,1.2309998273849487,0.1161348819732666,1.6034220729905373,, +us_births (1D),0.13,54.06s,0.0159488450735807,0.0130975423380732,1.1550896167755127,-0.1392997205257415,0.1262887418270111,0.0195636326879018,0.1026146560907363,2354.62744140625,1.71883753109232,, +tourism_yearly (1YE),0.16,2.1s,3.56056547164917,1.5736219882965088,715.8680419921875,-0.8474381566047668,1.886946082115173,0.0933262083297813,1.4369211196899414,0.358586698770523,-0.9478607834606614,, +covid_deaths (1D),0.27,1.16s,3557.487060546875,20.674468994140625,49.37272262573242,0.8411641120910645,59.64467239379883,0.0815842526250348,0.8856794238090515,0.0969743132591247,0.8685581696281686,, +hospital (1M),0.35,2.82s,2.024125099182129,1.078792929649353,1.1011804342269895,16.25428009033203,1.4227174520492554,0.1072754030081362,1.3363966941833496,0.1306593120098114,2.022076813814543,, +tourism_quarterly (1Q),0.36,1.65s,14.379294395446776,3.116183042526245,33.51498031616211,-1.73103928565979,3.792004108428955,0.1048896272565427,1.6957045793533323,0.382441759109497,-1.850251070704766,, +vehicle_trips (1D),0.36,1.41s,3.5303359031677246,1.154481053352356,162.26605224609375,-0.5684612393379211,1.8789187669754028,0.0890248611167094,0.905729353427887,0.104294940829277,-0.677820671785667,, +saugeenday (1D),0.38,185.53s,0.016956677660346,0.0095159644261002,5.099930763244629,0.0381901450455188,0.1302178055047989,0.0077757430997683,0.1075708419084549,956.0369873046876,57.65071425206276,, +fred_md (1M),0.53,7.94s,205.455078125,2.7924537658691406,20.967544555664062,0.1768457442522049,14.333703994750977,0.0512047346165328,0.5596998333930969,0.1076751500368118,0.632347032854926,, +car_parts (1M),0.56,9.91s,4.2449727058410645,0.679968535900116,0.8603194355964661,361.6725769042969,2.060333251953125,0.0142473809029873,0.9980092644691468,0.0781594142317771,9.21876799818671,, +traffic_weekly (1W),0.66,3.26s,6.723269939422607,1.2145947217941284,1.2642812728881836,-0.8018678426742554,2.5929269790649414,0.0239835499373194,1.16968035697937,0.1275652796030044,1.7758931926046486,, +bitcoin (1D),0.72,32.19s,1490.8427734375,4.883283615112305,7.665090084075928,0.0953329205513,38.61143493652344,0.0177670061581149,0.2273775041103363,0.4201638996601105,0.4099574670801922,, +tourism_monthly (1M),0.78,1.39s,115.5609130859375,4.321318626403809,2.2380928993225098,-0.5813080072402954,10.749926567077637,0.0336979224313633,1.104116916656494,0.104200042784214,1.0472959116316596,, +nn5_daily (1D),0.85,1.81s,1.2503522634506226,0.8067260980606079,0.9635328650474548,0.0295346174389123,1.118191480636597,0.090564745666134,1.399633288383484,0.1262142956256866,4.606585833155049,, +sunspot (1D),1.19,600.91s,0.0088533908128738,0.0085975946858525,2.2324674129486084,0.0516191236674785,0.094092458486557,0.0127232618618424,0.0866450816392898,2515.24755859375,3.356061209124168,, +rideshare (1h),4.01,5.64s,0.849719226360321,0.8600947856903076,3.899453401565552,3.924179553985596,0.9218021631240844,0.1206409716181236,1.9330228567123413,0.5819978713989258,1.4664858441522657,, +oikolab_weather (1h),6.78,503.68s,0.1066711917519569,0.0919519737362861,4.596961498260498,0.0383840799331665,0.3266055583953857,0.0336271204119031,0.2367980480194091,0.0218137428164482,93.41455609047507,, +kdd_cup_2018 (1H),17.6,264.04s,0.634676456451416,0.4862099289894104,3.049732685089112,3.153933048248291,0.7966658473014832,0.014831770539352,1.1020643711090088,0.1236260831356048,-15.695044709143325,, +australian_electricity_demand (30min),22.54,962.11s,0.0715997517108917,0.0609600991010665,5.858462333679199,0.1000976637005806,0.2675813138484955,0.0216442121563734,0.1817545592784881,0.0290705896914005,-32.94834536636065,, +pedestrian_counts (1h),33.02,17.14m,0.2851085662841797,0.2073741853237152,2.399303436279297,-0.0790703669190406,0.5339555740356445,0.0120536388288673,0.4386217296123504,0.050216905772686,-1.6025779789169343,, +solar_10_minutes (10min),33.4,646.16s,0.7062894701957703,0.5957131385803223,14.92678165435791,-0.1445342004299163,0.8404102921485901,0.2103408826278452,1.2018007040023804,0.1611537635326385,-21.298476220611,, +traffic_hourly (1h),104.18,20.1m,1.11533522605896,0.7301473021507263,2.485908031463623,-0.3488401472568512,1.0560942888259888,0.0201896183480336,1.5637468099594116,0.1347162723541259,16.947088589413003,, +temperature_rain (1D),106.72,20.63m,7.422463893890381,0.9501174688339232,1.2810786962509155,7.790842533111572,2.7244198322296143,0.0030026609462776,1.664116382598877,0.1432362198829651,3.2250580528488424,, +solar_4_seconds (4s),181.73,503.3m,0.000896792218554765,0.0017341896891593933,56.58272933959961,0.05136888101696968,0.02994648925960064,0.011800645700475143,0.07888095080852509,2783.650146484375,-1.174730302450018,, diff --git a/leaderboard/monash_timesfm.csv b/leaderboard/monash_timesfm.csv index 2685543..ab3729c 100644 --- a/leaderboard/monash_timesfm.csv +++ b/leaderboard/monash_timesfm.csv @@ -1,28 +1,28 @@ -dataset,mse,mae,mase,mape,rmse,nrmse,smape,msis,nd,mwsq,crps -tourism_yearly,2361702.5,36.06318664550781,3606318.0,3606318.0,1536.783203125,153678320.3125,1.4851635694503784,3606318.0,3606318.664550781,-7780.55712890625,316451173.62888974 -tourism_quarterly,133073.890625,11.017659187316896,1101766.25,1101766.25,364.7929382324219,36479293.82324219,1.7359308004379272,1101766.25,1101765.9187316897,-3167.559814453125,9380487.746344984 -tourism_monthly,8602214.0,83.11030578613281,0.4208029210567474,246588.640625,2932.953125,0.0077529820907281,1.8755922317504885,30134452.0,0.4558699002701215,35345.91796875,3721804582.7465873 -cif_2016,4054.138916015625,12.505019187927246,0.1814535409212112,20152.20703125,63.672119140625,0.0231935676381596,1.714262843132019,0.0330332480370998,0.204135231840822,404.7507019042969,274699.42295985995 -australian_electricity_demand,150264.421875,232.8675231933593,0.3018090426921844,57704.55859375,387.6395568847656,0.0295914764548414,0.0868552923202514,0.0322753563523292,0.0669677612136246,-9590.701171875,524073785.36750144 -bitcoin,,,,,,,,,,, -pedestrian_counts,27354.072265625,48.37232971191406,0.4173867702484131,23420.462890625,165.3906707763672,0.0103505019508643,1.1788418292999268,0.0315233282744884,0.1650090210907146,754.9451904296875,7847077.815458994 -vehicle_trips,181.2378387451172,0.5425539612770081,54255.40234375,54255.40234375,13.4624605178833,1346246.05178833,1.9117441177368164,54255.40234375,54255.3961277008,-2.324742794036865,1099.301869087636 -kdd_cup_2018,2242.1318359375,19.861251831054688,0.802638590335846,367465.21875,47.35115432739258,0.0143749709118527,0.8389987945556641,0.1068498790264129,0.6370928594170072,103.09420013427734,69039.25441101419 -nn5_daily,35.77730941772461,3.885481595993042,1.6794143915176392,16414.0,5.981413841247559,0.0623306355287951,0.2313048988580703,0.0763084515929222,0.2016468535974389,4.786635875701904,657.0211086576586 -nn5_weekly,89.06739044189453,1.0670193433761597,0.1561520546674728,143.2692108154297,9.437552452087402,0.0293910798458386,1.6629701852798462,0.0270845741033554,0.1691647435775213,-1.9155505895614624,1144.141507496138 -solar_10_minutes,26.70118904113769,2.4146080017089844,0.1946574002504348,17161.712890625,5.167319297790527,0.058125068929674,1.3448920249938965,0.0854992121458053,0.3819715876199319,3.173281192779541,169.20667857149823 -solar_weekly,78236.7578125,42.5200424194336,0.1622558981180191,142.00363159179688,279.7083435058594,0.0181469127403297,1.5168342590332031,0.0203413348644971,0.1802843047310721,15530.861328125,32472244.27291968 -car_parts,0.0023693332914263,0.0032368141692131,2.325936794281006,224.52639770507807,0.0486757978796958,0.0048675749203946,1.820556044578552,332.6125183105469,3.0938389132825828,0.0002686674706637,0.0001817149828545 -fred_md,337362592.0,1292.2945556640625,1.1678502559661863,26.98746681213379,18367.43359375,0.005784504681377,0.098615176975727,0.0782435983419418,0.0311425562484908,-106201.6875,363984351646.9304 -traffic_hourly,0.000455149973277,0.0080386660993099,0.387826532125473,21.04779624938965,0.0213342439383268,0.0294667816919213,0.1653950363397598,0.0385803952813148,0.1418321666411633,0.0036569747608155,0.0001586150792923 -traffic_weekly,0.0091974884271621,0.0065262168645858,0.0998282879590988,148.9840545654297,0.0959035381674766,0.0049818700666002,1.7232398986816406,12732.7841796875,0.1089020227146023,0.0025558837223798,0.0113577489246558 -hospital,306.1357116699219,0.6331541538238525,0.1295970529317855,191.8525848388672,17.496734619140625,0.0023567799832398,1.7972227334976196,862228.25,0.1555164023384381,3.4607455730438232,4272.47144303022 -covid_deaths,119150.6484375,13.035115242004396,0.1232983171939849,151.50901794433594,345.1820373535156,0.0064081616843543,1.8622676134109497,17.030107498168945,0.1438479938902019,-31.99930191040039,156593.61040537566 -sunspot,2275.333251953125,32.238399505615234,0.787270724773407,306006.84375,47.70045471191406,0.0903417685766976,0.8078023791313171,0.1073573678731918,0.4086495293865697,433.9431762695313,164093.2649063609 -saugeenday,1305.9161376953125,15.696843147277832,0.6664905548095703,0.4421868026256561,36.1374626159668,0.0566684355614381,0.4329459965229034,0.0825363099575042,0.5160752917687316,206.78466796875,33607.82410239941 -us_births,151821.234375,265.7854919433594,0.2479991912841797,0.0276550203561782,389.6424560546875,0.0630897758134374,0.0274470150470733,0.0591710805892944,0.0274304356335499,140754.0,1462221219.9125562 -solar_4_seconds,21.87527084350586,1.3645970821380615,0.5447545051574707,2320.71484375,4.677100658416748,0.0401467833214499,1.0651005506515503,0.0118864914402365,0.0418262921876244,-1.593536138534546,725.113524009159 -wind_4_seconds,23.43195533752441,1.7629423141479492,0.6767939925193787,1002.0794067382812,4.840656280517578,0.0370080718463087,0.1096571534872055,0.016058275476098,0.0435277317578082,2.227166652679444,927.9487571490704 -rideshare,46.68171691894531,0.3123773336410522,0.7474644184112549,7209.89306640625,6.832401752471924,0.0767685503908586,1.8551300764083865,0.2038945853710174,0.7291043059541679,0.1454752832651138,50.14438478675226 -oikolab_weather,81064.8515625,61.23777770996094,0.551532506942749,45665.16015625,284.7189025878906,0.002753571675954,0.3464515209197998,0.0006154436268843,0.0047957091499386,-133933.125,805123431.8426093 -temperature_rain,172.225341796875,5.509518623352051,2.190863847732544,51034.7421875,13.123465538024902,0.022516410521192846,1.5997639894485474,0.06347574293613434,0.8267697936668237,16.08089828491211,999.5042235450454 +dataset,size_in_MB,eval_time,mse,mae,mase,mape,rmse,nrmse,smape,msis,nd,mwsq,crps +solar_weekly (1W),0.06,4.58s,78236.7578125,42.5200424194336,0.691641628742218,142.00363159179688,279.7083435058594,0.0181469127403297,1.5168342590332031,0.0203413348644971,0.1802843047310721,15530.861328125,32472244.27291968 +cif_2016 (1M),0.08,1.94s,4054.138916015625,12.505019187927246,1.7135133743286133,20152.20703125,63.672119140625,0.0231935676381596,1.714262843132019,0.0330332480370998,0.204135231840822,404.7507019042969,274699.42295985995 +nn5_weekly (1W),0.12,3.82s,89.06739044189453,1.0670193433761597,1.2955018281936646,143.2692108154297,9.437552452087402,0.0293910798458386,1.6629701852798462,0.0270845741033554,0.1691647435775213,-1.9155505895614624,1144.141507496138 +us_births (1D),0.13,17.63s,151821.234375,265.7854919433594,0.3732689917087555,0.0276550203561782,389.6424560546875,0.0630897758134374,0.0274470150470733,0.0591710805892944,0.0274304356335499,140754.0,1462221219.9125562 +tourism_yearly (1YE),0.16,22.4s,2361702.5,36.06318664550781,3606318.0,3606318.0,1536.783203125,153678320.3125,1.4851635694503784,3606318.0,3606318.664550781,-7780.55712890625,316451173.62888974 +covid_deaths (1D),0.27,3.94s,119150.6484375,13.035115242004396,16.555212020874023,151.50901794433594,345.1820373535156,0.0064081616843543,1.8622676134109497,17.030107498168945,0.1438479938902019,-31.99930191040039,156593.61040537566 +hospital (1M),0.35,10.45s,306.1357116699219,0.6331541538238525,1.6906147003173828,191.8525848388672,17.496734619140625,0.0023567799832398,1.7972227334976196,862228.25,0.1555164023384381,3.4607455730438232,4272.47144303022 +tourism_quarterly (1Q),0.36,15.65s,133073.890625,11.017659187316896,1101766.25,1101766.25,364.7929382324219,36479293.82324219,1.7359308004379272,1101766.25,1101765.9187316897,-3167.559814453125,9380487.746344984 +vehicle_trips (1D),0.36,6.7s,181.2378387451172,0.5425539612770081,54255.40234375,54255.40234375,13.4624605178833,1346246.05178833,1.9117441177368164,54255.40234375,54255.3961277008,-2.324742794036865,1099.301869087636 +saugeenday (1D),0.38,52.85s,1305.9161376953125,15.696843147277832,3.3370907306671143,0.4421868026256561,36.1374626159668,0.0566684355614381,0.4329459965229034,0.0825363099575042,0.5160752917687316,206.78466796875,33607.82410239941 +fred_md (1M),0.53,15.78s,337362592.0,1292.2945556640625,2.1173739433288574,26.98746681213379,18367.43359375,0.005784504681377,0.098615176975727,0.0782435983419418,0.0311425562484908,-106201.6875,363984351646.9304 +car_parts (1M),0.56,25.97s,0.0023693332914263,0.0032368141692131,1.9314188957214355,224.52639770507807,0.0486757978796958,0.0048675749203946,1.820556044578552,332.6125183105469,3.0938389132825828,0.0002686674706637,0.0001817149828545 +traffic_weekly (1W),0.66,24.78s,0.0091974884271621,0.0065262168645858,1.090032696723938,148.9840545654297,0.0959035381674766,0.0049818700666002,1.7232398986816406,12732.7841796875,0.1089020227146023,0.0025558837223798,0.0113577489246558 +bitcoin (1D),0.72,12.04s,,,,,,,,,,, +tourism_monthly (1M),0.78,11.98s,8602214.0,83.11030578613281,2.404216051101685,246588.640625,2932.953125,0.0077529820907281,1.8755922317504885,30134452.0,0.4558699002701215,35345.91796875,3721804582.7465873 +nn5_daily (1D),0.85,4.39s,35.77730941772461,3.885481595993042,0.5496071577072144,16414.0,5.981413841247559,0.0623306355287951,0.2313048988580703,0.0763084515929222,0.2016468535974389,4.786635875701904,657.0211086576586 +sunspot (1D),1.19,179.14s,2275.333251953125,32.238399505615234,1.849418044090271,306006.84375,47.70045471191406,0.0903417685766976,0.8078023791313171,0.1073573678731918,0.4086495293865697,433.9431762695313,164093.2649063609 +rideshare (1h),4.01,38.13s,46.68171691894531,0.3123773336410522,4.130497455596924,7209.89306640625,6.832401752471924,0.0767685503908586,1.8551300764083865,0.2038945853710174,0.7291043059541679,0.1454752832651138,50.14438478675226 +oikolab_weather (1h),6.78,153.28s,81064.8515625,61.23777770996094,4.258865833282471,45665.16015625,284.7189025878906,0.002753571675954,0.3464515209197998,0.0006154436268843,0.0047957091499386,-133933.125,805123431.8426093 +kdd_cup_2018 (1H),17.6,150.17s,2242.1318359375,19.861251831054688,2.7438406944274902,367465.21875,47.35115432739258,0.0143749709118527,0.8389987945556641,0.1068498790264129,0.6370928594170072,103.09420013427734,69039.25441101419 +australian_electricity_demand (30min),22.54,402.67s,150264.421875,232.8675231933593,2.5489635467529297,57704.55859375,387.6395568847656,0.0295914764548414,0.0868552923202514,0.0322753563523292,0.0669677612136246,-9590.701171875,524073785.36750144 +pedestrian_counts (1h),33.02,456.37s,27354.072265625,48.37232971191406,0.5331121683120728,23420.462890625,165.3906707763672,0.0103505019508643,1.1788418292999268,0.0315233282744884,0.1650090210907146,754.9451904296875,7847077.815458994 +solar_10_minutes (10min),33.4,308.57s,26.70118904113769,2.4146080017089844,4.389446258544922,17161.712890625,5.167319297790527,0.058125068929674,1.3448920249938965,0.0854992121458053,0.3819715876199319,3.173281192779541,169.20667857149823 +traffic_hourly (1h),104.18,782.24s,0.000455149973277,0.0080386660993099,0.5998861789703369,21.04779624938965,0.0213342439383268,0.0294667816919213,0.1653950363397598,0.0385803952813148,0.1418321666411633,0.0036569747608155,0.0001586150792923 +temperature_rain (1D),106.72,30.02m,172.225341796875,5.509518623352051,1.072189450263977,51034.7421875,13.123465538024902,0.0225164105211928,1.5997639894485474,0.0634757429361343,0.8267697936668237,16.08089828491211,999.5042235450454 +solar_4_seconds (4s),181.73,247.63m,21.87527084350586,1.3645970821380615,22.490386962890625,2320.71484375,4.677100658416748,0.0401467833214499,1.0651005506515503,0.0118864914402365,0.0418262921876244,-1.593536138534546,725.113524009159 +wind_4_seconds (4s),184.15,134.12m,23.431955337524414,1.7629423141479492,10.539875030517578,1002.0794067382812,4.840656280517578,0.03700807184630877,0.1096571534872055,0.01605827547609806,0.04352773175780827,2.2271666526794434,927.9487571490705 diff --git a/leaderboard/monash_ttm.csv b/leaderboard/monash_ttm.csv index 1541dd2..09789b1 100644 --- a/leaderboard/monash_ttm.csv +++ b/leaderboard/monash_ttm.csv @@ -1,28 +1,28 @@ -dataset,mse,mae,mase,mape,rmse,nrmse,smape,msis,nd,mwsq,crps -tourism_yearly,2359421952.0,2854.82470703125,0.1089780032634735,32821664.0,48573.8828125,0.0129256089977534,1.7662028074264526,2159464448.0,0.2452005257977564,, -tourism_quarterly,22722631680.0,22425.71484375,0.3071715533733368,12967759.0,150740.28125,0.0214316133549931,0.8326326012611389,0.1169410794973373,0.4716038363900894,, -tourism_monthly,190129680.0,3006.765380859375,0.5812615156173706,4.872797012329102,13788.751953125,0.0548260515012991,1.912000179290772,0.0580983571708202,0.9999999966741668,, -cif_2016,7879127705059328.0,8228172.0,0.3379811346530914,3967203072.0,88764448.0,0.0598607971446591,1.382277488708496,2.4027557373046875,0.6759591304653735,, -australian_electricity_demand,3341478.75,712.8013305664062,,12.315439224243164,1827.97119140625,0.2657256222050282,1.899953961372376,0.0547413900494575,0.9999999859708456,, -bitcoin,1027181641728.0,163108.15625,,10.36756992340088,1013499.6875,0.1522775053674234,1.872477412223816,0.0010149367153644,0.999999999938691,, -pedestrian_counts,228594.296875,128.47894287109375,0.5034648776054382,12.996304512023926,478.1153564453125,0.0734432190032074,1.8848451375961304,0.0253258012235164,1.0000007535211155,, -vehicle_trips,12944.91796875,32.1100959777832,0.5454309582710266,8.314323425292969,113.7757339477539,0.1142326634592643,1.8874207735061648,0.0401715897023677,1.0000005201756343,, -kdd_cup_2018,5859.45361328125,20.07014846801757,0.6065610647201538,11.69101619720459,76.54706573486328,0.1040748670212299,1.8907800912857056,0.0260089412331581,1.000039987876041,, -nn5_daily,84.30940246582031,3.8253681659698486,1.5465952157974243,12.33367919921875,9.182015419006348,0.1372924203656559,1.8956758975982664,0.0864472389221191,1.0000172057270649,, -nn5_weekly,2644.98974609375,39.26283264160156,0.605934739112854,1.469842791557312,51.42946243286133,0.1316665901147501,0.6432918906211853,0.159388393163681,0.374499525577815,, -solar_10_minutes,0.1193703636527061,0.0212509110569953,0.5495615005493164,16.501588821411133,0.3455001711845398,0.0193557406298882,1.879668116569519,2125.758544921875,1.007180415032677,, -solar_weekly,1373680.0,743.2652587890625,0.2323275208473205,2.50764799118042,1172.041015625,0.0655905581844849,0.6550678610801697,0.0760571658611297,0.2115543900339158,, -car_parts,1.4168554544448853,0.4631965458393097,0.6699858903884888,9190.7080078125,1.1903173923492432,0.0297579273692492,1.8140949010849,0.1154233813285827,1.1145474636590038,, -fred_md,382965536.0,3788.1640625,0.3339461088180542,2.1272084712982178,19569.505859375,0.0635251556777571,1.144764065742493,0.0587963350117206,0.6666116741719332,, -traffic_hourly,0.0012339112581685,0.0165214743465185,1.925545334815979,10.48202896118164,0.0351270735263824,0.0920979379871027,1.889513611793518,0.1289066225290298,1.0020856827429263,, -traffic_weekly,18.118263244628903,3.596972942352295,0.8620346188545227,0.6948413252830505,4.256555557250977,0.1203895891183525,0.5381367802619934,0.1892789900302887,0.3839304865008895,, -hospital,340835.0,170.67337036132812,0.3977198898792267,0.6387853026390076,583.8107299804688,0.0503241728710651,0.958367884159088,0.0690037384629249,0.6482679880152843,, -covid_deaths,10885.9296875,4.382719039916992,0.4113816916942596,12.534967422485352,104.33565521240234,0.0352604443595126,1.8569068908691408,1.4129878282546997,1.0000239395375004,, -sunspot,1908.99462890625,15.400076866149902,,10.670031547546388,43.69204330444336,0.2275627136917511,1.868899941444397,0.0662359222769737,1.00000436672582,, -saugeenday,269.8126220703125,11.023311614990234,,6.07255744934082,16.425973892211914,0.4874174672453182,1.903668761253357,0.3271011114120483,0.9999971030106124,, -us_births,43719532.0,4511.60009765625,,6.072560787200928,6612.07470703125,0.6386009948469422,1.9036824703216555,0.3768950998783111,0.9999999977834916,, -solar_4_seconds,3.021440520001306e-08,0.0001681742432992,,16.817424774169922,0.0001738229184411,17.38229184411466,1.8779784440994265,16.817424774169922,16.817424329929054,, -wind_4_seconds,69.0277099609375,4.013407230377197,,12.315433502197266,8.3082914352417,0.4830398974600308,1.899942874908448,0.2333374768495559,1.0000159243190567,, -rideshare,41.59174728393555,1.985545635223389,0.9003582000732422,11.035061836242676,6.449166297912598,0.0700996260534384,1.8937571048736568,0.0379886627197265,1.0000231222176446,, -oikolab_weather,376969088.0,3722.074462890625,,10.634934425354004,19415.69140625,0.1906107499551713,1.895629286766052,-0.015476182103157,0.999999997313326,, -temperature_rain,97.58940887451172,2.4841203689575195,0.6232664585113525,10.281512260437012,9.878735542297363,0.05631090447723616,1.871820330619812,0.02576564997434616,1.0000233284375566,, +dataset,size_in_MB,eval_time,mse,mae,mase,mape,rmse,nrmse,smape,msis,nd,mwsq,crps +solar_weekly (1W),0.06,0.87s,752489.75,557.5025,0.5804403,2.455477,867.4617,0.0471882537915461,0.61856735,0.053072058,0.1536462213728214,, +cif_2016 (1M),0.08,0.03s,4496819748864.0,262117.4375,7035.666015625,26207698944.0,2120570.5,772.451050586365,1.2114204168319702,139.9845428466797,814.7173457460926,, +nn5_weekly (1W),0.12,0.04s,412.9100341796875,13.777584075927734,0.860400378704071,1.2613427639007568,20.320188522338867,0.0575473764005769,0.3451373875141144,0.051920112222433,0.1171800602487265,, +us_births (1D),0.13,0.14s,774920.5625,749.2998657226562,1.079309582710266,0.0801891908049583,880.2957153320312,0.1425349277698643,0.0784591287374496,0.1512692719697952,0.0777864587040352,, +tourism_yearly (1YE),0.16,0.19s,4072574976.0,9371.76171875,9.174805641174316,753902912.0,63816.73046875,0.016981761757407,1.969757318496704,2944608768.0,0.8049394050111222,, +covid_deaths (1D),0.27,0.03s,3977120.5,333.5022277832031,17.693937301635742,25.42571258544922,1994.27197265625,0.0115158015934049,0.6798346042633057,0.0594763606786727,0.1522535528720493,, +hospital (1M),0.35,0.55s,5419.22412109375,23.3170166015625,1.0005640983581543,0.2082435339689254,73.6153793334961,0.0060889478306539,0.1936554908752441,0.0582410655915737,0.0844709199142533,, +tourism_quarterly (1Q),0.36,0.04s,687540352.0,4809.1240234375,10.751434326171877,439333312.0,26220.990234375,0.080523135657534,1.9392236471176147,3.4929778575897217,2.2411868137948163,, +vehicle_trips (1D),0.36,0.05s,4858.8662109375,16.659088134765625,214.93820190429688,1656793.75,69.70556640625,1.124283147797073,1.969457983970642,1681903.125,101.85826401285188,, +saugeenday (1D),0.38,0.41s,1242.5223388671875,17.380760192871094,3.849287033081055,0.6814940571784973,35.24943161010742,0.0552758826761708,0.5076281428337097,0.0998007580637931,0.579967369109267,, +fred_md (1M),0.53,0.16s,814031744.0,2271.661865234375,4.098703861236572,16.986513137817383,28531.2421875,0.0089854199366891,0.4137711822986603,0.0847974345088005,0.0783722482977593,, +car_parts (1M),0.56,1.17s,1.2862780094146729,0.4820746183395386,0.833551824092865,15072.5380859375,1.1341419219970703,0.0283535409615415,1.861904978752136,0.1240926533937454,1.204913658689701,, +traffic_weekly (1W),0.66,0.04s,2.178606510162353,0.9004173874855042,0.939781665802002,327.9014587402344,1.4760103225708008,0.0435291994058681,0.1624960601329803,0.0483781658113002,0.0944642034940045,, +bitcoin (1D),0.72,0.33s,inf,inf,inf,inf,inf,inf,,,inf,, +tourism_monthly (1M),0.78,0.03s,112704736.0,2367.056396484375,1.2977303266525269,89226096.0,10616.2490234375,0.0164984879174605,0.8436533808708191,0.101791076362133,0.3132102373206439,, +nn5_daily (1D),0.85,0.07s,75.6475830078125,6.000096797943115,0.9565598368644714,31188.701171875,8.697562217712402,0.0906348557330684,0.5871419310569763,0.1126096695661544,0.3657762648871892,, +sunspot (1D),1.19,1.9s,2263.482421875,33.57451629638672,1.913502216339112,464901.59375,47.57606887817383,0.0901061893505908,0.7567583918571472,0.1112903878092765,0.4225726197802073,, +rideshare (1h),4.01,0.05s,5.570432186126709,1.023034930229187,0.9345336556434632,0.2661929726600647,2.3601763248443604,0.0256540876989509,0.142071932554245,0.0360090173780918,0.1433627069740914,, +oikolab_weather (1h),6.78,9.2s,89783.921875,72.87317657470703,5.005366802215576,55771.1953125,299.6396484375,0.0028978731002085,0.3530401289463043,0.0006117020966485,0.0057070918994542,, +kdd_cup_2018 (1H),17.6,43.33s,1443.7083740234375,16.11660385131836,2.632657766342163,268924.46875,37.99616241455078,0.0115349612323015,0.967480719089508,0.0953071042895317,0.6115428402805015,, +australian_electricity_demand (30min),22.54,7.41s,107249.4765625,196.10948181152344,2.1563868522644043,71741.046875,327.4896545410156,0.024999776801508,0.0715896040201187,0.0272345002740621,0.0563015166401996,, +pedestrian_counts (1h),33.02,139.93s,24884.259765625,40.46586990356445,0.8490180373191833,16420.40234375,157.74745178222656,0.0098721729572254,1.5205281972885132,0.037688247859478,0.2664243914990514,, +solar_10_minutes (10min),33.4,94.63s,12.607479095458984,1.590712070465088,4.013481616973877,21271.443359375,3.550701141357422,0.0399403901125908,1.466823935508728,0.0642259195446968,0.3497193315636127,, +traffic_hourly (1h),104.18,181.67s,0.0008253686246462,0.0134190144017338,1.0168439149856567,25.714778900146484,0.0287292301654815,0.0396807103129804,0.3227794170379638,0.0520986802875995,0.2445855730424515,, +temperature_rain (1D),106.72,51.92s,158.96197509765625,6.575003147125244,1.1789751052856443,133810.75,12.608012199401855,0.0216320283476482,1.4561697244644165,0.0790323466062545,0.8580568815952595,, +solar_4_seconds (4s),181.73,156.48s,23.079532623291016,1.5590450763702393,25.690189361572266,2572.01171875,4.804116249084473,0.0412370458087039,1.0432041883468628,0.0134458094835281,0.0477826620524761,, +wind_4_seconds (4s),184.15,152.44s,22.64220428466797,1.8029050827026367,10.780738830566406,724.3897094726562,4.758382320404053,0.03637906609780805,0.11517007648944855,0.016024718061089516,0.04451212641740882,, diff --git a/src/samay/utils.py b/src/samay/utils.py index a32c7f7..2867fd3 100644 --- a/src/samay/utils.py +++ b/src/samay/utils.py @@ -304,24 +304,50 @@ def get_gifteval_datasets(path:str): size = os.path.getsize(d_path) df = pd.read_csv(d_path) freq = pd.infer_freq(df["timestamp"]) - fil1.append((d, freq, size/1e6)) + fil1.append((d_path, freq, size/1e6)) fil2 = [] for data,freq in hier: for f in freq: d_path = os.path.join(path, data, f, "data.csv") size = os.path.getsize(d_path) - fil2.append((data, f, size/1e6)) + fil2.append((d_path, f, size/1e6)) fil = fil1 + fil2 fil.sort(key=lambda x: x[2]) # Create a dictionary to hold the dataset names and their frequencies - dataset_dict = defaultdict(list) - for name, freq, size in fil: - dataset_dict[name].append(freq) + dataset_dict = defaultdict() + for p, freq, size in fil: + dataset_dict[p] = (freq, size) # Convert the defaultdict to a regular dict dataset_dict = dict(dataset_dict) - return dataset_dict, fil + return dataset_dict + +def get_monash_datasets(path:str, config:dict, setting:dict): + dataset_names = config.keys() + dataset_paths = [path + "/" + name + "/test/data.csv" for name in dataset_names] + # Get the frequencies for each dataset + dataset_freqs = [config[name] for name in dataset_names] + dataset_horizons = [setting[name] for name in dataset_names] + + # sort the datasets by size, ascending + dataset_sizes = [] + for p in dataset_paths: + size = os.path.getsize(p) + dataset_sizes.append(size/1e6) + dataset_paths, dataset_freqs, dataset_horizons, dataset_sizes = zip(*sorted(zip(dataset_paths, dataset_freqs, dataset_horizons, dataset_sizes), key=lambda x: x[3])) + # Create a dictionary to hold the dataset names and their frequencies + dataset_dict = defaultdict() + fil = zip(dataset_paths, dataset_freqs, dataset_horizons, dataset_sizes) + # turn fil into a list + fil = list(fil) + for p, freq, horizon, size in fil: + dataset_dict[p] = (freq, horizon, size) + # Convert the defaultdict to a regular dict + dataset_dict = dict(dataset_dict) + + return dataset_dict + if __name__ == "__main__": # ts_path = "/nethome/sli999/TSFMProject/src/tsfmproject/models/moment/data/ECG5000_TRAIN.ts" From 823ada2651ab5c70e2e9cbd967b094ba1f649ccb Mon Sep 17 00:00:00 2001 From: Risto0211 <2533895673@qq.com> Date: Thu, 17 Apr 2025 13:11:42 -0400 Subject: [PATCH 3/3] time_moe --- config/timemoe.json | 25 + example/timemoe.ipynb | 247 ++++ leaderboard/monash_moment.csv | 3 +- src/samay/dataset.py | 138 ++ src/samay/model.py | 132 ++ .../models/Time_MoE/time_moe/__init__.py | 2 + .../Time_MoE/time_moe/datasets/__init__.py | 4 + .../time_moe/datasets/benchmark_dataset.py | 131 ++ .../time_moe/datasets/binary_dataset.py | 112 ++ .../time_moe/datasets/general_dataset.py | 101 ++ .../time_moe/datasets/time_moe_dataset.py | 139 ++ .../datasets/time_moe_window_dataset.py | 139 ++ .../Time_MoE/time_moe/datasets/ts_dataset.py | 30 + .../Time_MoE/time_moe/models/__init__.py | 0 .../time_moe/models/configuration_time_moe.py | 65 + .../time_moe/models/modeling_time_moe.py | 1177 +++++++++++++++++ .../time_moe/models/ts_generation_mixin.py | 241 ++++ src/samay/models/Time_MoE/time_moe/runner.py | 248 ++++ .../Time_MoE/time_moe/trainer/__init__.py | 0 .../Time_MoE/time_moe/trainer/hf_trainer.py | 94 ++ .../Time_MoE/time_moe/utils/__init__.py | 0 .../Time_MoE/time_moe/utils/dist_util.py | 45 + .../Time_MoE/time_moe/utils/log_util.py | 56 + src/samay/utils.py | 2 +- 24 files changed, 3129 insertions(+), 2 deletions(-) create mode 100644 config/timemoe.json create mode 100644 example/timemoe.ipynb create mode 100644 src/samay/models/Time_MoE/time_moe/__init__.py create mode 100644 src/samay/models/Time_MoE/time_moe/datasets/__init__.py create mode 100644 src/samay/models/Time_MoE/time_moe/datasets/benchmark_dataset.py create mode 100644 src/samay/models/Time_MoE/time_moe/datasets/binary_dataset.py create mode 100644 src/samay/models/Time_MoE/time_moe/datasets/general_dataset.py create mode 100644 src/samay/models/Time_MoE/time_moe/datasets/time_moe_dataset.py create mode 100644 src/samay/models/Time_MoE/time_moe/datasets/time_moe_window_dataset.py create mode 100644 src/samay/models/Time_MoE/time_moe/datasets/ts_dataset.py create mode 100644 src/samay/models/Time_MoE/time_moe/models/__init__.py create mode 100644 src/samay/models/Time_MoE/time_moe/models/configuration_time_moe.py create mode 100644 src/samay/models/Time_MoE/time_moe/models/modeling_time_moe.py create mode 100644 src/samay/models/Time_MoE/time_moe/models/ts_generation_mixin.py create mode 100644 src/samay/models/Time_MoE/time_moe/runner.py create mode 100644 src/samay/models/Time_MoE/time_moe/trainer/__init__.py create mode 100755 src/samay/models/Time_MoE/time_moe/trainer/hf_trainer.py create mode 100644 src/samay/models/Time_MoE/time_moe/utils/__init__.py create mode 100644 src/samay/models/Time_MoE/time_moe/utils/dist_util.py create mode 100644 src/samay/models/Time_MoE/time_moe/utils/log_util.py diff --git a/config/timemoe.json b/config/timemoe.json new file mode 100644 index 0000000..91c4ce8 --- /dev/null +++ b/config/timemoe.json @@ -0,0 +1,25 @@ +{ + "repo": "Maple728/TimeMoE-50M", + "config": { + "input_size": 1, + "hidden_size": 4096, + "intermediate_size": 22016, + "horizon_lengths": 1, + "num_hidden_layers": 32, + "num_attention_heads": 32, + "num_key_value_heads": null, + "hidden_act": "silu", + "num_experts_per_tok": 2, + "num_experts": 1, + "max_position_embeddings": 32768, + "initializer_range": 0.02, + "rms_norm_eps": 1e-6, + "use_cache": true, + "use_dense": false, + "rope_theta": 10000, + "attention_dropout": 0.0, + "apply_aux_loss": true, + "router_aux_loss_factor": 0.02, + "tie_word_embeddings": false + } +} diff --git a/example/timemoe.ipynb b/example/timemoe.ipynb new file mode 100644 index 0000000..00229ff --- /dev/null +++ b/example/timemoe.ipynb @@ -0,0 +1,247 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "143213fb", + "metadata": {}, + "source": [ + "# TimeMoE Usage Example\n", + "\n", + "## Loading TimeMoE Model" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "id": "706e7811", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "src_path: /nethome/sli999/Samay/src\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/nethome/sli999/anaconda3/envs/torch/lib/python3.11/site-packages/gluonts/json.py:102: UserWarning: Using `json`-module for json-handling. Consider installing one of `orjson`, `ujson` to speed up serialization and deserialization.\n", + " warnings.warn(\n", + "/nethome/sli999/anaconda3/envs/torch/lib/python3.11/site-packages/huggingface_hub/file_download.py:797: FutureWarning: `resume_download` is deprecated and will be removed in version 1.0.0. Downloads always resume when possible. If you want to force a new download, use `force_download=True`.\n", + " warnings.warn(\n" + ] + } + ], + "source": [ + "import numpy as np\n", + "import os\n", + "import sys\n", + "\n", + "src_path = os.path.abspath(os.path.join(\"..\", \"src\"))\n", + "if src_path not in sys.path:\n", + " sys.path.insert(0, src_path)\n", + "print(f\"src_path: {src_path}\")\n", + "\n", + "from samay.model import TimeMoEModel\n", + "from samay.dataset import TimeMoEDataset\n", + "from samay.utils import load_args\n", + "\n", + "arg_path = \"../config/timemoe.json\"\n", + "args = load_args(arg_path)\n", + "tme = TimeMoEModel(**args)" + ] + }, + { + "cell_type": "markdown", + "id": "36516d99", + "metadata": {}, + "source": [ + "## Define the datasets" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "id": "e82a2d23", + "metadata": {}, + "outputs": [], + "source": [ + "train_dataset = TimeMoEDataset(name=\"ett\", datetime_col='date', path='../data/data/ETTh1.csv', \n", + " mode='train', batch_size=32, context_len=512, horizon_len=96, task_name=\"finetune\")\n", + "val_dataset = TimeMoEDataset(name=\"ett\", datetime_col='date', path='../data/data/ETTh1.csv',\n", + " mode='test', batch_size=128, context_len=512, horizon_len=96)" + ] + }, + { + "cell_type": "markdown", + "id": "42bf78f8", + "metadata": {}, + "source": [ + "## Evaluate the zero-shot forecasting" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "id": "4b9a086d", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "{'mse': 0.47828728, 'mae': 0.45220655, 'mase': 1.5621544, 'mape': -0.19009934, 'rmse': 0.69158316, 'nrmse': 0.07268840430471468, 'smape': 0.7727101, 'msis': 0.069919765, 'nd': 20.229612437546262}\n" + ] + } + ], + "source": [ + "metrics = tme.evaluate(val_dataset)\n", + "print(metrics)" + ] + }, + { + "cell_type": "markdown", + "id": "58c1873f", + "metadata": {}, + "source": [ + "## Visualization of the evaluation" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "id": "3f2c8b3e", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "(462, 7, 512)\n" + ] + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAA3EAAAGTCAYAAACGZ5vsAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjkuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/TGe4hAAAACXBIWXMAAA9hAAAPYQGoP6dpAAEAAElEQVR4nOydd1gU19fHv0tdehUEFFBU7GLFDlZsqIkFNcWaRI0xURNjr7HEFk1MYhK7v9h774gNUOwNK6ASioh0WHbhvn/sO8PM7uyywC67wP08zz7K3Dszd/r93nPuOSJCCAGFQqFQKBQKhUKhUCoERvpuAIVCoVAoFAqFQqFQNIeKOAqFQqFQKBQKhUKpQFARR6FQKBQKhUKhUCgVCCriKBQKhUKhUCgUCqUCQUUchUKhUCgUCoVCoVQgqIijUCgUCoVCoVAolAoEFXEUCoVCoVAoFAqFUoGgIo5CoVAoFAqFQqFQKhBUxFEoFAqFQqFQKBRKBYKKOAqlihMdHQ1TU1M0btwYhYWF+m6OwREREQGRSITOnTvruykl5v3797C3t0e1atWQnZ2t8/15e3tDJBJh69atOt+XECKRCCKRCJcuXdLL/ikUVSxYsAAikQiBgYH6bopO0OezFxsby+4/Nja23PdfESgsLESjRo1gamqKp0+f6rs5FC1BRRyl0sF8LEUiEaysrPDff/+prMt9+VfVjt8PP/wAmUyG+fPnw8iI/0rYunUre36MjIxw584dtdti6uqrE68L2rZti6CgIFy5cgWHDh3Sd3NKxIIFC5Ceno7p06fDyspK382h6JDnz59j+fLlGDx4MBo1agQXFxeYmprC3t4erVq1wsyZM/H27VuV69++fRsLFy5E//79Ub9+fTg5OcHU1BROTk7o0KEDlixZgtTUVMF1ue9RTX6jR4/W1Wmocty9excLFizA2rVr9d0USjmRmZmJBQsWoEmTJrC2toadnR1at26N1atXIz8/X3AdIyMjzJ07FzKZDNOnTy/nFlN0BqFQKhnz588nANjfl19+qbJuTEwMWy80NLT8GmkgXLx4kQAgjRs3JoWFhUrlW7Zs4Z3Lnj17qt0eU2/Lli06arF+CA8PJwBIvXr1iFQq1XdzNOLp06fExMSEVKtWjWRnZ5fLPrt27Up8fX3JwYMHy2V/ilTlZ3nlypW8Z9XMzIzY29vzlllZWZFDhw4Jrv/111/z6orFYmJjY8Nb5uzsTK5fv6607uvXr4mrq6van52dHbud33//Xcdnw/D47bffiK+vL/nss8+0ul3mHe3l5aXV7ZYUX19f4uvrSyIjI8t939zveExMTLnvvzyJjY0l3t7e7PFaWloSc3Nz9u/mzZuT1NRUwXULCgpIw4YNCQASFhZWzi2n6AJqiaNUejZv3oxnz57puxkGyc8//wwAGD9+PEQiUbH1z549i4sXL+q6WQZH27Zt0axZMzx79gyHDx/Wd3M0Ys2aNZDJZBg5ciQsLS3LZZ8XLlxAdHQ0Pvroo3LZH6WIBg0a4KeffkJoaCjev38PiUSCDx8+IDc3FwcPHoSPjw+ys7MxYsQIxMTEKK3fpk0brFy5EuHh4ex6GRkZyMzMxNatW1GtWjWkpKRg4MCBSE9P561bs2ZNJCYmqv199tlnAAALCwuMGDGiXM6JITFp0iRER0dj+/bt+m6KToiOjkZ0dDTatGmj76ZUWgoKChAcHIzY2Fi4ubnh3LlzyM7ORk5ODnbv3g0bGxvcuXMHn3zyieD6RkZG+OKLLwAAK1asKM+mU3QEFXGUSkvNmjXRtGlTyGQyzJo1S9/NMThiYmJw9uxZmJqaIiQkpNj6/fr1AwD8+OOPIITounkGx6effgoA+Ouvv/TckuLJysrCzp07ARS1m1K56du3L2bPno3AwEA4Ojqyy8ViMT766COcPHkSAJCbm8veG1w+//xzfP/992jbti3s7e3Z5dbW1hg5ciT+97//AQCSk5Nx/PjxErUtLy8P//77LwBg0KBBvO1TKBTN2Lp1Kx48eAAAOHDgALp37w5ALs5CQkLYb9OpU6dw4cIFwW0MHz4cxsbGOHXqFF6/fl0+DafoDCriKJUWIyMjLFu2DID8hXfjxo1SbaegoACbN29G165d4ezsDHNzc3h4eGDIkCFq59EFBgZCJBJhwYIFIITgn3/+gb+/P2xtbWFjY4N27dqxHSN1vHz5Et988w0aNGgAa2trWFpaokGDBvjuu+/K9BLeuHEjCCHo0aMHnJ2di62/bNkyGBkZISoqCvv37y/1fg8ePIh+/frB1dUVZmZmcHV1Rb9+/dTONxs1ahREIhFGjRoFANi/fz/bWbW0tISfnx/WrVtXbGCWxMREzJgxA82aNYOdnR3EYjFq166NcePG4fHjx2rXZawHFy5cwKtXr0p20OXMrl27kJmZiQYNGqBZs2ZK5ZMmTYJIJMLgwYOVyqRSKaytrSESiVCtWjVBwR4UFASRSIR58+bxlqsLbMKde5qZmYk5c+agfv36sLCwgJOTE/r164fIyEi1x/Xhwwf88MMP8PHxgVgshpubG4YMGYJbt24Vc0bkpKenY9GiRWjRogVsbW1hYWGBunXrYsKECYLX9N27dzAyMoJIJMLDhw+VypctW8Ye1/r165XKw8PDIRKJYG5ujtzcXI3aqCvq1asHBwcHAFA7N04Vbdu2Zf9f0vUPHjyIDx8+AADGjRtX4n0Xt22RSAQzMzOkpKSordupUyeIRCKlNty+fRuLFi1C586d4eXlBbFYDHt7e7Rt2xY///wzsrKyVG6Te18nJydj6tSpqFevHiwtLXneDeoCm+Tm5uLo0aP44osv4Ofnh2rVqsHc3Bzu7u4YOHAgTp06pXLfzPzCuLg4pbmHCxYsUFonPT0dS5Ysgb+/PxwcHGBubo6aNWti+PDhiIiIUHv+1KFqbrli0JGkpCR8++23qFWrFsRiMVxdXTFs2DBER0er3X58fDy++uor1KxZE+bm5qhRowZGjx6NFy9eaNS+goICbN26FUFBQey3p1q1aggKCsLu3buV3nMvXryAra0tRCIRvvvuO8FtZmZmok6dOhCJRAgKCtL54Oa2bdsAAF26dEG7du2UyocNG4ZatWoBgEqLr6urK7p27YrCwkJs2rRJd42llA/69OWkUHQBMyeOmSMQEBBAAJAuXboo1S1uTlxaWhoJDAxk6xgbGxN7e3siEonYZd9//71gO5j9zpkzhwwYMIAAICYmJsTW1pY3z2TevHkqj+Xvv/8mpqambF1zc3NiYWHB/m1ra0vOnj1bqvPUokULAoAsW7ZMZR3unDhCCBk5ciQBQOrWrSs4N4ypKzQnTiKRkJCQELaOkZERcXBwIEZGRuyy4cOHk/z8fKV1mf2OHDmSnbtjZGSkNOfn888/V3ksx44dI9bW1mxdU1NTYmVlxZtDtG3bNrXnzMfHhwAgf/zxh9p6+ubjjz8mAMhXX30lWH7gwAECgDg5OSnNhbx69SrvnN67d49Xnp+fz563ixcv8sq8vLxUXn9mezt37iR16tRh511ZWlryrsnp06cF2xwTE8Nun7lezLNkZmZGjhw5ovZZfvjwIalRo4bKOV/m5uZk//79Sus1adKEACDr1q1TKuvevTu7/kcffaRU/tNPPxEApHPnzoLHVJ48fPiQbevKlStLvP7x48fZ9fft21eidbt27cq+N7SNRCIhjo6OBABZv369ynoxMTHse/vSpUu8Mu79LvReadiwIUlKShLcLlPnn3/+Ia6urkr3FgPzXQoICFDahuLcYwsLC95zAYBMmzZNaT1XV1f2GTAyMlKah6h4nSMiItg2Mt8z7jMgEonI0qVL1Z1ulah69rjf2OPHjxMXFxfBuVy2trbk7t27gtu+desWcXBw4J0f5l1ua2tL9uzZo3ZOXGJiIvH39+edT+4cTQCkf//+RCKR8NbbsWMHr+2KfPrppwQAcXFxIQkJCaU6b5qSnZ3NfitXrFihst6ECRMIAFK9enWVdRYvXkwAkDZt2uiiqZRyhIo4SqVDUcRFRESwL+JTp07x6hYn4gYNGsR2En/99Vc2QERCQgIZM2YMu+6ff/6ptC4j4hwcHIidnR3ZunUrycnJIYQQ8ubNGxIcHMx+fJ89e6a0/qFDh9iO7YwZM0hsbCwpLCwkhYWFJDo6mgwZMoT9iMXFxZXoHKWnpxNjY2MCgFy4cEFlPUURFxcXx354hY5ZnYibNm0a21GYO3cu+fDhAyGEkNTUVDJr1ix23R9//FFpXUbEOTg4EDMzM7JmzRqSnp5OCCEkJSWFjBs3jl1f6HgiIyOJmZkZK2yePHlCZDIZe0wTJ04kgFxk37x5U+X5GDFiBAFAQkJCVNYxBJiO0qZNmwTL379/z3Zo79y5wytjPvBM5/CXX37hlV+5coUVPbm5ubwyTUScg4MDadiwIbl48SIpKCgghYWF5MaNG8TX15d9bgsKCnjrymQy0qpVK3b9vXv3soMIjx49Ip06deJ1vBWf5YyMDFKrVi0CgHh4eJATJ06w+7h79y5p27Yte0yKHcnJkycTAGTAgAG85RKJhFhaWhILCwtiampKHBwclNrdrVs3AoDMnz9f6XyUB1KplLx584Zs3bqVeHp6sufv3bt3Gq2fl5dHYmJiyG+//cZ2ouvUqUPy8vI0bsPLly/Ze2358uWlPRS1MB1Xf39/lXWY+9rLy0tp4KJ79+5k8+bNJC4ujr2vcnJyyMGDB9n7UkikE1J0X1tbWxNfX19y4cIF9j54+vQpW0+diDt06BD58ssvSWhoKElJSWGX//fff2ThwoXsQN6RI0eU1tU0sElMTAz7jAwePJjcunWLPdakpCQyd+5cYmJiQgCoDH6jDk1EnIODA+nQoQP7jpVKpeTcuXPEzc2NACCdOnVS2m5GRgZ773p6epKzZ8+y1y88PJw0atSI9+wrijiJREJat25NAJAWLVqQEydOsN/xrKwssm3bNvZ9+d133ynt/7PPPiOAPKjPf//9xy7ftm0b+z07efJkic9XSYmKimKPUd3+fv/9d7be+/fvBeucPXuW/d5lZmbqqsmUcoCKOEqlQ1HEEULIRx99RAAQPz8/3gdcnYiLjIxky/766y/BfTEiz9nZWalDy4g4QNliQYi8g+Tu7k4AkJ9++olXJpFIiIeHh9qOOCGE9O/fnwAg3377rco6Qly4cIFtG7fToIiiiCOEkClTphAAxM3NTSnqoSoR9/btW7aDMHPmTMF9TZ06lRWt3I8lIUUiTpVAIISQli1bEgBk3LhxSmXMR3zu3Lkqj1VVZ50LEwHQ09NTZR198/LlS/ZcRUVFqazn5+dHAJDVq1fzlnfp0oUAcgsxABIcHMwrX7hwIQFAAgMDlbapiYirVq2aoFXj/v37bJ2rV6/yyrgj7efPn1daNzs7m7WSCj3Ly5cvZ++tBw8eKK2fkZHBRnzr27cvr+zw4cMEALG3t+eJtLCwMAKAdOvWjbRv354AILdu3WLL8/LyWKu5ouWH+94pza84UcgID8Vfw4YNlSyrQnAtJNxfhw4dSjxgNHv2bLbDqCtrBRM9FgBPOHFhzsmcOXNKtO23b98Sc3NzIhKJBI+d2a+trS158+aNyu2oE3HFwbx3unXrplSmqYgbPHgwAaA2OuaaNWsIANKsWbMSt1ETEVe/fn12IJPL0aNH2TqK5/Dnn38mgHwg9fHjx0rrJiQk8Kx0iiJu/fr1BABp1KgRycjIEGx7VFQUEYlExMzMTOndlJmZyXoOdO3alRQUFJDnz5+zlsApU6YIbjM0NLRMz7jiO5R7jtQ9w8z7CoDgu44QQt69e6e2b0KpONA5cZQqwdKlS2FsbIy7d+9i165dGq2ze/duAECNGjVUzuNYvHgxACAlJQXnzp0TrNOhQwd06dJFabm5uTmCgoIAAPfv3+eVnTp1CvHx8XB1dVWbU+nzzz8HAJw5c6aYo+HD5M4zNjbmBUHQhNmzZ8PW1hYJCQka5yY6cOAAZDIZxGIxZsyYIVhnzpw5MDc3h1QqVTnnrmbNmuwxK9K/f38Ayufy3r17uHnzJkxNTTFt2jSVbWS2e/78eRQUFAjWYeYOJiQkqNyOvuHmRaxWrZrKesw9yY02KpFIEB4eDisrK0ydOhVmZma4fPky73yEhoby1i8pX375JVxcXJSWN2nShJ3PoXgNmWexQ4cO6Natm9K6lpaWanMf7dmzBwAwePBgNG7cWKncxsaGXf/UqVO86IsBAQEwMjJCWloabt++zS5nzkPXrl3RtWtXAPxzGRERgdzcXIjFYt58MkD+3Lm6upb6Z21trfJYAfl1d3V1ha2tLbusadOm+O2339C0aVO16wJA9erV4erqysst2KVLF6xduxaenp7Frs/AzEMC5IFXqlevrvG6JaFt27aoW7cuAGDHjh1K5Tdu3GATHDNRMjXFw8MDzZo1AyEE169fV1nvs88+Q40aNUq0bU3p27cvAPkcS1XvJnWkpqbi4MGDAKDy/QsUvQPv3buHpKSkUrRUPdOmTYOFhYXS8t69e8PMzAwA2MAdDMyzP2TIEDRo0EBp3erVq2P8+PEq97lx40YAwMSJE2FjYyNYp2XLlmjUqBHy8/PZ55rB2toau3fvhpmZGS5evIjFixdj+PDhyMrKQvPmzbF8+XLBbTLzvUv7UzxPmZmZ7P/VRRvmlnHX4eLo6MjmhFWXR5di+FARR6kS1K9fnxVDc+fOhVQqLXadqKgoAPLOi2ISbIYGDRrAw8ODV18Rf39/lftwd3cHAKUkulevXgUgD+Tg5uaG6tWrC/6YcMFxcXHFHg+Xd+/eAQDs7e01Si3AxcnJie3wrlixAu/fvy92HebctG7dmtex5OLg4IBWrVrx6ivSunVrldeiuHNZWFgIX19fleeyV69eAIDs7GyVx8QIXqlUirS0NFWHq1eYawtArUBnhMfly5chk8kAANevX0deXh46duwIOzs7+Pv7Iz09nQ0ckpeXh/DwcAClF3GleR6Y+4Fps7rjUSQ/P58VhUw0NyF69OgBQH6fcMWavb09mjdvDoAv0pj/d+3aVVAQM/9v3749zM3NefvSJCS/ut/333+v8jgA4MqVK0hMTER6ejrev3+PTZs24f379+jWrRvGjh3LXm9VxMbGIjExEVlZWUhKSsKqVatw9+5dtGnTRimYjTpOnz6N+Ph4ANoPaKIII87+97//KQWYYISdv78/6tWrp7RuYWEhdu7cif79+8PT0xMWFha8ACFMUCx1AV06dOhQpvYnJSVh/vz5aNeuHZycnGBiYsLuv2HDhgCAnJwcNkBMSQgPD2eDPnXt2lXlO7BRo0bsOiX9pmiCqmffxMSEHXDiPvv5+fmsqCvNs5+Zmck++3PnzlV53NWrV2dFvtBxt2zZEkuXLgUgD1ATFRUFKysrVtwJ0b59+zI945pEjC4tRkZGsLOzA8D/XlAqHlTEUaoMCxYsgIWFBV69eoUNGzYUWz85ORkAWJGmCmb0lamviKrRP0D+8QKgJCqZ0bH8/HwkJSWp/DEf9JJGvsvLywMApc6lpkyZMgXVq1dHeno6+3FThyGcy4KCArXnkhvZLicnR3Af3NFR5hxqwqpVq1R2Ht68eQNAbi1SVUedBUARbrvUXd/OnTvD2NgYmZmZrEjiWpe4/zKC5Pr165BIJLC0tFQrxtRRmmuoyf2jygqSmprKWi80XV/x/lMUabm5uYiMjISNjQ1atWqF9u3bQywW48qVK6xAKqvFUls4OjpizJgxuHbtGqytrbF582aN3n8MLi4umDZtGk6fPg2RSITFixdrnGKAsYJ4eHigd+/epWq/pnz22WdsBERm4AaQ30uMNUfIip+Tk4Pu3bvjk08+wbFjx/DmzRsUFhbC0dGRtYqYmpoCkA/wqELIuqwp4eHhqF+/PhYtWoSIiAikpqbCwsICLi4ucHV15UUPVtcGVXCtLeregVzrm6p3YFko6bOfmprKPk+lefYTExNZ8Zqamqr2uJn9qjruqVOnsoOMgPydLjQgoCu4507dteGWqTvfzLesJN8xiuFBRRylyuDh4YFvvvkGAPDTTz+pDRvNRVNLVUktWupgOp29evUCkc9dLfZXEpycnACgVKO6gNxlgxmR//333zVOdaDPc1m/fn2Nz6W3t7fgtrijxMw51ATGoiH0Y9qXm5ursk5+fr7G++K2S931tbW1RYsWLQAUiROudYn7r2J5hw4dVI5A6xJ194Um94ym6yvWY4TY1atXIZVKce3aNUgkEnTu3BkmJiYQi8Vo164dsrKycOPGDeTk5LDpEtRZEMoTLy8vfPzxxwCAv//+u8Trt2nTBh07dtR4/eTkZJw4cQKAPEWIsbFxifdZEry9vdn2ccOrnz59GikpKTAzM8OwYcOU1luyZAlCQ0NhYWGBX375BXFxccjLy8P79+9ZqwgzYKHuPVva45PJZBg+fDjS0tLg5+eHkydPsknWk5KSkJiYyAv9X9J3PVD0DrSwsND4HSiUCkGflOabwHU9jYiI0Oi4hdIyAMDNmzdx79499u/Lly+XuD1lgfFSAMBat4XglnHXUYT5lpXkO0YxPKiIo1QpZs6cCQcHByQnJ2P16tVq6zIjq4ylRBWMi426+UclhZk7ojg/QFswbc3NzS31SNwXX3yBunXrQiKRYP78+WrrGsK5fPXqValGsbkwHz47Ozt2dF4TmFyB6gTjqFGjtNKh4p47RbdERbgiLTs7Gzdu3IC9vT0r7tq2bQsLCwtcu3YN+fn5rIgrb+sSc/+oc2dTVebo6Mh2sNXdf9wyxfuPEWvZ2dmIjIxUErsA31p39epV5Ofnw8rKCq1btxbclzrXruJ+q1atUnkc6mCsGS9fvizT+prk5tq2bRukUilEIhHGjBlTqv2VFMbStm/fPva9xrhS9unTR9C9mLHSzZs3D9999x08PT2VBENiYqLO2hweHo64uDgYGxvj+PHj6N27t5IFpaz7Z96Bubm5GudVMwS4z666Z1+VqHF1dWX/X5ZvaUZGBoYPHw6pVIomTZpAJBJh165dgvkwGa5fv16mZ5yZx8vQoEEDdiqBUM5KBqasevXqKt3p8/Ly2OdDm99aSvlDRRylSmFvb89O7F69erVKtz0ArOtEaGioyiTS0dHR7AdEqLNWWpj5FfHx8TzXIG3BzLEAUOrE1SYmJvjpp58AyEe+1X1YuHPduEEjuKSlpfHmzmkL5lzm5+erTSiuCTExMQAgOMHeUKhXrx7rmlTctWWEx/Xr13HhwgVIpVI2kAcgn5zfoUMH5OTk4Pz587h58yZvvfKC+yyqgjsfjYuZmRkbzOPChQsq1z9//jwA+XwRRsQyWFtbs224ePGiktsp9//c8o4dOwqK/eJce4v7aepFoAhzPxQXGKW49dW5aTEwiYS7dOmC2rVrl2p/JWXo0KEQi8VIT0/HsWPH2H8BYVdKoEi8M/MeFYmNjdWp8GH2X61aNZUug8y9KQTzrKqz0LVv354VpoxorQhwn93SPPsODg7st64sxz1hwgS8evUKrq6uOH/+PL799lsAwDfffIPnz58LrlPcVIjifopTJCwtLdlv2enTpwX3SQhhg5z17NlT5fFwvwuG/C2jFA8VcZQqx+TJk1GjRg1kZmayIkQIxvUmPj6enduhCONS6OzsrDZoQkkJDg6Gm5sbAODbb78tdn5CcRYXRXx9fdlRSmbSfmkYMmQIWrVqhcLCQsyaNUtlvUGDBsHExAR5eXn4+eefBessXboUEokEpqamGDRoUKnbpEirVq3YDtrs2bOLncit7lwyLnIBAQFaa5+2sbKyYkVIcde2U6dOMDU1RW5uLju3UdH9jxFsixYtgkwm4wma8oKZ5H/16lVcunRJqTw3NxcrV65UuT7zLO/fv19wsCErKwsrVqwAILfYMJP+uTDn5ejRo4iKioKTkxOaNWvGlrdp0wZWVlYIDw/HqVOnAKgWu97e3hq7tWnq8lVcsJLHjx/jyJEjAKBk2S0oKCjWTe/ChQvs/VScZfjq1atsoAhdBzThYmtriwEDBgCQDywxFjlHR0c2wqMizLXmuspxURfNURsw+1eck8bw9u1b/PrrryrXZwJFqQu05OLiwp6XlStX4tmzZ2rbVNLviS5hnv19+/ax9xSX5ORktXM8v/zySwDy+7c4ISd03Nu2bcPOnTshEomwbds2uLi44Oeff0bz5s2RlZWF4cOHC7q7BwYGlukZHzVqlNI2R44cCUAuaJlvEZd9+/axAk3VoAVQ9B1zdXWFr6+v2nNCMXA0z0ZAoVQMhPLEKbJx40alvCzFJfv+7bffeMm+uQmm1SX7VpfTqbgEsEySXD8/P3L69GkikUjY8levXpENGzaQ1q1bk8WLF6vchyqGDh1KAJDx48errCOUJ06R8+fPF5vjhhB+su958+axyb4/fPhA5syZw66rLtn3yJEji22r0HWPjIxkc1/VqlWL7Nu3j5fj7u3bt2THjh2ke/fugnnmCJEnnLaysiKA+mSrhsD06dMJANKrV69i63bo0IF37RRzC3FzcAEgvXv3VrktTfLECT1nDKqeGalUSlq0aEEAEEdHR7J//342Wfvjx49JQEAAsbOzU7kPbrLvGjVqkJMnT7I53+7fv8/meTMzM1NK9s1w7tw53nkYNGiQUp2goCBencjISJXHqm1q165NVq9eTZ48ecLLZ5eUlET++OMP4uzsTAB5QnPFaxwTE0OaNWtGNmzYQF6+fMnLpfn69WuybNky9t53dHQsNt8b87w6OjpqnBic+65Rd48Ux/Hjx9m8dM2aNSMAyIQJE1TW//TTTwkAYmNjQw4cOMAmwH716hUZPnw4EYlEbB4yoXe5pm1W9a5PS0tjz23nzp3ZPHcymYycPn2a+Pj4ECcnJ5V50J4/f86W7dmzR+X+X758yW6nWrVqZNOmTSQtLY0tf/fuHTlw4AD56KOPSM+ePdUeixCqzgM3T5xi27moenekp6eTGjVqEADE29ubnD9/nr0/IyMjSZMmTdQm+87LyyP+/v7sPTF79mzy+vVrtjw7O5uEhoaSr7/+mtjb2/PW5eaDmzp1Kq8sOjqavW7Tpk3T7CSVEalUSpo0aUIAEA8PDzZnZkFBAdm7dy+xtbUt9h1NCCFfffUVAUCGDh1aHs2m6BAq4iiVDk1EnEwmI/Xr1y9WxKWlpfGSdpuYmBAHBwdWXAEg33//veA+yiriCCHkf//7H7G0tOTt38nJSSkZr2KycE04dOgQ26nldtq4aCLiCCGkR48exYo4iUTCCkcAxMjIiDg4OBAjIyN22fDhw0l+fr7SumUVcYQQcvbsWV5nyNjYmDg5OfHOLyCcLJwQQs6cOUMAEBcXF56YNkTu3LlDABALCwuSnp6utu7cuXPZY3dxcVEql0qlxMbGhq2zYsUKldvSlYgjRN4JrVmzJrsdc3NzVriZmZmRI0eOqN3HgwcPiIeHB1tHLBaznR5me/v27VPZtpycHGJmZsbW//3335XqMEnFAXnyZ0Zolgfce5h5T3CvG9N5P3PmjNK6isnHzczMiLOzM9tJZX61atUit2/fVtuOjIwM9pmaPHmyxu3XloiTSqXE1dWV1+7w8HCV9WNjY3n1TUxMeAMCS5cuVXtfllXEEULIn3/+yWuvtbU1EYvFBABxdnbmJXoWEkLdunVjy21sbIiXlxfx8vIiv/zyC6/e7du32aT2AFiByggV5te9e3e1xyKErkQcIYTcvHmTJ9QsLS3ZNtvY2JA9e/ao3ce7d+9I165decdoa2tL7O3ted9yExMTdp38/HzSqlUrAoA0b95c8J2/adMm9jwKPVe6ICYmhncNLS0t2XuFaWtqaqrK9QsKClhRfPjw4XJpM0V3UHdKSpXE2NhYo9D4dnZ2uHDhAjZt2oTAwEDY2NggKysL1atXx6BBgxAaGqrWjausfPLJJ3jx4gXmzJmDVq1awdraGmlpaRCLxfDz88OkSZNw/vx5/PjjjyXednBwMDw8PPD27VuEhYWVqZ3Lly8vNnqYmZkZ9uzZgwMHDqB3795wcnJCZmYmnJyc0Lt3bxw8eBA7d+4sUcCQktCjRw+8ePECy5YtY/OgpaWlwcjICA0bNsTYsWNx9OhR/Pbbb4Lr//vvvwCA0aNH6yUyY0nw8/NDmzZtkJubyyb5VQXX5U/I/c/ExASdOnVSW6c8qF27Nu7evYupU6eiVq1aIIRALBZj8ODBuH79OpvsXRWNGzfGo0ePsGDBAvj5+cHExAQSiQQ+Pj4YP348Hj16hMGDB6tc38LCgpe0WyjqJHdZp06ddB6RkcuxY8cwdepUtG3bFm5ubsjOzkZ+fj7c3d3Rs2dPrF27Fs+ePROcK+Pu7o69e/di4sSJaNmyJZydnZGRkYHCwkJ4enoiODgYGzduxKNHj1TOHWPYtWsX6/5dEldKZm6xtbU1L19ZSTExMcHw4cPZv+vWrauUbJ2Ll5cXoqKiMHbsWDaan1gsRr9+/XDmzBnMnDmz1G3RlPHjx+PEiRMIDAyEtbU1ZDIZG0353r17aNKkidr19+/fjylTpqBevXqQSqWIi4tDXFyckotl8+bN8fjxY6xfvx7du3eHs7MzMjMzUVhYiLp162LEiBHYvXt3se+M8qZVq1a4f/8+xo0bBw8PD8hkMtjZ2WHkyJG4ffs22rRpo3Z9Z2dnnD9/HkeOHMHgwYNRs2ZNSCQS5Obmsukv1q9fj9jYWHadWbNmISoqCpaWlti1a5fgO3/MmDEICQkBIQSff/652jn22sLb2xv379/HvHnz0LhxY4hEIpiamqJly5ZYtWoVIiIi4ODgoHL9sLAwvH37Fh4eHujXr5/O20vRLSJCShGvlkKhVAoWLVqE+fPnY/To0di8ebO+m2OwZGdnw83NDTk5OXj27Fm5BWooC9u3b8fIkSPRpUsXlRP/KRRDoXv37rhw4QLmzJmDxYsX67s5FEqlZMyYMdiyZQsWLlzIzumnVFyoiKNQqjAZGRmoU6cO0tPT8fLlS5VJU6s6P//8M2bMmIGvvvqqRImS9UlBQQGaNm2Kx48fIzIystjRagpFX0gkEjg4OMDCwgKvXr0SDCxDoVDKxps3b1CnTh3Y2dnhxYsXbFAcSsWFulNSKFUYW1tbzJ8/H/n5+Rq5l1ZFsrKysGrVKlhbW2PhwoX6bo7GGBsbsxEXVSWwpVAMgYiICOTm5mL69OlUwFEoOmLp0qXIz8/HggULqICrJJjouwEUCkW/fPXVV+zcsMLCQjbvEEVObGwsvv76azRv3pyXPLYi0LdvX6xduxZpaWnIysoqdX4wCkWXBAQEFJvigEKhlB5mfutPP/3Epl2gVHyoOyWFQqFQKBQKhUKhVCAq9JD7smXL0Lp1a9jY2MDFxQUDBw4UTAapSFhYGFq2bAmxWIzatWtXmDkuFAqFQqFQKBQKhVKhRVxYWBi+/vprRERE4Ny5c5DJZOjZsyeys7NVrhMTE4M+ffqgU6dOuHPnDmbNmoXJkyfjwIED5dhyCoVCoVAoFAqFQikdlcqd8t27d3BxcUFYWBg6d+4sWOfHH3/E0aNH8eTJE3bZ+PHjce/ePYSHh2u0n8LCQvz333+wsbEpNjcWhUKhUCgUCoVCoWgCIQSZmZlwd3dXG6egUgU2SU9PBwA4OjqqrBMeHq6U7DQoKAibNm2CVCoVTDQskUggkUjYv+Pj49GwYUMttZpCoVAoFAqFQqFQinjz5o3a1E+VRsQRQjB16lR07NgRjRs3VlkvMTFRKcKcq6srZDIZUlJS4ObmprTOsmXLBEOLv3nzhoZppVAoFAqFQqFQKFohIyMDNWvWhI2Njdp6lUbETZo0Cffv38fVq1eLravoAsl4lKpyjZw5cyamTp3K/s2cXFtbWyriKBQKhUKhUCgUilYpbspWpRBx33zzDY4ePYrLly+rNTsCQPXq1ZGYmMhblpycDBMTEzg5OQmuY25uDnNzc621l0KhUCgUCoVCoVBKS4WOTkkIwaRJk3Dw4EFcvHgRtWrVKnaddu3a4dy5c7xlZ8+eRatWrQTnw1EoFAqFQqFQKBSKIVGhRdzXX3+N//3vf9i5cydsbGyQmJiIxMRE5ObmsnVmzpyJzz//nP17/PjxiIuLw9SpU/HkyRNs3rwZmzZtwvfff6+PQ6BQKBQKhUKhUCiUElGhRdyff/6J9PR0BAYGws3Njf3t2bOHrZOQkIDXr1+zf9eqVQsnT57EpUuX4Ofnh8WLF+PXX3/FoEGD9HEIFAqFQqFQKBQKhVIiKlWeuPIiIyMDdnZ2SE9Pp4FNKBQKhUKhUCgUilbQVGdUisAmFQWpVIqCggJ9N4NCoVAqNMbGxnQOM4VCoVCqNFTElQMZGRlISUnhJQynUCgUSukxNzeHs7Mz9YagUCgUSpWEijgdk5GRgfj4eFhbW8PZ2RmmpqbF5n2gUCgUijCEEEilUqSnpyM+Ph4AqJCjUCgUSpWDijgdk5KSAmtra9SoUYOKNwqFQtECFhYWsLGxwdu3b5GSkkJFHIVCoVCqHBU6OqWhI5VKIZFIYGdnRwUchUKhaBGRSAQ7OztIJBJIpVJ9N4dCoVAolHKFijgdwgQxoRPwKRQKRfsw71YaMIpCoVAoVQ3qTlkOUCschUKhaB/6bqVQKBWNFy8+4Nq1eHh62qJLF099N4dSgaEijkKhUCgUCoVCKQfS0iSIiUmHWEy74JSyQd0pKRQKhUKhUCiUciAhIRsA8OTJez23hFLRoSKOQqFQKBQKhUIpBxISsvTdBEolgYo4ikEyatQoiEQixMbG6rsp5c7+/fshEokQGRmp76bohEuXLkEkEmHBggX6bkqlZeTIkfDy8kJeXp6+m0KhUCgUANHR77F9+yOkptL3MkU7UBFH0RmxsbEQiUTo1auXyjoREREQiUQYNWqU1verzW2WF1KpFDNnzkSfPn3g7+/PK/P29oZIJBL8jR8/Xmlbd+/exaxZsxAUFIRq1apBJBIhMDBQ5b7v3r2LuXPnom3btnBxcYG5uTlq166NiRMnskmVNaW4fVVUvL294e3tre9mFMvcuXMRHx+PX375Rd9NoVAoFAqA3buj8epVGvLyZPpuCqWSQGdVUgySZcuWYcaMGfDw8NB3U8qVrVu34sWLF/jnn38Ey+3s7PDdd98pLW/VqpXSssOHD2PZsmUwMzNDvXr1kJKSonbf48ePx40bN9C6dWsMGzYM5ubmiIyMxJ9//ol9+/bhypUrqF+/fqmOi0ubNm3w5MkTODs7l3lbFGHq1KmDgQMH4ueff8bkyZNhZWWl7yZRKBQKhULRIlTEUQwSNzc3uLm56bsZ5c6GDRvg6emJgIAAwXJ7e3uN3RCHDBmC/v37o0mTJnj//n2x5/PTTz/Fv//+Cx8fH97yn3/+GTNmzMC0adNw4sQJjfatDktLS62IQYp6Pv30Uxw4cAC7du3CuHHj9N0cCoVCoVAoWoS6U1IMElVz4g4cOICAgAC4uLhALBajZs2a6NWrFw4fPgxAbsmqVasWAGDbtm08l8NLly6x28nJycGCBQtQv359iMViODo6om/fvrh+/bpSWxYsWMCuv23bNrRs2RKWlpYIDAzEli1bIBKJsHLlSsHjOHnyJEQiEb799ttij/nBgwe4ffs2Bg0apJX8V40aNUKLFi00TjY/adIkJQEHAN9//z0sLS0RFhZW7DaY+W4AEBYWxjv/W7du5dVRFKOMq2J6ejomTJgANzc3WFlZoXPnzrh9+zYAIDExESNHjoSLiwssLS0RFBSEFy9eCLYlJiYG48aNg6enJ8zNzeHm5oZRo0YhLi5Oqe7t27cxePBgtq6rqyvatWuH5cuXAyhy0Y2Li0NcXBzvuBSP4/LlywgODoazszPMzc1Rt25dzJkzBzk5OYLnasGCBbh8+TICAgJgbW0NR0dHjBgxAm/fvlVq5/PnzzF69GjUqlULYrEYzs7OaNGiBaZNm6ZUt0+fPrCyssKWLVsEzw+FQqFQyh9bW3MAgKsr9ZCglA1qidMThBDk5Ej13Qy1WFqaGlQy3T///BMTJ06Em5sbPvroIzg5OSEhIQE3btzA4cOHMXDgQPj5+eHbb7/FunXr0KxZMwwcOJBdn5nLJJFI0K1bN0RERKBFixb47rvvkJycjD179uDs2bPYs2cPPv74Y6X9r1y5EqGhoejfvz969OgBExMThISEYMqUKdi4cSN++OEHpXU2btwIABpZQi5cuAAAaNu2rco6EokE27ZtQ3x8PBwcHNC+fXs0a9as2G2XBZFIBGNjYxgZFT/m4+3tjfnz52PhwoXw8vLizUv08/Mrdv38/Hz06NEDeXl5CAkJQVJSEvbu3Yvu3bvj+vXr6NWrF6pXr45PP/0UL168wLFjx9CvXz88evQIxsbG7HYiIyMRFBSE7OxsBAcHo06dOoiNjcW///6LU6dOITw8HLVr1wYgnwvYvn17GBsbY8CAAfDy8kJaWhoePXqEf/75BzNmzIC9vT3mz5+PtWvXAgDPpZU792/Dhg2YOHEiHBwcEBwcjGrVquHmzZtYsmQJQkNDERoaCjMzM94xR0REYNmyZejbty8mT56M27dvY9euXbh69Spu3rwJV1dXAMB///2HNm3aIDs7G3379kVISAiysrLw/Plz/Pbbb1i9ejVvu2ZmZmjZsiWuX7+O7Oxs6lJJoVAoemTkyEbIzZVBJBJhz55oEEL03SRKBYeKOD2RkyOFtfWv+m6GWrKyJsPKyqz4isXw4sULlS6AQtYGVWzcuBFmZma4d+8eqlWrxit7/16eb8XPzw/fffcd1q1bBz8/P8H9rlixAhEREfjkk0+wY8cOVqh+9913aNOmDcaNG4cePXrAxsaGt15YWBgiIyPRpEkT3vJPP/0Uv//+Oy5fvozOnTuzy5OTk3H8+HH4+/srrSMEYwVs0aKFyjqJiYlKAVt69eqFHTt26GyO2f79+5GZmYkhQ4YUW9fb2xsLFizAwoUL2f+XhISEBHTs2BE7d+6EiYn89eTn54cZM2agXbt2GD16NFavXs1es4kTJ+LPP//E0aNH8dFHHwGQB4cZNmwYCgsLERUVxRO5V69eRWBgIL799lscO3YMALBjxw5IJBIcOXIE/fv357WHua8YN1bGmih0XI8fP8Y333wDPz8/nD9/Ho6OjmzZ8uXLMXPmTPz2229KVrMzZ85g48aNGDt2LLts0aJFmD9/PmbNmoVNmzYBkFuh09LSsG7dOkyePJm3DVXzHVu2bInLly/jxo0b6NKli2AdCoVCoeieWrXsAQB5eTJ88UVTmJpSZzhK2aB3EEXnvHz5EgsXLhT8MR1UTTE1NRV0D3RyctJ4G1u3boWpqSmWL1/OszQ2bdoUo0aNwocPH3DkyBGl9b788ktBMfbVV18BKLK6MWzbtg1SqRRffPGFRu1iBC1jeVFkzJgxuHTpEt69e4eMjAxERESgd+/eOH36NPr376+TUb03b95g8uTJsLCwwOLFi7W+fSFWrlzJCjgAGDFiBABAJpNh8eLFvGs2fPhwAMC9e/fYZcePH0dsbCymT5+uZKXs2LEjBgwYgJMnTyIjI4NXZmFhodSWktxXf/31F2QyGX799VeegAOA6dOno1q1ati1a5fSer6+vhgzZgxv2Q8//MDWz8/PL7adqgQ8cy+VZLCEQqFQKLrj9OkYXLkSDyMjw/F0olRMqCVOT1hamiIra3LxFfWIpaVmc6mKIygoCKdPnxYsi4iIQLt27TTaztChQzFjxgw0btwYw4YNQ2BgIDp27Ah7e3uN25KRkYFXr16hQYMGqFGjhlJ5YGAg/vrrL9y9exeffvopr6xNmzaC22zSpAnatWuH/fv347fffoOdnR0AYPPmzbC2tkZISIhGbXv//j2MjY2VLIAM8+bN4/3t7++P48ePIyAgAFevXsXJkyfRt29fjfalCampqejTpw+Sk5Oxfft2+Pr6am3bqrC3t4eXlxdvGROQpW7dukougUwZNwVCREQEACA6OlrQYpaYmIjCwkI8e/YMrVq1wuDBg7F27VoMHDgQQ4cORY8ePdCxY0d4enqWqO3Mfk+fPo3z588rlZuamiI6OlppeYcOHZTcli0sLNCyZUucPn0az549Q+PGjdGvXz/MmDEDX3/9Nc6dO4devXqhY8eOqFevnso2MWKyuMikFAqFQtEdEokMJ068wv3779hlAQHKfRAKpSRQEacnRCKRVlwVqxLTp0+Hk5MTNmzYgDVr1mD16tUwMTFBnz59sHbtWjagiToY64sqa1f16tUBAOnp6UplqtYB5Fa60aNH499//8XEiRNx9epVREdH44svvoC1tbUmhwcLCwsUFBRAKpVqHIzEyMgIo0ePxtWrV3Ht2jWtibgPHz6ge/fuePToEf78808lQasrGAHMhbHK2draqiyTSovml6ampgIA/v33X7X7ys7OBgC0a9cOFy9exLJly7Br1y7WZbJly5ZYuXKlxm6IzH6XLFmiUX0GFxcXweXM/cbci7Vq1UJ4eDgWLlyIU6dOYd++fQDklrzFixcLurvm5uYCkEcEpVAoFIp+SE+X8AQcADx4kAI3N836BxSKENSdklJhEIlEGDduHKKiovDu3TscOnQIH3/8MY4ePYq+ffuioKCg2G0wQiApKUmwnFkuJBjUBXkJCQmBvb0961LJ/KupKyUAdp4fIwY0hXGlU4x+WFpSU1PRrVs33LlzB+vXr2fdRSsKzLU7duwYCCEqf9w0DgEBATh9+jQ+fPiA0NBQTJ06FY8ePULfvn3x8uXLEu03IyND7X4VSU5OFtwecy9yhW3Tpk1x4MABpKamIjw8HPPmzUNSUhJCQkJw7do1pW0w95LiHFIKhUKhlB8SiXL/5ObNRD20hFKZoCKOUiFxcnLCwIEDsWfPHnTt2hVPnjxhQ80zUQqFRJ2trS1q166NFy9e8FzwGJgw+ppEUuRiYWGBTz/9FHfu3EFYWBj27duHpk2bonXr1hpvg5lv9/z58xLtOzIyEkBR9M2ykJqaiu7du+POnTv47bffMHHixFJtx8jISCNRrQv8/f0BAOHh4SVe18LCAoGBgVi9ejVmzZqF3NxcnmuksbGxyuNi9su4VWrKtWvXlMRdbm4ubt26BQsLC0F3SVNTU7Rt2xYLFy7Er7/+CkIIjh8/rlTv6dOnAKBRYB0KhUKh6AYhEUcDm1DKCr2DKBWGM2fOQCaT8ZZJpVLW2sAEfHBwcIBIJFIZzGHkyJGQSqWYOXMmr/P88OFDbNmyBXZ2drzUBJrCWKxGjBiBnJycElnhALCWoRs3biiVPX78GGlpaUrLr169ijVr1sDc3FwwLUJJ4Frg1q1bh0mTJpV6W46OjnoLpjFgwAB4enpizZo1uHz5slK5VCrF1atX2b+vXLmiFOQEKLKEcQOJODo6IiUlBXl5eUr1J06cCBMTE3zzzTd48+aNUnlaWhru3LmjtPzp06fYvHkzb9nKlSvx7t07DB8+nE1JcPPmTUGrnVA7GSIjI+Hm5oa6desqlVEoFAqlfBAScSYmtAtOKRt0ThylwhASEgJLS0t07NgRXl5ekEqlOHfuHB4/foyQkBA2EIW1tTVat26Ny5cvY/To0ahbty6MjIwwYsQIeHp6Yvr06Thx4gR27NiBJ0+eoFu3bnj37h327NkDqVSK7du3qwwuoo7GjRujffv2uH79OsRicYnnkXXr1g02NjY4f/48pk6dyivbu3cvVqxYgW7dusHb2xvm5uZ4+PAhzp49CyMjI2zYsEEpEEd0dDSbrJqZGxUdHc2mKHB2dsaqVavY+h9//DHu3r2L+vXrIzU1VTAoyHfffadRIJmuXbti7969GDx4MJo3bw5jY2P07du3XCxC5ubm2L9/P3r37o2AgAB069YNjRs3BgC8fv0aV65cgZOTExtkZPXq1Th37hy6dOmC2rVrQywW4/bt27hw4QLq1KnDpi5gjisqKgrBwcHo1KkTzMzM0LFjR3Ts2BGNGzfGH3/8gQkTJsDX1xd9+vSBj48PG0wnLCwMo0aNwoYNG3jt7dmzJyZOnIgTJ06gfv36uH37Ns6cOYOaNWti6dKlbL1///0Xf/zxBwIDA1GnTh3Y2tri8ePHOHnyJJydnZUiXL58+RIxMTGYMGGCrk41hUKhUDRASMQZUBpeSgWFijhKhWHZsmU4ffo0bty4gWPHjsHKygp16tTBX3/9pdSB3bFjB6ZMmYLDhw8jPT0dhBC0bdsWnp6eEIvFuHjxIn7++Wfs2bMHv/zyCywtLdG5c2fMmjULHTt2LHUbR44cievXr2Pw4MElipoJyMXniBEjsGnTJiQnJ/MCXnTp0gVPnjzB7du3ERYWhry8PLi6urLJxoUiZyYmJmLbtm28ZUlJSewyLy8vnoiLjY0FIBd6CxcuFGzjqFGjNDqudevWAQAuXryIQ4cOobCwENWrVy83t77WrVvj3r17WLlyJU6ePImrV6/C3NwcHh4eGDhwIJuaAAAmTJgAOzs7REZG4vLlyyCEwNPTE3PmzMF3333HE/Rz587Fhw8fcPz4cVy8eBGFhYWYP38+e8988cUX8PPzY62AR48ehZ2dHTw9PTFlyhSMHDlSqa3t2rXD7NmzMWfOHKxbtw5mZmYYNmwYVqxYwQumM3z4cOTl5eHatWu4efMmJBIJatSoga+//hrff/+9UrTV//3vfwBQ4eY0UigUSmVDSMTRXN+UsiIiNGV8icnIyICdnR3S09MFA2Aw5OXlISYmBrVq1YJYLC7HFlL0BZN8OiwsjJf4W1OePHmCJk2aYMmSJfjxxx910EKKoXDp0iV06dIF8+fPL3FS9OKQyWSoV68evL29cfHiRa1u25Cg71gKhVIRCA19jbAwuZu9SCQCIQS2tuaYOrWVnltGMUQ01RnUEkehaIl3795h+/btaNCgQakEHAA0aNAAY8aMwerVqzFp0iSlvGgUiibs2LEDsbGxbBoCCoVCoeiPJk2c4epqCTs7cxgbG2HDhruC0YoplJJARRyFUkZOnDiB27dvY//+/cjOzsb8+fPLtL3FixfD3d0dsbGxaNSokZZaSalKiEQi/PPPP2jZsqW+m0KhUChVlvz8AmRnS+HsbAlnZ0t22ciRjWhgE0qZoSKOQikj+/btw7Zt2+Du7o6lS5ciJCSkTNtzdXXVunsdpWrBBK+hUCgUin5ITc3Fr7/ehpmZMWbO9IdIJEJcXDoiIhLg7m6NTp1qFL8RCkUNVMRRKGVk69at2Lp1q76bQalgBAYGUncaCoVCqaRkZUkByC1vjx6lQCQSYd8+ee7OJ0/eUxFHKTNUxFEoFAqFQqFQKFpCKi3A1q0P2b+PHn2J/Hx+hMrbt5PQooWr4qoUisZQh1wKhUKhUCgVHkIIDhx4htjYdH03hVLF+e+/LBQWFnlaKAo4ADh7NrYcW0SpjFBLHIVCoVAolArPvn1PERJyHABAyPd6bg2lKpOQkC243NTUGFKpXNBRb3pKWaGWOAqFQqFQKBWey5ff6rsJFAoAQCYrVFpmZCTChAnN0LChEwDQOdGUMkNFHIVCoVAolAqPSCTSdxMoFADCVjZXVys4OlqgRw9vlXUolJJARRyFQqFQKJQKD9VwFEOBsbI5OIjZZTVqWAMouk+pJY5SVqiIo1AoFAqFUuExMqIqjmIYMPrMx8cenp62AIAaNWwAUIsxRXvQwCYUCoVCoVAqPLRzTDEUatSwQbt27qhRwwadOtXA27eZrJgrssTpsYGUSgEVcRQKhUKhUCo8RtS3iGIg+PjYw8fHnv3bzs6c/b+lpSlGjGhABx0oZYa+8iiUciAwMLBCv7AvXboEkUiEBQsWaG2bq1atgrm5Od68eaO1beqSin4NDZ0XL17AxMQEf/zxh76bQqmg0OeTUhEwMTFCvXqOqFvXQd9NoVRwqIijlAt3797F+PHj0bBhQ9ja2sLMzAxubm7o2bMn1q5di/fv3+u7iXonNjYWIpFI45+3t7dW9+/t7a31baoiNTUVS5Yswbhx41CzZk1eWV5eHhYvXoyGDRtCLBbDwcEBvXv3xrVr19Ru8969exgxYgQ8PDxgbm4Od3d39O7dG6GhoRq1acGCBRCJRLh06VJpD8sg2bp1K0QiEbZu3arvpqilTp06+OSTT7BgwQJkZGTouzmUCgidE0cxFHJzpUhPlyAvT6bvplAqMdSdkqJTCgsLMX36dKxevRomJibo3LkzevbsCUtLSyQnJ+P69euYMmUK5s2bh1evXsHZ2VnfTdYb9vb2mD9/Pm9ZWloa1q1bBy8vL4waNUqpfkVl9erVSE9Px7Rp03jL8/Ly0K1bN1y/fh1NmzbFhAkTkJaWhgMHDiAgIAAHDhzAgAEDlLa3fft2jBkzBnZ2dujXrx88PDyQkpKCqKgoXL9+HV26dClzm7dv346cnJwyb4eimh9++AHbt2/Hr7/+ijlz5ui7OZQKBjXEUQyFa9f+w9Wrb9GunTuCgmrxymSyQjx8mAJCCPz8XKgFmVJqqIij6JTZs2dj9erVaNWqFXbv3g0fHx+lOjdv3sT06dORl5enhxYaDvb29kruirGxsVi3bh28vb216sqoT6RSKTZv3owOHTqgdu3avLL169fj+vXrGDJkCHbt2gVjY2MAwJw5c9CiRQt88cUX6Nq1K2xsbNh1bt26hbFjx6JNmzY4ceIEHBz4LioymXZGQj09PbWyHYpqGjdujGbNmuGff/7BrFmzYEQnOVFKAO0MUwwFJn2A0D2Zn1+Aw4efAwD8/FzKtV2UygX9QlJ0xvPnz7Fy5Uq4uLjg1KlTggIOAFq3bo2LFy/Czc2NXca4Fo4aNQrR0dH4+OOP4ezsDJFIhNjYWADyzvkvv/yCZs2awcLCAnZ2dujSpQtOnDihtA91rnJC7mbc/b969QqDBw+Gg4MDrKys0L17d9y7d0/wWK5evYqAgABYWVnByckJISEhOpnzxZ2jFh4ejqCgINjb27MfDHVz2LjHxv07Li4OcXFxPJdNofVv376NoKAg2NjYwM7ODh999BF7TTTh9OnTSExMxJAhQ5TKDh8+DEB+vRgBBwA+Pj4YM2YM3r17h/379/PWmTNnDgoKCrB9+3YlAQcAJibFj1UFBgZi4cKFAIAuXboIuqwKzYnj3jvHjh2Dv78/LC0t4eHhgblz56KwsBAA8O+//6J58+awsLCAp6cnVq1aJdgOQggrcG1tbWFpaYlWrVph8+bNSnXz8vKwevVqNGvWDHZ2drC2toaPjw+GDx+OBw8eAABGjRqF0aNHAwBGjx7Nu7ZcMjMzMX/+fDRq1AgWFhawt7dHr169cPXqVcFzJRKJkJeXh+nTp6NmzZoQi8Vo0qSJYDsLCwuxceNGtGnTBo6OjrC0tIS3tzcGDhyIy5cvK9UfOnQoXr9+jQsXLgieIwpFFdSdkmIoMJEnhcYVuMtohEpKWaCWOD2Tn1+gsszISAQTEyON6opEgKmpcanqSqUFSi8SMzNjlJWtW7eioKAAX331VbFukiKRiNdpZ3jx4gXatm2LRo0aYeTIkUhNTYWZmRkIIQgJCcHBgwdRr149fP3118jOzsbevXvRr18/rFu3DpMnTy7zMcTGxsLf3x8NGzbEmDFj8PLlSxw5cgRdunTBkydP4Orqyta9cOECevfuDSMjI4SEhMDd3R0XLlxAhw4dBMWFNrh+/TqWLl2KLl264Msvv8Tr169LvA3GjXPt2rUAgO+++44tCwwM5NWNiorCypUrERgYiK+++gp37tzB4cOH8eDBAzx8+BBisRjFwXTO27Ztq1SWlJQEAKhVq5ZSGbPs4sWLrDBJS0vD2bNn0bx5c9SpUwdhYWG4ceMGTExM4O/vj/bt2xfbHgCsoA0LC8PIkSNZ8aapy+qhQ4dw9uxZDBw4EB06dMCJEyfw008/gRACBwcHLFq0CAMGDEDnzp1x4MAB/PDDD3Bzc8Mnn3zCboMQgk8//RQ7d+5EvXr1MGLECJiZmeHcuXMYO3YsHj9+zBN/I0eOxN69e9G0aVOMHj0a5ubmeP36NUJDQxEUFIQmTZpg4MCBSEtLw5EjRzBgwAD4+fkptT01NRWdO3fGo0eP0KlTJwQFBSE9PZ29z/ft24eBAwcqrTdkyBDcv38fQ4YMgVQqxd69ezF27FgkJSVh5syZbL2ZM2dixYoV8PHxwYgRI2BjY4P4+HhcuXIFFy9eROfOnXnbbdeuHQD5de7Ro4dG559CAfid44KCQhgb03Fqin5QZ4njLpPXo4MPlNJR4UXc5cuXsXLlSty6dQsJCQk4dOiQYIeD4dKlS4LzY548eYL69evrsKXCLF0aobKsbl0HfPJJQ/bvlStvQioVFmfe3nYYNaox+/fatbeQkyMVrOvubo0vv2zG/v3773eRlsZ3ZVywoING7VdHeHg4AJRpPtK1a9cwd+5cLFq0iLd8x44dOHjwIAICAnD27FmYmZkBkLtvtmzZEt9//z2Cg4MFxUBJCAsLw/Lly/Hjjz+yy+bOnYuffvoJW7ZswYwZMwDIrQ1ffvklZDIZLl++jI4dOwLgd8x1wblz57Bp0yaMGTOm1Ntg3DgZS6Q6t80TJ05g9+7dCAkJYZd9/vnn2LFjBw4fPoxhw4YVu7/r16/DyMhIUFBUq1YNL168QExMDBo2bMgri4mJAQA8e/aMXXb79m0UFhaiZs2a6N+/P44dO8Zbp0ePHti3bx/s7OzUtmnUqFGIjY1FWFgYRo0apSRei+PUqVO4du0aWrduDQBYuHAh6tSpg19++QW2tra4c+cO6zr6/fffo06dOlixYgVPxG3cuBE7d+7E2LFjsWHDBtaCmJ+fj8GDB2P16tUYPnw4WrZsifT0dOzbtw+tWrVCREQEbwCkoKAAmZmZAMATcQMHDlSaVwkA33zzDR49eoTNmzez4hgAli5ditatW+PLL79Er169lAT6q1ev8PDhQ9a1dfbs2WjRogXmzZuHkJAQ9ng3btwIDw8P3L9/H5aWluz6hBB8+PBBqT2tWrUCIL9PKJSSwO0cS6VUxFH0B7XEUcqDCv+Gy87ORrNmzbB+/foSrff06VMkJCSwv7p16+qohVWXxMREAIC7u7tS2cWLF7FgwQLeT8h1q3r16oIBDhjBsWLFClbAAUCNGjUwZcoUSKVS/Pvvv2U+hlq1auGHH37gLRs7diwA+Vw+hqtXr+LVq1fo168fK+AAeadi6dKlglZGbdC8efMyCbiS0rlzZ56AA8Dun3s+1PH27VvY29vzrhtD7969AQCLFi1CQUHRgEVMTAzrqpeWlsYuT05OBgAcP34cN27cwOHDh5Geno4nT56gf//+OHfuHL788kvND7CUfPLJJ6yAAwAbGxv069cPOTk5mDBhAm/uX82aNdGxY0c8evSIN19v/fr1sLKywvr163kuoGZmZliyZAkAYNeuXQDk9xUhBObm5kr3lrGxscYWxJSUFOzZswfdunXjCTgAcHV1xQ8//IB3797h/PnzSuvOnj2bNzexevXqmDp1KmQymdKghZmZmZJbq0gkgqOjo9J2bWxsIBaL8fbtW42OgUJh4E6hlMkK9dcQSpWnZJY4CqV0VHhLXO/evdmOX0lwcXExiOh+s2Ypu5QxKPr3//BDaxU1lUd7vvuupcZ1v/7aTyejQepeThcvXmQ7pgxisZgngACgWbNmgp39O3fuwMLCAm3atFEqY6wod+/eLXmjFWjWrJlScIUaNWoA4IsJZo5cp06dlLbh5eWFmjVrlmjemKYIHb8uadGihdIyofOhjvfv3yulFWD47rvvsHv3buzZswfR0dHo2rUrG53S29sb9+/f54kWZs5ZQUEBNmzYwEautLW1xd69e1GvXj3s27cPq1atUrlPbdC8eXOlZcwcTyGLo5ubGwoKCpCUlAQPDw/k5OTgwYMHcHd3x/Lly5XqS6Vyq3p0dDQA+fH16tULp0+fRosWLTB48GB06tQJ/v7+gs+LKm7evImCggLk5eUJWmCfP3/O7rdfv368MqF7nVnGffaGDh2KDRs2oHHjxggJCUFAQADatWsHKysrle1ydHRESkqKxsdBoQDKljgKRV9QSxylPKjwIq60NG/eHHl5eWjYsCHmzJmj1uVPIpFAIpGwf2szh1FJ5p7pqi53fpw2cXV1RXR0NOLj4+Hr68sr++mnn/DTTz8BkFvVFK0A3G0IkZGRobJTXr16dQBAenp6aZvOIuSGx1gUuJYiZl8uLsKRplxdXXUi4lSdH12h6flQh4WFBXJzcwXLbGxscO3aNSxatAiHDh3C+vXr4eLigvHjx6Nfv37o3LkzqlWrptQeY2Nj9O3bl7ctc3Nz9OzZExs3bsStW7d0KuJsbW2VljHnRV0ZI84+fPgAQgji4+PZACtCZGdns//fv38/li5dil27dmH27NkA5OdvzJgxWLp0Kc91URWpqakA5G7L6vLwcffLIHSvM/cj99n79ddfUbt2bWzdupV97sViMYYOHYrVq1cLzpfNzc3VqP0UChdu51jV1AMKpTzw9LRBQQGBu7u1Uhm1xFG0RYV3pywpbm5u+Pvvv3HgwAEcPHgQvr6+6Natm2CUNIZly5bBzs6O/emyM1iZYIJKaJpsWQhVIaNtbW3ZIBiKMMu5nWfGmiYUbl6bYo9x71PVJm2j6vzo+njLQrVq1VjxIIS9vT3WrFmDmJgY5Ofn4+3bt/j555/x8uVLAEVzpgCwgwOWlpYwNTUV3BYAlaLRUGDu1ZYtW4IQovLHfZasrKywZMkSvHr1Cq9evcKmTZtQv359rFu3DlOmTCnRfqdNm6Z2v4r5CwHhe525z7li39TUFD/88AMePXqE+Ph47Ny5E506dcL27dt5cwIZCgsLkZ6ezhPrFIomFBQUdYhlMto5puiPxo2rITjYB76+yi7jRkYiDB5cD0OG+PKC11EoJaXK3T2+vr744osv0KJFC7Rr1w5//PEH+vbtqzLkNyCPrpaens7+dBEyvjIycuRIGBkZ4e+//9a6a1Tz5s2Rm5uLGzduKJWFhYUB4LuxMdEh4+PjlerfuXOnzO1p1kweKObKlStKZXFxceV+z5TmeI2NjTW2ppWFJk2aIC8vr8Rznpg5jtzgKT4+PvD09ERmZqbg9h4/fgwAvFQBqmDcNMvjHChiY2ODBg0a4MmTJxq7pXKpVasWxowZg7CwMFhbW+Po0aNsmbrjat26NUQiERuEqCQI3evMMiEXUkA+P3b48OE4ffo06tati/PnzysJ7OfPn6OwsBBNmjQpcZsoVRuuiKOWOIqhYmQkQuPG1dCokTMNvkMpE/TugTzUOTP3Qwhzc3PY2tryfpTi8fX1xdSpU5GcnIzevXuzlhRFStNpHTlyJAC5wGZc0gC5aFmzZg1MTEx4o/yM9Wb79u3sPCpAHkFTGwFQOnbsiFq1auH48eO8AC2EEMyaNavchYGvry/bmedavZKSklg3VkWYeUi6TroeEBAAAIICHBB2V/7ll19w/vx5fPTRR7wAIiKRCOPHjwcAzJo1i3dtw8LCcOrUKXh7e/PWUQUTZENfATUmT56MnJwcfPHFF4LuizExMaxL7rt37wTP34cPHyCRSGBhYcEuU3dc1atXx9ChQ3H9+nWsXLlS0LUnMjISOTk5SsuXLFnCRsEE5PcW8+yNGDECgNwV/eLFi0rbzc7ORmZmJkxNTZUCs0RGRgIouk8oFE3hBjOhc+Io+iQ/vwC5uVIaYIeiU6rsnDgud+7c4SWapmiP5cuXQyqVYt26dfD19UVAQACaNm0KS0tLJCcn4+7du4iKioKtrS2aNm2q8XY/++wzHDx4EEeOHEHTpk3Rr18/Nk/c+/fvsXr1al5EwLZt26Jdu3a4ePEi2rVrh86dOyMuLg5Hjx5FcHAwDh06VKbjZCyOffr0Qffu3dk8cRcvXkRCQgKaNm2K+/fvl2kfJcHMzAyTJk3C8uXL0aJFCwwYMACZmZk4duwYAgICBAV1165dERUVheDgYHTq1AlmZmbo2LGjUrCZsjJgwABMmTIF58+fx8cff6xU7uHhgS5duqBu3bpsgvZbt26hVatW2LRpk1L9qVOn4vjx49ixYwceP36Mzp07IyEhAQcOHIC5uTk2b96sUcJvJsn37NmzER0dzbpPT5gwQSvHXRxfffUVIiIisG3bNly7dg3du3eHu7s7kpKSEB0djcjISOzcuRPe3t6Ij4+Hv78/GjVqhBYtWsDDwwPv37/HkSNHIJVKMX36dHa77dq1g4WFBdauXYuMjAzWTZFJj/HHH3/g6dOnmD59Onbs2IF27drBzs4Ob968wa1bt/D8+XMkJCQozVGrXbs2GjdujEGDBrF54pKTk7FkyRL22cvNzUW3bt1Qu3Zt+Pv7w9PTE1lZWTh+/DgSExPx448/KgViOXfuHIyNjZUCqVAoxVFQQEUcxTA4dSoGd+4koXt3L3TsWEOp/PHjFBAC+Po6UpdKSukhFZzMzExy584dcufOHQKArFmzhty5c4fExcURQgiZMWMG+eyzz9j6v/zyCzl06BB59uwZefjwIZkxYwYBQA4cOKDxPtPT0wkAkp6errZebm4uefz4McnNzS3dwVUioqKiyLhx40i9evWIlZUVMTU1Ja6urqR79+5kzZo15N27d7z6MTExBAAZOXKkym1KpVKyatUq0qRJE2Jubk5sbGxIQEAAOXLkiGD9d+/ekc8++4w4OjoSCwsL0rZtW3LmzBmyZcsWAoBs2bJF4/0DIAEBAUrLL1++TDp37kwsLCyIo6MjGTJkCImLiyMBAQGkNI8b0w7FfYWGhhIAZP78+SrXlclkZN68eaRmzZrEzMyM1KtXj6xbt468evVK8NgyMzPJF198Qdzc3IiRkRFv++r2p8m1UiQoKIg4OTmR/Px8pbLx48cTX19fYmlpSaysrEjz5s3JypUrSV5ensrtZWdnk7lz55I6deoQMzMz4ujoSD766CNy9+5djdtECCFbt25l7ycAxMvLiy0TuoZC9w7D/PnzCQASGhqqVDZy5EgCgMTExCiV7dmzh3Tv3p04ODgQU1NT4uHhQQIDA8nq1avZ5+TDhw9kwYIFpHPnzsTNzY2YmZkRd3d30qtXL3LmzBmlbZ44cYK0bt2aWFhYEABKx5GTk0NWrFhBWrZsSaysrIiFhQWpVasWGThwINm+fTuRSqVK5yEnJ4d8//33xMPDg5iZmZFGjRqRjRs38rabn59Pfv75Z9KzZ09So0YNYmZmRlxdXUlAQADZvXu3Ujuzs7OJtbU1GThwoFKZEPQdS+Hy7bcXCLCSACvJ/fvJ+m4OpQpz6NAzMn/+VXL16lvB8vnzr5L586+SrCxJObeMUhHQVGeICKnYoXFUJe8eOXIktm7dyibxvXTpEgB5XrG///4b8fHxsLCwQKNGjTBz5kz06dNH431mZGTAzs4O6enpal0r8/LyEBMTg1q1aiklyqVQqjJnz55FUFCQUuJwiuETGBiIsLAwnURV27x5M8aOHYuwsDB07ty52Pr0HUvhMmnSefz++10AwK1bn6FFi/KN3kuhMBw69Bz37iWjZ09vtG/voVS+cOF1EELw/fetYW2teVoYStVAU51R4d0pAwMD1XYmmKTQDNOnT+e5GlEolPKnZ8+e6NGjBxYvXowhQ4Yo5eKjVD1kMhmWLl2K/v37ayTgKBRF+IFNqDslRX8w/VJVEaRFInmOuIptRqHoG9pzolAoeuG3337D4MGDkZCQoO+mUAyAt2/f4tNPP8WaNWv03RRKBYWfYoCKOIr+UJfsW75c9P/1qIqjlJ4Kb4mjUCgVE19fXyxYsEDfzaAYCN7e3vR+oJQJfnRKmmKAoj+Ks8QV1SuP1lAqK9QSV4WRyQqRlpZHR4IoFIrGXLp0ib4zKAYJjU5JMRSKt8Qx9ei7lFJ6qCWuCvP2bSZSUnLh42MPBwcaFIBCoVAoFRfqTkkxFDw9bWFsLIKTk4VgeXEWOgpFE6iIq8IwHznqdkKhUCiUig5N9k0xFPz93QCozj/cr19tEAJYWNBuOKX00LunCsNY8ak1n0KhUCgVHRqdklJRaNbMRd9NoFQCqIijUBFHoVAolAoPFXEUQ6GgoBCEAEZGIhgZUddJim6gIo5CJ9ZSKBQKpcLDdaekc+Io+mTPnqd49iwVAwbUQfPmyknnX71Kg0xWCG9vO5iZGeuhhZTKAI1OSaGWOAqFQqFUePjRKelcb4r+KC7FwP79z7Bz5xOkp0vKs1mUSgYVcVUY5iVDLXEUCoVCqehQd0qKoUBTDFDKAyriKNQSR6FQKJQKD3WnpBgKNNk3pTygIo5CR4IqAVFRUTAyMsLevXv13RSNWLBgAUQiES5duqTvplRKZDIZ6tSpg6FDh+q7KRRKuUEtcRRDoXhLHA12Qik7VMRVYXSdYiA2NhYikUjtjyJn69atEIlE2Lp1a6nWnzZtGho2bIghQ4Yole3cuRMdOnSAtbU1rKys0Lp162L3k5ycjKlTp6Ju3boQi8VwcnJCu3bt8Oeff2rUnkuXLkEkEmHBggWlOBrDhbmnR40ape+mqMXExASzZ8/Gvn37cP36dX03h0IpF6iIoxgKRZY44fIid8pyahClUkKjU1J0bonz8fHBp59+WqJ1ZLJCJCVlw8nJAmIxvU3Vce7cOVy+fBlbtmxREsbTpk3DmjVrUL16dXzyyScwNTXFyZMnMXr0aDx8+BCrVq1S2t7du3fRs2dPfPjwAX379sXgwYORlZWFJ0+e4NixY5gwYUKJ25iYmA2x2Bj29mIAwKRJkzBs2DB4enqW7qApxfLZZ59h5syZmDdvHs6fP6/v5lAoOocb2IS6U1L0SZElTljFMcupJxSlLNDeMUXnI0F16tQpsUXm/ftcJCRkQyYrhJeXnW4aVknYsGEDLCwsMGjQIN7yqKgorFmzBj4+Prhx4wYcHR0BANnZ2ejSpQtWr16NQYMGoV27duw6mZmZGDBgAADg1q1baNq0KW+bMpmsxO3LysrH27eZAIBWraoDAJydneHs7FzibVE0x8TEBMOGDcOvv/6K58+fo27duvpuEoWiU7jCjVriKPrE09MWFhYmsLU1EyynljiKNqDulFUYXbtTlpRHjx4hJCQELi4u8PR0xIABbbBw4UykpqYq1fX29oa3tzfS0tIwefJk1KxZEyYmJjw3wfv372PYsGFwc3ODmZkZvLy88M033+D9+/eC+79//z4+/fRT1KhRA+bm5nBzc0OvXr1w7Ngxtk56ejp+/vlnBAQEwN3dHWZmZnB3d8fnn3+Oly9fKm0zLy8Pq1evRrNmzWBnZwdra2v4+Phg+PDhePDgAQBg1KhRGD16NABg9OjRJXI3TU1NxdGjR9GrVy/Y2Njwyg4fPgwAmDp1KivgAMDKygqzZ88GIBeAXP744w+8fv0ay5cvVxJwgFwYFMeCBQvQpUsXAMDChQthY2OO1q3d0Lq1G2JjY9k6inPiuK6KT548Qb9+/WBvbw8HBwcMHz4cKSkpAIDIyEj06NEDtra2cHBwwBdffIHs7GzBtly+fBnBwcFwdnaGubk56tatizlz5iAnJ0ep7oEDBxAQEAAXFxeIxWLUrFkTvXr1Ys/j1q1bUatWLQDAtm3beNeJexyEEGzevBkdOnSAra0tLC0t0apVK2zevFnwXDHr//PPP2jUqBHEYjE8PT0xc+ZM5OXlKa0TGhqK3r17w93dHebm5nB3d0dgYCA2btyoVHfo0KEghJTaTZdCqUjw3SlpigGK/uja1RMhIfVRs6atYHmXLp7o27c27OyERR6FognUElelMZwUA9evX0fPnj0hkUgwePBgODm5IyIiAlu2/ImrV88jPDwcTk5OvHUkEgm6du2KzMxMBAcHw8zMDK6u8qSaR48exdChQ2FsbIz+/fujZs2aePz4MdavX48zZ84gMjISDg4O7LYOHTqE4cOHo7CwEMHBwfD19UVycjIiIyOxadMmBAcHAwCePHmCefPmoUuXLvjoo49gZWWF6Oho7Ny5EydOnMDt27fh5eXFbnfkyJHYu3cvmjZtitGjR8Pc3ByvX79GaGgogoKC0KRJEwwcOBBpaWk4cuQIBgwYAD8/P43P2+XLlyGTydC2bVulsqSkJABghQcXZtnFixd5y/fs2QORSIRBgwbh6dOnOHv2LHJzc1G/fn306tULZmbFf3ACAwMRGxuLbdu2ISAgAG3adEBamjwXjr29fbHrx8TEoH379mjVqhXGjRuHqKgo7N69G2/evMHPP/+MHj16oEePHvjyyy9x6dIlVrz8888/vO1s2LABEydOhIODA4KDg1GtWjXcvHkTS5YsQWhoKEJDQ9nj+fPPPzFx4kS4ubnho48+gpOTExISEnDjxg0cPnwYAwcOhJ+fH7799lusW7cOzZo1w8CBA9l9eXt7A5A/S59++il27tyJevXqYcSIETAzM8O5c+cwduxYPH78WNCFdfXq1bh06RJCQkLQr18/nDx5EsuXL8edO3dw6tQpVtCfOHECwcHBsLe3x4ABA+Dm5oZ3797h7t27+PfffzFu3Djedlu2bAkzMzOl60yhVEbonDhKRcHPz0XfTaBUBgilxKSnpxMAJD09XW293Nxc8vjxY5Kbm6u6UlaW6p/ieurq5uSUuO6TJynk5s0E8vJBvHIdLRATE0MAEB8fHzJ//nylX3h4OCGEkIKCAlK3bl0CgJw+fZoQQsibNxnk5s0EMn78FAKAjB07lrdtLy8vAoD07NmT5Cgce0pKCrG1tSU1atQgcXFxvLKdO3cSAGTSpEnssqSkJGJtbU2srKzI7du3lY7jzZs37P/T0tLI+/fvlepcvHiRGBkZkXHjxvHqikQi0qpVKyKTyXj1ZTIZ+fDhA/v3li1bCACyZcsWoVOpkh9++IEAIOfOnVMqmzlzJgFAfv/9d6Wyw4cPE8hVPMnOziaEECKRSIixsTFxcXEhP/30EzEyMmLrACC1a9cm9+/f16hdoaGhBACZP38+SU7OJjdvJpCbNxPY8vnz5xMAJDQ0lF3G3C8AyNq1a9nlhYWFpE+fPgQAsbe3J4cPH2bL8vPzSdOmTYmpqSlJTExklz969IiYmJiQ5s2bK12vZcuWEQBk1apV7LIWLVoQMzMzkpycrHQsKSkpSm0cOXKk4HH//fff7P0qlUrZ5RKJhAQHBxMAJCoqSuk8iMVi8vDhQ3a5VColPXr0IADI9u3b2eUff/wxAUDu3buntp1cmjdvTkxNTUleXp5geUVGo3cspcrQuPEWAqwkwEry/feh+m4OhUKhlApNdQZ1p9Q31taqfwpznODiorpu7978ut7equt27gygyI2yZq+2ynW0yMuXL7Fw4UKlX0REBADg2rVreP78OXr37o2goKD/b5u8cePHT4WTkxN27tyJ/Px8pW2vXLkSFhYWvGXbt29HRkYGli1bphQ4Y/jw4WjRogV2797NLtu2bRuysrIwbdo0NG/eXGkfNWrUYP9vZ2fHc01k6NKlCxo1asQLICESiUAIgbm5OYyNjXn1jY2NNbJKFcfbt28BgLVAcun9//fE2rVrkZaWxi7PycnBsmXL2L/T09MByF0zCwoK8P79eyxcuBArVqxAUlIS3r59i7lz5yImJgbBwcGCLn7qKKmht3bt2vjmm2/Yv0UiEYYNGwYAaN68OTtnDwBMTU0xePBgSKVSPHnyhF3+119/QSaT4ddff1W6XtOnT0e1atWwa9cu3nJTU1OYmpoqtUfRAqyO9evXw8rKCuvXr+e5npqZmWHJkiUAoLRfQB6EpFGjRuzfJiYmWLp0KQD5/amI4j2vrp2urq6QSqVITk7W+DgolIoIN7AJtcRR9Mk//9zHwoXX8eyZ8nQQAIiPz0RMTBry8ko+z5xCYaDulBSdExQUhNOnT6ssv3PnDgC5Gx5D4f9/fy0s5POJzpw5g2fPnqFx48ZsHbFYjCZNmihtjxGHERERePHihVJ5Xl4eUlJSkJKSAmdnZ9y4cQMA0LNnT42O59KlS1i7di0iIyORkpLCC/bBdTe0tbVFr169cPr0abRo0QKDBw9Gp06d4O/vr5FboiYw8/u4rqEMnTp1wmeffYYdO3agYcOG6N+/PxudUiaTwc7ODunp6azALPz/k15QUIBJkyZh2rRp7LYWLVqEZ8+eYc+ePdi/f3+Joo0SjoojhBQ7169Zs2YwMuKPL7m5uQGAoKspUxYfH88uY+6B06dPC0ZmNDU1RXR0NPv30KFDMWPGDDRu3BjDhg1DYGAgOnbsWCKhnZOTgwcPHsDd3R3Lly9XKpdKpQDA2y9Dp06dlJa1atUKFhYWuHv3Lq+dBw8ehL+/P4YPH46uXbuiU6dOcHFR7ZrDiNiUlBTUrFlT4+OhUCoa1J2SYigQQtR+7w4deo6UlFyMHt2YBm+jlBoq4vRNVpbqMgXrDdSNpCt0evH/ASQ0qRt74jrq1lEWAeVFRkYGAL41idvxr15dHtGQsRgxuLi4CL4gmUAov//+u9r9Zmdnw9nZmbVSeXh4FNvWffv2ISQkBNbW1ggKCoK3tzcsLS3ZHG9xcXG8+vv378fSpUuxa9cuNpiIjY0NxowZg6VLl8LS0rLYfaqDscjk5uYKlm/duhWtWrXCpk2bsHXrVlhYWCAoKAgrVqxAo0aNYGJiwgpAO7uiD0n//v2VthUcHIw9e/YgKiqqhCKO///i4rXY2ipPBGesWurKGJEEFN0DjPWrOKZPnw4nJyds2LABa9aswerVq2FiYoI+ffpg7dq1gvMKFfnw4QMIIYiPj8fChQtV1hMKwqJKhLm4uPDEaUhICExNTbF27Vr89ddf+OOPPyASiRAYGIg1a9YIilzm3ijrvUahGDrc6JQ0xQBFnxSX7FuxHoVSGqiI0zdWVnqry7w8CsWWJdu2lmE65kwgDoAfOZNZrtiBVzXCxdR78OABz3KnCsbaEh8fzwaoUMWCBQsgFotx69YtpZDtXBdNBisrKyxZsgRLlixBTEwMQkNDsWHDBqxbtw65ubn466+/im2fOqpVqwYAghE8AcDIyAiTJ0/G5MmTectjY2ORlZWFFi1asC6EVlZW8PDwQHx8vKAFilmmSjCqQtESB+g+yTtzD2RkZChF7RRCJBJh3LhxGDduHN6/f48rV65g165d2Lt3L54/f44HDx4oucSq2mfLli0RFRVVovaqcnVMTk7miWsA+Pjjj/Hxxx8jIyMD169fx8GDB7Fp0yYEBQXh6dOnSteOuTeYe4VCqaxQSxzFUGC+e8XliaNQygKdE0fRe3RKZh4aN0x7YaG8TTk52YiKioKFhQV8fX012p6/vz8AIDw8XKP6bdq0AQCcPXu22LovX75EgwYNlATcf//9J5higEutWrUwZswYhIWFwdraGkePHmXLGIFQUFCysNiMO+nz589LtN6///4LAOxcM4auXbsCAB4/fqy0DrOsOKEL8I9H0RJXHjD3AONWWRKcnJwwcOBA7NmzB127dsWTJ09Yt1x118nGxgYNGjTAkydPeHMQNeHKlStKy6KiopCbm6syWinjrvv3339j1KhRbDRVRZ4+fQp3d3fBuZwUSmWCijiKoVCcJa4oTxw1xVFKDxVxVRomxYB+W9GhQwf4+Pjg1KlT7Pwl5sX299/rkJKSguHDh2s8j2z06NGwsbHB7Nmz8ejRI6XynJwcXud+5MiRsLa2xurVq3nzjxi47mxeXl548eIFz2qYl5eHCRMmKCXCfvfuHTvfjsuHDx8gkUh4wSmYDjYTqERTAgICAEBwP0CRqyqXK1euYNmyZfDy8sL48eN5Zczfy5cv5wmRxMRErFu3DkZGRkpJxYXgHo+yJU73TJw4ESYmJvjmm2/w5s0bpfK0tDR2LiYAnDlzRun6SaVS1orFXCsHBweIRCKV12ny5MnIyclRmbsuJiaGzZXHZceOHbx7VSaTYdasWQDk9yfDhQsXBAPLMJY8xYAnr1+/RmJiInufUCiVGepOSTEUiixxwuWMJU7f/S9KxYa6U1L0PhJkZGSErVu3IigoCH369MGQIUNgY+OKW7duIirqKnx8fAQDRaiCiTw4ZMgQNGvWDL169UL9+vWRl5eHuLg4hIWFoX379mywFRcXF2zfvh3Dhg1DmzZt0L9/f/j6+iIlJQWRkZHw9vZmEz5/8803+Oabb9C8eXMMHjwYMpkM586dAyEEzZo1w71799h2xMfHw9/fH40aNUKLFi3g4eGB9+/f48iRI5BKpZg+fTpbt127drCwsMDatWuRkZHBur7NmDFD7bE2bdoUtWvXFgzeAQCDBw9Gbm4umjZtCltbWzx48ACnTp2Co6MjDh8+rORq2L59e0ydOhVr1qxB06ZNERwcDKlUiiNHjiA5ORlLly5FvXr1ir0GLi7ecHGpjt27d6Ow0ATW1tUgEokwf/40VKume4tQ48aN8ccff2DChAnw9fVFnz594OPjg4yMDLx69QphYWEYNWoUm+w8JCQElpaW6NixI7y8vCCVSnHu3Dk8fvwYISEhbJRTa2trtG7dGpcvX8bo0aNRt25dGBkZYcSIEfD09MRXX32FiIgIbNu2DdeuXUP37t3h7u6OpKQkREdHIzIyEjt37lSyZnbv3h1t27bFsGHD4OjoiJMnT+Lhw4cICgrizT+cNm0aXr9+jcDAQHh7e0MkEuHq1au4ceMG2rdvjw4dOvC2e+7cOQDg5bSjUCor/OiUNNk3RX8UWeJUuVPy61EopUIX+Q0qO1rNE6dHHj58R27eTCAPH77TyfaZnFpBQUEa1b9//z4ZPHgwcXZ2JqampsTNrQb55JMvyLt3yu3z8vIiXl5earcXHR1Nxo4dS7y8vIiZmRlxcHAgTZo0IZMnTyY3btxQqn/nzh0ydOhQ4urq+v/7dyO9e/cmx48fZ+sUFhaSDRs2kEaNGhGxWEyqV69Oxo4dS5KSkkhAQADhPlIfPnwgCxYsIJ07dyZubm7EzMyMuLu7k169epEzZ84o7f/EiROkdevWxMLCgs2XpglM3rNbt24plf3++++kdevWxM7OjpiZmREfHx/y7bff8nKqCbFlyxbSqlUrYmlpSaysrEjHjh3JwYMHNWoPIYTcvJlAtmw5Qfz9OxBraxv2eKKjnxNC1OeJE8rBxs07J9RWqMixd+PGDTJs2DDi7u5OTE1NibOzM2nRogWZMWMGefLkCVvvjz/+IP379ydeXl5ELBYTJycn4u/vT/766y9evjdCCHn69Cnp06cPsbe3JyKRSOk4CCFkz549pHv37sTBwYGYmpoSDw8PEhgYSFavXs27n7nn4a+//iINGzYk5ubmpEaNGmTGjBlKORB3795Nhg4dSnx8fIilpSWxs7Mjfn5+ZMWKFSRLIL9jYGAgcXFxIRKJRKmsMmDo71hK+eLo+BubJ65/f83fVxSKtjly5DnZvv0hSUwUzru7YcNdMn/+VfL8eWo5t4xSEdBUZ4gIoeMAJSUjI4MNzy4ULY8hLy8PMTExqFWrFsRicTm2UDMePUpBbq4MYrEJGjd21ndzeERHpyIrK98g22ZopKSkwMfHByNGjMCff/6p7+YAAKKiEgEA9vZimJiIkJIiD4bSuLEzxGLqAMCwYMECLFy4EKGhobwUG9rgxYsX8PX1xfz58zFv3jytbttQMPR3LKV8sbP7FRkZ8nyivXvXwsmTxbt+Uyj64O7dZGRnS9GwoRMcHOi7i8JHU51B58RR9O5OKQTTJkNsm6Hh7OyMH3/8EZs3bxac/6VPRKKiIDUAdR0pTxYvXgxXV1devj8KpTLDDWxC58RRDBk/Pxd06OBBBRylTNAh8SoMN4y/oUHFW8mYMmUKZDIZXr9+bVAJnUUiEZtEHKDXtbyQyWSoW7cuPv/8c1jpMX0IhVKe0OiUFAqlKkFFHMUgO9aGLDANEQsLC4N0maOWOP1gYmKCOXPm6LsZFEq5wrW+URFH0Se//34HGRn5GDmyEdzdrZXK373LQX5+ARwdxbCwMNVDCymVAepOWaUxjBQDQjAdf0MUmBTNMTJSFHH0enJZsGABCCFanw9HoVRFuNEpqTslRZ/k5xdAIpGpLD969CX++ec+4uKU0wBRKJpCRVwVpsjaZXgda2qJq7hw7ye5O6UeG0OhUKoEhYWE972gljiKPtE82Xf5tIdSOaEijmKQLxEa2KTiwp2XouxOSa8nhULRPlwrHEDzxFH0C/OtKz5PHP0mUkoPFXEUg3yJMNYbA2wapRi4ok3xb3o9KRSKLuAOHgHUnZKiX4q3xIl49SiU0kBFXDlgiCIJ4LssGlobiyxxem4IpcQoijYq4ii6wtDeWxT9oWyJ07+IU2wTpepQZIkTLqeWOIo2oCJOh5iamkIkEiE7O1vfTSkWQ3uPUHfKigtXtBUWEupOqQXy8mS4ezcZ//2Xpe+mGBTZ2dkQiUQwNaXR3ao6Mhn/3aJvEffttxdRvfqfSEw0/O8/RfsUWeJUuVNSSxyl7NAUAzrE2NgYdnZ2ePfuHSQSCWxtbWFiYqLyoS5vCJECkM8byM3NhbGxYWh6Qsj/t01Obm6uwZwzSvHk5uYDkF8/mcwYhYX5bJlEIkFenp4aVoFJTs6GTCbBu3cyODpW7dc2IQQymQwZGRnIyMiAvb09jI2N9d0sip4xNEvcuXOxSEnJxb17yahevZZe20Ipf2rUsEFengympsL9KhrYhKINqnZvoByoXr06LCwskJycjIwMwwolm5ycyc4jiI3NhJGRYQglQghSUjLZv2NiMqmIq0Dk5sqQkpIDAMjJMUFODjfM8gekplKrSUlJTs5Bbq78PBoZfYCJiWEMuOgTY2NjuLm5wc7OTt9NoRgAhjYnLj9fvv+8PBpgpSoyYkQDteVNm1ZDzZo2cHW1LKcWUSojVMTpGJFIBHt7e9jZ2aGgoAAymeq8IeXNyJG7kZws72yHh4+Ag4NYzy2Sk52djz59/sf+fffuZxCLace/onD2bAwmT74FAGjXzg3h4Qls2c8/d8KAAXRUuiQUFhKEhPyLjAy5RXPdui4ICqra59DExATGxsZ0cIfCoija9G2Jk0jk4i0vz3C++RTDoVkzF303gVIJoCKunBCJRDAxMYGJieGc8jdvcpGYKBdxIpEpxGJDEXEEcXE57N9GRmYQi8312CJKSUhLK2Svn7NzFu9a5uTAYO6zisKTJ+/x4EEa+/fly4kYMED9KC+FUtVQtMTpO8VAfj4VcRQKRbdQn5wqDDfghL4/eFyYEUwGXbrFZGXl4/PPT+LYsZc620dl5sqVtxgx4jhv8n52dtF8xrQ0Ca++vkfHKyLXr/8HAKy7M/M3hSJEbq4UI0eexMGDz/TdlHJFcU6cYqATLkeOvMDnn59Edna+yjplpUjEGc63lVJ+rF17C6tW3URGhkSwPD1dguTkbOTmSgXLKRRNMByzEKXc4UYKNKTONfPxY9CliFu+/AZ27HiMHTseg5DvdbafykrnzrsByK/Z/v0DAMiFMQMVcWXn/v13AICePb1w+nQsnj79oOcWUQyZS5feYPv2x3jwIAUff1xP380pNxQtcRKJDIQQQZfbgQMPAwB8fOwxf357nbSHWuKqNllZ+ZDJClUGLjl58hWePk1F//510KKFa/k2jlJpoJa4Kkwhpz9tSJ1rRRGny7bFxqbrbNtViVevis4j1xKXns4XcfoONlAReftWHuSnQQMnAMrPB4XChZk7qTiAUtlh3i1MNEBC+O8iIV6/1l2wMTonrmrDeDoVn+ybhqeklB4q4qowfEuc4XQMy9MSpzh6SykdxsZFXypux8nQgg1URBgRV7u2PAojPYcUdTDPHyPmqgrMu9zGxox9HxV3DrhTCrTblkK2PdSdsmpSfJ44fj0KpTRQEVeF4c+JM5yOofKcON295ahlSDtw01NkZake/Tak+6yi8PatPMF37dr2AOSDHHT0lqIK5vlLT5dUqfuEmRNnYmIEW1t5ICxV85GK1tHN+eG+55jUIJSqBfPsqbbE8etRKKWBirgqDPfdYUidaya/DoMurYTUEqcdmETxFy7E4fLltyrrGZLFtyIglRYgIYERcUX50OjgA0UVjCVOJitEXp4M8fGZ2LnziVLgD0WOHXuJJ0/e66RNeXkybNnyAL/8EoXnz3Uzp5MZ7DM2FsHW1gwAkJ6uH0scdyCSulNWPbjCrLgsKFTDUcpChRdxly9fRnBwMNzd3SESiXD48OFi1wkLC0PLli0hFotRu3ZtbNiwQfcNNUAM1RJXvu6UhnPcFRkTEyO8e5eD7t334fFj1R1BKj5KRkJCNgiRz/Px8LBhlxvS80oxLLjuzBkZ+ejX7xA++eQEVq26qXKdyMgE9O9/CA0bbtFJm3bvjsaYMWcwdeoljB59Wif7YN7lxsZGsLPTzBKnKxHH/YZRd8qqB1eYqXanpDkuKWWnwou47OxsNGvWDOvXr9eofkxMDPr06YNOnTrhzp07mDVrFiZPnowDBw7ouKWGR8VJMaC7oSpqidMOxsYifPiQV2w9Kj5KBjMfzsPDGmKxMbucBjehqIIbHTYjIx937yYDANasuaVynXv3ktn/60LY/PdfFvv/5OQcNTVLD/MuNzEpssTpa04cX8RRS1xVxM3NGm5u1rypBlzonDiKNqjwKQZ69+6N3r17a1x/w4YN8PT0xNq1awEADRo0QFRUFFatWoVBgwbpqJWGCdfkr0uhVFKUo1Pq0p2ySFSoCkdNKR5jY5FGwoKKuJLBiLgaNWxgYlI05kZFHEUVfEtckSVKnXgyMysaIHj3LgeurlZabRN3XpiurPHMdo2NjTgiTj9z4qiIq9oYGYnw1VfN1NapV88RdnbmcHPT7rNGqVpUeBFXUsLDw9GzZ0/esqCgIGzatAlSqRSmpqZK60gkEkgkRR+DjAzdhSUuT/gpBgynU6jsTlk+ljiJpABicZV7JLSCsbGRRm5D1J2yZLx5UyTiRCIRTE2NIJUWUjFMUYmiO6WFhQkronJypLC0VP7GcS1Wb99mal3E5eSojlirLZh3uXxOHONOqWyJ4w7clc+cOMP5tlIMh6ZNqwGopu9mUCo4Fd6dsqQkJibC1ZWfWNHV1RUymQwpKSmC6yxbtgx2dnbsr2bNmuXRVJ1juMm++W3RZcefu206Ylp6jI1FGp0/Q7rPKgJMZMqaNeXz4RiLCbXEUVTBjQ6bkZEPBwcx+/etW0mC66Sm5rL/Z+45bcK1xOnqHVA0J069OyVXYOnOnZJ+VygUiu6pciIOUJ5QWhQKVtiVbubMmUhPT2d/b9680XkbywNDDWwikfA/erpsG/djS0NBlwzuIIBcxFVdd0qZrBAdO+6CufkvaNPmfyUSWbNmXUGTJltx/nwcXF3/gKXlWixdGgGA704JFCUyVhzooGgHQgg+/vgIevTYV+6hvwsKCtGp0y589tnJUq2/Y8cj1Kr1N8LD/2OXZWRIeJa5yMgEwXXfvy+az/rmjfY9TcrHnZKZE6fenZIr4rTtTpmRIYGf3zZMnHieXaZLEffjj2Fo0mQrbx4kRf/k5xdg3bpbWLfulsr7PSdHig8f8qjIp5SJKifiqlevjsTERN6y5ORkmJiYwMnJSXAdc3Nz2Nra8n6VgYqSYkCXljhuB4e+TEsGtzNkZMS3xHHn2ACAvb3cvcmQ3Ha1SWxsOq5di0d+fgFu3kwsURj1Zcsi8fBhCoYNO47k5Bzk5sqwd+9TAPL5SQDg4mIJoOi8VtbzqG8yM/Nx6NBznD8fh9evy9dt/vbtJFy9Go///e9xqQTk55+fQmxsBlJSiqxqGRn5vHdcXJzwMb1/r1tLXE6O7kUcNzqlOndK7ntL28/Rr7/exr1773DtWjy7TJfulCtW3MTDhynYtu2RzvZBKTmEEHz4kKc22Ne5c3FYt+4WoqISVdahUIqjyom4du3a4dy5c7xlZ8+eRatWrQTnw1VmDDU6ZXmmGOCOYNK5CyWDK9rkc+Lkf3fs6IGEhPG8uoyIM6QAOtokPZ0/4q8YYVUTuBZoZn2mE2pnJ7csFLlTGs6gS2UiNbWo01Xe9yrXE0RbXgEpKbm896eq7XKPm7H+apPycacUyhMnZIkraou2vS+ELJ3lMThIoywbFvwUA5rXpVBKSoUXcVlZWbh79y7u3r0LQJ5C4O7du3j9+jUAuSvk559/ztYfP3484uLiMHXqVDx58gSbN2/Gpk2b8P333+uj+XrFUN0plaNTUkucIcIVvSJR0d+WlqbsSDiDvb18Xo4hDRZoE8UR/9KIOO7zyNyLRSJOfj6L3Ckr53nUN1yLFPfdUB4YGxf19rS1b25of0C1aOEeNxNMR5uUZ3RKExNunjj1ljiuhVAb3L37TmlZeQwOGlX4nlzlgvsuV50nrrxaQ6nMVPhHPyoqCs2bN0fz5s0BAFOnTkXz5s0xb948AEBCQgIr6ACgVq1aOHnyJC5dugQ/Pz8sXrwYv/76a5VOLwAYlohTzhNXXiKOdoxLQm5u0bmTSgtZ4SEWG8PYWMT7SBW5UxrOfaZNlEVcyTuH3GixzL3IzOlhLAtF7pSV8zzqG+7csPIWcdxrqq19JyRk8/5WLeJ0a4kr/+iUmgU20aYlTiKRCZ678hgcNDau8F25SgW3f6VKrDHirrzn3lIqFxU+nnpgYKDah2Dr1q1KywICAnD79m0dtsrwUTxlhmQhUedOmZSUjWXLIjF1ait4emo+N1EmK8Ts2VcQGFgTvXvXBiB/eVYlS9y6dbdgaWmKL75oWuZt/f77Hd68L6m0gCPiTCASiWBubsIuK3KnrJziQzGAQmkscdz3GNO5ZDqhjGXTzKzyW+JWrrwBFxdLjBzZuNz3rQ9LXHx8JlasuPn/IcflaCtQRUIC3xLHFVNc+O6UWVrPmckVSwUFpFTbP38+DqdPx2Dp0k68ObfHj7/E9ev/wc9Pfv74c+LUBzZRdT5KyrNnqRgw4LBgma4CZnGtPaoSSlP0A/MqF4lExVriqIajlIUKL+IopcOQLXGK8324bVu2LBLr1t1GRkY+Nm/upfE2jx17iRUrbmLFipsgRO46K5EU8OYSVGYR9/59Lr77LhQA8MknDQRzRWlKWloeJk26wFsmlRaynRWxWN7BMjMzQt7/9w2rmiWuNFZdRXdKiUTGdjgZy4KpaeVOMRAbm47p0y8DAD75pCEvwXl5wBVx5RXxb8SIE7h8+S1vWXa2dt5FmljiZLJC3tyx/PwCpKVJeKkJyoqi26JMVsjey5rSo8c+AICVlSkWLuzALg8OPgQAGDrUFwBgYlL+lrilSyMRHZ0qWKar7wrX2k9FnGFRJOJU1ykScVTFUUoPtcFXURTz4xhS51rRFY1rvWHCZ3PDaGu2zaIPNzNPRHGkvTK7U3KPPz6+bNHnhCbRy90p5ftgEqZzR8uZOSqGdJ9pE+24U/ITz3O3aWPDuFPKX9mV9TxyxWliYraamrpBH+6UN24oR6cr6b5ViXrFOXFClidujjh19coC1/UaKJtF/uDB5+z/uYm7mftFbokrXxHHfI8++qiuUpmuvivc7XLnU1L0j0gEODtbwMnJQk0dxp2yvFpFqYxQEVdFUXanNJxOoSpLnEQiYyeOR0enIi1NdfheRbgfOSaCmLKIq7yWOK6oKOucF27HiSE/n+9OCfBFXGVPMVBad0ruKKziwAqTXsDKypSd81LZLXHcAQJdzM0qDn24U5qYKHfAS7rvzExhq6Hie15ItDCulPb25rC2NlVZrywobq8s35uHD1PY/ycn57D/NzeXPxvyOXFF7pSKlg7uu1AbgU0+fMjDs2dy1/J//umpZMHMzy/QSVJx7veKCgHDwtraDJMmtcDXXzdXWcfLyxatW1eHh4d1ObaMUtmgIq6KovhRMaS5SqrmxN25k8wru3lT8/wq3JFlRsQpuktVbhFXdN7KLuJUWeL47pRMpwoA27GprCkGShudktuZVeyIMR1UxqoAVC1LnH5EXNHAUFZWeYk45c9wSV05heZ+cWEGVITEGXPMTk4WrJu1tqM2CrlTlhTueWLuE+49wjyD3GTfBQVE6Zi5z2Z+foHgoFRJuHFD/j2pW9cBTk4WsLBQnqVSGst8cXC/V5V1cKwy06iRM/r29UG9eo76bgqlAkNFXBXFkN0pFUXchw95yMmRIiKCn4NH8W91cD/kERFy1xfF0e60NIna5JwVGW7H5fXrTCQlld5VTagDJuROyZ2noW9LXEaGROsuYvztC4u45ORstaPw6s5HUhIj4orSNRTliaucnTbufRoXl1Gm+7Q06McSp/wZLum+hdwGuTg7y926hMQZc8xOTmJWgGjLEieTFSIlJUdpe6URcbVr27H/37MnGoQQXmJy5t1tbCyClZUp+/4pboClrMfKfIf8/d0AAJaWyiJOFy6V3G3SvJEUStWEirgqinJgE8PpFCp+ZGfPvgo3tz/ZEU93d7n7AfO3JnA/1FFRSSgsJEodpalTL8Hb+2/BBLEVHe45nTPnKqpX/xNhYW9KtS1hEafsTsmtp885cfn5BXBz+xPVqv2uE7cmQNidMjIyAa6uf2LKlFA1bVN9PoQscUV54ipnp40rTr//PgzVq/8pmEBZV1RWEVetmlzEqXOndHQUswJEWwMeffseRLVqf2hl0JC7jc8/P4W5c6/xctoxx2FsbASRqCi4ieLAnKLHRVlFHOMR4u9fHQAELXG68PLgbrOyDupUVNLTJfjjjzvYuPG+yjr5+QXIzs7XiZWWUnWgIq6KUqjwDTVkSxwg76Qwk/TbtpWPeHLDYhcHdwQ6O1uK7GypoLtURkY+L3R+ZUHIvW/Dhnul2paQSyTfEqecy8za2uz/1y3/+ywpKRs5OTLk5MjYeWbahulEW1nJ3dEkkgLcuiXv3HHn8CiibvBE2J2SObeVs9MmJE43bLhbbvvnu1OWT3RKYXfKkoo49QNPRZY45e2mpcnXdXAQw8JCu3Pizp6N5f3NWMdK8x5QFILh4f/x3CmZ7wEzx9DHxx4AcP8+PwG34ruwrIKVEf5MyhuhyL+6EXFFx2FI32+K/P5OTs7Bu3fKQYMYLl16g5UrbyIs7K3KOhRKcVARV0WpSCkGGJjOBjP5viQdAcVOCSPkhOCOxlcWhDoRjRo5lWpbQnNI+HPi5CPRXDHOWJD0fZ+VNTKnKhjrLWPxkEhkrCBQN0qu7nwwIo6xYgJcd0rDeV61idC58vCwKbf9V9TAJkKWOG5kvGrVLAHI7zfF57coobw5a4nTRW4zIyMRu/3SvAeYubgrVnQGIJ8PxxVxjMhjggC1b+8OALh+nR/JWNvulMyxMGJc2BKnC3dKaokzVJj+lSYpBiiUskBFXBXFkOfEMe4FjBhgKBJxjFVHc9c4xdFW9SKu8s2LE7LElTZXnOo5cfzAJtx7Sp8ijit4dBUsg+lEM51liaSAFQRlFXHC7pSVs9MmdJ/qci4jF6mUn9ahvEScUL60ku5byAW8Zs0i8csMLgDKoqUoobwZK0B0cc4tLU3YYy2NJY4RcV5ecovX27eZPHdKBiYScbt2chGnmI5G2yKOORbm2Swvd0puuyvr+6CiolmeOCbFQOUM9kUpH8os4h4/foyDBw9ix44d2mgPpZxQTjFgOB8BptPNuKYxMCKOyZlVkjYrfqizsvJVuixVRkucUOe4tNdc2J2yQCmwCbejxoxS6+M+43ZwhDp92oCxZri4cEVcHvt/VWgm4pQtcYY06KJNhDqjJXGbLguKc6f0aYkreXRK5fo1ahSFLuda5RQFmpCI04UlzsLChD3WsrhTMm6LOTkyPHig7KrMvGsYEXfnTjIvT52yO6V2LHGMiCu/wCbc6JSV831QUSmyxKlWcUXJvsujRZTKSqlF3M2bN+Hn54cmTZpgyJAhGDVqFFt2+fJlWFpa4ujRo9poI0UHGLIljunIKX4MmdFmXbtTquo0njkTA3///+HBg3eC5YaMsIgr3TUXOu/5+crulEKWOEYAvnyZhrZt/8WhQ8+hbVavvomAgN2IjEyAv///cPToC7Zs7dpb6Nx5t9YtckWWOMadUlNLXEnnxJW/JS46+j2aNdsGL6+/sHbtLZ3uS+i4ymtQRdECn5UlxZMn7+Hv/z+cPPlKZ/vVVWCTGjWKLHE2Nmbsc9m9+z7MmnWFsy7XnVI3KQYARsSV3iLPuIFaWpqyc/yELJCMJc7LyxZublaQyQoRFZXElisGktC+O2X5z4mjljjDglriKOVFqUTco0eP0LVrV8TExGDKlCno3bs3r7xTp05wdnbGvn37tNJIivapCCJO0RLHoC13ytjYdADKnShVncZevQ7gxo1EjBlzWuP9GgpCEbC0KeL4ljjlMPhF7pTyZaNGnUJkZAI+/vhIqdqgjt9+u4PLl9+ibdt/ceNGImbMKOqwvniRhitX3mLr1oda259EImNFMtedkhkMUDd/reTulOWfYuDkyRjcv/8Or19nljoYjqYIDTaUl3uzYjqD7GwpPvnkBG7cSETfvgd1tl/tiDi5mGESB8+Z05Yn4qysTFkr24MHKVi2LJKzbvlY4iwtTTmDOaV3pzQ2FvGOTRHmuyESidCkSTUA8kEjBm0HNmHeacyzKWyJo9EpqxKMLuOm2VGEWuIo2qBUIm7+/PkAgFu3bmHVqlVo3bo1r1wkEqFdu3a4efNm2VtI0QkVIcWAKhGnHXfKorxznTvX4JUV12nURQdH1zDntE+fWhgxogGA0ou44pN9K3dimA4Os09dzU0rKCjUKHiJNsPWZ2YWWUEYCwHXEqfOnVJd56uoc811pyz/uYXcNuo6YiOzr969a+HAgf4Ays8Sx+QcY0RVdrZUbXQ5baGd6JTy6zJ5cgvEx4/HokUdeHPirKxMBcUFd11bWzNOYBPtu5JyLXElGYBjKApcIuIdm+IcNK7AY7w2uO9s7c+Jk7dL3Zw4XXwzqDul4WJkJA9IxfRV1EEtcZSyUCoRFxYWhkGDBqFOnToq63h6eiIhofzy+1BKRkVIMaDaEse4U5bEEsf/iCYlZbOh3wMDa/LKius0MvOeKhJMx8XW1hzu7lYAyjInTjiwCdNRERJxRZ03+bq6EsLJyTkajfJHRCRo7ePJTS/AdIK1EZ2SQSjFQHmOvHPPp67niTFWSzs7c9Sr5wCg/CxxzMCCr698v1lZUtY1T5do053S1tYM7u7WEIlEKi1xDMz9z7gk2tqacwKb6MYSV5a5sczgkZGRiDffr2dPb1497nELWRZ1Z4lj5sQJuVPqOtm34QzCUgAXFytMmdIKX37ZTGWd6tWt4OfnotaqTKEUR6lEXGZmJlxcXNTWycvLQ0EBfbEYKhUhxUBxlriyzIkLC3uLwkKCGjVsULu2Ha9MSMRxw3K7ulppvF9Dgem4mJsbK1nFSoqq8850PBl3Si6K0Sl1JeI0tfClpOQiJiZdK/tkOtB2duYwNy/qBKelaV/E6SPZd/mKOPm5MjMzYoNxfPiQp7Mk7VyKRJwjAPmxVhwRVyTEGLhCx9paWcQx917RuroPbKI4N7YkMO9gY2MjXse3Z08vXj3ucWsi4sonxYCu3SkN5/tN0YwGDZwwcGBdNGumvi9NoaijVCKuZs2aePhQ/ZySW7duwcfHp1SNougexU4Rt6OWnJyNEydeatRxun07CVFRiVptG/ORVeX+U+ROWXIRxwSeuHAhDoA8cbii5Uho5J+ZnwQATk5ijfdrKPBFHD84xvXr8Xjy5L3G21Il4hi3QmF3Sr6I08XINFDkEqcJjDttWcjLk+Gvv+TzxGxtzWBuLhewSUnZ7FyHkoo4ZhsMwtEpy2+AjNtGqbRQp6P+zH1qZmbMirjCQsIKYl3CRC6tX58r4nSfhUdIKJY2OiVX8DPz4wC5FUvRQsSca+4gRFFgE+2LdbHYWMkiD8gHFE+ceIn//lP/7HLnxHGjbfbo4c2rx3W1ZI4nKysfx4+/RGJits5EXHmmGDh/Pg5Pn6Zy2kAHzCmU4igoKMTTp6k6cRfXF6X6QvXr1w9nz57FxYsXBcv37t2LiIgIDBw4sCxto+gQ5RQDRR/Vli3/h379DmHz5gdqt5GZmY/OnXcjMHCPVh+KouiUxblTai7imE4JM2eJmevSpk11JcuRkCWOa+ERmhNm6HBFHDdM/du3mejQYRcaNtyi8bZUHX+RO6V8+0xnuF49B7bzVlhIUFhIdCYENLHEubnJLam3bycVU7N4Nm9+wAb7cHQUswLsv/+KgmSoTzGgXKZogRZ2p9SPJQ7QrTWuyBInv0+ZZ708XCqZAQDGEpefX6C3+SrZ2SXr9DNCjDsHx8qK+39lS5xEIgMhpNwCm8hkhYLulP/732P063cIPXqoD4TGDCoaGYng6lrk0l6njj2vnru7siXu2LGXCA4+hC+/PKsU5KmsrqOKeeK4g4/M8WrzfF6/Ho8ePfZh+/bH7DJqiTMskpKy8c8/97F3b7TKOgUF8gGx0gT5oZSOK1feYteuJ9ix43HxlSsIpRJxs2bNgpubG3r37o0vv/wSUVFRAIA//vgDn332GUaMGAFvb29MnTpVq42laA910SmZjvDevU/VbuPmzUQ2VH9JLCDFoXl0ytJY4vjz2by8bJUsRxkZ+Uqda25+sYo4/4DpuHAtcVJpIV69SmPraNphLe68M+fzxImP8fXXfjh9ehC7T03WLwvF5YGbOdMfX37ZFEDJg0cIkZhYJNYWL+7Iijju8oICwnPH5SJkiWOEC4OjY5HlVx/JvvUh4pjzyFhcyiO4iaI7JaBegGsLoeehpOdY8bwx7N0bjLlz26JjRw8lzwaJpAA5OVL2WyAPbKIcCKS0KH5jcnNlgu6Uq1fL+w+PH6v3BuBa4oKDffDDD61x5MhApVxcjEszUCSonj37AAC4dOkNe2zM96Ws17jIEic/91yxzHhtaONdw3DjhrLnS0X8JlVmJJICxMdnIikpR2Wd8PD/sHRpBE6c0F36EgqfzEz5c6juulQ0SiXiqlWrhrCwMLRq1QobN27EiRMnQAjBpEmT8O+//6J169a4ePEi7Ozsit8YRS8oizjlj0BxLm8REf+x/9dmtEF1Is7ExIi19JTEnZIZbWXcKRkcHS0E3f8Uc8VxRaohzR/UlCJLnAlPxHHn42jaEdBUxNWubY/167ujVi17nojTpeuPuvuwRw8vLF3aie3kaaMdTMdy8uQW6NrVk+1Ea5rCQ2i54n3PdR3TR7JvxX2VlyUOKD8RJ5HIWJdpHx879rnQxVwmRYSep7w8mUrhL4SiSx/DkCG+WLSoI0QikYAlroC1whkZiWBpaarVwCaK7c/NlQm6U967p1neTe6cOGNjI6xYEYD+/VUHVwOKBBVzfjIz83HnTjKAIgu3UPoVTSGEsMfCJDLnepCoy2dXWoQiHlbEb1JlpijZt+o6NE9c+cO4mPv42Ou3IVpEeNKRBtSuXRvXrl3D3bt3ERERgdTUVNja2sLf318p5QDF8NAksElxLpLh4UUirjgLSElQNydO1byK4mCORdES5+QkFpz79/59Li+ACVccVMRRT6E5cVJpAU/E5ebKeKPYqihexAkFNilapssOhzqLMHPcRQmzy94ObsQ8ACrPX35+geBggdC9xFiaAbkg5nYK9ZHsW/F66zLNQNGcOPlxMpYMXbtTMvOxxGITODlZwMrKFOnpEp5FqrCQqM37VFrUBQrizodUBzMgIRQkhUExCbVEUsA+i7a2Zjyhpw1LnOJx5ebK2AEKZr+aDqQQQtgpAOqCzTDin0FoflpCgtxKbmtrjoQE5TlyJYHrWi5kiWNEnFAy9tJiY6M8uFkRv0mVmaJk3zRPnCHBvDtKMkBm6JRaxDH4+fnBz89PC02hlCeapBhQZ4kjhPACQ2jLEldQUMh+GIUscWJxkSWJmV9VXMeKEKLSEufkZMGzujk6ipGamqfUaeSLON2/ACQSzQSV5tsTjk7J/cbk5Mhgb1/8toqbEyjUbm7nUpfulMx1MjISKYlzpoNXmjD9qq5HkXWAEXHKAla+vvC+inOnVAyio49k34qRBHVriZOfD+Zcl5cljhH/NWrIw/MzIo77DszJkfIEtrZQFamxJCJOMVeZEELulIyIYqxSRXnitCHi+MeVkyPjzYmTSGRsmheAH4hFEe6zrO59b2fHvz6q5lUDRcdcliBLXBEqlGKAGTRkIoBqA6HjZ94H2v5ucNHltisb1BJnmDDB1z580N7zqG90H3qLYpBoYolT50r08mUaUlKKOlbaEnHcdqgScSUVBNwOtLOzsiWO+yFmzOyKnUa+O6VuO9Bnz8bCxuZX/PnnXa1tk7mWinPiuOdb046bunNubm4s2MkwMhKxy3UlggkhbKLvxo2dlcoZ8aYYKbM41q69BbF4Lc6ciVEq4yYgBlSLOFWiS1jEFXVEua6UgH6SfSve7+XjTlm+ljjm/cWErhcKnqSr41b1PJVkHlVRrjLh+w9QtkpJJAVIT+cnlC9ypyz7sSoel6mpEfvu/vzzU3Bz24CjR1+y5eoGJrgDR+oscdygJoCwJY5BG+6U3OdQKMWALixxQs++VFqImJg0ODn9jsmTL2htXwyzZl2BWLwWN2/S3L+aQC1xhsmtW/JgZqmpup9jXV6UalhlzJgxGtUTiUTYtGlTaXZB0TGazIlT16lXDM+urcAm3A+5ovsPwHenBOQdBUUXGkW4x8G1xJmbG8PS0hTNm7uiXTt31K5tx4o3xTkM3Ide15a4K1feQiotxOXLbzFhgp9WtqkqxQC3Q6Bpx43bOWvY0AmxsemspVPIZZDB3t4cqal5PMGvSvSUhrw8GXv/TJjQDBMmnOeVF7lTlsyaNWVKKAB5xzMpaSKvrCjYgnzbJRdx6qNTGqIlTptBGhRRnBPHzP/RdX46xhrPnG+hAaTyFnElmUelak4cF6HolNwccQC0GtiEe1w1a9pg+/be+OmnCHbZhw95WLQonNMe1fc093sllPZhy5ZemD//GrZt681brt4SZ17sfouD+/4USjHAfG+0KeKEnv38/ALcupWE7GwpLl9+q7V9MSxbFgkAmD79MkJDQ7S+/cpGkYjTpC5VcZTSUyoRt3XrVrXlIpEIhBAq4gwYdSkGGNS5mTBBTerVc8CzZx+0NieO+0EV8lvmulMCmlniGHFiYmIEe/si9yQnJwuIRCKYmIhw/foIAEBIyDEAyh9dbgdO15Y4pkOpTRccVSkGSmeJk988/fv74MiRj1Cjxgbk5DBzilSLsho1bJCamodHj4pcqIoT4CWBe7+OHdsEXbp4on79zUr7Ko07JSBsAeBGzAN04U4pbImrrNEpuXnigKLzWRZriSYwx8RYQQ1BxJXEhbQouIZqEae4H25gE0bE6cISZ2QkwuvXXxXbPnViivstMBLYxKhRjTFqVGOl5ZpZ4kr/LDHHKBIViUuu26ou3CmFBhHz8wvZa6nL57MiptfRB0ZG8ntP3SClOisdRTfY2Znjw4c8nbjF64tSibiYGGW3IgBIT0/H7du3sWTJEjRv3hwrVqwoU+MoukOT6Hnq3CnDw+WWuCFDfLFkSYTW3CmZzqmJiZGgiFS0xGniVsaIEwsLE7VWDkD+kAPKo+Bc64OuLXFM502bo7eqolNyBammIo7pUDHXgSuq1Vniata0wf3773ihxLXpFsgco0gkbxsjeBhK607JINQB1XROnKp7prjolMoiTh/RKcvfnZI5j8wcHF2H+meCtTDnXjHNg7xO+Yi46tWtkJiYrbELKSFEI0ucYsRJrohj3nu6CGzCfW6KE3HM4K8ifHdKzWeBKIo4ExMjtl3aEHGK6QUU96kbd0rl9kqlBeUk4ipPQAhdUquWPX780V9tHUdHMRo2dIKHh43aehTtwQzit2vnrueWaI9SiTgvLy+VZU2bNkXv3r3RpEkTnDhxAl9//XWpG0fRHZqIOFUf8pwcKe7dk4dpHjy4HpYsiUBKSi7y8mRqO/GawJ0TIxQdUyw24c250swSVyTi1M03Aoo+7Ooscbq2gjCdN22GpWY6KmIxN7BJAU9clNSdskjEFXVg1F1/Zr7Ro0dFIk6b57IoF54JRCKR0iT80rpTMgiLOEV3StXRKYUQ6pDx71FFd0p9WOLKP7CJsiVOt8dbZImTi7fytcTxz6+bm1zEaTpvgx8hUbXAUXyfy0Uc404p79xw3SlVCSpNUQy9X1z7APl3SMg6z3en1LxNisFc/P3dcO1aPABtuVMyg45FbeK6cDI5HrXrTilsiWO+F7p0d6aWOO1Rt64D6tZ10HczqhSKnjOVAZ0ENnF1dUVwcDDWr1+vi81TVBAXl45Zs66w4bLVoeiHLSSGhELvA/LJoQUFBO7u1mjWrBo78sgElSgLXIuRUK4iCwt5B70oaazmljhLy+ItcUIirqCgkNcB0rUVRDeWOFWBTUpuiWM6ncKWOHXulPKgA1xLXGGh6kTYmhATk4aZMy8jMTGb5zIKQMASx2+vohCKi0vH11+fx5dfnmUHKbiUxZ2S2deLFx8wc+ZlvHsnz0lWcndK+fafPEnF/PnXyizmDh16jt9+u622DvOMMc+5blMMyO9B5lox51PX+dqys/kJoPXpTskE5yjOEkcIwbJlkTh9usgzRp2lS/H5zsuTqXSnLCggZX7PKb4nimsfoFpQccVDSdI8KFriunf3ZP/PRLLUpSWOGZDJyJCo/J6WFFVz4hhBnp0t1dk8K+ZdvW7dLRw58kIn+6BQdAXz7GhzgFzf6Cw6pY2NDWJjY3W1eYoAv/12B8uWReKvv+4VW1c5xYDmH7IHD+TJWVu2dIVIJGJDQ8fHl92lsmgk3gitW1dXKmcsPUWhqjURcfLOl7I7pZAlTj46y53DoCgmdW0FKZoTVx7ulFxLXMmiUzLChTtyroklLi4ug7e8LJ3FtWtvY/nyG9i48b6AiOMLKsU5cYr7/eWXW/jjj7v455/7WLgwHIqUxp2SOR9M23r3PoDly29gxIgTAITvJXUDDdxjWrQoHBs33ldavySMG3cGkydfVOsOzZwnxhWlMlrilN0pledMlL+IU2+JO3ToOWbNuoLg4EPsMnWWrq5dPXl/SyQFbMhtJoAM13JVVpdKTd0puYMjquY+8ufElV7EtWvnDnt7cxgbi1C9upXafWoCc4zc887dp5eXLQD5HHRt3T+qolMy34vCQqKz56WggCAi4j98910oBg48rJN9VAZev87Atm0PcerUK7X15PkPqXWzvEhOlg+ecnMcV3R0IuLS0tJw5MgRuLq66mLzFBUwo/tMMlN1qEoxoLhcyErCfCyYDibj96+NMODc6HR9+9bGvn3BmDOnLVvOWHpKkvCbESeWlqY8Kwcz+sxFyBKn+PHVvTslY4mTaO0FL5zsu2wpBoQscYz7kBA1awr7/pflfHLv+eJEnHKyb/5+Y2LS2f8LdaA1c6fk79PNzYq3rxcv0gAA58/HASi5JU6xk85srzQQQvDhQ/Guu8z1ZuZNMVYrXaA8J6683SlVBzbRlQVSWcTJ7xlu/kohuPcrg7oUA2PGNMauXf3QqpX8uyyRFLAu1Ix4MzMzZiPqlTW4iZCIExKZNWvasHVUXWdm0NHISFQiF0/F6JT29mKcOzcEJ08Ogpubtdp9agLz/HKP0djYCNevj0Bo6FB4eFizIlVbwU1UW+KK7k9d3asFBQTPnn3QybYrE9nZUsTEpKvti92+nYSFC69j9+7ocmxZ1aZ5cxd9N0HrlGoC06JFiwSXy2QyxMfH4+jRo0hNTcW8efPK1DhKyWBe4ppENePmt2JcZ7gT5BmysqRs542B6fAwHR1tJuTlduJEIhEGD/bluaEwlo3SuFMqWuKErEaaiDhdulPm5xewcxoKCghyc2Vqw2RrClfgMMKjtCkGmPWFRJyQdZOBscQpUpbzyb3nNbXEFYXp5++XG2FVaNRcE3dKY2Mj9pkyNhbB2dkCMTHppQ5s8n/tnXeY3NTVxt/p27u3r71el3XvFUw3BlNCCy200AIkEGpCSMIHpEECIUBowaETegsdTAAbXHDBNsa9l/X23qfp+0N7pSuNNCNppJ3i+3seP96dnRl16b73vOccuSiWb5PSRIRW+EIS/M/h7IrkGiOROCvtlPIWA4Mt4uLBTknERaT7qVJEKlyuh8NhxwUXjMF77+3EmjX1AyJOvDcCfMW81FTeyj5YkbiKiiw0NvbC7w+GsVOKlS71II/EZWS4MH4830Py66/5Uvzm2Cml20UXTsjK4ividXR4UVZmeFECkeyUAH+uFoS2yoyaQCCItrbksaJZhbZm3+S9g7BCDADA5MmF+OKL/RFt3YmEIRF39913h/17RkYGbr/9dibiBhk9Io7cONxuh/Cw5sWc9AHR0dGvKuLIrLXYkDd6EScvMQ5IxZYRO6U42+ySzFQr5S8p2Snlg1YrI3HyfdjR4TVdxJF9ZjwnTlqwQCri1CNxJCdOTjT7Uzzn+0JEnN1uk1SjE+2U5NyRLpe2FCoN2MPbKaUVOru7fcjLSxHOV7KN6ekuWbsKvc2+5dFF4y0a6OMdrp3IYNopQ1sMDFZ1yliKOOkojtj8Ijkb5FUanU67pigV3bZBzBcWtzctzWWyiBPXSekaKi/PwKZNTeju9kXMidNbkEAu4ujjasYEgZKdUk52tlsQcWagdM8IBDiJuLLqXA0EuKTKJ7IKbc2+bQPvZSpusCD3j0CAi7pwU7xgSMR9+eWXiq/b7Xbk5uaiuroaLlf0A0+GPojw0GJrJNEtj0cUcfJqhfx3evHDD40oKEgTBhfyAU9eXng7pd8fxOrVdZgxoyjioFM+Ew/IRZx+OyUdiaNRFnFa7JTWReLkFqqOjn5hv0cDLXDIjcvni67ZNxlE0sc0XCQuPd2N3NwUwcJHiE7EkXO+V1KdkuB220MGWmJ1SnHb+/r8aGwUBbRShTelgZrS4NLjcaC724f8/FRBMJL9X1iYJrHBKeWi0uelWnVKAslpMgJ9vLVE4kQ75WC2GIhNdUqlFgODEYnzeByCPT2SnVLeLy1S5UdxGaIwJucAfW80q1ecUmETpXUsL8+MeJzluadacbkckokcs0WcUmETOUoTg9Ggdr+kn79WVaiUi7hkGQgrsWNHK1JSHKioyNL9WS2ROPG9/IRKc3MfSkrSk3N/7tkDNDQAs8O3XbAaMvbgOA7BIJcUVSoNibhjjjnG7PVgmEB7u347JS2WfL5gyANi9eo6XH75JyguTkdt7XUAlOyU4SNxt932FR5++Dtcc81kPPnkiWHXiy5sQqArHhqxU5L1lYs4pf4s4URcWhpvM7Ky2bdSJM4M6OqUZOLPipy4cJE4gM9/kYu4aOyUZEChZKfkfxarnMr7xNHnuryiq7KdUruIAzAg4qTtDIqKRBHX3e1V3HZ60iI3N7ydMpqBoTQSp8VOya+LlSXM1QubDE6z73CROCu2m+O4EBGn1dkgtxZqF3GieFGLxAHaCx2pod1OGVnE0TlxeklNdQqTHbQ4N1PE0dFGOWpta4yiNonY1CSeL9ZF4oISEZcsA2E5ra19GD36aQAAx92m+/PaInHiz4sWfY+mpl785CdjMXp0nu7lxT2zZwONjcDatcC0aTFbjbff3iH8zKc8xGxVTCN5jKEMSSQuUoie/Jke8CqJuEWL+Op3dXVigq581lrMiVOeOX74Yb6EuZaqmUrRlHCROC0CgKx7UVEaAOCZZ07CFVdMwHnnVYe8l0Qb6KIiZPBGBtRWRuLk+9A8EadcnZI+3tqbfZMZdiU7pXokDgD+7//m4uijy3HqqVXCa1bZKQHpZEC46pQkH46ca+Qcp68jpYGakp2SLD8vLyVExNETCQcPdimev9XVubjtthm4//5jQoSjvG1CNOcHPUiPFzsl3ScSiF11ysGyU8rLzns8DiEPsq2tP+wklZKdUgv0PpXnxAFAZia/7dHmPmopbHLuuaNx7rnVwv1ebTJB6TrTCinaYrNJnyVmtK8gE3rhI3FimwEzIMt0ux34v/+bK7xOT45ZdY0Gg9JInJZJ1ERk506xeIuRFjiiiFN/D22nJAL8hx+adC8r7mlv5wUcALz+emzXhSJZGtdrisQtXbrU8AKOPvpow59l6IMM6LzeALq7fYplsglk8MDnUfA3HXlkBgBWrqwVfvb7g3A67bojcXpQjsSp58RpeYgcPMhHWUhhjcsvn4jLL5+o+F5iffH5+CR7kt8E8CKupqbL0kic3EJlRv6B3x8UhBefE8evf7QtBsgxoKNDkUTcOeeMxjnnjAYAlJY+gdrabsOimOM44Zzv7vYJP0tFnPhzuGbfJB9uzJg8rF/fMFD0JSBJOg9fnZKOxPHnaH5+irBPiQiht/XgwU5FAet02nH//ccqbrN8sBjN+aE3EjcYdsrQnLjYVKccrBYD8vuXx+MU7OkAPzAfMiRN8bPmRuLEe6xo/zNfxNE/T548BK+//qOQdVIimia9RKCmp7skkREz8i215MSZtT8J5B7yl7/Mw623zsQf/7gipDiGlXZKOvcuWZt/09vV3x9AWpq+yQObjT/Xw006KBU2MRJp1s3Bg8BPfwr86lfASSdZv7wtW8SfTznF+uWFgezrioosQxNC8YgmEXfsscca9ukGAtY+fBk8Pl9AMihraekLK+JIhMFut8HlcgiDVvmgkp4p7uz0Ijc3JWTW2orqlPTgm54lltsptUTiyABdrcQ9Df+g5y/29vb+EBEH8Df4QCBoyU0g1E4ZvYijrWgejwP9/SQSFYiqsImYE6fdTkkjHkNj94i+Pr9kEFxby4t1NREXzk5JzpHq6lysX883+u7u9kkeqsp2ytBcHdpO6ff3SJYlX6bS+RvuvDI3EkfnxKkfA3l1ymTLieM4LmbVKUNFHJ/DlZ3tQXt7P1pa9Ig4bd4gaSQu1GpOIkfRTiBFEnFKUTF1O6VxEUfsofLnIVlmMMgJE5R6UWoxIMes/UmQPyPdbkfIfrMyf5O+5yRrJI7err4+/RWiJ04cgokTh4R9T2amG6NG5aK4OB27d7cBCG+/NI0bbgD+9z/+32AUVdm8mf9//nxgsII6L74IdHbyorGyUniZPK9POWV4SGpCoqJJxP3f//1fciZbJhHywVxzcy+GDlVPyCUPRZuNH9SScvPhoiIdHf3IzU1RqE5pTZ84gjQnTn9hEzJAVytxT2O325CZ6UZHhxcdHV4UFaULopUMYgH+4T04Ii762Vv6AU/3iQsEOMnx1lvYxIidkkapwIgeSA4o4dAh3jYrzYkLtVaS5XIcBDFOorVVVTlCIYTubp9k28K3GAi1U+bnpwoWJ7KNcgunkogLl18jH6hHc35ojcTJ7ZRWtRjgOE6hxYD11Sl7e/3CWCZ8Tpz52y2vTCmeOylob+8POzEmPx/1R+KUq1OalcNFto1eL/pn+r4eKfeRXGdGc+KA0GNK3xv6+/1wOvW361BrMUBjdk4cWWYsRNzhYqekJ7XCTXBFQ2VlNiorswHwk+6bNjUJRY0s5cAB8We/H3AaKo2hHSLixo2zdjk0jzwCrFnD/7xxIzBhAoDoIvrxiqajF6mlACP2yCM2kQQVGbTwkTi6+bP6DYu2rgGhdsqWlr6oq1Up5TXRM7bkgUwGupEeIhzHUXZK5RL3crKyPAMijt+nZHvpnl0+XxAp2oNOIXzzzUHceOOX6OnxwWYDrr12Cn75y2mW5MSRfUosHvSAgxZu7723CwsWvIH//OdU1dl/ILRPHD2A0BOJE3PTjD0k5ec8KU4irU5J2ylD+8d5vQGkptqFnLiKikxkZLjQ1taPri6fZKCpZB0KX9gkBQcOSK2b0kicsjU33Ky+fBA7mIVNaDsluc5bW/twwQUf4KKLxuLSS8cbXheyHLr1CTA4kTh6wEtshYNVnVIpEgfw95rdu9vD3sfl54L2nDhRGCtV7h0sO6WeSFw0OXFk2+THVCriAkg3UARYm4gz205J8vBCnRAEK5t903bKZBVx9LUebasNLZyyoAInz85BSokFzf3k0OOzH34ApkyxdnlExOXmAitXAsXFkuiY6fj9/HYRli8PEXE9PX7L3FSDTeJvAQOAciQuHCQSR3ppAfwNOXwkjl+GvMUAib74/cGoSp4DWloMEDtlaHEKJRobe+D1BmCziU10IyGfOZXbKen1NMoLL2zGd9/VY+vWFmzZ0oIHH+RnjRoaePsd2bfm2CnFoiY2m02yb+V5cIsX78P9968O+33ywRl97umxnYi2RmMDAfk5L4o4tcImoYMesmxyvQwZkirs++5un2QWVklwKdkpq6v56mITJw4R9rWYEyd+R0NDj247JSBt8D2Ydkoi4jhOFH0PPLAan322F5dd9rHh9SDQ+0Zup/R6A5b1U6Kr15J9rxSJi1Ty3wjyQfDo0bkA+FYUAFBf3x3yGYK8KIreSFxfn1pOnDmFOJT7xIk/K93XrbRTyo+p02kXhLDRSQItOXFk8o+uHhkNSnZKOVbaKenrIFlz4mgRbKTwzc6drXj55S1YuvRA5Df/5z9IHzkMmX+5K6q+n5rgOGDHQIXGhx4CRo+2dnlkOW+8AaxaBcydCzz7rLXL274d6KPu1atWCT+S5/Vzz/0gjLUSHSbikgSjIo7YKQFptcLq6jysXXsJli27EOPH5w8sQxqZIjObaWku4SEcbV6c1hYDWu2UJApXVJSu2QMtF3FEtGZmuoSHfrQijgyeTjllOABxQE0iQmPH5knWIRrIAIXsR3rAofSwj7Rt4UScniisUoERPWgRcUpROXr7iTATc7GcwmCvq8sreYDLrW+Asp3yiSfmY9euqzB3bmnINkpbOvhCRJzNFtkytn//NVi16iIAg2unzM4WxSO5JuTtIqKBFvPySBz/d2uicfIcX/nPVVW85engwa4Q4RQttAjYs+dqvPACn/hPrN90A3o58gG0XhFH2+JSU8XtFSv0WlGdUtkmr7WwiZl2SpvNFnWkV0tOHDmWBw50GFqG2jJFe3josq0ScR0dXsl1eDhE4oyIuLa2fmzf3oLaWvVJmG3bWvD4zW9g09MfAE1N6H/ldQS91uUbAwC8XuDii4ETTwSuuQZIU3fcmMbo0cCPfyy2FmhutnZ5G2RV0L/9VvjxyCPLhJ+TZQLCsIjr7OzEvffeixNOOAFjx45FVVVVyL8RI0aYua6MMMiTpo3ZKQOSB8S0aUU44ogywVrX0eFFMMgJgoN+KIoVKqMb1MkLGwDSGVMxJ47YKcNfiHqKmhDkzVnpHECl8vRGIA+GMWN4sUYiYmR9x40jwtk8EUf2KT3gUMqDC2elBMR9TmbFjc7YR7sv5cslD0y1SBw5zx0Ou7DuRDjQkwck11MeiVMasCjZKV0uB6qqcga+T91O2dvrD9l2LfaO7GyP0EvI6w0Y7qEmbTEQ2U6ZkuIMacFAR16jbQ5N9o3NJu5POlpjlaWyu5vfdtpuR9/bRozIgc3Gr19Tk7mzt7TQqazMFs4XceAfTsRJzx29LQZoAa5U2CS+7JTRROKInTI05y16ERe5xQB59pAJxWgR7ZQOyf80VubE0RweIk7/uaGl2TfHcRj5w1cY/+WrAABPRyv2/edD3cvShccDPPoo8NlniCofxAgFA1ZRq0Xcxo38/ycO9CTetUsY8B533FDBOZYsLQYMibjGxkZMmzYNv/vd77B27Vps27YNra2tqK+vx969e7F37154vV4Eg8mxkxIB45E4m8SaqGRnpKtr9fb6QooAAOErVOp58MpLjJN1JOIt1E4Z/garp6gJITQSJ87UK1U2NAIZNJNy4r29fvT2+gTLzfjxBQPrYF51SjJgsdlEC61SWwGlfCAacvMj32HUQhvtvlSr9kYPDpWqU/LLVhZXbrdDZqcU9094O6XyrVQ+SJQWkvGHbLvWgTh9jIxWvaMjceHyPuhoA71vAOkgLlzUSAv0tU8iuvQxs0rEKUXiHA67cB5lZrpRXMwnTIUTVUYgEyLy404G/uGWJx9A643EERHnctkly5dPYhlFWcQp2ykjFTYR7ZTGc+KULLJmReLC7XuSi11f321KNFlLJM6qFgNyIk2iJir0/jMSidPW7NuGgqZ9ktfav9ui8m6T6enhbY1/+5u1y2lu5kXjf/4D5PMT02iyuBdeSwv//4wZ/P+9vUCr2PeP3I8O60jc3XffjV27duGFF15A68DOufnmm9Hd3Y1vv/0Ws2bNQmVlJTZt2mTqyjLU0V/YhG4xEGqnlIo40V4jLQKgLRKnxwKjtHxAfOCLdkpthU30FjUBQu1EdCGXaC2ABDJozs3llxUMcti9ux0AP3NcWZklWQejdHZ68Yc/rAAgtRaSY64UPdFrp+zsNDZgMNtOSVCvThkaoSMDInowlp7O76euLp8OO6XyOU6WI9opI0XitF0rDoddEHJGzxG9OXFOp7hMMsihr3clwfHCC5uwfHmNpvVRisLT96domjKHQ16oiUB+T0lxaoqMGUFJ6AB09EaPnVJfiwFSoIKOwgHmlcRXshqq2ynD58SRyRIz7ZT8csOLx0ioHT+aIUPS4PE4wHFATU305488EjeYOXFykiWaISdaO6W2Zt9AQfN+AIDXxY+fXC2Nupeli0OHgI4OvvjHFVcAt98OdKtbPqNm3z6+pcGvfy2KOKsjcWR78vLE6N/BgwgGOTQ19Qhj5cNaxH300Uc44YQTcPHFF4fMNMycORMff/wx9u7dy6paDiJkIEcOR0uLsZw4pZlFkgvT0dFP2aickgdquEicGSKOfD8Ri+QBFmkmkPQOKy3VLuIyM6WDGGK34kWc9v504SCDZrqx7/bt/IRIeXmmICTpSmBGeOmlzfjgg90DyxLtE+T4Kj3sI81Ky6MHCxYMAyAWZdCKnl5/SsjPeUKkZt/060qRuGjtlDTyNgrylg7y6J6eXlXRVr3TW52SF7jSSBx9n5ELnJ07W3HZZR/j0ku1FT1Ru/atrlCpJuKIYE1JcWiKjBlBqfgHILVTqhV0MZoTRybCyHkjL0Zkvp0ycmETK+2UxB5eWBhavj3aFhZaInE2m43KcYzeUileJ6GFmugKsmYRrqAQs1MqI9opI0XieBG3b9gUAIDbahF3xx1Adjbw3HMQyrHW1lq3vJ4B+3l6+uCJuL/8hW8vcNFFQHk5bxttbkZPjw+PPrpOeO4lywSEIRFXW1uLqVOnCr87HA709ooP89zcXCxcuBBvvPFG9GvI0AQRHKSqWSQ7hTQnTrQmRorEke+V5xeYbaekB+IA8NxzJ+Ppp08ScoHIYCCSnZLcgPVUTSSDN3IjJ3arjAzz7ZR0sZTt23kbQEVFpiC4oq2IV1cnzrI9/vh84WdyzJXslJFFnLQi42OPzcff/34svvzyfF3rFn0kjj/n5VVHIzX7BkKtnNrslEoiTpud0uxIHBB9FUFpJE6tP5dY9l/JTklH4uRRI2INbmzUlkcm5iXKRZy1veLU7ml0JE5LZMwIkSJxXV0+1YiYfACtNyeOEBqJM6uwSahVVJoTp72wSTTVKX/+8yn4+9+PxfXXTw3522DYKQHRCWLGJIBSnzjCUUfxhRvMbDEQrphP8topo6tOqSUS52htRlov777ZN3QyAMDdZrHVkESpnE6gtJT/+dAh65ZHRFxamhgVs9pOWVGB1qrx+K7Wju3PvMevw3HHhUx6HdaRuOzsbPh84gAgNzcXBw8elLwnKysL9fX10a2dRh5//HEMHz4cKSkpmD59Or7++mvV93711Vew2Wwh/7Zu3Too62oV5IFLBrSRZuLoFgNKdkr6oUTPzKrNWptvp5SemkccUYYrrpgo/C5Wpwx/IYp5NtpPdflAVdlOaU5hk9RUlzCI2raNj8RVVGSGFcV6IOfFb34zC9OmFQmvh7NTahVx5Bjk5qbglltm6Ip2ArTVMLpm33KrbKTCJvzr0qIqUjulWnVK45G4/v4AAoGg5MGhJOL0ROKirSIojcQpH3N6m5VFnHokjkwQaO2zpHatxioSJ4o4x6DbKdPSXMJEjtoy5TPJenPixGUp2ymtyImT2in1R+KM2CmHDEnDLbfMQGFhaCO4aM8tLS0GAKCigrfHm1GhUv6MpvfJ/Pm8K8LMSFy4wS6LxCmjJRLnbOHFTE9qFmrKxmLz2GPQUDVZ97J0QYuqwRBxRDSmpfH94X73O+BPfxJVrkVs2dKM997biXcX1wpKmr5fTp9ejJwcj6XrMFgYatVeVVWFvXv3Cr9PnToVixcvRktLC/Ly8tDb24v3338fQ4cONWs9VXnttddw00034fHHH8eRRx6Jf/3rX1i4cCE2b94cdvnbtm1DVlaW8PuQIUMsX1crIQ/c0tJ0rF+vXcSp2SmVCpvQdspQEWeWnVJ5Nl6OaKcM/xBRyrOJRDgRR1fyjAbyYEhJcSAtzYnubp/ETkn2Jyl4QpcA1wOZxSez6wQyUDYSiZM3+zaK1uI0apBzvqIiE6tW1Qmv07l/kaJykSNxZtkpAyGCrafHj4wMMyJx1tkp6W12ueicOH6Z4XLienv568bnC8LvD0Y8X+g2DzSxt1M6LbRTql9L5eWZaGnpw8GDnZg4MfT5ZLzFgHT/yiNxZHKgp8ev6bipYaSwSbiIMGCssEk4Ii03ElpaDADmVqiUPyPJcwMAZs0qAWC2iFN/xiZLNENOtDlxc+eWYu7c0rBaJdXPj5W8njTsqZqBPVUzMGxYFmbqXpoOBlvE0XbK9HRewFnNv/4F9+dbkVN2JNpQIrxMztW0NBdOPz15KudrviP6/eKJvGDBAvzvf/9Dz8ABuuaaa9DQ0IDJkyfj3HPPxYQJE7Br1y789Kc/NX2F5Tz44IO48sorcdVVV2Hs2LF46KGHUFFRgSeeeCLs5woLC1FcXCz8czi0D/LjETKQI9EQfXZKpcImodXK6ut7sGMH/8CQVzC0urCJHK12Sq3fR0MPVA8d6kJNTdfA627TI3EpKU5hEEX2bUVFJrKy3MKAPpq2DUTo0A2iAVFAGcuJU87j0Uu0kThyzssrj0YqZgKENhqXthggIs4ftZ0ynIhTag+gLycuugIU0hYDysecXmd5JC4QCKKtTV3E0d+vJRqnFoWPtviEErW1Xejo6Edfnx+LF+8FEHpPI9uammqliFMXAZGWaZadUm41JznBgPHKs0DkSBwtHq3MiQuHeS0GYmenpJsWk3uC/Pm/b1+74ahZOLdLskbi6P1H37v27m3XNOlIHF7hxj5DZk8AXnwRWY//A2edNQrAINhTSWQsPR0oGRA4g2WnHCwefhgz3nwIOW21KD/4A3DWWcCNN4akgSQLmkcMpaWluO2227B582Zce+21WLRokSDizj77bNx///3o6urCW2+9hbq6Otxyyy341a9+ZdmKA4DX68XatWuxYMECyesLFizA8uXLw3526tSpKCkpwQknnIAvv/wy7Hv7+/vR0dEh+RdvkIFcSQlvGdEeiZPnxBF7SGgk7ptvanDddZ8DCJ21JgU6lHPitA9M1XLi5Oi3U+qPxO3b14GqqkXC6xkZLsqGZ2Ykjl8eeRiXl2fCZrMJdqpoLJVE6ISKOPXcvkiDZfFmGN2seLT7kpRI1yri6HNavmy635OZdkoxJy6ouK+JJZQQb4VNwtkpW1v7JDPN8nwxaQuDyJEB8n6rC5u0tvZh2LCnUF39DC677GMsWcKnAshz4sjvdHXKmpqusIUe9BJuQiSSiIu22TdBHolzux1U8RPjlsrIkTjt1Slp+7+ZmJcTF/75QuyU+/aZb6ckTJ48hLp3idfbJ5/sQWXlIpx++tuGlhcuEpesIk4pErdy5SEMH74I11672JyFDBkCXHwx7BdegDFj8nDD9VNxwYJCIGCN4wBA7OyUpIjKvn3AypV8Xtz77wOrVpm/zIFt9LlS4OnvAd59F/jiC9h++AGnv/dXpLY3obc3tKhYoqJ5xNDe3o4HH3wQEydOxDnnnIPOzk6kUM0Cb731VjQ1NaG2thZdXV24//77LY9uNTU1IRAIoKioSPJ6UVER6urqFD9TUlKCp556Cm+99RbefvttVFdX44QTTsDSpUtVl3PvvfciOztb+FdRUWHqdpgBaXRMmgxHEnF0iwG6XD85saWRuNAmqWo5cUqFOOzUWabV/hjZTklEnHV2yk2bmoWH+6WXjkNpaUZIBMcoSpE4wpAhpBInEcbRROKIiJPaKeUDgBkzxGtIb06cUaLZl4FAEBs38jkFdK4fEK7FgD3kZ683KMlVc7vtOu2U4Wf3yHL6+0N7wvHLl74Wb4VNyCSJ3c7PKhNh09XlCzkv29v7JQKHFnFKtl05P/zAH8+qqmzJ62aLuE2bmuDzBVFX1413390JgN+X5547WvK+K66YgGOPrcAZZ4wU9jV/jzRv4BruWiJFqhoblSdxrBJxAB3lNSMSR1enHPxm3+GItmiO1nvh+PF8Zb7Nm5ujjijLn5Hvv38Wjj66HO+8c4aQ59PX5xeW89BDawEAn3yy19DyWE4cv7+//75R8n84Nm9uwhtvbMPatcrjUDketwP5w4uQWVkC1GhryWIIWsSdfTaweDHw5z9bt7yzzgI++AC4+Wb+94svBubO5fvGXXIJXyXTbAaEo8+Vgo6sARt6TQ3yfnE5pq97H+c+8XP89a+rJCkYiYzmnLja2lq8+OKLePbZZ7Fy5Up8++23uPnmm3Heeefh8ssvx7x58+BwOEIE1WAgTx7lOE41obS6uhrV1dXC73PnzsWBAwfwwAMP4Oijj1b8zB133IFbbrlF+L2joyOuhBzHccJsbXU1X72Rt28FVGcIpTlxYpEHpZw0uQAA9FanFB9w/f1+OJ2hopCg3U6prTy91hw7Gvm2TZ48BM8/f4rke8yqTpmS4lQo8c3vb7JPI7WLCAcZ4JM2EQT5efGnP83Drl1t+MUv/qcjJy5aO6Xxfbl1aws6O71IS3PiiCNKJX/TVp1S2eYobzFACxFlO2X4JsS0/ZZ8PiXFCY7jFPezETullYVNyKQOWS9a4JJrfciQVDQ29oLj+O8k5zMtErXYKVeu5Etdz5kjP57mVqekBaXXG0BamhPNzdeH7PsTT6zEiSdWApBWq/P7g7ruJ+EIJwIiXf+hzb61rVMkOyXAn1sNDT1RRuJC8/0iFzYZ3Jw4Eg20ujrl8OHZwnWybl1DyDluZJnkHDzttBE47TQ+x4fjODgcNgQCHJqb+1BamhG10Ar3+WQVcUrNvsmklZZJ1cbGXmza1KQ4QUKoXboOK57+HPVphSg/ZR5OzsyGq6sLqKsDrKonccYZ/PcXFQGVlcDw4dYshzBsGP+PQNoMbNwItLfzAu/xx81d5oBQ9bpS0Js6UPeitRXugZ7WQ5r2wsYFD78WA3l5ebjxxhuxfv16rFmzBtdddx3cbjeeffZZHHPMMRgzZgzuv//+QatICQAFBQVwOBwhUbeGhgZdYnLOnDnYsWOH6t89Hg+ysrIk/+IJumpkdbXYqytcNC5STpxSdUoatUhcR4c3JExNz55GqvSktZqkaKc0J7JHI982WsTKqxoawe8XIz8pKQ7VZrvh8gy1ojUS53I5NA+WzYrERbMvv/2WH/DPnFk8IITVipkolzen+/3Ry5dWp5Q3+47GThmQXFtqD3c9kYZoq1NKc+LC2ymJYKdFHIm6E6sYeZ0gjcRFdgaQYzp7donkb2ZH4uQTTTNmFEc8l+nrZbAicZGuf/kgxKwWA4A5bQb02CmJoLOixUA4os231CribDabcF6T89wIgUBQ2BdKy7TZbMjNJa4Y/jyPVmiFi8QdToVNyH1DS3qDWJ1S/T0pn32Es1/4NY5Y/grWrq1Dg20gLcDKMfRDDwGvvsoLuFhARNxuvnctsrPV32sEjpPYKfs96UBqasiySg9tTZpz19AobNq0aXj00UdRW1uLl19+GSeccAJ27tyJ3/zmN6ioqMBZZ52FDz74AMGgtUrX7XZj+vTpWLxY6lFevHgxjjjiCM3fs27dOpSUlER+Y5xCclFyc1OQm5siPOjCiTi1FgPhqlPSyGeHc3NTqEbj0kEH3Wcm0kBMq/1Rq50ymhYDBHr7zegTRw+YU1OdqiW+zWgzECknjv5d62DZrJy4aPYlidqQgREtUqXVKZ3CsujoPB0FpJevvzplJDuluE/pCQW1voXGcuKMRUvoPDX1FgNkwMhvB50vSEfiiBCg7zl6Cpvs39+BurpuOJ12TJtWKPmb1SJuzpzI9376uJgZfQhX3TDS9W9WdUr5/QcQI/fmi7jo7JTm58RF2+xbW2ETQIwwk3uXEeT3KiXkNvxoi2WwnDh+n5NxjdJEtRyxT5z6+Wrv4W1/Xhd/vLoyeBcVamr4RtV//7uhdddMMAgsWgTcey/Qa3yMEZYlS4Dnnwc2b+Z/JyJu1y7+/0OHgKefBtatM2d5feLY0+dKQVl5ltif7vnngQEX3pgtS5Pm3I1qFOZ2u3HBBRfgs88+w969e3H33XejoqIC//3vf3HGGWcMiuXwlltuwb///W8888wz2LJlC26++Wbs378f1157LQDeCnnppZcK73/ooYfw7rvvYseOHdi0aRPuuOMOvPXWW7j++ustX1erIFbK8vIM2Gy2kBL5Sii3GFBu9k1m/Gl++KFZ8rvDYUdODj8DeOqpb+Ott7YLf6Mvlkgznlrtj9rtlPpz4uRV6mgBZIadkhZxHo80J87hsAkDfHEm3tgNNhjkhOpychEn379GRFws7ZRkNpsMjOjtU4rEKW0vWTZZvt1ug8NhlzR7l1enlBe10G6nDFATJOEicYNnp9QSiZPbKel9QwaJ+fmpEnFHoEViJBFHjufkyUNC2mmYXZ1SHtmSR/6UoMWDmQ9/PZG4Bx9cg9tvX6K6HlpFnNNpl2xP+EicuYVN6HWMj5y4aPvEaW+3YkYkTmr9Vl4mOW9eeWUrLrroQzQ1RTdAP9zslPQ4CAiNxAFiUS01tDT7FkScmxdx3ekDIu6dd4CXXwZuu423PpoFxwFdXWLhFJsNuOEG4Le/Ba68kv+dtj6awaJFwE9/CnzyCf87EXH79/P/t7QAV10FvG2s6E4IpJAKgONOHYOTTqoURJzP5kDvDTeDs9uR1dmYNJE4Q33ilCgvL8edd96J8847D1dddRWWLVumWlzETM4//3w0NzfjD3/4A2prazFhwgR89NFHGDZwMtbW1mI/OWHAV7S87bbbUFNTg9TUVIwfPx4ffvghTjnlFMvX1SpIJI5UUMvIcEsslkpI7ZR0TlzozKLH44TdbkMwyOHoo8uxdOlBXH/9lJDvzM9PQWtrH9aurcePf/weOO424XsJkR6WRHSoRSoIsbNTahOP4SAze263A3a7TbKtWVkeYfYu2sIm9IA6sp3SLgyqIvXFMa9PnPF9uW1bCwBg0iQ+cVldxDkky5K/TkefyXtITlxnpzdkXwSDnGQgqdVOSUfiXC67YvQD0CeMo20xIK0e6VfMJRYHqaF2SjKQyc31ID3dhaamXnR3K0ffIhU2+f57vqjJ9OmhNngrI3EZGS7Mm1cW8TN8FV97iP02WuT7l4bOieM4Drfe+hUA4PLLJ2DMmPyQQYie6zEnxyNEFpTutUSsm9NiQK2wSWgBotjZKa2PxJHiUXv2tKOz0ytp5aAVWlyo5UCS8+Zf/9qg+/uVONwKm8jHTfKcOPKzUvN4gpZm37ZumYhLy+E/u3cvhE9t28Y3yTaDlhYxKuX3Aw4H/3tNDfDKK/zr+/cDjY185UwzkLcYyMtTft/69eYsLzsb+PZboKcHc+cNBJEKChBwe/DOS+uxfdQR4O74HAGnGzOTJCfOFBHX3d2N119/Hc888wyWL18OjuOQlpaGH//4x2Z8fUR+/vOf4+c//7ni356TVb/59a9/jV//+teDsFaDBxFxpCS1UplhOdIWA+HtlACwa9dV6OvzY9iwLKxdW4+5c0MTs/PzU7FzZ1vI61pFXDDICT3ZSF8dNfTbKaMRcbSd0rxIHBnE0DPh9LKitVOSwT0dZSMo58TpjcSZkxOnd18Gg2JRkMxM/lhJ7ZShIk5+/JXslOQ1cgx4ESddN78/KImWae0Tx4s48dqijzkRB+G+R4lo8pY4jpPkqQWDHPz+YMjAkBxruZ2yu9snDHYyMtyK0X89dkpyD6usDM2RMFvEEfFy883Tcd11U8IOxmicTrvQuNwswl1LpMVIT49fMvNP9qVROyXA55J++uleAMqROPJapBzmcChFqYzbKfn9lKgtBgAgJycFWVn8BGtNTSfGjMnXvTxyD3E41HuQkfPGLJREXHq6a6BXZHJEM2hCRRyxU4rPYaUq3DTaInH8WIeIOKEIx06+Yi6OOYb/ZxYkSuXx8AIO4CNj8mqYy5fzBVDMXKaaiJs4kS9yYpaIc7mAWbPg8wWw7Qc+2jb5gw/w1COrMfntR5HffADfzLsYQPLkc0Y1Cvv6669x+eWXo7i4WIi+zZgxA08++SRqa2tDBBTDGg4eJMJHKuLCR+KUc+LUGu5WVmZjzJh8pKa6MG9eueJgk9g45EjtlOoPy6amHni9AdhsQElJeBFnpZ3S7XZIBhtSO2X0LQbIIIwMYuiojFTERVfYhC5qIp8RlA863G4jOXFmNfvWN4CibXVkH6pF4sjPWuyUchHX2+sPiUTIzzc9kTixfYc0J66oSBQRxqpT6o/Eeb0ByNudKQ3YxZwtfvvoFgNkkigjwyWxWRL0FDYR3QSh173Z1SnJpMikSUMwalRuhHeLaI3+6yGciMvO9gjn1f79Yq84Mng3aqcEIJmEU4oKa43Kh0OcAFCzUypF4tSqU4a3LRslejultsImBPKMJs9svdD3EDXUnsNGUTrfSYpFMkbi5JPfypG48BOrmiJxMjtlQ+FwbBp3LLgRfKVRFBaqfdQYSo23SWSO5ptvzF8m6RM3YQLw+98DOTn876Qi/MGDfO84kzh4sBNvvrkd77yzA5zLBXdbE45Y8SqOXvo8bA4HJk4cgrKy8GPMREF3JK6mpgbPP/88nnvuOezatQscxyE/Px9XXXUVrrzySkyYMMGK9WSEgc6JA6A4oJKjXNhEudm3VuQJ8wStkTjyYCsqStecExfZTqm/xQDJKySRLDonkG6MbpTwkThxWdFG4tSKmgCRInGRmn2bZac0Vp2SFhtksElbk6K1U9Lf1djYI/mc/HzTU52SnPvy6pQFBamoqekExw1edUqlyFhfnz/E4qUlEpee7lKJxGnPiZNPRNGYb6cUc/n0EI39V41wIs5msyEvLwWNjb3Yv79DeJ0sX15wQs89mxZx8hxEQLw3mSHi6G2jRZg0EpfY1Sm13gvLyzOxeXOzMGmhF/H5rL48ved1JJQiFtnZbhw6lJwiTj5uIlZzOvoW6Zl83HFDcfTRypPdBEcvEXG8qNo5ai52jpqLO7yfwnPvH/moFccB/f1AignCXKuIM7MBt3yZ1dXAH/8I3HEHb+9MSeHz5Xbt4qNx8+dHt7z9+4HXX8f+HQGgZB6AAZdJAy8Qe1OzcNb2tzHxk+V8zuH0s6NbXhygeRT2+uuvY+HChaisrMTvf/977N69GwsWLMBrr72GQ4cO4R//+AcTcDFCnhOnVGRADh3uJw//Tz/di5UrDwHQV82RsGtXm/Czw2EDx3EIBjnN1SnDzcbL0WKn5DhONbIYCdpSaWYkbs2aOnzwAV9elwxi6EGUuZG4UBFKiKY6JRk8Rm+nNBaJIwNLegJCaUBILyPUTqkUieNfc7lEu2NDg1TEhUbiwtsg6fUigwM+Eie+np+fInzeSCSuvz+gexBKrI52u03YN/IB+65dbXjjjW0D60Vy4sQqlEoiTlrYRPy+H35owj/+sQZPP71RUvAEIH0ueZFCLOE09EB7zZo6/P3vq/Hqq1tDisxohQzA9EYsrIzEqQ3KyYCcFnHkOIXmxGkXOHQxF6VIrhiJMyacP/xwF9avbxhYL3Hb6GcBfU2SY1xf34NPP90T8n3xW51SXySOnN+rVtXi5Ze36J4M1JLjbb6ICxeJS2xL2nvv7RTyqxsauvHoo9/h2Wd/kLxn5842/OtfGyTXfSQ7pdNph8fjDHs/d9/+K+Cxx3DqPefj5ptnCOc218KvD95+m8+H+9WvjGxaKEoijhQaoVmzhs+Zs2qZ5Pfycl5Ejh7Nv7ZvX/TL27IF+NWvMOGT54SXgp//D1c8y6db9aZmIbN+P7BihXkWzhijORJ3wQUXAAAqKytx+eWX4/LLL0d5ebllK8bQjpqI0xuJ27ChUfi7kWa206cX4fvv+e8IBDjFwXm4waaY2xe5D58WO6W8ibMe6AqVyn3i9D/0fb4AZs58Sfg9sp2Sfxi3tvaFbWCvhr5InB47pTmROLoRth7oSCbZJ3T0jbZpkWNHcucIkRvcu9Hb60djo3TG1WgkDhCLRPA5ceL65Oenwum0D+TbaT/GdNSss9OrGglXggistDQnHA47vN5AyIB95Mh/Cz+L1SlpOyW/PWqROFrEPfmkWGTB7w/immsmC7+3tfULolLJ4kKOZ2+vHyed9KYwgCotTcfRR+uvgBxtJG6w7JQALeLEyA05TkabfQPSiZ2cnNBJnmjslBs3NuK0094Rfqe3jY5AS69T8VxeuPAt1NRcK7HUR2rlYRRyfRoVq3pFHJmgfOKJDXjiiQ1obe3DL34xVffywj3PzM6JC2enTOSGyf/73z6ccca7AACOuw133rkMTz31fcj7Ghp6cN11n0tei6Z3q8DChQCAlIF/P/vZZDhsHFyOMcB9fwJefx245hq+UIcZkPy0dCoHmI7EjRgBNDQAnZ28GJo40bxlEhHHccCOHXwUbvp0PoeNtPeqNV61VWBANPrd4jXANTbB7eefVb2pmegur+Lfs2kLeju9yMhw6R5bxRO6RNyVV16JE044wcr1YejE7w/issvG48CBzpDCJlpy4mw25QGEnvwKwr33HoXcXA8efHAtAH6gRz+ogfACQW4LDYeWWXFaROrJiQPUI3FiLpX+h9fGjVLPdyQ7JXlQBoMcurt9wgBaK+FEnHKLAX3NvqMdUBm1p5EBl1JhBEC6bUccUYq77pqLY4+VDvbpSJxShbmsLA/q63skkQNAer7Rf1OLENDrQkSc3E7JR+L4z+sRxg6HXSgw0NHhRUFBWuQPDUALMABobw8/YJfbKfv6xHzBjAy3cG6qFTahqavrlvxOJm/y81MVrX3kvKyp6ZLMgO/d2yGkVGjF7w8KNmm9g12tebj61if8hAiJFmqJxOm9Z3/zzYX45JM9+MlPxob8TbRT6hc3S5celPxOb1t+fioWLVoAj0c6kVFSkoFHHjkev/zlF+A4PlpKizjSctaqnDij9ni9RZ7kduEPPtilS8QpVY+WMxh2SjKBlMh2yi+/PCD5nbguZs8uwdixeRg7Nh+3375U8bOR7JTff9+IPXvaUV2dq7mATTHXAZSV84Mynw9YsID/w7p1fB+31CiPq1JU7JJLgNZW4NFH+YqUEybwIq7PBJEKAE89BbS1ASNHiq9NnAh4vcA55wD33Qf88pfAT34CjA29D+lGQcQFcsViKr2pWdgcHILxAJqWrceTf1+N22+fpfjcSRQ0i7iXX37ZyvVgGMTptOORR6TCWltOHP8/HYmjMRKJKypKx9//fhwee2w9+vsD6O72hQz0teTEKeXFyNEyK66lMaoa6nZK49UpiVWVIEbilJeVmuqEw2FDIMCho8NrQMT1D3ynFjtl4lSnFCNxyiKOnlVzOOy4++4jwyxbPRKnBD2Ap2eh1QQtub58vqCQMB9qp0wV9qXefZqV5UZ3t093mwHaCkkGaeFEnLzFACAOerRE4mjk51ckGzU5trRdGzCWK0pXeczNjR87pZoVkghNaSSOiDjpeug9d448sgxHHqncXiGaSJy8mbV8va66apLi5264YRr++tdVqKnpCpkks6o6JV091gh6WgwAoc82PRMvgFY7pfXVKcXnb+LaKeX3THItXn31JFx55USsW1eP229X/mwkO+XBg51Yt64eWVluVRHX8cZ7ePvDfThYNh7nXDwZYytzxIFZRwffs62oCKiv561/c+fq2bxQiouBH/9YtC8CwKhRwD338K+7XMARR0S3DDknnyz93WbjLZy1tcBbbwE33wwcGfp8NoyCiPNm5YLI397ULLhGVgIAsjp415jXG4xaH8cSc6e1GHGB0RYDNEZEnHz53d2+kFlrbTlxkUWclllxsizSxFkPke2U+gdy8iavYk6cOKCnbU42m40qXqG/AmF4O6VSJI5/ze8PhkSgaMwWccYjcaE5NVqhi/koVXxTE3Fqkbhw5xdZN9FOKY/EpQoiUG9002ibAVrEacl/Isc6NdUplM2urw8VcfQ9R60ipdxSLUbgla97sv/k7UuMWJqI8MvJ8eg+fwe7sAkgRlX27aMjccROGV0kLhzRiTjpZJWeXD21e4JVzb6N5uUS9LQYAEJzPuWFk7QuL1yOt9mROKVJCysmNAYbNRFHzld6klCO9uqU6u/JvPR8/PT5XyK9uxU1NV34bksHAp4B8XHuucCePaKlcevWsMvTxNy5wBtvAH/+s/T1vDy+lYHZAk4Nus0AqVJpFgP2TZ+LslPmiSK6LyUDQybx1T/Tetvh8HujKlIXDzARl4TobzEQ+gCKZkBAFzmQ3+S1iDil4gZq66clEmdEkEa2UxqJxMlFHL9eajlx9O/t7forEIqRuMg5cW63QyKEwuUumt3s24xIXLgHrhJKkTh6n9Bi2uGwCRER+nyjZ6jDDS6JHVC0U0pbDOTlpRiOxGVnkzYDxkRcRoZbUyVCsm9I5VZAjLRlZJgTiVO77tUixEYicUbz4YDBbzEAiOtZWyuWpFeLxJkp4oz2iWts7AkR23rOabV7gnU5cc6B5Rk7pvpz4qTnuN4qlaKdcvBy4pQiceSYJnJOnFzEyQW5Uv9EQqQJJLFwnMr56vfDNmBZ9LpTUV6egTVr6tDtHjg/Fi/mbY2jRvG/79gRdnmGaWvjLY+PPCJ93Qw7pdcLvPgiX6AlQF3PuVRbl+xsfh3+/W/goYeiX+ZAJI6IuKlTi5BaXiT8+ftJJ6HNkYGAnT+2GV0tUbWLigeYiEtClPJT5CgVNqGJJhJH2zmVInGXXvoR5s17RfIA4DiOslPqz4n78MNdyMt7FBkZD2PRIj45mVyceqM0gFTE0QN6oxbA1tY+bN/eKnlNKRIXKuLMiMRFtlM6HDaJEKIHzH/60wqMGfMMmpp6wHFcRAuYVuh9+dvffo3x45/VZAuMZKfUu2wlsU8fh7y8FMUIgXYRx39WtFOq58QZj8TpOz/IutCRuA0bGjFixCLh+qGhB+L0tUF+j9QnjiZUxIW3UasdW2MizlhlSkDcB08+uQFVVYuwdWuz7u+g+fWvl+DOO5dJvlsOWU+6EKd6s2/j92w5kYT9wYOdGDXq3/jHP9ZIXpe7DQB9Ik7t/krb/82ERLT02Cn37m3HiBGL8OCDa3S7EuT3d7394kTrt/ry6AkiOUYquioJNXKfSkY7pZZIXNTNvrvE486LuEw4nXax4TeArzaYLOL8foQ0B21r44un3HgjsHkz386gvJzPv2ttVfwazbS1AZdeyue+0TuC9i5mZ/PW0auvBn7969D108uAiMuvyMfxx/NtHjzZGUAGP6b0ulJwqLYHbTnFaM8qhKe/23AUPl5gIi4J0WKnlLYYGDw7ZWenFy++uBnLltXghx/EQh87d7ahr88Pj8eBsjL9dsr33tuF1tY+dHf78Oab2wGI0SQj7RLINjgcNsWKanpnqHfvbgt5jXyvNCdOKrjEhs76I3Ek/0ep+py09509xFZLD2ruvHMZtm1rwX33rdJsIdQCiUB2dflw773fYvPmZjz//KaIn1OyU/7kJ2ORlubEmWeOVPuYBNoap1TxTd6vTykKI82J02OnNDcnDojOTknW5b33dmH37nbh+qGh10t+jirlxHEcF8ZOKb12iKWssFA5P6i4OF3y+6RJQwBEHkgpQUSckWgFOWfefHM79uxpx2WXfaz7O2juv3+18HOkSBwNEVbyiGC0kyo0ZACrJsR/97uvsXNnG2655SvJ63K3Ab9e+kWc3lYeRjEyKfezn32G3bvbceutX+mOxNlsNpx55khhn7S39wv3BS1oafYNACedVKn4ejibvBrJa6eU7nd5kSF5UTa73YZ77uEth3RurRIRm30PiLiA3YGSYXnIyHDD5bLD7xRF/vKtvcCUKcAJJwDTpmnbqHDcfTdgtwO33iq+RrcY+OQTwOPhi6oAvJ0zGkghldRUfrkEB7VfMzL4XD2AX25zdBNjuPJK4PPPUXjXrTj66Ark5qago6Mf3Wk58DndSPV2Y+bMYjx16xv4x81vo6FohKn2+Figu9k3I/4x2mKAxhw7pS/kJk+LGXqma/nyGgDAjBnFmgSk3E5Jz6qR7TbS6JtAoplZWR7JjVjLvlVCacCpLRJHBun6I3HhrGN01IO2ynk8joG+Y6GDmvb2fsnxjNZOqdTMXMtMsVIkbsiQNDQ3X685IqcUiZNWp5S2ehAHzuL66Y3EqVenjCYnztj5IdopxQkEYu1SOrfpfZOfnyKZGKbtlKTqpdcbUJ1UlZ9bRIASa6icWbNKJL9PnjwE33/faCgnjlyH0dgpCXv3dqi8Uz9qAkwpYkgmMayNxIXPiVObIJTnwwFm2SmtyonTL+JWrBC3Ua+IA4C33joDXm8AxcVPoL29HwcPdmLsWG0VDLU0+waAjz46B++9txNnnfVfyes+X1C3EA5np0xsESe3U0qtqvTz5fzzq/Hssyeju9uHu+5aju5uHwIB9X2pNRJnz8rElQOFfpxOOzjq/baMTOC4OcBxx+ndNGVIuX8XFanNoFxPZKWrqvhWA7t3Ryce1XrE0YLOZgPcbr7VQVMTcOiQcgNyrQwfzv8DsGdPG/r6AnC7HXjl6ueRlpeFG26YCpfLgfHj8/HSS5uxb18Hi8Qx4g8t1SnpxFsli0p0dkrRzilPGqUrzNEXz4oV/Azu3LnSAZsa4kOE3w46EiGKOP77o7FTykWVlkbqSigNOMWcuHCROGOFK/hlqlvHaMsqPfgT+yaFDt56evySB3q0M/9kIE0P6rXkttF94mhSUpya+73otVOKfeXEdaUjceFsXqF2Soes7LrxnDhyfujNmaQjcWRba2q6hL+Fq3woF0CpqaGROLX2AoC6iFOy/QJ8FcmxY8VkeBKJi1VOHEHeCD4a1I57Xp56JM7KnLhIjgOliE4gEMSqVXUApJU/jRQ2CbVTWiPiyLWpdSDHcZxEwBop8mS389Z1kgOqJy9Oa5633W5TdGAYEV2Hi4gTI3GhvUdJ+xN5b041tEbibJmZwnucTjsahwwX10dhv0cFEVV0nzh6/VIGrtmqKv7/3bujW568Rxxh9uzQ95rZKw58G5vnn9+E117bitraLvidfCEr8hx3uRwYOTIXU6YUSlxJiQgTcUmItkgc/7/dblMUGGZE4pTslHTSu1TE8bObRxyhXPJajmin5L8jnIiLprCJXMRpyTdUQmnAqRSJkzeljsZOqTcSB6gXkQD4waOZkbiMDJdibl4kiMVLbzETGiU7pbxPHIEXWSQHJDQnzm63hRWPSpE42qaWkyOKOL2z5EYjcXSfOPId5Hrp7vaFHH+56CSkpTlht9tCrgs1Gx4QWjSHrHu4h+ncuaXCz5MnRyPijOfEmSmS5ETKiaNRigqH+w4jRIrEKQ3st2xpRmenF+npLsyeXWxovYj1Xa06ZaxbDOzd2y78nJ3tMRSJI5CJND0iTkuzb4JSbpwR0aX0GXKfVjoPEgW1nDgy0Kfv6QUF/DPU43EK9/Nwz+STTx6O226biZkzi5XfQHLiqEiY02nH+sl8A/Cm/KEIBKgq0V1doigyilpk7Pbb+SqYl1zC/26WiFMSjQBwxx3AgQPAzp3ia6UD9/dDoZF8Xbz/PvCvf+G9Bz4SXiLXtmQy6a23cNRvzseZK5/WVA09nmEiLgnREi2iWwzU14feHPSKFOXlh9opyWw/IF5cHR392LiR79lBD9bCIbdT0oNYst3k+43ZKYmIC83/AYyLuNGjxcpMSs2+5RWxyAB78eJ9uOmmLzT3A+M4Lmz+T2mp+PCgZxTDNfzu7ZWKuGjzU2w2W4jA1LJfSXQgXPWwSOiJxNE5a7fc8hXOPfc9nHvue/jNb5YCiCw85dUp3W6HJFLFt8Ags7FG7ZTRROKk53hXlzckMiG1U4rHjIg3+rrYtasNV175ScgyS0r4h7l6JE69D2J1dWgkrqfHr7sEvijioo/EKbFrVxt++cv/SZpzKyG3DevLiSN2ythF4pQG9iQfbtasYlkkzszqlGbnxJHlaRM3xDFC1kluwdMDGTzqKW6ipdk3Qen+aCT/R6mwCTmmn3++D7fc8qWhVhSx4H//24fbbvsKXm9AIkA5jhOOpdI9mIg4QNvEWUqKExkZbvWxx8iRfINtqhGd02lHai9/3yAFTny+AHDhhUBmJvDKKxq3UgUiAtPTEQxy+N//9mHHjla+4fb33wNZA0VVRvAl+KMWcSSnjm4pAPB2zvJycTmAGImLVsQ99hhw7bUo2feD8JIo4qhrpqMDWL4c2LAhuuXFASwnLgnR22Jg3rxy/O1vq4XG0oC2Mv9qhKtOSUMeSFu2tIDjgLKyjJAiBmrIC5uEj8Tpf/CT7R82LEvyupaiMUqQqNioUblClUoy201X+5M3fyUD7K+/Poivvz4It9uBv/3tmIjL6+kRoylKA0G1SpThInGhIi76WfH8/BTU1YmTCNpEXPSRODLA6eryKVZ8o4UNbadct64B69Y1SL4r0sBStFOKIm7WLOkMrdFIHLH36CmOACjnxNF/kw+i1eyU5NxNTxf352WXfYxly2pCvrekJAO1td2GRNzChcNx++28aC4sTBPuVS0tfZIJiUiQ69BIYROlwZ08L+bxx9fhn/9ch+xsD/74x3mq3xValET5uKekOJGW5pSIftFOGbs+cUoRGOKmmDOnVJIDbKw6pXT/WGWn1JsTt2mTWIyru9snfN7IM4act4cOaRdxeiYmzYrEyY/12WePEo4puR+6XHb89a+Rn0uxZv78NwCIE0qEQIALKWxCc8wxFcLPWVkeNDb2GnLHCJSXA7/4heSlI48sRe+UX2DVlWdhyWd8lMrnC8JDeqkdOGB8eYAkErd5cxO+/vogAODuu2XNts2KxH08UPjphBMiv3fYMP7/vXujW+ZBfpt63OLxJde25LgOiMbgoVr4vYGo0odiDRNxSYjeFgOnnVaFDz44C1OmFKKz04v6+h5UVeUYXn44OyUNeVCTASg9exsJuSdfLuI4jqNaDOg/zU8+eTj++98zMWeONEfPjEjchx9iYL3IAMCBtWsvQTDISTz3QOjAVt6DSX15/CDK5bIrDtTViCTi6AIDWvPPwmEsEqecE6eH0lL+Jn/oUJfibHpoJE7c1qqqbHR3+4Rm15EjccRO6RtYjh1HHFGGTz/9MUaNyhn4DpITp2+f6s3pIdCROLlw1GOnFEWceF0olZkH+H3+3XfSc6u/3y+su1pOHABMnDgEX3xxHoqL02Gz2ZCbm4Kmpl40N/fqFHHGI3FK0Za6um5JNd3WVn52PlLlzHD7V05+fip6ekTLndjsWx6JG7zCJkrRGRKJmzOnBN98I4p4Y9UplQubmG2n1Hv90JMlHCcex3DnrhokZ02PGCA2ZS0uBKX3RGOnPPLIMtx22wzMnz8MDzywWvKet97akRAijrB2bb3kd7Wo6rZtV6CmpkuI/gPa3A/ffVePurpuTJhQgKFDs1TfR5OXlwrkpaKxuR/dGS0ABvZ90UCfs7o6Td+jCpWjRrZRsRL4qFG88Bo6NLrlPfwwcMopyjlwci66CDjqKGDsWOPLa2oCNvHVrfeXjhdeVozEDYi43p378M2X+3HSSWIuYqLBRFwSQg+oOI5THGzT1ZNsNhtOPVUMbY8Zo61Slpbla4nE0QNKrYh2Sg4cx0msDYEAJ6mwaGSW1OGw40c/Ci1XT0cZ1fatEmTwOGqUaKekBw7TphWFfAYIzRPKzdU2WKCr8KmtY1aWO+RBJIq40MEbHYkzK/9GPpjWEuEUWwwYv30RK1NtbbcQ5ZBG4mgRlyJ5sI8alYtDh7p0izgyICYD1QULKoX3EPGmd7+qRS4iQV9z8lnIQIALieypReLI9UAmjnp7/aiuzsO2bfwgxG63CRNGRGzRAoY+/yJNNhx3nDioyM9PFUScHsTrwpxI3MGDXZKBENmvkQbmchGndL0R8vNTceAALeKUI3HmthhwCMvw+4Mh56V82W1tfdi8mS8PPmdOCb7/vpFaLyN2SuWcOKsicf39AU33c7VJJvnkmxaM9Hjs7eWXT7coUUPpPcbslPy+z8py48wz+b5l8mNKFyyLV7q7xWtSnrPr9wdDCpsAwOjReRg9WmoHJM+GcKkNO3a0YsuWZhQWpoWKuIMH+ebWZ58NTJoU8lnyjOX7b7rFEvz19SHv1cWll/JCLjNTuP8oFn0rKQE+/zy6ZQH8el92mbb3jhol9sQzytdfAwC4cePQnZ4jvKwo4gZy8NJ72uDrNqGxeQxhOXFJCBFDRMwoQUfizIYM6Lq6vGFn/si6EZuZnogRXdiEjhARaEuYmaFysm+DQU5XHgCJjNFJtOEq+BHkkTilimPKy4tcwKGoKNS6qtVOadZgSr5+g2WnLCpKh9NpRzDIYd8+Pg8hfJ84G/U3aa6DVjslQWlSgXyHXjtluOMVDrrZt1IUQR5JoiMvynZK8dqlx8F0FcOiIt4qTF83ZCCUkREaEQwHOW/0thkwszolEFqUgtzLIg3M5ceLTAgoIbd+is2+rY/EAcrROPn9dvVqPkpQVZWNwsJ0yTllpDqlPBIn2imt6RMHaItSKU0ypaU5DU1qGclnjUUkTklAKx0HIjDjFTofX369+f2c5glKLRWjw46v3nwTuOeeEDvloUNdWLasBlu28JMhw4dn8+tCRFy0kbirrwa++w5YsECYNDJSuTsiHAe0tJj/vZH46isAQPBoaUR4/Ph8/OpXM6U9ZAsKEHTz91VnXZR5eDGGibgkhB5QqQ2K6RYDVi3fykicWC2QE26mfMsRh/Cd0bQYUEPLvlVCSVRp+bx8gB3U+AzWYhsjg2oaMngjg0y6AAMv4tTzBowQyU7Z3+9HS4s02qLU7FsvdrsNZWV8ZGjPHr7iHJ1TRPcsy8tLkWxvVpZHck5pjcQRlAbbYosBfRekkT5XAJ0T51bMRSO2QAI94UCLCnI9pKY6hXuJWlEPYpdWisTptaMp9RhUw+sNYP36Bmzb1iIIErPslHIRR/ZrpAJE8shbba165Tn5RMdg5MTR56yyiBNvRBzHUVZKfoabPqfMyIkjy7ObPGKhJ1S0XENK92wjVkr+c/pFHLkOlfLd5LjdjpDnuzERF1pURuk+9d13DSGvxQvNzb2SVIQdO1olf/f7lasUK6GlsImqiOM44Kmn+J/PP1/yp337OrB48V4hqinsb2KnjDYSR7DZhEk6IhgV8XqBfv39afHvf/O2yI8+ivxemtdeA+6801juX0sL/3kAgXlHCy+fckoVKiuzkZ7ull4zNhv6S8sBAO5D+3W3jIonmIhLQpxOu/AQVhMKdIsBs5GKOPUHY3R2SnHGltxMs7I8QjSPrrBnZiTO4bALQkefiAuNAGgRl/IBttZlaok40JUy5etEBtr0Q5+PeJprp5RHGeQ30xNPfBNDhz4lGaybEYkDxKgoaUAfLhInbwQujcRpq05JUI7EkWbfRu2UxnPilEScXDjT9iOlnDibzSY8JNUizKTCm1TEkWtXnx2NrEOk3DMAOPnkNzF16gsYM+YZAPy5qyfqT1AauNI2R8C4nTLcOSS/htVy4sxsMeBw2IVzXqlCJS0gAwEOq1bxIm72bD7XhLaB6xGXkZt9mztkoa9NLZZkZRGn30rJf86InVJ7JI6+JglG7JRihEo8R5XONbVc2FjT3NyLYcOewqmnvi281tQkvb/xdkptzzZybhuKxC1ZAmzZwpfdJyX9B5DfXzZsaEBra580EieraquJtjbgxRf5iowDqFWdFfjxjwGPRxBGunjtNb5Z+Hff6fvcffcBf/qTsYqRLheQnQ1kZ8N2wvE45pgKzJtXjpkzi1XHQL6K4WjLLkbt7iY88MDqhLAEK8Fy4pKU9HQX+vsDqjMMdIsBsxGFlNhioLo6DxkZLtTWdgvVuMhAJpqcOJ8vGFLdrqWlT1KcwezKQ+npLvT1+TVXqPT7g8LMfF5eCh5/fD5efHEzbrllRsTPGhdxke2U9913NDZtasYVV0wQXpOLOPqh39vrszwnjt6+YJDDihWH4PcHsXVrC448ku8haEZhE0Ds09TYyO8r+jxxux249trJaGnpQ1lZhiwSF62dMlwkzpidMjoRF9lOSR8X+pjRY4ohQ1Il7xs2LAt/+tM8rFhxCA0NPULPJOVInL6BMBH/WiJx8mqi+fkphu57SsdGfg8wIuJmzizGX/96tOp7Q0Wc9ZE4gJ8k8fm8ipE4WkD29/uFCrNVVdkAoo/EycWGVdUp+QJN0iIl4VB6nhoXcUYicfz5pbW9SmqqU3JNRmenpCNxoceUTIbFG8uXH4r43CS5n4AWO2Xk4yaOr2R/ePpp/v9LLuFFB4XScuvru5E7tAhYuJAXcz4f4NZxvgWDfMTvs8/4nLPNmwGnU3IeBINcqNgkvd30Nt/mOFGEnXyyvs+OHAmsXw/s2qXvcwDfguHNNwGOg6u4EMdRxZ+3bm3Grl1tqKrKwdixYr2HmidfxmuvbxN+X7r0AEaMyNG/7BjDRFySkpHhEsSMEnSLAbNRslNWVGRi8eJzAQDnnfce3nhje0gkjuTSaYEevNIDQXLzlObEmTu4SU93orlZu6Cioxq5uSm47ropuO66KZo+q9TDSwtaInFDhqRh5cqLJK/JC5tIRZxopxyMnLiGhh7hYSONxEVf2AQIbaMhHwQ/8cSJws/m2inDReKM2SmN5sRlZChH4lpbpSKODB4BqZWLHuBXVGRh715xtnfnzqvgdNpx8cXjAIiNks21U0aOxMmvUyNWSkD5uMnFM9mvWnPihg3LwqpVF4d9r7qd0ro+cQAvADo7lUUcnevo9QZDJuKMi7jwkTizn1c2mw1utwP9/YFBt1NqiejIIZE4LXZK/n3Se6S8QbwWlHPilIv8xCNaeqvqs1NGjqCqRuKWL+f/P+eckM+Q66SqKgccx2HPnnb++ZeWpt+aSFi9mhdwqal8hMzJnw+jRuUKvXn9/mDoxCLp26Y3D6+2lq8SabcD48dHfj8N6RtHNwHXw8SJkl8bG3vQ3t6PDRsasWVLM1wuh0TEuWUOGT1BhHiC2SmTlEil8K0sbKIk4ugbI7GwEPsKXWRBK+SmEwhwwoAzK8sj6eMWTYuBcGhp4UBDoho5OR7dkZZoI3F6+2HJm33TdliOEwfzg1Gdks43oiNDZtspCeEitqF2SvF3vSLOzEic2qA3EpHslPKcOHk1NwI9wCeRTUD5XKcnCMgkEhkI0TmIWtCaE+f1BkKiOkYqCQLKx0YunvVG4rRYqtXtlPJInLmOAxLpVjr29D71egMhfQelhU302CmVI8tiXpb5zys9lmSynfQ5FG0kzusNaC6SpcdOyb9PbqfUd58AoBih0lLkJ16QW56V0GOn1BKJIw4FyfiqtVXsvTZtWshn6LZJYrqI/siphDVr+P+POw6YOlV4ecKEAuFnxegssXDqjcSRKFx1NS8c9VBdzf//5Zf6bKNr1/LRu4EcIb8/iMbGHnz88R689NJmIe9PflyzstwSUacniBBPMBGXpEQScXSLAbOh89LIDYIeBMsHnkbslPRAuLGRrzSVleWWbHc0LQbCobfhdzQV8eT7RL+dUt8yw9kpATFCY56IU4/E0Q9f5UhctHZKuYhT36Zo7JRysakk4oxG4uQTIlrw+4PCtZee7oLH4wwRE3I7pVqeGy1i6P2pNHlAlsFxof0d9UfitFWnVLpe1AqvRELpnJcP/Mny+BYn6gNzPSJOvi/VInFmthgA6F5xypVqCV5vIGQijhY2es5pcg2q2ynNH7LosSR3d/PbTReFMiri6EGj1rw4sbCJNhEXGomLprBJ+Jy4eBVxWtbL5wsK55hWERcuwnfeedW48cbpUnvenj287W/4cCAvL+QzZLl9fX5hXCa5Dnp6xIbdWiF5adOnS16228U+r3JbNgDjFTGJiJs8Wd/nAOCss4CMDL7X26efav/c1VfzVsz//hcA73x67LF1IfZe+f1xSEctfvzwlbjq31cDAI4/Psq+eDGCibgkJZLQGOxIHH1jlFvAjIk48ftIknJWlptqb2BNiwFAe8PvF1/chOHDn8KZZ74LwFhvKnnujt7on95lynuahYo4/sE1GDlx9MOXHqyTddM6G62GXMSFi2TI7ZT6CptEtlNGG4nTaqf89ttajB37jPC70qAbCC1sQtspaegBPr0/lSYP6P1A1tdoThz5fvl6yiHnE71fw1WCDEckO2UgEJQcB3mvPRo9Jb7VInGhOXFmR+L466unx4cLL/wA1167WPgbLeL6+wMh93A6UqUnSqweibPueaXHkkzs7IWFooiT9/LUit1uE/aTVkul/kicmXbK8JG4+voe9Pf7ce21i3HxxR9KKhvHEi0ijo6EarVTLl68D0cd9QqamkKFVUaGG7m5KdKxx7RpfJERYqmUQURGQ0OP0GdTeP5efDGfp/bMM4qfVWXtWnHZFK2tfcLxURT2xE6pNxK3cSP/v04Rt2dPGxa9sR9dFw70lfvHP7R90OsVGnxjyhQAKqIUCudsRgYcK5ajrHYbzjtzeNTOnljBRFySEsnyZ2WLAfJg6unxCw9GqZ1S+qCWW3G0QN8cRRHnkQgsK1oMANKG3+F44okN2Lu3Q1i/6dOVG3pHgv6cFhHHcRx27GgDECpUIkEe+mSgKBdxpICBWfu0oCAVw4aJzVDV7JR0JI4MZKK3U2ZIfg8XiZO2H4guJ05pADZuHG/rGDMmdIY2HHqrU15xxSdCqW2n0y58Xh4Jk9sp5YU3rrySzz/4v/+bK7xG70+lyQPa1iyKOGPVKcXCJtoicRkZLvzzn8cDAB599ARdyyJEisTJr81wA3MxEhf5HK6uzkV6uku4lsmAkx58DR2aaajiZjhIpHvNmnq8+upW/OtfG4Rl0qKevs/TfQPJ/iopkV5n4RBdGoPT7DvcMuXQIp3usWk0J47/rD4RZ6SwCY0VdsqUFDGSv21bK/71rw34z3+2CAXMYg2dqyePapNIJT0ZpTUSBwDffFODF17YrH1l7FTfNxnFxem47LLxkkkb4RrPyeH/1xMZ6+sTBY4sEvfSS+I6K4pto5G4e+7hC7ZQ1k0tvPDCZtTUdOJN9ww+b2//fm0f3LqVF3LZ2UBlJQAdIq6wEFxKCmzBIMZm6IxwxhFMxCUpkXPi+P+tmNmkZ2GJ/Y6eJZYPPMnspp5InM1mEwbWpLqgup0yNpE4IkKef34h1q+/FI8+Ot/Q8pYtuxBffHEeAG0Wzp0729Dc3IuUFCcmTRqia1nybZOLA9JTzUhUUQmHw46NG3+K3buvAsAPUIl9h374WtFioLg4XTIoDHeemFmdUilK9Yc/HIna2utw4omVkVZbgl4RRze5TU93CZHe0Egcf92ecspwHDp0LS68cKzk7089tQCHDl2Lk08eLrxGF4rJywvdRqfTLtxvQiNxxgqbtLT0hZ3xp+8tv/jFVBw8eA1+/vMpupZFiJQTJ782tYk4LXbKVOzb9zMsW3YhAP785zhOGKysW3cpfvjhctOthuT6+uorsW8TiSDSkTi6CA6ZPLTZbGhrux5tbTfouk7Vmn3HQ04cfb83w05Jf1arnVJ/YRPp+6wobJKa6hQmGL7/vlF4nS5+E0vIc3jZsguxc+dVkr+R3pV0JE6PiAP4/F85y5fXYPHivaJTwO8HGhtD3keTmurC8OE5kiivMIlKRFVNTdjvkLBnD5CSAhQUAOXlkj+R+891101RvFejpASYPx844wwgEOa6CAaBt94Se7uNHAm8+y5w4onqn1GA3MP35o/kRdmWLdo+SHL+pk4VIhJq5538uHp9QRzKqwQAbL/zYWzdGqZnXhzDRFySEsvCJnSODRl8035kM+yU9PeQSFd2tvV94gDaqqo+SAsEgsJM5AknDMXkyYWG97XH48TQoXy0SkskbsWKQwD4CJ7ebZdvmzwSJ4o4YxX+lMjMdKO4WJzZJtuobqc0JyfO4bCjtFSMEoSz0YSKOOOFTZQEsM1mk+wDrZDvpnM6wkEPPumBo5qI83icipEUu90W8jod9VUa2NDrS8QAySsx2ifO7w+GFUv0vcVms6GsLNNwWxVlO6V4fYRG4tQH5npEHMBfb8S2x3G84CDCJjvbbbhYSziI+Fq2TBw49vcHEAxyEvFKRJzdbpNsT3q6W7fVUOwTF385cUSkOxw2SUQnOhGnr0Jl9HbKaFoMKOfEpaQ4hCj8hg1iOw+9FXOtoL/fj4YGfuKqujpPEG0Esk1G7JQEpWv4u+/qsWxZjXhcn30WqKoCHn444jqTYzRhQgGmTSvEwC/8/0uWaC/6MXYs0N7Ol+2X3fMi3n+ysoDFi4EXXgAcYe5RDz3E95SbPl20Xh57LB9xNILNps8eRqypc+YIL8lzhQlyEed02rHsSL469/C3nkbdht361jVOYCIuSaF7tSkh2ikt8FNCfLCRwbc0Eid9UJsl4uSROPL9Zhc20VKdsr6+B4EAB4fD2OBcDtmunh5fxFyDlSt5ETdnTonh5ZBtk8+IWyHiAH7ASE5Fsmz1wibmROIAqfAIJ3hpgcS3GBCXHTkSJ11PM/cdvc5arFJ0Mj59GoXaKfsGvl/7taM4gyxDXjjHaCQuNdUlDFDD5cUZvbcoYaadkpzDemzJ9KRFX19AiKqY2eRbaXny/LfeXul2EsFPR3aNohYVi4ecOPpcos+nWNgpjbYYsMpOWVHBTzJu2CBGm7RW3LSSmhp+IjUlxalYbEksJiLul0jnmFy0K4l/idOpvx+46y6gqyusAPN6A1i9uhY7drQC4EWnECWbP5/vD7dnD28hVOOll4A//lGMntntQFmZ5C2BQFA4D6JKiwgEgH/+k/+5sRF44gnDX2X4Glqxgv9/rmjrl9spq6vzcOON01FdnSt53W63YfPYY1FbPBpuXy8Kln1mbB1iTGJm8jEiQh4y27a14NNP92DBgkrJA1aMxFmz/KwsDxobewWBZXZOHP09JLE4K8sjfJeVOXFa7JREgJSWZpgye0z2Dcfxg6pwD/GVK/kZsblzS3UvJ1TEqUXizLFTEmw2GzIy3Ojs9KK724dgkBMewIByYRNzRJwYTQon4ugcoIwMl+FInNvtMLUfDb0eXm8QHg/Q1taHr7+uwcknV0omT/z+oGpRD3nEhNwf9Fw79HmuVgiFfN+HH+5GSorTcGETgBfDBw92orm5D8OHK79H7IcXfaQqkp1Sfj8IV71ObyQOkJ6ftO3YiugUoHx98SJOOjgnIs6MnDy1Zt/W5sTps1Omp7sk55MZdkotvcw4jotRYROl6pTiz3QkjrZTKlU1BfgJuf/+d6futihG2L2bf16Vl2coTjDQFSHJ75EmIuT3SqUJK4nT6d13+ShVWRlw3XWq39vfH8CHH4rRIMm5npHBtwn49FPggw/4KJucffuAn/5UFFf/+19I7zRAep6/8spWnHRSJcrKVHLn+/uBv/0NmDGDbzhO8/HHwN69/Lr9/e/ArFmq2xaJG2+chv7+AH++XnopcPAgX8RlIM9NkbY2vnk5IIvEiQGKk06qRGFhWkgEVsBmQ23xKJTUbYezvcXw+scSJuKSFDJQfPPN7Xjzze34/PNzccIJw4S/iy0GrI3EkQe8tMWAPCfOaCSO/04iFDMzXejsFKtTWp0TFy4/jVgB9RYWUYMWbV1dXlUR193tFR6kRiJxYtEW5eqUZJ/q7T+nhfR0Fzo7vejq8qGpqUfysGlu7gXHcbDZbKbZKQFpHlc4Gw1dYt/hsMNodcq8vBRTrzl6Pcj+mj//DaxdW48775yDP/xhnvD32touVcslEeVZWW5JVMDotTNqVK7i62Rf3HbbEtx22xKhBLeRgXBeXsqAiNMSiYv+URepOqXcXm1WThzBZrMhJcWJvj4/env9lgobQPn66u8PhEQqSNTWjMkJ0U4pHeCLdkorC5tEslOK+ZXSSNzg2Cl9vqBwzLWKOLngiM5OSU9cSSNx5DlH59yqReJ+9aslePbZH3SvRzTQxbOKitJQX9+D448fKhTqIuI4kpUSCL1mlSNxlIgjFSUvvxzwqEec5CXwv/++CS6XQ7yXnn46L+I++gj41a9Cv+Chh8QIXGMjX9GSlPynoCee9u/vEO6RgYDYekboL3jttcBzzwGPPBK6vPff5/+/4grgZz9T3S4tOBx2pKUN7PulS3lBWl8fXsR5PMDbb/NCrrBQeDk314O5c0uRleXBnDmRJ7L3DJ8Ov9ON4HCdzcnjBCbikhT5zPOKFYckIs7KnDiAtlOSnDhzWwzQ3yP2znFJIknkgWW2iNNSnVIUcdors4XDbrchNdWJ3l5/2OWuWVOPQIBDRUWm+uxaGCLlxBHMtlPSy+7u9glFTTIz+ehcf38APT0+eDxO4bgOpp1SHlkyWtjE7Aimw2GHw2FDICDmKa1dWw8AeOmlLRIRR/ZpYWEazj+/GmefPUr4289/PgU+XxB5eSn4059WCq/rvXaWLr0Ab765HbfdNkPx7/JjRvJG9VQwJGjpFRdbO6WWSJy+czglxYG+Pr/kfLTOTqkciZND2ymjRd1OyV/zVuVw88sML3BEx4jbAjtl5EgcHQHV2ifu6qsnobPTi6++OoBNm5oNNY8m91v1nDin4mSlWiSutpa/5qdNK5KIK6twOm248UaxOuMXX5yHxx5bj9/9bg5OPvnNgXUVI3GRsNlseOaZk3DFFXwvM6XzJhjk4OnvRtaf7+RzywBexIVdT+myt25tht8fFEXcwoXAb37D59bJ6e4GFi2SvnbNNYrLkV/DRKSvWlWHTz/dg0mThuDss0fzfySis1mh6AepXDlunPpGUdTWdqG+vgdFRWnh7/c5ObyIa28P/4WpqXxvubPOkrxcWJiOk04SrRlLlhyA1xvArFklIZMaF1wwBq/ZbNg4aQGmTClEIsJEXJIif6DKIydWthgAxAcbEXFqdkq6bLPeQYB8RiwlxSkRWGTbYmGnNDsSR5YbScSRoiZGonBkGYB6ThzBChFHHzty3lRX52LDhkb4fEE0N/dJRJAZkTjtIk46q2y0xYAV+83tdqC31x8y8JWvFzknR4/OxSOPSMvsjx6dh8cem4+XX5ZWBdN77Rx1VDmOOqpc9e/y7yMD07IyIyKO35daInFm2Cn1FzaJHInTew7zwqpfsizrInFKIs4fEs0V7ZTR72O1Zt9K0SCz0NprUWqnNCsSpz0njlwrNpv2yZWqqhw8+uh8nH32f7FpU3NUkThpTpyynZJGLRJHLJ233DIdF12kTQCYybhxBXjsMb5SNNkmpVZI4bj88on4+usaPPvsD4rPSI7jMHPV28j44l/8C+edpyy+KOj9e+qpVfjww93S41VVBdx7r/oXPPMM0NICHHUU8PnnqtGxlBQn5s4tFcYKZBmKRYUKCvj/m5pCv4hU2xwSuQJ2b68Pn366F3v3tuPooytCRNxrr23Fli3NKC3NwKWedKQAvF1SiUce4QuaPPEEkKvs+AB4V1Jzcx++/JJvVzBhQkGIiBszJh8LFlTi00/3xE01Vb0wNXTifAAAVZZJREFUEZekyAWR/IFsZYsBQHw4Ecuhmp2SHozoHQTIH2QpKQ5JJIn8PRbVKUlOnJkiLiPDhaamXsHqqEQ0+XBA5Jw4glV2SoDfryR3q6IiCwcPdqGurhvNzb2S89rsSFy4B7g8DygaO6XZeDzKIk5+bWuZWJDfN8y+dpRE4ZAhqYaOJd1mQA0j7UvUUM6JE88LfSJOf2ETQBR9UhFnbWETgD+XSFVK+T3BXDulciTOWjul/py4WNgp6aImei3ZRHRZU53SKbGlE9RFXGihlFhB1oHc3/Wsk1olVYA/Xw+VjUHfTy5FSm6mpubVNpsNDoddUl1Rc+Q0PZ0XioTx6tbA7GwPTjppOJqaerFjR6twfLdv5wuqSKLC+Xz/UsVI3CuvAIcOAaNHR1y9rVtbsHcvH1lTypfetasNAO/M6HUPiDilSFxfH3DjjfzPr73G20UnTZK8pb/fj76+ALZubcHHH4s5hmrH1s4FkdbTBldjAEDkbYk3mIhLUuQPVPkgdLDslAQlO6XXGxREnrw8tRZCRZxTMUoWiz5xxLqm9HCLdrlq4pHjOCoSZ0zE0ZU3vd4AFTFwSh7KZtsCAXH79u/vRH09L+LKyzOQn58yIOL6hFxAl8tuyuCVPj7hKjGG2imV80OUoC1zVkXigNBIgrqIU496hYo4cwdaSvZBoxMd5BzctKlJyJeUY5Wd0u12wOsNSAZw8hzZrVtbQtaro6Nf0sNSv53SGbIseS6NWZC8q7Q0vsXJ1q0titEqM0WcUn6a1xsQBv7W2Cn1tRjIyJCKuGjaO2i1U3IcJ+xnrflwNOTc1WOn7O/3w+12UHZK9Zy4goI04ZogqNkpyTpojXpZiVJhE62EE/9XXjkRgcAEOPNTAR3f6XTyIo7cM0KifHv28FGxiRP5HnAG6OnxwW63CceQHN/t2/nCHqTvLgAxEqck4oYN4/9pgO75SsaigUBQuJ7p87LDkYZcQDkS98030t9ffTVExH3/faOkQAxB7dhOqVuL2feficC06cA1ayJvTJzBRFySIq8UJhdx1rcYkIat6Sp5dJlxeR8nPShF4gIBMQJIlmlVi4FwhU0OHOgAYL6dElAXj3v3tqOhoQculx1Tpxrzd5NltLX1o6TkCSHKMXx4FrZsEas3WWOn5PfrzTd/KbxWXp4pLOvLL/fjL3/5FoB5Ftni4nQhykCfo3LkQk3aYkCPndJ88as2mAgVcWRiQT0PRX7fGIxInNFrhEQ133hjO5zOD/Hyy6eFvIdErc2onEgPOjMyXGhpCSjmxJEcxc8/34fTTnsbH354DgDgiy/2Y8GCNyQlsPWex2QAPziROH5ZM2cWC/e6/v5ASAlvK3LiyKCupaUXY8Y8Iwws4yUSR84nj8ehW4jTkOdkW1t4EXf11Z/h6ac3AjAm4si5qzUSt39/B8aPfxYXXzxOiAzRkwVyO6XdbkNZWYZQvRgIHXMQ4ikSJ/aJ02enBNStvwBQUJAW8poWnE4b+vuBDz7YBYC3nq5eXYuPPtqDyy4bj8pZs3gRt3Gj2DsOADZtArZtA8aMCZujtnZtHf73v/244IIxwhjQ7w9KzgvJ+UUicUp2Sh2Q8RDAi0ifL4B//nMdCgpSceGFYyRtk+p6nBgGKIu4zwbaAIwYwTcUVyjwQtt/6e1SO988RbxQdbQrLC8BiP1VxLAE+QNVHkmwusWA3HsstVOKs63RzJQr5cRlZoqVI82chachNzk1u0hLSy/27xfzj8wikohbsYK3Uk6bVmTYakjvK9qmJi+SYoUt8MwzR4a8Vl6eiaqqbADAP/+5Tnj9ggvGmLJMp9OOSy4ZhxkzioTlKPHMMydh2LAsvPjiKQDkkbhY58Qp23rkIo5UcS0oUF8H+XVrdj6psogzVvxnwYJK4eclSw4qvscqOyUZxCuJuFNOqRIiLB9/vEcoH//WW9tDBJDe/Uu2gy5Jb1VO3PHHD0VFRSauumqSZOJNrU+cmUKZ7NennvpeEhmwJidOf5+4kSNzMXNmMX7yE4VS7zqorOQnVDZvblbt/xkIBAUBB2jvEUejNxK3YUMjurp8+OabGsV8RLmdEgidjEkkO6WRSJya9RcAsHYtsGYN4NfXK08oKDKAz8e3HeA4Dh9/vEfd3vj668A55yhXkaQ4cKATPT0+PPPMRmzf3ipsLx0JltxP1CJxzc3A734HPPaYpu1qbxfdQ729fuzb14GOjn7s3t0m2X9Opx19KRkI2B0I9ClMbBAR98c/qubEkbEtPQHpdNrViwHl5PD/q+XgxTksEpekRLJTDlaLAUKknDgjAwB5hC011Sk8UDo6+oWbUTTVw5SQNyyWs2oVX7Vp1KhcUwftkZqMR9Pkm6A22JXv63BRK6Nceul4zJpVjLFjnxVeq6jIxJw5pXjuuU3o7OQfBA8/fDx++ctppi33uecWRnzP1KlF2LtXTBQ3Wp3Sqpw4IHIkjjyos7PV7V/y9RuMSFy4yGA4Jk4cgp07r8LIkf9WtaNZZack12IgwCEQCMLhsAvLmjJlCN577yxUVS3Cnj3tWL26DvPnDxPyVWn0ijh56xbAOkv8rFkl2L+fr3D3zDO8iOjvD0jyAAEximRudUp+oL95s3TwaE2zb/0tBtxuB1atujjqZU+dWgi324Gmpl7s2dOOqqqckPds2SLdB9HYKbVG4sikb0+PT0N1Sv6YyVMH1ERcfNopA5LftRDuvKn/9R9Q9MV78P/5L3D+9g7N3zliRI7gDKHXCwDOP78aeElFxJEiI4XhHTjkWJ588nBJygXJVwNkQn/UKODFF4HiYukX7dsH/OUvQEkJ8ItfhF0mX7xOPBd6evyS61ism+DA7343B/9IsePL467CTy+fiEr6i7xefj18PuAEaWEu6fK4ge8Tj2VJiXq/3ka/B0MAcG1tsHGcddX+LCL2VxHDEiLZKeMhJ66/PxDVTLlSThxZLl/NsFdxXaIlkoj79lt+sDZ7tnExpUSk/nTRFjUB+H2q9HB1uRyDMnM6enSeJBpUXp4Rsh/N3q9GMFrYRB7pMnNdIuXEiY211dchFiIumjYcdAEluiAAwarqlPT9Sux36ZUsi0ymrFhxCN3dXmzY0BDynfpFHH/sSH6Uw2GzbCKOhqwn395AeXBuhZ1SLmCsiDqKkyBaWwyY5+zweJyYMoWv7qck8gHg22/rJL9rbS9Ao9dOScYLdD9CaXVKpUic9DpWy4mLr0gcsVNq7xNHIBOZ8ugmx3Ho3lMDAPAX6XtWBYOcMDY77bQRmDevDAAffc3NTVGPxGmsFKnWW5IurDNuXL74h9xcvt/c/PnKy4sgGoHQsSd9TgGiq4Ds+2FVuagYmh0amXa7gU8+4a2jYZZLngP0sytc9eNGH3+/tgUCfKuGBCP2VxHDEkLtlGqROGuWH5oTp9xiwFw7pUMyWCM3isEWcSQiNnt2seLfjRLOTtnb68O6dfwgMZpIHL0cGpcrjB3BROx2m8TWWFqagQkTCoRlu90OYdATS4y2GDDb2guo5/SoPajDXQ8ul0Py98GxUxrPG6XXlURqaciEh1V2SkAc/MvvZWSme+XKQ1i7lu/fWFqaITkuRiNxra38wMeqfDg5JO+Lt1Mqiziz7ZQ+XwCbNlkv4ozkxJkJmZQik39yyPOEMBiFTcRInF8YFNP7nv45ke2U5PoxUp1SLRLHcUBaDx/Zsmkov0+zbZuYdz5xYoGwb8vKMvjJGpNEnHw7iZNh8uRCnHJK+FYIAICGBk3LA+hG6g6cddYonHdeNYqL04W/d3Z64XDYhevwrLNG4corJ2L48JzI66GAPBLncNjDTmzb0tIQsA9cU62thpYZS5idMkmJt+qUynbKYFQPRnmUwONxwm63CQ2iCdFUD1MinIjjOE6wUxqtEKkGLeIefngtKioyBQ/9d981wO8PoqQkHUOHRtdANT3dFZJo73LZkZrq1FQKO1rKyzMFQUoGjzNmFGPp0oOYOrUwqkICZqHPTimur5mz+OL367VTho8G5uWlCMfZ/EicedUpyfeRqngdHV7k5EgjiVbbKQHSLsATsqy5c/nB+cqVtVi+/JDw2tKlB4U8L73nsmin7B1Yp8Gx/tD3PKUS4YD5zb5/+KEp5LlljZ2SX+bTT2/EhAkFuOKKiYrvM3NCgGbOnFL885/rQsQaQS7uBsNOSUfiSF839Zw4fv9pFXHxaackIk77+SW3/hKCQY4ScQW61uerrw5I1u3TT/cA4KNVL764CfO9HpQAUYg4sdLoxo2N2LixCaNG5YSf4Pv8c+DgQb7ZeFGRdHkaInEpKU4cc0wFbDZeJBKuuGIiHA4bhgxJw/TpxcKY1LZ5M/D73/OC9d//Fr9Io9WRiLj8/FSMGZOHtDRX2GeeYyAPL72njc+Lq6iIuIx4IvZXEcMS5MnP6oVNBkfEpaaK62NWYRN54iq5IdPLTk11mp6/RQ+a5Q0iDx3qQktLH5xOOyZNMjdilJPD34hef30bbrrpS5xzznvC3+jWAtHaq9QicWRwM316UVTfH4mLLuKLBdDWvuOO42+sxx4bHzdYPYVN6AHPiBE5FqxLZDul3x8UovGRItN0HqfZlV3lg8jUVGdUdkogfJl2My1w8uJM8j5R9fU9AMTrdPLkQqSkONHS0ocXX9wMgLc60/vXuJ1ysCNx5Bzzq+bkmm2n3LChMeTvVhY26ejw4sorP5UUjaGxKhI3YwZ/P/3++6YQC5nXGxqNNFLYxKidkkREgch2yupqaZGJRLJTis2+tV+PZJ/K2wAEA0HDkTgau90mCOOsLA927WpDs20ggmUwJ462U7a09GH79hbU1fVg1qxinHtuNcaOzUdfn186rrnxRuDyy4EffhBf0xGJy8x047jjhuLYY4dKXh86NAtlZZnC9Sc8r3p6gHffBbd4sfR6uOceXtjdc0/Y5ZWXZ2DGjGKMHZuHo4+uwIwZ4R1RdrsNGyadhB+OPg/INK+a+GAR+ylthiXYbDasWPETPP74erz44uaYtxiYNk28uShZc4w8mOgBJt2YNivLjZoa8WezoWfPvd6ApBIkmWEvKEg1PYoxbRr/sCeNMQFejNvtNmEWl8z+R4NS/pDb7cBdd83FuHH5OPFEbb1hjHLeedUApGLx9ttnYdSoXMUKlrFA2mIg8mDk228vQmenN6TKpxmo2XpoEacnMi0Vceaew3SPwyVLzofdbpNM8BghK8uNpqZexSixVdUp3W4H3G4HfL4gvF4+OrVxI1+Gm1ynbrcD06cXYdmyGqFAx5w5pXj77R3C9xi3U4o5cYMBucf19wdUm6ubmXcYDHKSYgsEK3PiCF1dXsWZezPzK2lKSvhJDD7f0If0dPH7a2u7QiYKjYgf/XZKcbzQ2Sm2zlBaB3JujBtXgHffPRNvvLEN//nPloSwU0bT7Fs1EtfZBXeAv+/YC/WJOPp+a7PZMGtWMWbOLEZfnx87d7aifuRUTLjjDmDWLGqBQVHURRBVc+aUYOzYfJSUpAv3kEAgiIKCNBQUpOHPf14Jny+Am26aLrgaejNykAqgZUcN8kg9ER2ROJq9e9vR1tY/0A6KQ2FhGoYMkbVjyObTKfrrm9HV3Cu2azh4EGhpiRiNGzeuAOPGaY+A2u02fHbSDSgsTMOEyko9mxMXMBGXxMyZU4p9+zrw4oubB73FAC2ecnNTMGqUOEtHDzrJw8KIRYQeSNNCihaQZlem5Jcr3mj7+6UijhRTsaIfmJJA6+ryIjPTLbQXMMPCqRaJ83icuPhi9R40ZmGz2XD++dIWAqmpLlx0kfXL1oqeSBzAV/qzCnIdhGsxQKILWiLT9Llrdk4cXZTn6KPNiaqSa1xJxFnZ7NvtdqC72wevN4C1a+sFOzNdpW/OnBIsW8bPKLlcdkybVhhlJE5anXKwBsK0nZLc44qL01FXJxYCMDMSBwC7d4eKOCvtlAS1SKNVkbiMDBdcLvtAMa4+iYg7cIBvVVNZmYW9e/leW2qRwnAYtVMC4gSQWk4cfQ6fccZI7N7dNiDiErHZtxE7pSwnbkDg+JxuODPSQz4XDnJ9L1zI56VNmMCLsnXr6gEAdaOmARfJqqIGg3zT68ZGsSWACmPGiEVLlM4Jt9sOn0/a/7LOl4LhALYt34651w68qCMS19npRX+/HxkZbnzxxX7s39+BrCzPQAVxO2bPLkFzcy+GDcvCEUeUCSIupb8bDZ39ABFxZGa+vDziMvVAzmX5ZEmiwERckkMiXLFsMTBxYoFkOfTNT4zE6T8VpZE4WsS5FX82C/qhT/JhCGSAk5dnfj+wwsJ0jBiRI4nEdXR40d7ej0OHuuB02k2xOqanhx4LK1oKJDJ6qlNaDbkO5KXf6QGvlqImBNrGan4kTnmAHA2kZYLcTslxnKkDb3rQyU9qiMKG2JnnzpXamekiQ1OmFCI11RWVSCaClYi4wTr3pCKOX3ZpaYYFIk7cx0oizprCJlIxoSbirMqJs9lsyMtLQX19D5qbeyU5zQcPdgHgrWdExJFnjB702inpSV8SzVazU8r3H3kWJ0IkLrTZtxE7pXSfBtIy8NmJP4cz4MfxOmfJ1cR22OPndALnnqtrOYC47YEAh02beCsvyX+kJwRbkI7hAOa++Gdgbhlw3XXA448Dv/0toCFy9e23tfjmm4OYM6dUmKwn9+pAIIjly8VJLgBATg44mw02jkNww/fA8GP414mIKysLu7y+Pj84joPb7dDkkrHbbbAH/HC1twLt7YKITBRifxWZwOOPP47hw4cjJSUF06dPx9dffx32/UuWLMH06dORkpKCqqoqPPnkk4O0poMPuWjk1SmtzomjRdX48fmSv5GBYSDACbN8RiJx9ABTbqckWFHS3W63CTcceR4SGeBYEYkDgCOOkEba2tv7hQHk5MlDDNlS5ahF4hgi9Lln1TWkFbIu27e3SgQ+PeAlD00tkWkr7ZRqA+RooCNxvb0+rFx5CMEgh95evzBZZYYFLjQSJzoKSHl4eWVYuioa+ZkWyXoLm5D7mRgdiV0krqREGmUwRyjTkbg2ANL9ZWVOHEFtosGKFgMEcs3t3dshKWRy8CAfiaOLhpBnjB702inpSV+yP9TslHLhQ57FiSDirIjEpZYXYfyz92HUc/frXh81sUbWM9DvA3btAr77Tvd3A8CBAx3Yt68d/f1+iWD85JM9ePPN7cJEIL1NjUMqxS/461+B9ev54h9z5/J94iJAzqXUVGfY8YlwHnk82D+Db2lQedaxfE86gLdTAhEjce+/vwt//esqrF1bH3HdAKCoKB237n8R1/x+PvDQQ5o+E0/E/iqKktdeew033XQTfve732HdunU46qijsHDhQuzfv1/x/Xv27MEpp5yCo446CuvWrcNvf/tb/PKXv8Rbb701yGs+OJAIl1okzqoBKD0bfeSR0pkTevaZWEOM5cTRIk4cDNHCzYpIHKBeoVK0U5ofiQNCRVxHhxerV/PVMM3qn6Y04GUiTgp9DseLiPvnP9dh5EixmpfRSJyVIm7oUPNzAsXCJl5ceOGHmDv3ZTz++DqJYDSjPQY9wCN2SiA0EkdTVpYpDMCJwKP3Lz35pIXQ/puxi8RZIeLsdpsgFmpr+Sjf+PGiRWwwcuIG204JiOfE2Wf/F3Pm/AfvvsvnTYoiTiz+M2yY/urDRpt9A8oTBvR5JxdjJMdVrRVFfNspjeTEyVu72FFWlomKCv3HiZzzX34pHb+S9XK1NAIjRwLTpwMff8z/8a67+CqOwcjH9q23duDZZ39AU1OvcDwDAU6IRJJzmxb7q2edjecveQgr5pwPNDUBpfpSNsi5lJrqDDtZT0d0N131e/S7B+6TGzbwxU5I+f8IkTil5vThcLsdSC8dsIW2tWn6TDwR+6soSh588EFceeWVuOqqqzB27Fg89NBDqKiowBNPPKH4/ieffBJDhw7FQw89hLFjx+Kqq67CFVdcgQceeGCQ13xwECNxyjlxVvaJffzx+bjmmsm44AJpfhM9MCSl7I3lxKlF4qwXcXSiPw2xOVkVibv44nH46U/HC793dPRjzx7edjR2bJ4py1C6+cXDAzeeoM/hWHvp1Sx5xkWcdTlxjz56As4/vxpff32Bad9Jtqm9vR///e9OAMADD6wRBp/p6S5TIjh0xIG2U+7c2Ya6um5VO/PDDx+Hn/1sEs4+exQAmJITR4hFdUq1SBzd+yka5BMHdPPhWOXEcRxnapEcOfLnxb33fgtAGon79tuLcO65o/HUUyfq/n6j1SkBcX+o2SnlEwliJC4RqlNKC5voa/atEt3ctw9Ys0bMG9PBhAn8hIU8zYWsZ3dWATB7Nv/iKacAt98O/OlPwNVXA99+G/H76RYD5Lj5fEGhwiYpekUL04DDhT1VM/DpguuBp57ihZwO6JSZcEW1JEVdKsrx+nl/xoEzLuNFG7FSpqZGtDuKFTh1nF+33cYv4957tX8mTkjonDiv14u1a9fiN7/5jeT1BQsWYPny5YqfWbFiBRYsWCB57aSTTsLTTz8Nn88Hlyv0Bt3f34/+fjHfoqOjw4S1HxzUcuKstlMCwHXXTVF8nb5RipG4aO2Uajlx5tspgdhF4jIy3Hj22YXYvbsdS5ceREeHV8ibiKbfFo1S/zuWEyeFnjXUOjCyCrVomVTEabdTSnPizB1olZVl4tVXTzf1O5UKm/h8QV3CVQtqkbglS/jeTlOmDFGstHn22aOFfo5AdCJZfvwGLyeOv7+2tvYL94fSUjE6lJHhMm0/u1x29A6kfWVluSVRKCubfRPoCqoErzcgDA6tjMQRamr4ezopbFJRkYlZs0rw+us/MvT90dgpCdLCJsqCDgifE8dxHNVwOrYOBiA0J86MSJzv8Sfh+tt9qD/vChS99rSu9Sko4M8D+YTIsGFZuOuuI3hxd9n/+LL/Tz8N/O1v/BvOP5+3N0aAbjEwZkw+/u//jkB/vx9//esqAMSF063c+N5mA37yE13bA9B2SlfYGgz0uDA11YldI2YhZ/qPUDF9BG+lvPRS3kYaIfKg1Jw+/Pr5sHpTD+x2G+aVWjP5biWxnwqJgqamJgQCARQVSWc/i4qKUFdXp/iZuro6xff7/X40qcww3HvvvcjOzhb+VSRQM0AS4RrsFgPhsNlswg1QjMRF22Jg8AqbAOFEHInEWSPiCHT0QSlvIhqUHr5mD+YTHXrgRx6MsULt2NA59fFip7QCpT5xPl9Al3DVgryYgyji+FwNrZVh6aJHRpt9K62TlZD73aFDvLhwuewSsV9RkWnas4Q+58rLMyXHz5qcuMiFTejXBiMSV1vbjUAgaNoEnX47ZSQRJ/4sj16Fy4mjlx8PE4Nkv5DnuD4RJ+0TSfDX8dUpd7Tq3z4iQOTrYbPZxOsrPZ2PiF16KX+Tv+46YNEiTd9PWw3tdv4fve1VVdmYMKFAaC9gBsQFlpbmDPv8oa/7goJUDBuWJd5jiov5iJwGxxwtVLXQ1xfAF1/sF+7jiUZCR+II8ocHx3FhHyhK71d6nXDHHXfglltuEX7v6OhIGCFHIlz9/QEEAkHhIWh1i4FIuN12eL0BtLX1SdZTD/QAiLZjDoadkrYX0VjZYoCG5P21tPQJPnrzRByLxEWCHkzGWsSpCQH6fkYEjZZCP9HY/WIBnRNH8PmCaG83NxIntVOKkThiZ5bnw6lBmoED+vdveroLNpuY0zzY1SmJiMvPT5Wcd2bdewAlEScePyueV/LrN5yIc7sdltwL5ZN+wSCH9esbUFtrjoiLxk5JoIUFfW8JLWxCInGhzxF6+fFkpySRJ312StKYXrqdtmY+GNCXkRvymUiQPrNkYlYVux14/nngySd5i6FGlKyGRMR5PA6+xL+M+fOH4fPP9xmavOA4TlLYJCXFiTPPHIW6um5kZLjw+ef7hPfS1/2ECUOE9goA+Aqcf/mLpmWKkV5tx5I4VmKdFmGUhBZxBQUFcDgcIVG3hoaGkGgbobi4WPH9TqcT+fn5ip/xeDzweKyx5VkNLW76+vxCDxqrWwxEgr9gfcJAy1h1SvVm3+LPVtkplXPiBjsSt21bC4JBDk6nHYWFaRE+pQ1lO2XsH7jxSuztlMrHhh6cGm0xwCXAc03dTkkicVbYKe0hAkxemVINurqhXhFns9mQleURbOiDnRMnirgUyXlnpoij7zXl5RmS42fF9jY29kh+VxJxVrUXICg9L0477R1wHL8/or23y+2Uy5fX4K67luOxx07A6NGhudTyHHpAfd/rsVOSEvZAfDxT5OtuRiTO1sI33u7LyNG9Ptu388U7ONmNt6fHhw8+2AWOg7SHqg4Bx1tZxUhca2sf/ve/fWho4M9/tcnAadOKMGRImqExGsfxhe16e/1IS3PB7XZgyhS+QXgwyKG4OB2pqU4UFaWbVp+BzvvTQqKLuNhfRVHgdrsxffp0LF68WPL64sWLccQRRyh+Zu7cuSHv/+yzzzBjxgzFfLhEh7Yp0haJwciJCwcZFIiROPOqU8bWTjk4kTgycN28mX9glJVlmHYsb7ppWshr8fDAjVfIQyNWqFkeaXFJBv1argc6WjdkiLWTEWZA1pdugizNibPCTumQiJisLDeGD9fWX4juA2ZkYEQfw8G2U5KxpTwSRzc4jxb6fB4+PFtmpzT/eXXccUMlvyu1GLCyvQCg/LwgPfimTSuK+t4ut1Ped98qfP75PjzyiHKp+kg5cYCYmjRlirThc7jCJnTUKh4icfJtMhKJC6lOWctXqu3L1B+JI9Wn6abcAD9e27y5GVu3toQIPK3QIsXh4G2UP/zQhObmPpx3XjUWLBgmvI9+dqSluVBdnSe5b2nFbrfhuOOG4pRTqkKeU3a7DSNH5qKsLBNOp11RdBnZ1tGj8zBp0hBkZmq7Vsk5wHFcQgq5hI7EAcAtt9yCSy65BDNmzMDcuXPx1FNPYf/+/bj2Wr61/B133IGamhq88MILAIBrr70Wjz76KG655RZcffXVWLFiBZ5++mm88sorsdwMy7DbbfB4HOjvD0huzFa3GIgEuaDJekTfJ07NTjl4hU0CgSBaWwc3ErdpEy/izJwJP/XUEdi16yr885/r8NBDawEwO2U4Ym+njCzi9Agau92Glpbr4fcHDeWqDjbkWqAjKn5/kLKQmldwg/6Zvv8MGZKm2dWQkuJEY+PPYbPZDEWWpJGpwS1sQsjLsy4SR3/vnDmlMjul+dtbWZmN/ft/hn/+cx3uv391WDvlYETirr56En72s0lCNGzaNGVXkR5EOyUHjuOElhgrVtQqvj+SnRIA2tpuQE+PX5LjCUSKxIn3pME6d8NhRiROUiymsRHOvbsBAC3lo5U+FpaxY/Nx443TQ2zvZL2I0DC67046aTgCgSBcLofwHW63HePG8VUxly2rweLFezF1ahHOOGMkAGDdunrYbDaMGZMnGWcZZffuNixbVoPRo/NU2yJ1dnrxr39tgNcbwB13zNblGDv++KGR30RB31OCQS7mLYP0kvAi7vzzz0dzczP+8Ic/oLa2FhMmTMBHH32EYcP4WYXa2lpJz7jhw4fjo48+ws0334zHHnsMpaWleOSRR3DOOefEahMsJzXVif7+gMQiMRgtBsIhn5Uxt8VAbCJxbW39gijNzbU6EsdvFymjbuZMOABUVeVI9h2LxKkTaxGnFokzaqcErD9/zYRsEykCAfD3N9Luw6pIHH3/0Rt5Lygwbo+zOjKlhHyiID8/VZa7liH/iGHoZtazZhULFk7Auu2tqMgSLIvKdkrr2gsA0vNn6NBMzJhRbOr3i3bKAHbubENTE+8Y2bChAd3dXiHNghCpsAnAn4dK1xYt4uT1CYid0um0xyyVgyY6EacQiVu2DADQMKQS3swcQ+ukdO+l18vvDxqa/HE47JK8XTE6y4W8RkdMSduWo4+u0C2QvF6+wFRqqlM4xz76aDeamnqxa1ebqohzu+3CNef3By2dRKbPaxaJixE///nP8fOf/1zxb88991zIa8cccwy+M9jxPhFJS3Ohra1fMrsWazulPI/HuhYD1oo4eraRWCkzM92WV/WTz9SZOYgi0MKaiTh1Yp8TpyUSZ26lxniCbJPc1rR/f+fA383KiZPbKcX9TucRWk0s7ZSE/PwUyWtmRuKIjRDgjy1tb7TyeUUEWjg75WBE4ujWDWZBFzYhUTiAn+hZvboOxx4rDs6DQU4xiqZVQJMJVY7jo1T0dULEQTxYKYHQNgdG+sRJ7jszZqDpj3/HilWNpopUet/7/UGYUaKBfKfPF8CmTU0oKEhVzfMDgKVLD+C44yp0bdeBA5148cVNKC5Ox7XXTgGgrSKv2+2Aw2FHIBBET48f2dnax1NebwB2uw0Oh03TutL3FD41IrFcR0kh4hjhERt+03bK2LUYAEIvZHMLm8TGTikWNbF+QCffLjMHUQQ6T5GJOHViH4lTK2yiZKe0ZlIjlqhtE6kaadY2Oxx2oTKk3E5ptX2aJjZ2ytBIHH3em+0EAMR8THp7lYoumQXJdwtnp7QqJ46eBLBiGXTUhRZxAG+ppEWckoCjvyMS9IRqb69fcp2IkbjYR+GA0OIXRiJxPl9QjDiWlyP91hswub4H000UqjabDU6nHX5/UBI504PfH0RtbRdcLgeKi9Ml2/rGG9swa1YJhg7lr2MiTOWRKb1WTnIu0fcPLcWcbDYbUlOd6OryorfXr6mqMuHBB9egr8+PG26Ypum+bLfbcMUVEwdSjxJPErGR2WEAiXIpR+JiskohF7KRwib0BSePxKWmOuFw2ITmmWajVJ2SWFSsWiaNfGBqxSCKFtaJ0C8sVsS6sInag4d+2CeziFN7wIsizryJHDLwkRc2GUwRR9utBqs6pTwXpqAgVXIu6RlkRYKIt2uumQxAGv3imxFbA1lOLKpT0nax6urQapHRQtvkli/nRRwporFuXb3kvUr5cPw6ajvX6Ge7XBASd0C85FibYacEpK6H1FQXKiuzTZ9YVbI66qGjox9PP70Rzz77A4DQ4+nxOITrnKTeyF0mel0nYvsC8f4xcmSOps+ScatSpdRw6O0TZ7PZMHRoFsrLMxMuHw5gkbjDAqWG37FuMZCbK33oRx+JEz/vcNjx5ps/Qk+Pz9TBBY1SJK6mhrdvlZWZL6jkyAfj06dHn/wuh7a4skicOokRiUteO2VKihO5uSlCUSECKZ1tpnB1uewDFjG7ZGAyGNF3AinRDcTOTllWloGRI3Px4ounoLQ0w9TnyFdfnY9PP92LG27gq+TabDZ88sk5aG/vt8RqSBDtlN6Qv1ltpwSAL744DwcPdmLy5MLIb9YJuX+3tvYLdtXTTx+B5csPSVpzAOoirrg4XdOybDaxmJpcxIl2yvgYLJthpwT4aJxr+1bgm2+AY48FqqvNWkUBpRw2PdCNvgFeSOfnpwppIB6PA0VF/DFubOxFf78/5Nmm18pJ96AjzJlTCrvdhhEjcsJ+VmncqgW9LQYSHSbiDgNIlEupsEmsZh7kM9fRV6eUDjJOOaXK2IppRKnZ94EDvIizIj9NDi1OS0rSDZX/jYQ0Jy4+Zk7jkXjPiQsEgkIkIRkjcQB/zclFHMHMbSYDKb7Zd2wicXRxgsGag5OLOBJluPjicaYva9y4AqFaHuGkk4abvhw5JMoXi+qUQGirAzMh5y0pEjNsWJYQEZFvr1LkIyXFqescT0lxDog4adSILmwSD5gVifN6A0h75x3gzjvhPfMcrL/jn8jKcoe0CoiGG2+cBofDbnjMphShuvTS8fjoo93Ytq0FHo8DmZlu5OSkoK2tDzU1XSGuIr0CkoyP6PuH3W7DnDmlah8RIJWR9UTigkGxTYAe2+fq1bXw+YKYOrUwISoy0zARdxgQLhIXDyLO6bQbEglqhU0GA6VI3MGDRMQNbiRu9uwSSyKqrLCJNmIdiZMX9CCQ9SIVTIFkFnGZ2LixSfFvZkbjRTtl7HLiJkwQBc7WrS2Dskw1EZdMhLNTijlxiXn9yCNORxxRqipalSIf5eX6oq2pqU60t/eHsVPGx/Mkmpw4ehu83oBQmbJ53HR89NFuDBuWZaqIi3YilTwP6G3MzvYILRLIGGrChAL09PiQmuo00U6pf92Li9PQ1+fXNXEi74WnlS++2I/eXj9Gj85lIo4Rf4iFTeKnxQBtPzJSmRKQtxgY3FOZLE8q4vhZzsERceLAdOLEgjDvNA4rbKKNWOfE0SKNhjxwiV3K43EkZOK2FsLlhJppISXXQbQtBqKBHoTt29cxKMuktzUtzZmUkwHhqlNanRNnNXIBMHduqer2KrUX0PtMU2v4TQRD/ETijNsp6WIjvj4fsHw5AKB7ymxgU+wmyNUQbYbS9ZILrfnzhwl/83oDuPDCsXjllS0ABlfE0cV2tEI/i/XYKcmxivWErBGS84nOkCDaKeOnxQBdjcuIlRIIb6e0mnCROCuKjKgtH4DpPYUILBKnjcEoZBMOtRla8kAS8+GSb+BNCDfINHO7yT0nli0GAGDcuHxs3tw8aMujxX9hofbG5okEXZ1S3t9sMOyUViLPm507t1R49muLxOkVcU7F7yYiIH5EnPFIHMDvV78/CKxbB3R0ABkZ6BkxFti02/RrZOnSA2ho6MGcOaWGJopFO6V0G0kuv5KAcbsdqK7OwwUXjAHH6Xc1VFVlw+WyD1rknt4GPZE4ci0kYp+4+LiSGJaiVOUn1i0GaPuRkcqUgPTBZFQIGkUu4jiOo3LirL9h2Ww23HHHbFxwwRjL8v+khU1YTpyc998/C8cdV4Ennjgxpuvx4x+Pxumnjwh5wMojcclY1IRA56GOHp0r+ZuZIu7GG6fhRz8agcmTh0gmNgbTTgkAH3xwFo4+uhzvv3/WoCyPnjQaMsR4o/J4hgi0YJALaWVgdYsBq6muzsM554zCqFG5uPDCMZg6tVC1pUJvb2gkUm+eNylasXFjo+T1eLNTRiviyHMx9b9v8S+cdBKCdv41syfId+9uxw8/NKGtTTn3NxJqVRtPOmk4Ro3KxahR4n2zv9+PxsYeIbI1Zkw+xo7N1+14mjBhCBYurEJVVY6hddaLzQaMH1+AsWPzdY1tibBNRBHHInGHAUo361i3GKAHPeZE4mIr4lpb+4QZzLIy6wubAMBf/nKUpd9Pe8PVKiAezpx22gicdtqIWK8G3G4H3nvvLPz1r9/iN7/5WnhdjMQlb3sBAj1xsmBBJbZvbxV+z8w0b7tvvXUmbr11JgDprO9gi7jhw3OwZMkFg7Y8etBNWgAkG3SUravLK3mmkIqViRqJ4ys2nyF5jc4BpCOPxLHjcNiEc1zvxOScOSX48MPdWLmyFjfeKL4ef3ZK6XroFZdutx12BJH5wZv8C5dcYpnLKdrqlHl5KTjuuKEhExFz55ZKiiUBwEMPrUVvrx8XXzwOPT18XlqkapJms3t3G956azsKClJx+eUTNX0mNdWFc8/VXxmU2SkZcY2S9z3WLQboHBKjIi6WOXHy6pQkH66gIHXQ18UqWCQusZCfd2IkLvntlLSFWZ4jatW5S+ciJmqERiv0cyJZI3EOhx0pKU709fnR3e1DAXUaJbqdUgmyLX5/EF5vQLDMksnIvLwUNDby5ef1pggQUSBvLB5vdkp5VEq/ndKBsWiAo6MDyMsDFi5E8Hu+2JDZIo4ITKPVkPPzU3HMMRWa3pud7UFvrx/btrVg9eo6AMA554zG0KFZuiyVbW19cDrtSEtz6d4fDocN3d2+QRlPkfMgESNx8XElMSxFqepWrHPizIjE0Tfcwc+JkxY2OXCALzAwGPlwgwV9XIj9lhG/yB92oZG45LVT0r0Z7XYbKivNb7khhxZxyZgjpkayRuIA9QqVySziAOn2krQL+hmtNxI3c2YxbDa+8E5tbZfwOonEJY+d0o5NKMaaD9cDH30EuN2WuZzESJz1hbTIs6KlRbRuvvXWdmGco5Vnn/0BDzywWuhNqAel1liR4DjO0FglkXPikiNkwAiL0oMp9i0GxEicUX1gs9ngdjvg9QZiFol7771dOPbYV1FXxzcWTqbS27SIi3UvNEZk5BMZoTlxyRuJoy2TnZ1ejB2bj717ra3cqFTF8HAgWSNxAB9RbW7uDTm25PdEbTGgBN/rkH9+dnf7kJfHizYSiaOvKb05cVlZHowfX4AffmjCypW1OOusUQDiLxIXvZ1ywJHjTgNmjwbA5+Tm5Iw1/VyJVsT19PjQ2elFaqoz4oQeeVbQIo5ftt4+ccarU5LxR19fAMEgp2msWlvbjaee2oDc3BTceON0zcs67bQq+P1BzQ3t44n4uJIYlqLUDybWLQboYiZ6ZlrkVFZmweNxDPrFR9+Uliw5iG3beAvFpElDBnU9rITOOSwtHZw8P4ZxQiNx/MO+vT357ZQAkJPDD0yOO24obr6Zf4DPnGlN5VYAOOuskQD4CmyHA8Refeqp1hRSigeIcCETH4RkjMQByqkW5H5BmoEDQEGBfuFOLJUrV4qWyngXcUbslIC0V2dOTgpGj84z/ZlJ2iEYFXGbNjXhiSfW45NP9kR8L3lWtLbKRZz2ZXMcZ4qI4zgupN+gGuSZp9cZUVGRheHDcxKuRxzAInGHBUo36ljbKemLTKmksVa++OI8tLf3Izd3cEt8y29KTz99EoqK0nDCCcNUPpF42Gw27Nv3M/T1+ZGTM7j7l6Gf0Jy4w8dOCQDbt1+JAwc6MWVKIQDg228vkgxEzebkk4dj1aqLJFXdkpndu69GXV03xo+3pi9lPEBaRbS09EpeJ9dQdnZyTYRkZLjQ2tonmeAl+d1jx+Zj69YrkJLiMDROmDOnBIsWfY8VK2qF1+LNThltThzZDlrEWUW0hU3UWgwoofas0CPivN6AYG00IuIcDjs8Hif6+/3o7fVrqmJO1k9Pe4FEh4m4wwBlO2VsWwzQRBOJKyvLlOTDDBb0TcnptOOyy8brai6ZKAwdan1uEcMc1O2Uh0ckbsiQNInVb9asEkuXZ7PZMHOmtcuIJ4qK0lFUlHh2Iz2QPLDmZjEC4fUGhEhAsk2EKI0N6H6n1dV5hr+bROLWrKmDzxeAy+WI+0icUTslEacAUFfXjbq6buTnp6Ciwrzn53HHDcUxx1QYFsBqLQaUUHtW6BFxJApnt9sMH+/UVF7E9fT4NFUA1rONNNu3t6C9vR9VVTmDXmk4WuLjSmJYSvjCJjFZJQnRROJiBd38tqwsIykFHCOxkEfigkE+yVuMIiTXAJTBMBuSq93cLEbiyCQIYG67inhAdOmI9lEi4qLN766uzkNODl/lcOPGJgDJ1ydOyU65bVsL3n13BzZsaFT7mCHcbgdSUpyGxxpilCry5/PzUzF9erFqxWMt0FZKo8GC8vIMVFZma95mPdFGmpUra/Hhh7tRU9MV+c1xRnxcSQxLofvEvfvuDmzf3hLzwiY0pC9NIkFH4pKpmAkjcVEq7hMIcIdFYRMGwwyInfLgwS689NJmNDX1CNdPWpozbiJIZqHUQ/bAASLiosvpstttmD2bj1STVgPJ1idOtFMGsXt3G5qbe2Neb0ANPVGq7GwPTj99BK64YgLOPnu0MMbx+zkcOtSlyT0lijjjhr8f/7gaP/3pBM35hSQnTm8kjryffD6RYHbKwwAy23bwYCfOOuu/mD27hLrRxO5OU1aWgZqaLsybVxazdTCKVMSxoh+M2KPUZiMQCFJ2ShaJYzDCQaxU//rXBvzrXxtw5JFlePTREwAk5/Ujd+l0dPQLotWMNIVZs4rx6ad78d139QDir7CJfLBvpNk3ANTXd+OFFzbBZrPhqKPKAZg/Qb53bzu+/74RxcXphqziRKDo2feFhekoLExHfn4Kmpp6UViYhhdf3IxLLx0Pj8cRNuKVlubE7NklUYk4vRChqvf8SuQWA/FxJTEsRV5Ra+/e9riIxC1degFuv30WnntuYczWwSj0gJlF4hjxgFIkzu8Pskgcg6EReT7MsmU1wiRIMtqRSeVqUvSM2Mmysz2mWEdJBIWUqo93O6XenE9ipzx0iN9vvH2dP1/MHls1N/fiu+/qsXt3u6HP680XCwY51Nd3o6mpB2VlmZg8uRBZWW709Pjw1lvb0dTUiw0bGrBvn/L65OWlYuHCKhx//FBD60ujtTplRoYLo0bloqxM38R6Ijf7ZpG4wwC5iGtu7kNhIV8AIJYh/6qqHNx339GxW4EooGeXmIhjxAN0Xz8Cs1MyGNqh+5cS9u3j+w0m4/Ujj8TRRU3MgEQvyT0o/uyU0gGQ3sE/EaOtrX3CviTbaLaIi7ZPXFVVNhwOG4YN01Zs5auvDmDp0gNIS3Ph6qsnITc3RTiOvb1+7NzZhsWL92Ly5EIMG2ZNm5WGhm68+OJm2O023HzzjIjvHz48B8OH5+heDjlWROgmEkzEHQbIm07Ss/PxkBOXiNB2SrMeeAxGNCjZKflrndkpGQwtKFWm+/zzfQAOLxFnVooA2WfkHhRvdkp6PQoKUhXdDOEgkbjm5j6Ul2fi0kvHY/v2VgDxJ+JGj87D6NHaq42WlvJRyZ4eH1atqkVlZbYQqc3KcguThmqF6fr7/fD5gvB4HHC59LcYAPiee11dPiHCadUzLJHtlEzEHQa43Q44nXbJxd/UxFffiocWA4kILeL0zt4xGFagNADx+YLo7GSROAZDC0qRuMWLiYhLvkkQUtiE2CnFoibmTEwSCyqZNI43OyWd02Vkm8l2dHV5YbPZUFqaga1bWwDEn4jTC70/tm1rFYrTAPy1QEScWpGTtWvr8dlnezFp0hCcffZoQ+vgdjtQVJSGurpu1NR0WXYNkvMgEUVcfFxJDMuRWyrJzFs8tBhIROiHUFFRWph3MhiDg5KIa2vrF/JfmYhjMMKjFImrre0GkJzXDx2Ju+OOpbjrruUAzBNxZJ+1t/ORuHi2Uxpx1JBIXCDAoaAgFU6nHYWFaTjnnNEYNy7ftPUEohdxnZ1etLb2aW5MTju46BYUAH9c09L4501bWz8ef3wdPvtsr+Q95FiTfWQUUmDntde24osv9od97zffHMSf/7wSH320W9cyZs0qxoUXjjX9mA0G8XElMSxHLuIIzE5pjJycFIwbl4/Ro3NZQ2xGXKA0u11Xxw9AU1Oduq1CDMbhBmkxoESyi7jnntskvH7MMeWmfL9op5RG4uJHxNGROP2OGiJQ5s4txXnnVeOvf12FDz7YhcrKbN1FUiIvi19XUrpfLx9+uBsPP7wW33+vvX/dEUeUwem045hjKiSv03bKri4vGhp6sHx5jeQ9RCxGG3UdO1a0gG7Y0BD2vX5/ED5fQHdEraQkA9XVecjLS6xG3wCzUx42qIk4Zqc0ht1uw/r1l8Jms7FG34y4QOla3ruXrxxWXp7JrnUGIwLhcneS007Ji6zW1j7U1/MTPtu2XaErdyocZJ/19vrh8wXizk4pFXH6I3F/+MORuOOO2UI1T6fTBp8P6O31md4YPlIOWiSMtBhYsKASxx1XAb8/iMWL9wqvZ2d7kJYWOqbkOE54zhARF20kbuTIXFx33RQ88cR6dHZ6JcuQY7TFQCLDRNxhAvG+y2GROOMYTdZlMAYLUlmPFd9hMKIjOzt5I3E7drSC4/gB98iRuaZ9Py1kOju9cWinFNfDSG673H6bluZCb68fa9bUY/bsEkV7rlFyc1Nw003TFasQa0FviwGCy8UXJhkyJA2NjT047rihGDEiR1IXgOD1BoTK3WbZKQFgyJA0HHlkGbKzPQgGOdVtMLqNdXXdqK/vRkFBqin9EQeT+LiSGJajHokb5BVhMBiDxt69vIhjDekZDH1kZbkl+c7JGIkj44KdO9sA8ELGzIldt9sh2Lg7OryUnTI+Bh70YJ/0tIsGIrBWrarFjh2tUX8fjcNhR05OiuHm2SQSp1fgEMhEoM8XRFqaCw6HHWefPVpyXdAl+s2yUwJ8sOHEEysxa1ZJWOcTOb/0uqM2bWrCO+/swMaNTVGtZyxgkbjDBJYTx2AcftB2SgaDoZ20NBfKyzNQX98DIDlz4uQOHSvuE1lZbvT1+QdEHD/IjxcXCz3YLymJPoeNthjG29hKjFIZE1UVFZn47rt6oQ0FAEyaNAQTJxagt9cPmw1ITRW33+s1LxKnFaOROLFP3OBU/jQTJuIOc1ixAwYjeREjcUzEMRhaOProcixdehDXXTcZ69Y1YM2aegDJKeJycqTRRSts11lZbjQ09KCjox8+Hx+diRc7JW1NrKyMvmE1/X1WiLhly2rQ3NyLCRMKkJbmQk6OR/MYrqeHz6UjBVL0MmJEDs4+e3TIOWKz2RTz40aMyEF6ukuxbYcRuru9aGnpQ1qaS9WmajTaOGZMHnJzU1BQwAqbMOKUvr7QZFiPx5GQJVUZDIYye/deja1bW3DllZ+ipqaLReIYDJ28884ZWLLkIE49tQq33vqV8Hoy2inHjy+Ay2UX8pessF2T/RaPdkq324HVqy+GzSYtqW8UUnYfsEbEbd7cjJqaTnz3HT+x8JOfjNVUhKanx4eWFr43cHGxsYhjVpYHkyYNkbx28GAnmpt7UVqagSFD0iRFR+bOLTW0HDW+/roGK1cewpFHluHEEysV31NQkIrKyuyQyYlIlJRkoKQkMVMOmIg7TFAqSzt1auGghroZDIa1DBuWjWHDsoWZSNIjjhU2YTC0kZeXirPOGgVAKmqSMRKXkuLElCmFWL26DoA1kz2kIEw82ikBYMaMYtO+a+jQLKEpthUijhaJgL5KlfPnD0N7e7/E8hgtS5cexPbtfHNzl8uBBQuGYebMEtO+n0berkKJefPKMW+eOe0xEoX4iGkzLKevL1TEzZ5tzcXGYDBii9yuxAqbMBj6qagQe4Amo4gDgDlzxHGANXZKPirS3k7bKeMjEmc2Y8fmY8SIHADWFI2TV6YkFslIpKW5MG9eOU49dYSp60OOJ/mZXp+eHh98vgA4Tl/PNjWys8XziCHCRNxhQn9/6MU+Z4654W4GgxEf0MnrHo/D1FLXDMbhAm09S0Y7JSAdB1hV2AQAOjr6hUhcvOTEWQFpNG1NJE4aRTPaM84syPEk9PT4hJ8feeQ7/PnPK9Hc3GvKsiJF4ny+AOrquiXC8nCA2SkPE5Q60c+aZZ6NgMFgxA/0THdZWQZr9M1gGIAudJCskTh6HGCkV1ok6MG32Ow7fuyUZsJxHKZPL8KwYVmWCGJ5JK6316fyTuk6bd7cjLKyDGRne0x9FoSujygqzWr2TSCTKGoNv59+eiPq6rqRlubCGWeMRHW1OQ3r4x0m4g4T/v3vBbjkko/w+9/PxWuvbYXb7cDw4dFXY2IwGPEHHYlLxIpbDEY8MHFiAc49dzTy81OTVniMGJGDSy4Zh0CAM1z0Ihx0YZNkt1PW1/fgzTe3Iz3dhWOPHWr698tz4rTYKZuaevHGG9vgcjnwm9/MMtwnTomTT65EV5cX6eku7NjRKqxPIBAUIpJm9IkDxDZZgUAQfX1+SW5fe3s/6uq6AfAN5q2YjIhXmIg7TBgzJh+rV18CADjjjJExXhsGg2EltF1JrUckg8EIj81mw+uv/yjWq2EpNpsNL7xwimXfr2SnTFZBTERWb69fMVoULeEiX2ocOMD3dSsryzDcI06NvLxU/Oxnk7FtWwt27GgV1odE4QDzInFOpx0ulwM+XwC9vVIRR3rXlZRk4JprJpuyvESBiTgGg8FIMujZVibiGAxGrFCyUyZrJI6IrGCQQ1tbP3JzzemRRhg1KheZmW50dnqRl5eKiRMLIn6GCBwr28yQ7SY5caRlhd1uM1U4HndcBRwOe0hvPCJUD8cqzEzEMRgMRpJBR+LM6H/EYDAYRpDaKYmIS87CJnSEcffuNkyfbm7dAY/HKUT3zjlnFMrKIouWwRA4GRkulJVlCv3ZzM6HIxxxRJni62QbD8d+qEzEMRgMRpLBInEMBiMeoO2URIAkq52SRqmtU7RwHIfUVCe83kDEaqkdHf144YVNaGriq0Na2WYmLy8VV189CQBfCf3RR9cBMF/EKeH3B4V8OBaJYzAYDEbCw3LiGAxGPFBSwhdL2bWrHcOG8X33ktVOCQClpRk4dKgLY8eaXx3RZrPhuuumgOM4dHR4UVvbhaKidMV2Bjt2tFICLhPp6YPjyKit7RZ+HjPG3H3Q0dGPtrZ+ZGa6kZubAo7jwHEcTj21Cg0NPUIk8HCCiTgGg8FIMqR2SibiGAxGbJg0aQjcbgeam3vR18cXvkhWOyUAXHnlRPT1+S0XTQ89tBYcx+Gqqybhgw92wesN4pe/nCb8feLEISgpyUBLS++gltsnwrGyMhunnFJl6nd//fVBrF5dh2HDsuD1BpGV5caFF47FtGlFpi4nkWAijsFgMJIMZqdkMBjxgMfjxNSphfj221p0d/OFL8wqOx+POBx2ywWczWZDaqoTPT0+rFvXINgJvd6AYGF0ux0oLc1AaengltsfOpS3NB461IVgkDO16TmpSLlvXwcAoKHBbkkV0EQiea8kBoPBOExhdkoGgxEvzJlTIvk9mSNxgwWpCLlpU5PwWmenN1arI1BQkAan0w6vN4CGhh5Tv1veJy8QCOKLL/YLgvFwhF1JDAaDkWTQkThWnZLBYMSS2bOZiDObtDR+co5YVAE+Z4ywdm0dvvxyPxoaukM+ayV2u01oJfHkk+tN/W55nzyAt1guWvS90Ej+cINdSQwGg5FksEgcg8GIF+SROHZPih4lQdPRIUbiNmxoxJIlB4QctcGkooIvYJOfn2rq9xLhKqewMA0ez+GZHZbQIq61tRWXXHIJsrOzkZ2djUsuuQRtbW1hP/PTn/4UNptN8m/OnDmDs8IMBoMxCLCcOAaDES8MH56D++8/BmedNQp33DEbEyZEblLNCE9RUVrIa+3tYiSut5eP0CmJPas5//xqzJxZjPPPrzb1e+ltKS5Ox+TJhRg3Lh8nn1xp6nISiYSWrj/5yU9w8OBBfPLJJwCAn/3sZ7jkkkvw/vvvh/3cySefjGeffVb43e1mdiMGg5E8sOqUDAYjnrjttpmxXoWkQqknGh2J6+nhi8iQYiCDSUaGG6eeOsL076Vz4ioqMi1ZRqKRsCJuy5Yt+OSTT7By5UrMnj0bALBo0SLMnTsX27ZtQ3W1+gyAx+NBcXHxYK0qg8FgDCoOB7NTMhgMRrJSXp6JoqJ01NfzOW9DhqQJ93qO44RInLwYSCJD53dHanZ+uJCwdsoVK1YgOztbEHAAMGfOHGRnZ2P58uVhP/vVV1+hsLAQo0ePxtVXX42Ghoaw7+/v70dHR4fkH4PBYMQrdDNdJuIYDAYjuUhLc2HevDIAfE+2X/xiKo47bii6urzo6vIJ1RpjYae0CrfbgaqqHABAVhZz0AEJHImrq6tDYWFhyOuFhYWoq6tT/dzChQtx7rnnYtiwYdizZw/uvPNOHH/88Vi7di08HmVlf++99+Kee+4xbd0ZDAbDSuhIHLNTMhgMRvIxblw+KiqmIxAQy+svWvS9kBvndNrhcjlitXqWcNFFY9HV5YPHk1zbZZS4i8TdfffdIYVH5P/WrFkDAIoN/iI1/jv//PNx6qmnYsKECTj99NPx8ccfY/v27fjwww9VP3PHHXegvb1d+HfgwIHoN5TBYDAsgr4Fskgcg8FgJB8Ohx05OSmSKpB0cRO1ao6JjMNhR3a2BykpCRuDMpW42wvXX389LrjggrDvqaysxPfff4/6+vqQvzU2NqKoqEjz8kpKSjBs2DDs2LFD9T0ej0c1SsdgMBjxRn+/2DOHiTgGg8E4PFi4sAoff7wbeXmpOPfc0bFeHYbFxJ2IKygoQEFB5PKzc+fORXt7O1atWoVZs2YBAL799lu0t7fjiCOO0Ly85uZmHDhwACUlJZHfzGAwGAkALeLYjCWDwWAcHpSVZQDgm4AXF6fHeG0YVhN3dkqtjB07FieffDKuvvpqrFy5EitXrsTVV1+N0047TVKZcsyYMXjnnXcAAF1dXbjtttuwYsUK7N27F1999RVOP/10FBQU4KyzzorVpjAYDIap9PX5hZ/D2csZDAaDkTwUF6fD4bCjp8eH1ta+WK8Ow2ISVsQBwH/+8x9MnDgRCxYswIIFCzBp0iS8+OKLkvds27YN7e3tAACHw4GNGzfijDPOwOjRo3HZZZdh9OjRWLFiBTIzQ3tuMBgMRiLS1xeI/CYGg8FgJBVOpx2BQBAA8M03NTFeG4bVJLTPJi8vDy+99FLY93CcWLUnNTUVn376qdWrxWAwGDGFjsQxGAwG4/Bh1Khc7NjRCoeDuTCSnYQWcQwGg8EIhYk4BoPBODw555zR2LatBWPG5MV6VRgWw0Qcg8FgJBnMTslgMBiHJykpTkyeHNpHmZF8JHROHIPBYDBCYZE4BoPBYDCSGybiGAwGI8lgkTgGg8FgMJIbJuIYDAYjyRgxIjvWq8BgMBgMBsNCmIhjMBiMJOP55xfivPOqsXLlRbFeFQaDwWAwGBbACpswGAxGkjFsWDZee+30WK8Gg8FgMBgMi2CROAaDwWAwGAwGg8FIIJiIYzAYDAaDwWAwGIwEgok4BoPBYDAYDAaDwUggmIhjMBgMBoPBYDAYjASCiTgGg8FgMBgMBoPBSCCYiGMwGAwGg8FgMBiMBIKJOAaDwWAwGAwGg8FIIJiIYzAYDAaDwWAwGIwEgok4BoPBYDAYDAaDwUggmIhjMBgMBoPBYDAYjASCiTgGg8FgMBgMBoPBSCCcsV6BRITjOABAR0dHjNeEwWAwGAwGg8FgJAtEXxC9oQYTcQbo7OwEAFRUVMR4TRgMBoPBYDAYDEay0dnZiezsbNW/27hIMo8RQjAYxKFDh5CZmQmbzRbr1UFHRwcqKipw4MABZGVlxXp1GCbCjm3ywo5t8sKObfLCjm1ywo5r8pKIx5bjOHR2dqK0tBR2u3rmG4vEGcBut6O8vDzWqxFCVlZWwpygDH2wY5u8sGObvLBjm7ywY5ucsOOavCTasQ0XgSOwwiYMBoPBYDAYDAaDkUAwEcdgMBgMBoPBYDAYCQQTcUmAx+PBXXfdBY/HE+tVYZgMO7bJCzu2yQs7tskLO7bJCTuuyUsyH1tW2ITBYDAYDAaDwWAwEggWiWMwGAwGg8FgMBiMBIKJOAaDwWAwGAwGg8FIIJiIYzAYDAaDwWAwGIwEgok4BoPBYDAYDAaDwUggmIhLAh5//HEMHz4cKSkpmD59Or7++utYrxIjDEuXLsXpp5+O0tJS2Gw2vPvuu5K/cxyHu+++G6WlpUhNTcWxxx6LTZs2Sd7T39+PG264AQUFBUhPT8ePfvQjHDx4cBC3giHn3nvvxcyZM5GZmYnCwkKceeaZ2LZtm+Q97NgmJk888QQmTZokNIudO3cuPv74Y+Hv7LgmD/feey9sNhtuuukm4TV2fBOTu+++GzabTfKvuLhY+Ds7rolNTU0NLr74YuTn5yMtLQ1TpkzB2rVrhb8fDseXibgE57XXXsNNN92E3/3ud1i3bh2OOuooLFy4EPv374/1qjFU6O7uxuTJk/Hoo48q/v1vf/sbHnzwQTz66KNYvXo1iouLceKJJ6Kzs1N4z0033YR33nkHr776Kr755ht0dXXhtNNOQyAQGKzNYMhYsmQJfvGLX2DlypVYvHgx/H4/FixYgO7ubuE97NgmJuXl5bjvvvuwZs0arFmzBscffzzOOOMMYUDAjmtysHr1ajz11FOYNGmS5HV2fBOX8ePHo7a2Vvi3ceNG4W/suCYura2tOPLII+FyufDxxx9j8+bN+Pvf/46cnBzhPYfF8eUYCc2sWbO4a6+9VvLamDFjuN/85jcxWiOGHgBw77zzjvB7MBjkiouLufvuu094ra+vj8vOzuaefPJJjuM4rq2tjXO5XNyrr74qvKempoaz2+3cJ598MmjrzghPQ0MDB4BbsmQJx3Hs2CYbubm53L///W92XJOEzs5ObtSoUdzixYu5Y445hrvxxhs5jmPXbSJz1113cZMnT1b8Gzuuic3tt9/OzZs3T/Xvh8vxZZG4BMbr9WLt2rVYsGCB5PUFCxZg+fLlMVorRjTs2bMHdXV1kmPq8XhwzDHHCMd07dq18Pl8kveUlpZiwoQJ7LjHEe3t7QCAvLw8AOzYJguBQACvvvoquru7MXfuXHZck4Rf/OIXOPXUUzF//nzJ6+z4JjY7duxAaWkphg8fjgsuuAC7d+8GwI5rovPee+9hxowZOPfcc1FYWIipU6di0aJFwt8Pl+PLRFwC09TUhEAggKKiIsnrRUVFqKuri9FaMaKBHLdwx7Surg5utxu5ubmq72HEFo7jcMstt2DevHmYMGECAHZsE52NGzciIyMDHo8H1157Ld555x2MGzeOHdck4NVXX8V3332He++9N+Rv7PgmLrNnz8YLL7yATz/9FIsWLUJdXR2OOOIINDc3s+Oa4OzevRtPPPEERo0ahU8//RTXXnstfvnLX+KFF14AcPhct85YrwAjemw2m+R3juNCXmMkFkaOKTvu8cP111+P77//Ht98803I39ixTUyqq6uxfv16tLW14a233sJll12GJUuWCH9nxzUxOXDgAG688UZ89tlnSElJUX0fO76Jx8KFC4WfJ06ciLlz52LEiBF4/vnnMWfOHADsuCYqwWAQM2bMwF/+8hcAwNSpU7Fp0yY88cQTuPTSS4X3JfvxZZG4BKagoAAOhyNkxqChoSFk9oGRGJDKWeGOaXFxMbxeL1pbW1Xfw4gdN9xwA9577z18+eWXKC8vF15nxzaxcbvdGDlyJGbMmIF7770XkydPxsMPP8yOa4Kzdu1aNDQ0YPr06XA6nXA6nViyZAkeeeQROJ1O4fiw45v4pKenY+LEidixYwe7bhOckpISjBs3TvLa2LFjhaJ+h8vxZSIugXG73Zg+fToWL14seX3x4sU44ogjYrRWjGgYPnw4iouLJcfU6/ViyZIlwjGdPn06XC6X5D21tbX44Ycf2HGPIRzH4frrr8fbb7+NL774AsOHD5f8nR3b5ILjOPT397PjmuCccMIJ2LhxI9avXy/8mzFjBi666CKsX78eVVVV7PgmCf39/diyZQtKSkrYdZvgHHnkkSEtfLZv345hw4YBOIyet4NfS4VhJq+++irncrm4p59+mtu8eTN30003cenp6dzevXtjvWoMFTo7O7l169Zx69at4wBwDz74ILdu3Tpu3759HMdx3H333cdlZ2dzb7/9Nrdx40buwgsv5EpKSriOjg7hO6699lquvLyc+/zzz7nvvvuOO/7447nJkydzfr8/Vpt12HPddddx2dnZ3FdffcXV1tYK/3p6eoT3sGObmNxxxx3c0qVLuT179nDff/8999vf/paz2+3cZ599xnEcO67JBl2dkuPY8U1Ubr31Vu6rr77idu/eza1cuZI77bTTuMzMTGF8xI5r4rJq1SrO6XRyf/7zn7kdO3Zw//nPf7i0tDTupZdeEt5zOBxfJuKSgMcee4wbNmwY53a7uWnTpgklzRnxyZdffskBCPl32WWXcRzHl8a96667uOLiYs7j8XBHH300t3HjRsl39Pb2ctdffz2Xl5fHpaamcqeddhq3f//+GGwNg6B0TAFwzz77rPAedmwTkyuuuEK4xw4ZMoQ74YQTBAHHcey4JhtyEceOb2Jy/vnncyUlJZzL5eJKS0u5s88+m9u0aZPwd3ZcE5v333+fmzBhAufxeLgxY8ZwTz31lOTvh8PxtXEcx8UmBshgMBgMBoPBYDAYDL2wnDgGg8FgMBgMBoPBSCCYiGMwGAwGg8FgMBiMBIKJOAaDwWAwGAwGg8FIIJiIYzAYDAaDwWAwGIwEgok4BoPBYDAYDAaDwUggmIhjMBgMBoPBYDAYjASCiTgGg8FgMBgMBoPBSCCYiGMwGAwGg8FgMBiMBIKJOAaDwWAwDHDsscfCZrPFejUYDAaDcRjijPUKMBgMBoMRa/SKMY7jLFoTBoPBYDAiw0Qcg8FgMA577rrrrpDX7rnnHmRnZ+Omm25S/MwLL7yAnp4ei9eMwWAwGIxQbBybTmQwGAwGIwSbzYZhw4Zh7969sV4VBoPBYDAksJw4BoPBYDAMoJQT99xzz8Fms+G5557D+++/j9mzZyMtLQ1lZWW48847EQwGAQD/+c9/MHXqVKSmpmLo0KF44IEHFJfBcRyeeeYZHHnkkcjKykJaWhpmzJiBZ555xvLtYzAYDEb8wuyUDAaDwWCYzDvvvIPPPvsMZ555Jo488kh8+OGH+NOf/gSO45Cbm4s//OEPOOOMM3D00Ufjrbfewq9+9SuUlJTgoosuEr6D4zhcfPHFePnllzF69Gj85Cc/gdvtxuLFi3HllVdi8+bNquKPwWAwGMkNs1MyGAwGg6FAJDvlscceiyVLlkiKnDz33HO4/PLL4XK5sGzZMsycORMA0NnZiZEjR6KrqwtZWVlYtmwZqqqqAAAHDhzAyJEjMWbMGGzYsEH4rkWLFuFnP/sZrrzySjz55JNwOvl5V6/Xix//+Md4//33sWbNGkyfPt2iPcBgMBiMeIXZKRkMBoPBMJmLLrpIEHAAkJmZidNOOw09PT247rrrBAEHABUVFZg3bx42bdoEv98vvP7oo48iPT0djz76qCDgAMDtduPPf/4zAOCVV14ZhK1hMBgMRrzB7JQMBoPBYJjM1KlTQ14rKSkBAEyZMkXxb4FAAPX19SgrK0NPTw82btyI0tJS3HfffSHv9/l8AICtW7eau+IMBoPBSAiYiGMwGAwGw2SysrJCXiPRtHB/I+KstbUVHMehpqYG99xzj+pyuru7zVhdBoPBYCQYTMQxGAwGgxFnEKE3ffp0rFmzJsZrw2AwGIx4g+XEMRgMBoMRZ2RmZmLs2LHYsmUL2traYr06DAaDwYgzmIhjMBgMBiMO+eUvf4menh5cffXVirbJPXv2sEbkDAaDcZjC7JQMBoPBYMQh11xzDVauXInnn38ey5Ytw/z581FaWor6+nps3boV3377LV5++WVUVlbGelUZDAaDMcgwEcdgMBgMRhxis9nw3HPP4ZRTTsGiRYvwwQcfoKurC4WFhRg1ahQeeOABzJ8/P9aryWAwGIwYwJp9MxgMBoPBYDAYDEYCwXLiGAwGg8FgMBgMBiOBYCKOwWAwGAwGg8FgMBIIJuIYDAaDwWAwGAwGI4FgIo7BYDAYDAaDwWAwEggm4hgMBoPBYDAYDAYjgWAijsFgMBgMBoPBYDASCCbiGAwGg8FgMBgMBiOBYCKOwWAwGAwGg8FgMBIIJuIYDAaDwWAwGAwGI4FgIo7BYDAYDAaDwWAwEggm4hgMBoPBYDAYDAYjgWAijsFgMBgMBoPBYDASiP8HSdYwsc0Hc74AAAAASUVORK5CYII=", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "tme.plot(val_dataset)" + ] + }, + { + "cell_type": "markdown", + "id": "4e8e7c63", + "metadata": {}, + "source": [ + "## Finetune the Model" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "id": "9846b368", + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/nethome/sli999/anaconda3/envs/torch/lib/python3.11/site-packages/torch/nn/modules/loss.py:1100: UserWarning: Using a target size (torch.Size([32, 1, 1])) that is different to the input size (torch.Size([32, 512, 1])). This will likely lead to incorrect results due to broadcasting. Please ensure they have the same size.\n", + " return F.huber_loss(input, target, reduction=self.reduction, delta=self.delta)\n", + "/nethome/sli999/anaconda3/envs/torch/lib/python3.11/site-packages/torch/nn/modules/loss.py:1100: UserWarning: Using a target size (torch.Size([32, 1, 8, 1])) that is different to the input size (torch.Size([32, 512, 8, 1])). This will likely lead to incorrect results due to broadcasting. Please ensure they have the same size.\n", + " return F.huber_loss(input, target, reduction=self.reduction, delta=self.delta)\n", + "/nethome/sli999/anaconda3/envs/torch/lib/python3.11/site-packages/torch/nn/modules/loss.py:1100: UserWarning: Using a target size (torch.Size([32, 1, 32, 1])) that is different to the input size (torch.Size([32, 512, 32, 1])). This will likely lead to incorrect results due to broadcasting. Please ensure they have the same size.\n", + " return F.huber_loss(input, target, reduction=self.reduction, delta=self.delta)\n", + "/nethome/sli999/anaconda3/envs/torch/lib/python3.11/site-packages/torch/nn/modules/loss.py:1100: UserWarning: Using a target size (torch.Size([32, 1, 64, 1])) that is different to the input size (torch.Size([32, 512, 64, 1])). This will likely lead to incorrect results due to broadcasting. Please ensure they have the same size.\n", + " return F.huber_loss(input, target, reduction=self.reduction, delta=self.delta)\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/nethome/sli999/anaconda3/envs/torch/lib/python3.11/site-packages/torch/nn/modules/loss.py:1100: UserWarning: Using a target size (torch.Size([13, 1, 1])) that is different to the input size (torch.Size([13, 512, 1])). This will likely lead to incorrect results due to broadcasting. Please ensure they have the same size.\n", + " return F.huber_loss(input, target, reduction=self.reduction, delta=self.delta)\n", + "/nethome/sli999/anaconda3/envs/torch/lib/python3.11/site-packages/torch/nn/modules/loss.py:1100: UserWarning: Using a target size (torch.Size([13, 1, 8, 1])) that is different to the input size (torch.Size([13, 512, 8, 1])). This will likely lead to incorrect results due to broadcasting. Please ensure they have the same size.\n", + " return F.huber_loss(input, target, reduction=self.reduction, delta=self.delta)\n", + "/nethome/sli999/anaconda3/envs/torch/lib/python3.11/site-packages/torch/nn/modules/loss.py:1100: UserWarning: Using a target size (torch.Size([13, 1, 32, 1])) that is different to the input size (torch.Size([13, 512, 32, 1])). This will likely lead to incorrect results due to broadcasting. Please ensure they have the same size.\n", + " return F.huber_loss(input, target, reduction=self.reduction, delta=self.delta)\n", + "/nethome/sli999/anaconda3/envs/torch/lib/python3.11/site-packages/torch/nn/modules/loss.py:1100: UserWarning: Using a target size (torch.Size([13, 1, 64, 1])) that is different to the input size (torch.Size([13, 512, 64, 1])). This will likely lead to incorrect results due to broadcasting. Please ensure they have the same size.\n", + " return F.huber_loss(input, target, reduction=self.reduction, delta=self.delta)\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Epoch 0, Loss: 0.1356\n", + "Epoch 1, Loss: 0.1170\n", + "Epoch 2, Loss: 0.1028\n", + "Epoch 3, Loss: 0.0947\n", + "Epoch 4, Loss: 0.0839\n" + ] + } + ], + "source": [ + "tme.finetune(train_dataset)" + ] + }, + { + "cell_type": "markdown", + "id": "34778f0f", + "metadata": {}, + "source": [ + "## Evaluate the Finetuned Model" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "id": "69a7262c", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "{'mse': 1.3270599, 'mae': 0.86829454, 'mase': 2.9995377, 'mape': 0.10158511, 'rmse': 1.1519809, 'nrmse': 0.1210782115435668, 'smape': 1.8951176, 'msis': 0.12658486, 'nd': 38.843448613719595}\n" + ] + } + ], + "source": [ + "metrics = tme.evaluate(val_dataset)\n", + "print(metrics)" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "torch", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.9" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/leaderboard/monash_moment.csv b/leaderboard/monash_moment.csv index c5a026a..c5800b2 100644 --- a/leaderboard/monash_moment.csv +++ b/leaderboard/monash_moment.csv @@ -24,4 +24,5 @@ pedestrian_counts (1h),33.02,17.14m,0.2851085662841797,0.2073741853237152,2.3993 solar_10_minutes (10min),33.4,646.16s,0.7062894701957703,0.5957131385803223,14.92678165435791,-0.1445342004299163,0.8404102921485901,0.2103408826278452,1.2018007040023804,0.1611537635326385,-21.298476220611,, traffic_hourly (1h),104.18,20.1m,1.11533522605896,0.7301473021507263,2.485908031463623,-0.3488401472568512,1.0560942888259888,0.0201896183480336,1.5637468099594116,0.1347162723541259,16.947088589413003,, temperature_rain (1D),106.72,20.63m,7.422463893890381,0.9501174688339232,1.2810786962509155,7.790842533111572,2.7244198322296143,0.0030026609462776,1.664116382598877,0.1432362198829651,3.2250580528488424,, -solar_4_seconds (4s),181.73,503.3m,0.000896792218554765,0.0017341896891593933,56.58272933959961,0.05136888101696968,0.02994648925960064,0.011800645700475143,0.07888095080852509,2783.650146484375,-1.174730302450018,, +solar_4_seconds (4s),181.73,503.3m,0.0008967922185547,0.0017341896891593,56.58272933959961,0.0513688810169696,0.0299464892596006,0.0118006457004751,0.078880950808525,2783.650146484375,-1.174730302450018,, +wind_4_seconds (4s),184.15,510.79m,0.0011861467501148582,0.0021924355532974005,27.476261138916016,0.04878073185682297,0.034440480172634125,0.009859649579947491,0.09420329332351685,2713.079345703125,-1.621489882769975,, diff --git a/src/samay/dataset.py b/src/samay/dataset.py index 247a335..e9a7d3d 100644 --- a/src/samay/dataset.py +++ b/src/samay/dataset.py @@ -1593,3 +1593,141 @@ def get_dataloader(self): return DataLoader(self.dataset, batch_size=self.batchsize, shuffle=False) else: return DataLoader(self.dataset, batch_size=self.batchsize, shuffle=False) + + +class TimeMoEDataset(BaseDataset): + """ + Dataset class for TimeMoE model + Data Format: + Dict with keys: + input_ts: np.ndarray, historical time series data + actual_ts: np.ndarray, actual time series data + """ + + def __init__( + self, + name=None, + datetime_col=None, + path=None, + batch_size=16, + mode="train", + boundaries=[0, 0, 0], + task_name="evaluation", + stride=10, + context_len=512, + horizon_len=96, + **kwargs, + ): + super().__init__( + name=name, + datetime_col=datetime_col, + path=path, + batchsize=batch_size, + mode=mode, + ) + self.context_len = context_len + self.horizon_len = horizon_len + self.task_name = task_name + + self.stride = stride + self.boundaries = boundaries + + self.pad = False + self._read_data() + + + def _read_data(self): + self.df = pd.read_csv(self.data_path) + + if self.boundaries[0] == 0: + self.boundaries[0] = int(len(self.df) * 0.5) + if self.boundaries[1] == 0: + self.boundaries[1] = int(len(self.df) * 0.7) + if self.boundaries[2] == 0: + self.boundaries[2] = int(len(self.df) - 1) + + if self.boundaries == [-1, -1, -1]: + # use all data for training + self.boundaries = [0, 0, len(self.df) - 1] + + self.horizon_len = min(self.horizon_len, int(0.3*len(self.df)+1)) + + self.n_channels = self.df.shape[1] - 1 + + if self.datetime_col: + self.df.drop(columns=[self.datetime_col], inplace=True) + + self.df = np.array(self.df) + + if self.mode == "train": + self.data = self.df[slice(0, self.boundaries[0]), :] + + elif self.mode == "test": + self.data = self.df[slice(self.boundaries[1], self.boundaries[2]), :] + + scaler = StandardScaler() + scaler = scaler.fit(self.df[slice(0, self.boundaries[0]), :]) + self.data = scaler.transform(self.data) + + self.length_timeseries = self.data.shape[0] + self.required_len = self.context_len + self.horizon_len + self.pad_len = 0 + if self.length_timeseries < self.required_len: + self.pad = True + self.pad_sequence() + + def pad_sequence(self): + self.pad_len = self.required_len - self.length_timeseries + # Pad data with zeros from the left + if self.pad: + self.data = np.pad( + self.data, ((self.pad_len, 0), (0, 0)) + ) + self.length_timeseries = self.data.shape[0] + self.num_windows = 1 + (self.length_timeseries - self.context_len - self.horizon_len) // self.stride + + + def __getitem__(self, index): + channel_idx = index // self.num_windows + seq_start = self.stride * (index % self.num_windows) + seq_end = seq_start + self.context_len + + if self.task_name == "evaluation": + pred_end = seq_end + self.horizon_len + + if pred_end > self.length_timeseries: + pred_end = self.length_timeseries + seq_end = pred_end - self.horizon_len + seq_start = seq_end - self.context_len + + # input_seq = self.data[seq_start:seq_end, :].T + input_seq = self.data[seq_start:seq_end, channel_idx] + forecast_seq = self.data[seq_end:pred_end, channel_idx] + return input_seq, forecast_seq + + elif self.task_name == "finetune": + pred_end = seq_end + 1 + if pred_end > self.length_timeseries: + pred_end = self.length_timeseries + seq_end = pred_end - 1 + seq_start = seq_end - self.context_len + + input_seq = self.data[seq_start:seq_end, channel_idx] # shape: (context_len, ) + forecast_seq = self.data[seq_end:pred_end, channel_idx] + loss_mask = np.ones(input_seq.shape[0]) + return input_seq, forecast_seq, loss_mask + + + + def __len__(self): + if self.length_timeseries < self.context_len + self.horizon_len: + return 1 * self.n_channels + return self.n_channels * self.num_windows + + + def get_data_loader(self): + if self.mode == 'train': + return DataLoader(self, shuffle=True, batch_size=self.batchsize) + else: + return DataLoader(self, shuffle=False, batch_size=self.batchsize) + # shape: (batch_size, n_channels, seq_len) \ No newline at end of file diff --git a/src/samay/model.py b/src/samay/model.py index eb8f2ac..81d8908 100644 --- a/src/samay/model.py +++ b/src/samay/model.py @@ -46,6 +46,10 @@ from .models.TinyTimeMixer.models.tinytimemixer.modeling_tinytimemixer import ( TinyTimeMixerForPrediction, ) + +from .models.Time_MoE.time_moe.models.modeling_time_moe import TimeMoeForPrediction +from .models.Time_MoE.time_moe.models.configuration_time_moe import TimeMoeConfig + from .utils import get_least_used_gpu, visualize @@ -2041,6 +2045,134 @@ def finetune(self, dataset, **kwargs): print("Fineuned model updated") +class TimeMoEModel(Basemodel): + def __init__(self, config=None, repo=None, **kwargs): + super().__init__(config=config, repo=repo) + if repo: + self.model = TimeMoeForPrediction.from_pretrained(repo) + else: + t_config = TimeMoeConfig(**self.config) + self.model = TimeMoeForPrediction(t_config) + + def finetune(self, dataset, **kwargs): + """ + Finetune the model on the given dataset. + Args: + dataset: dataset for finetuning + """ + # Implement finetuning logic here + dataloader = dataset.get_data_loader() + self.model.to(self.device) + self.model.train() + optimizer = torch.optim.Adam(self.model.parameters(), lr=1e-4) + for epoch in range(5): + total_loss = 0 + for i, data in enumerate(dataloader): + context, forecast_seq, loss_mask = data + context = context.float().to(self.device) + forecast_seq = forecast_seq.float().to(self.device) + loss_mask = loss_mask.float().to(self.device) + optimizer.zero_grad() + output = self.model(input_ids=context, labels=forecast_seq, loss_masks=loss_mask) + loss = output.loss + loss.backward() + optimizer.step() + total_loss += loss.item() + avg_loss = total_loss / len(dataloader) + print(f"Epoch {epoch}, Loss: {avg_loss:.4f}") + + self.model.eval() + + + def plot(self, dataset, **kwargs): + """ + Plot the results of the model on the given dataset. + Args: + dataset: dataset for plotting + """ + # Implement plotting logic here + dataloader = dataset.get_data_loader() + self.model.to(self.device) + self.model.eval() + trues, preds, histories = [], [], [] + with torch.no_grad(): + for data in dataloader: + context, forecast_seq = data + context = context.float().to(self.device) + forecast_seq = forecast_seq.float().to(self.device) + output = self.model.generate(inputs=context, max_new_tokens=forecast_seq.shape[1]) + pred = output[:, -forecast_seq.shape[1]:] + pred = pred.cpu().numpy() + true = forecast_seq.cpu().numpy() + history = context.cpu().numpy() + trues.append(true) + preds.append(pred) + histories.append(history) + trues = np.concatenate(trues, axis=0).reshape(-1, dataset.n_channels, dataset.horizon_len) + preds = np.concatenate(preds, axis=0).reshape(-1, dataset.n_channels, dataset.horizon_len) + histories = np.concatenate(histories, axis=0).reshape(-1, dataset.n_channels, dataset.context_len) + + visualize( + task_name="forecasting", + trues=trues, + preds=preds, + history=histories, + ) + + def evaluate(self, dataset, **kwargs): + """ + Evaluate the model on the given dataset. + Args: + dataset: dataset for evaluation + """ + # Implement evaluation logic here + dataloader = dataset.get_data_loader() + self.model.to(self.device) + self.model.eval() + trues, preds, histories = [], [], [] + + with torch.no_grad(): + for data in dataloader: + context, forecast_seq = data + context = context.float().to(self.device) + forecast_seq = forecast_seq.float().to(self.device) + output = self.model.generate(inputs=context, max_new_tokens=forecast_seq.shape[1]) + pred = output[:, -forecast_seq.shape[1]:] + pred = pred.cpu().numpy() + true = forecast_seq.cpu().numpy() + history = context.cpu().numpy() + trues.append(true) + preds.append(pred) + histories.append(history) + trues = np.concatenate(trues, axis=0).reshape(-1, dataset.n_channels, dataset.horizon_len) + preds = np.concatenate(preds, axis=0).reshape(-1, dataset.n_channels, dataset.horizon_len) + histories = np.concatenate(histories, axis=0).reshape(-1, dataset.n_channels, dataset.context_len) + + # Calculate metrics + mse = MSE(trues, preds) + mae = MAE(trues, preds) + mase = MASE(trues, preds) + mape = MAPE(trues, preds) + rmse = RMSE(trues, preds) + nrmse = NRMSE(trues, preds) + smape = SMAPE(trues, preds) + msis = MSIS(trues, preds) + nd = ND(trues, preds) + + return { + "mse": mse, + "mae": mae, + "mase": mase, + "mape": mape, + "rmse": rmse, + "nrmse": nrmse, + "smape": smape, + "msis": msis, + "nd": nd, + } + + + if __name__ == "__main__": name = "timesfm" repo = "google/timesfm-1.0-200m-pytorch" diff --git a/src/samay/models/Time_MoE/time_moe/__init__.py b/src/samay/models/Time_MoE/time_moe/__init__.py new file mode 100644 index 0000000..b4e66d1 --- /dev/null +++ b/src/samay/models/Time_MoE/time_moe/__init__.py @@ -0,0 +1,2 @@ +#!/usr/bin/env python +# -*- coding:utf-8 _*- diff --git a/src/samay/models/Time_MoE/time_moe/datasets/__init__.py b/src/samay/models/Time_MoE/time_moe/datasets/__init__.py new file mode 100644 index 0000000..328b194 --- /dev/null +++ b/src/samay/models/Time_MoE/time_moe/datasets/__init__.py @@ -0,0 +1,4 @@ +#!/usr/bin/env python +# -*- coding:utf-8 _*- +from .binary_dataset import BinaryDataset +from .general_dataset import GeneralDataset diff --git a/src/samay/models/Time_MoE/time_moe/datasets/benchmark_dataset.py b/src/samay/models/Time_MoE/time_moe/datasets/benchmark_dataset.py new file mode 100644 index 0000000..63e5ad4 --- /dev/null +++ b/src/samay/models/Time_MoE/time_moe/datasets/benchmark_dataset.py @@ -0,0 +1,131 @@ +#!/usr/bin/env python +# -*- coding:utf-8 _*- +import numpy as np +import os +import pandas as pd +from torch.utils.data import Dataset +from sklearn.preprocessing import StandardScaler + +from time_moe.datasets.general_dataset import GeneralDataset +from time_moe.utils.log_util import log_in_local_rank_0 + + +class BenchmarkEvalDataset(Dataset): + + def __init__(self, csv_path, context_length: int, prediction_length: int): + super().__init__() + self.context_length = context_length + self.prediction_length = prediction_length + + df = pd.read_csv(csv_path) + + base_name = os.path.basename(csv_path).lower() + if 'etth' in base_name: + border1s = [0, 12 * 30 * 24 - context_length, 12 * 30 * 24 + 4 * 30 * 24 - context_length] + border2s = [12 * 30 * 24, 12 * 30 * 24 + 4 * 30 * 24, 12 * 30 * 24 + 8 * 30 * 24] + elif 'ettm' in base_name: + border1s = [0, 12 * 30 * 24 * 4 - context_length, 12 * 30 * 24 * 4 + 4 * 30 * 24 * 4 - context_length] + border2s = [12 * 30 * 24 * 4, 12 * 30 * 24 * 4 + 4 * 30 * 24 * 4, 12 * 30 * 24 * 4 + 8 * 30 * 24 * 4] + else: + num_train = int(len(df) * 0.7) + num_test = int(len(df) * 0.2) + num_vali = len(df) - num_train - num_test + border1s = [0, num_train - context_length, len(df) - num_test - context_length] + border2s = [num_train, num_train + num_vali, len(df)] + + start_dt = df.iloc[border1s[2]]['date'] + eval_start_dt = df.iloc[border1s[2] + context_length]['date'] + end_dt = df.iloc[border2s[2] - 1]['date'] + log_in_local_rank_0(f'>>> Split test data from {start_dt} to {end_dt}, ' + f'and evaluation start date is: {eval_start_dt}') + + cols = df.columns[1:] + df_values = df[cols].values + + train_data = df_values[border1s[0]:border2s[0]] + test_data = df_values[border1s[2]:border2s[2]] + + # scaling + scaler = StandardScaler() + scaler.fit(train_data) + scaled_test_data = scaler.transform(test_data) + + # assignment + self.hf_dataset = scaled_test_data.transpose(1, 0) + self.num_sequences = len(self.hf_dataset) + # 1 for the label + self.window_length = self.context_length + self.prediction_length + + self.sub_seq_indexes = [] + for seq_idx, seq in enumerate(self.hf_dataset): + n_points = len(seq) + if n_points < self.window_length: + continue + for offset_idx in range(self.window_length, n_points): + self.sub_seq_indexes.append((seq_idx, offset_idx)) + + def __len__(self): + return len(self.sub_seq_indexes) + + def __iter__(self): + for i in range(len(self)): + yield self[i] + + def __getitem__(self, idx): + seq_i, offset_i = self.sub_seq_indexes[idx] + seq = self.hf_dataset[seq_i] + + window_seq = np.array(seq[offset_i - self.window_length: offset_i], dtype=np.float32) + + return { + 'inputs': np.array(window_seq[: self.context_length], dtype=np.float32), + 'labels': np.array(window_seq[-self.prediction_length:], dtype=np.float32), + } + + +class GeneralEvalDataset(Dataset): + + def __init__(self, data_path, context_length: int, prediction_length: int, onfly_norm: bool = False): + super().__init__() + self.context_length = context_length + self.prediction_length = prediction_length + self.onfly_norm = onfly_norm + self.window_length = self.context_length + self.prediction_length + self.dataset = GeneralDataset(data_path) + + self.sub_seq_indexes = [] + for seq_idx, seq in enumerate(self.dataset): + n_points = len(seq) + if n_points < self.window_length: + continue + for offset_idx in range(self.window_length, n_points): + self.sub_seq_indexes.append((seq_idx, offset_idx)) + + def __len__(self): + return len(self.sub_seq_indexes) + + def __iter__(self): + for i in range(len(self)): + yield self[i] + + def __getitem__(self, idx): + seq_i, offset_i = self.sub_seq_indexes[idx] + seq = self.dataset[seq_i] + + window_seq = np.array(seq[offset_i - self.window_length: offset_i], dtype=np.float32) + + inputs = np.array(window_seq[: self.context_length], dtype=np.float32) + labels = np.array(window_seq[-self.prediction_length:], dtype=np.float32) + + if self.onfly_norm: + mean_ = inputs.mean() + std_ = inputs.std() + if std_ == 0: + std_ = 1 + inputs = (inputs - mean_) / std_ + labels = (labels - mean_) / std_ + + return { + 'inputs': np.array(window_seq[: self.context_length], dtype=np.float32), + 'labels': np.array(window_seq[-self.prediction_length:], dtype=np.float32), + } diff --git a/src/samay/models/Time_MoE/time_moe/datasets/binary_dataset.py b/src/samay/models/Time_MoE/time_moe/datasets/binary_dataset.py new file mode 100644 index 0000000..4074c78 --- /dev/null +++ b/src/samay/models/Time_MoE/time_moe/datasets/binary_dataset.py @@ -0,0 +1,112 @@ +#!/usr/bin/env python +# -*- coding:utf-8 _*- +import json +import os +import numpy as np + +from .ts_dataset import TimeSeriesDataset + + +class BinaryDataset(TimeSeriesDataset): + meta_file_name = 'meta.json' + bin_file_name_template = 'data-{}-of-{}.bin' + + def __init__(self, data_path): + if not self.is_valid_path(data_path): + raise ValueError(f'Folder {data_path} is not a valid TimeMoE dataset.') + + self.data_path = data_path + + # load meta file + meta_file_path = os.path.join(data_path, self.meta_file_name) + try: + self.meta_info = load_json_file(meta_file_path) + except Exception as e: + print(f'Error when loading file {meta_file_path}: {e}') + raise e + + self.num_sequences = self.meta_info['num_sequences'] + self.dtype = self.meta_info['dtype'] + self.seq_infos = self.meta_info['scales'] + + # process the start index for each file + self.file_start_idxes = [] + s_idx = 0 + for fn, length in sorted(self.meta_info['files'].items(), key=lambda x: int(x[0].split('-')[1])): + self.file_start_idxes.append( + (os.path.join(data_path, fn), s_idx, length) + ) + s_idx += length + self.num_tokens = s_idx + + def __len__(self): + return self.num_sequences + + def __getitem__(self, seq_idx): + seq_info = self.seq_infos[seq_idx] + read_info_list = self._get_read_infos_by_offset_length(seq_info['offset'], seq_info['length']) + out = [] + for fn, offset_in_file, length in read_info_list: + out.append(self._read_sequence_in_file(fn, offset_in_file, length)) + + if len(out) == 1: + sequence = out[0] + else: + sequence = np.concatenate(out, axis=0) + + if 'mean' in seq_info and 'std' in seq_info: + return sequence * seq_info['std'] + seq_info['mean'] + else: + return sequence + + def get_num_tokens(self): + return self.num_tokens + + def get_sequence_length_by_idx(self, seq_idx): + return self.seq_infos[seq_idx]['length'] + + def _get_read_infos_by_offset_length(self, offset, length): + # just use naive search + binary_read_info_list = [] + end_offset = offset + length + for fn, start_idx, fn_length in self.file_start_idxes: + end_idx = start_idx + fn_length + if start_idx <= offset < end_idx: + if end_offset <= end_idx: + binary_read_info_list.append([fn, offset - start_idx, length]) + break + else: + binary_read_info_list.append([fn, offset - start_idx, end_idx - offset]) + length = end_offset - end_idx + offset = end_idx + return binary_read_info_list + + def _read_sequence_in_file(self, fn, offset_in_file, length): + sentence = np.empty(length, dtype=self.dtype) + with open(fn, mode='rb', buffering=0) as file_handler: + file_handler.seek(offset_in_file * sentence.itemsize) + file_handler.readinto(sentence) + return sentence + + @staticmethod + def is_valid_path(data_path): + if (os.path.exists(data_path) + and os.path.isdir(data_path) + and os.path.exists(os.path.join(data_path, 'meta.json')) + ): + for sub in os.listdir(data_path): + # TODO check if lack bin file + if os.path.isfile(os.path.join(data_path, sub)) and sub.endswith('.bin'): + return True + return False + + +def load_json_file(fn): + with open(fn, encoding='utf-8') as file: + data = json.load(file) + return data + + +def save_json_file(obj, fn): + with open(fn, 'w', encoding='utf-8') as file: + json.dump(obj, file) diff --git a/src/samay/models/Time_MoE/time_moe/datasets/general_dataset.py b/src/samay/models/Time_MoE/time_moe/datasets/general_dataset.py new file mode 100644 index 0000000..cdc4ef8 --- /dev/null +++ b/src/samay/models/Time_MoE/time_moe/datasets/general_dataset.py @@ -0,0 +1,101 @@ +#!/usr/bin/env python +# -*- coding:utf-8 _*- +import json +import os +import pickle +import gzip +import yaml +import numpy as np + +from .ts_dataset import TimeSeriesDataset + + +class GeneralDataset(TimeSeriesDataset): + def __init__(self, data_path): + self.data = read_file_by_extension(data_path) + self.num_tokens = None + + def __len__(self): + return len(self.data) + + def __getitem__(self, seq_idx): + seq = self.data[seq_idx] + if isinstance(seq, dict): + seq = seq['sequence'] + return seq + + def get_num_tokens(self): + if self.num_tokens is None: + self.num_tokens = sum([len(seq) for seq in self]) + return self.num_tokens + + def get_sequence_length_by_idx(self, seq_idx): + seq = self[seq_idx] + return len(seq) + + @staticmethod + def is_valid_path(data_path): + if os.path.exists(data_path) and os.path.isfile(data_path): + parts = data_path.split('.') + if len(parts) == 0: + return False + suffix = parts[-1] + if suffix in ('json', 'jsonl', 'npy', 'npy.gz', 'pkl'): + return True + else: + return False + else: + return False + + +def read_file_by_extension(fn): + if fn.endswith('.json'): + with open(fn, encoding='utf-8') as file: + data = json.load(file) + elif fn.endswith('.jsonl'): + data = read_jsonl_to_list(fn) + elif fn.endswith('.yaml'): + data = load_yaml_file(fn) + elif fn.endswith('.npy'): + data = np.load(fn, allow_pickle=True) + elif fn.endswith('.npz'): + data = np.load(fn, allow_pickle=True) + elif fn.endswith('.npy.gz'): + with gzip.GzipFile(fn, 'r') as file: + data = np.load(file, allow_pickle=True) + elif fn.endswith('.pkl') or fn.endswith('.pickle'): + data = load_pkl_obj(fn) + else: + raise RuntimeError(f'Unknown file extension: {fn}') + return data + + +def read_jsonl_to_list(jsonl_fn): + with open(jsonl_fn, 'r', encoding='utf-8') as file: + return [json.loads(line) for line in file.readlines()] + + +def load_yaml_file(fn): + if isinstance(fn, str): + with open(fn, 'r', encoding="utf-8") as f: + config = yaml.safe_load(f) + return config + else: + return fn + + +def load_pkl_obj(fn): + out_list = [] + with open(fn, 'rb') as f: + while True: + try: + data = pickle.load(f) + out_list.append(data) + except EOFError: + break + if len(out_list) == 0: + return None + elif len(out_list) == 1: + return out_list[0] + else: + return out_list diff --git a/src/samay/models/Time_MoE/time_moe/datasets/time_moe_dataset.py b/src/samay/models/Time_MoE/time_moe/datasets/time_moe_dataset.py new file mode 100644 index 0000000..0fe3341 --- /dev/null +++ b/src/samay/models/Time_MoE/time_moe/datasets/time_moe_dataset.py @@ -0,0 +1,139 @@ +#!/usr/bin/env python +# -*- coding:utf-8 _*- +import os +import numpy as np + +from .ts_dataset import TimeSeriesDataset +from .general_dataset import GeneralDataset +from .binary_dataset import BinaryDataset + + +class TimeMoEDataset(TimeSeriesDataset): + + def __init__(self, data_folder, normalization_method=None): + self.data_folder = data_folder + self.normalization_method = normalization_method + self.datasets = [] + self.num_tokens = None + + if normalization_method is None: + self.normalization_method = None + elif isinstance(normalization_method, str): + if normalization_method.lower() == 'max': + self.normalization_method = max_scaler + elif normalization_method.lower() == 'zero': + self.normalization_method = zero_scaler + else: + raise ValueError(f'Unknown normalization method: {normalization_method}') + else: + self.normalization_method = normalization_method + + if BinaryDataset.is_valid_path(self.data_folder): + ds = BinaryDataset(self.data_folder) + if len(ds) > 0: + self.datasets.append(ds) + elif GeneralDataset.is_valid_path(self.data_folder): + ds = GeneralDataset(self.data_folder) + if len(ds) > 0: + self.datasets.append(ds) + else: + # walk through the data_folder + for root, dirs, files in os.walk(self.data_folder): + for file in files: + fn_path = os.path.join(root, file) + if file != BinaryDataset.meta_file_name and GeneralDataset.is_valid_path(fn_path): + ds = GeneralDataset(fn_path) + if len(ds) > 0: + self.datasets.append(ds) + for sub_folder in dirs: + folder_path = os.path.join(root, sub_folder) + if BinaryDataset.is_valid_path(folder_path): + ds = BinaryDataset(folder_path) + if len(ds) > 0: + self.datasets.append(ds) + + self.cumsum_lengths = [0] + for ds in self.datasets: + self.cumsum_lengths.append( + self.cumsum_lengths[-1] + len(ds) + ) + self.num_sequences = self.cumsum_lengths[-1] + + def __len__(self): + return self.num_sequences + + def __getitem__(self, seq_idx): + if seq_idx >= self.cumsum_lengths[-1]: + raise ValueError(f'Index out of the dataset length: {seq_idx} >= {self.cumsum_lengths[-1]}') + elif seq_idx < 0: + raise ValueError(f'Index out of the dataset length: {seq_idx} < 0') + + dataset_idx = binary_search(self.cumsum_lengths, seq_idx) + dataset_offset = seq_idx - self.cumsum_lengths[dataset_idx] + seq = self.datasets[dataset_idx][dataset_offset] + + if self.normalization_method is not None: + seq = self.normalization_method(seq) + return seq + + def get_sequence_length_by_idx(self, seq_idx): + if seq_idx >= self.cumsum_lengths[-1]: + raise ValueError(f'Index out of the dataset length: {seq_idx} >= {self.cumsum_lengths[-1]}') + elif seq_idx < 0: + raise ValueError(f'Index out of the dataset length: {seq_idx} < 0') + + dataset_idx = binary_search(self.cumsum_lengths, seq_idx) + dataset_offset = seq_idx - self.cumsum_lengths[dataset_idx] + return self.datasets[dataset_idx].get_sequence_length_by_idx(dataset_offset) + + def get_num_tokens(self): + if self.num_tokens is None: + self.num_tokens = sum([ds.get_num_tokens() for ds in self.datasets]) + + return self.num_tokens + + +def zero_scaler(seq): + if not isinstance(seq, np.ndarray): + seq = np.array(seq) + origin_dtype = seq.dtype + # std_val = seq.std(dtype=np.float64) + std_val = seq.std() + if std_val == 0: + normed_seq = seq + else: + # mean_val = seq.mean(dtype=np.float64) + mean_val = seq.mean() + normed_seq = (seq - mean_val) / std_val + + return normed_seq.astype(origin_dtype) + + +def max_scaler(seq): + if not isinstance(seq, np.ndarray): + seq = np.array(seq) + origin_dtype = seq.dtype + # max_val = np.abs(seq).max(dtype=np.float64) + max_val = np.abs(seq).max() + if max_val == 0: + normed_seq = seq + else: + normed_seq = seq / max_val + + return normed_seq.astype(origin_dtype) + + +def binary_search(sorted_list, value): + low = 0 + high = len(sorted_list) - 1 + best_index = -1 + + while low <= high: + mid = (low + high) // 2 + if sorted_list[mid] <= value: + best_index = mid + low = mid + 1 + else: + high = mid - 1 + + return best_index diff --git a/src/samay/models/Time_MoE/time_moe/datasets/time_moe_window_dataset.py b/src/samay/models/Time_MoE/time_moe/datasets/time_moe_window_dataset.py new file mode 100644 index 0000000..6630e96 --- /dev/null +++ b/src/samay/models/Time_MoE/time_moe/datasets/time_moe_window_dataset.py @@ -0,0 +1,139 @@ +#!/usr/bin/env python +# -*- coding:utf-8 _*- +import random +import numpy as np + +from time_moe.datasets.ts_dataset import TimeSeriesDataset + + +class TimeMoEWindowDataset: + """ + A dataset that generates windows of time series data. + """ + def __init__(self, dataset: TimeSeriesDataset, context_length: int, prediction_length: int = 0, **kwrags): + self.dataset = dataset + self.context_length = context_length + self.prediction_length = prediction_length + self.window_size = context_length + prediction_length + self.window_size_plus_one = self.window_size + 1 + + num_seqs = len(self.dataset) + iterator = range(num_seqs) + try: + from tqdm import tqdm + iterator = tqdm(iterator, total=num_seqs) + except ImportError: + pass + self.sub_seq_indexes = [] + for seq_idx in iterator: + n_points = self.dataset.get_sequence_length_by_idx(seq_idx) + # Skip sequences with fewer than 2 points + if n_points < 2: + continue + for offset_idx in range(0, n_points, self.window_size): + self.sub_seq_indexes.append((seq_idx, offset_idx)) + + def __len__(self): + return len(self.sub_seq_indexes) + + def __iter__(self): + for i in range(len(self)): + yield self[i] + + def __getitem__(self, seq_idx): + seq_i, offset_i = self.sub_seq_indexes[seq_idx] + seq = self.dataset[seq_i][offset_i: offset_i + self.window_size_plus_one] + seq = np.array(seq, dtype=np.float32) + + loss_mask = np.ones(len(seq) - 1, dtype=np.int32) + n_pad = self.window_size_plus_one - len(seq) + if n_pad > 0: + seq = np.pad(seq, (0, n_pad), 'constant', constant_values=0) + loss_mask = np.pad(loss_mask, (0, n_pad), 'constant', constant_values=0) + + return { + 'input_ids': seq[:-1], + 'labels': seq[1:], + 'loss_masks': loss_mask + } + + +class UniversalTimeMoEWindowDataset: + """ + A dataset that generates windows of time series data with pack technique. + """ + def __init__(self, dataset: TimeSeriesDataset, context_length: int, prediction_length: int = 0, + shuffle: bool = False): + self.dataset = dataset + self.context_length = context_length + self.prediction_length = prediction_length + self.window_size = context_length + prediction_length + + self.window_info_list = [] + n_seqs = len(self.dataset) + + cur_window_info = [] + num_cur_remaining_points = self.window_size + + iterator = range(n_seqs) + if shuffle: + iterator = list(iterator) + random.shuffle(iterator) + + try: + from tqdm import tqdm + iterator = tqdm(iterator, total=n_seqs) + except ImportError: + pass + + for seq_idx in iterator: + seq_len = self.dataset.get_sequence_length_by_idx(seq_idx) + remaining_seq_len = seq_len + while remaining_seq_len > 0: + if remaining_seq_len < num_cur_remaining_points: + cur_window_info.append( + (seq_idx, seq_len - remaining_seq_len, remaining_seq_len) + ) + + # update states + num_cur_remaining_points -= remaining_seq_len + remaining_seq_len = 0 + else: + # add the part of this seq to cur_window + cur_window_info.append( + (seq_idx, seq_len - remaining_seq_len, num_cur_remaining_points) + ) + + # update states + remaining_seq_len -= num_cur_remaining_points + self.window_info_list.append(cur_window_info) + + # reset current window + num_cur_remaining_points = self.window_size + cur_window_info = [] + + if num_cur_remaining_points > 0: + # drop last batch for speed-up + pass + + def __len__(self): + return len(self.window_info_list) + + def __getitem__(self, window_idx): + window_info = self.window_info_list[window_idx] + seq = [] + for seq_idx, start_idx_in_seq, offset in window_info: + part_seq = self.dataset[seq_idx][start_idx_in_seq: start_idx_in_seq + offset] + seq.append(part_seq) + if len(seq) == 1: + seq = seq[0] + if not isinstance(seq, np.ndarray): + seq = np.array(seq, dtype=np.float32) + else: + seq = seq.astype(np.float32) + else: + seq = np.concatenate(seq, axis=0, dtype=np.float32) + return { + 'input_ids': seq[:-1], + 'labels': seq[1:], + } diff --git a/src/samay/models/Time_MoE/time_moe/datasets/ts_dataset.py b/src/samay/models/Time_MoE/time_moe/datasets/ts_dataset.py new file mode 100644 index 0000000..50aed5d --- /dev/null +++ b/src/samay/models/Time_MoE/time_moe/datasets/ts_dataset.py @@ -0,0 +1,30 @@ +#!/usr/bin/env python +# -*- coding:utf-8 _*- +from abc import abstractmethod + + +class TimeSeriesDataset: + @abstractmethod + def __len__(self): + pass + + @abstractmethod + def __getitem__(self, seq_idx): + pass + + @abstractmethod + def get_num_tokens(self): + pass + + @abstractmethod + def get_sequence_length_by_idx(self, seq_idx): + pass + + @staticmethod + def is_valid_path(data_path): + return True + + def __iter__(self): + n_seqs = len(self) + for i in range(n_seqs): + yield self[i] \ No newline at end of file diff --git a/src/samay/models/Time_MoE/time_moe/models/__init__.py b/src/samay/models/Time_MoE/time_moe/models/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/samay/models/Time_MoE/time_moe/models/configuration_time_moe.py b/src/samay/models/Time_MoE/time_moe/models/configuration_time_moe.py new file mode 100644 index 0000000..1fa6c0b --- /dev/null +++ b/src/samay/models/Time_MoE/time_moe/models/configuration_time_moe.py @@ -0,0 +1,65 @@ +from typing import List +from transformers import PretrainedConfig + + +class TimeMoeConfig(PretrainedConfig): + model_type = "time_moe" + keys_to_ignore_at_inference = ["past_key_values"] + + def __init__( + self, + input_size: int = 1, + hidden_size: int = 4096, + intermediate_size: int = 22016, + horizon_lengths: List[int] = 1, + num_hidden_layers: int = 32, + num_attention_heads: int = 32, + num_key_value_heads: int = None, + hidden_act: str = "silu", + num_experts_per_tok: int = 2, + num_experts: int = 1, + max_position_embeddings: int = 32768, + initializer_range: float = 0.02, + rms_norm_eps: float = 1e-6, + use_cache: bool = True, + use_dense: bool = False, + rope_theta: int = 10000, + attention_dropout: float = 0.0, + apply_aux_loss: bool = True, + router_aux_loss_factor: float = 0.02, + tie_word_embeddings: bool = False, + **kwargs, + ): + self.input_size = input_size + self.hidden_size = hidden_size + self.intermediate_size = intermediate_size + self.max_position_embeddings = max_position_embeddings + self.num_hidden_layers = num_hidden_layers + self.num_attention_heads = num_attention_heads + + if num_key_value_heads is None: + num_key_value_heads = num_attention_heads + + self.num_key_value_heads = num_key_value_heads + self.hidden_act = hidden_act + if isinstance(horizon_lengths, int): + horizon_lengths = [horizon_lengths] + self.horizon_lengths = horizon_lengths # Predict horizon length for each prediction. + self.num_experts_per_tok = num_experts_per_tok + self.num_experts = num_experts + self.initializer_range = initializer_range + self.rms_norm_eps = rms_norm_eps + self.use_cache = use_cache + self.use_dense = use_dense + self.rope_theta = rope_theta + self.attention_dropout = attention_dropout + self.apply_aux_loss = apply_aux_loss + self.router_aux_loss_factor = router_aux_loss_factor + + assert self.use_dense ^ self.apply_aux_loss, 'Both use_dense and apply_aux_loss cannot be set to True or False at the same time.' + + kwargs.pop('tie_word_embeddings', None) + super().__init__( + tie_word_embeddings=tie_word_embeddings, + **kwargs, + ) diff --git a/src/samay/models/Time_MoE/time_moe/models/modeling_time_moe.py b/src/samay/models/Time_MoE/time_moe/models/modeling_time_moe.py new file mode 100644 index 0000000..7de4026 --- /dev/null +++ b/src/samay/models/Time_MoE/time_moe/models/modeling_time_moe.py @@ -0,0 +1,1177 @@ +import math +from typing import Optional, Tuple, List, Union +import warnings + +import torch +from torch import nn +import torch.nn.functional as F +from transformers import PreTrainedModel, Cache, DynamicCache, StaticCache +from transformers.activations import ACT2FN +from transformers.modeling_attn_mask_utils import _prepare_4d_causal_attention_mask +from transformers.modeling_outputs import MoeModelOutputWithPast, MoeCausalLMOutputWithPast +from transformers.utils import logging, is_flash_attn_2_available, is_flash_attn_greater_or_equal_2_10 + +from .configuration_time_moe import TimeMoeConfig +from .ts_generation_mixin import TSGenerationMixin + +logger = logging.get_logger(__name__) + +# if is_flash_attn_2_available(): +# from flash_attn import flash_attn_func, flash_attn_varlen_func +# from flash_attn.bert_padding import index_first_axis, pad_input, unpad_input # noqa +try: + from flash_attn import flash_attn_func, flash_attn_varlen_func + from flash_attn.bert_padding import index_first_axis, pad_input, unpad_input # noqa +except: + pass + + +def _get_unpad_data(attention_mask): + seqlens_in_batch = attention_mask.sum(dim=-1, dtype=torch.int32) + indices = torch.nonzero(attention_mask.flatten(), as_tuple=False).flatten() + max_seqlen_in_batch = seqlens_in_batch.max().item() + cu_seqlens = F.pad(torch.cumsum(seqlens_in_batch, dim=0, dtype=torch.int32), (1, 0)) + return ( + indices, + cu_seqlens, + max_seqlen_in_batch, + ) + + +def load_balancing_loss_func( + gate_logits: Union[torch.Tensor, Tuple[torch.Tensor], List[torch.Tensor]], + top_k: int, + num_experts: int = None, + attention_mask: Optional[torch.Tensor] = None +) -> torch.Tensor: + r""" + Computes auxiliary load balancing loss as in Switch Transformer - implemented in Pytorch. + + See Switch Transformer (https://arxiv.org/abs/2101.03961) for more details. This function implements the loss + function presented in equations (4) - (6) of the paper. It aims at penalizing cases where the routing between + experts is too unbalanced. + + Args: + gate_logits (Union[`torch.Tensor`, Tuple[torch.Tensor], List[torch.Tensor]): + Logits from the `gate`, should be a tuple of model.config.num_hidden_layers tensors of + shape [batch_size X sequence_length, num_experts]. + top_k (`int`) + Selected Top k over the experts. + attention_mask (`torch.Tensor`, None): + The attention_mask used in forward function + shape [batch_size X sequence_length] if not None. + num_experts (`int`, *optional*): + Number of experts + + Returns: + The auxiliary loss. + """ + if gate_logits is None or not isinstance(gate_logits, (tuple, list)) or gate_logits[0] is None: + return 0.0 + + compute_device = gate_logits[0].device + concatenated_gate_logits = torch.cat([layer_gate.to(compute_device) for layer_gate in gate_logits], dim=0) + + routing_weights = torch.nn.functional.softmax(concatenated_gate_logits, dim=-1) + + _, selected_experts = torch.topk(routing_weights, top_k, dim=-1) + + expert_mask = torch.nn.functional.one_hot(selected_experts, num_experts) + + if attention_mask is None: + # Compute the percentage of tokens routed to each expert + tokens_per_expert = torch.mean(expert_mask.float(), dim=0) + + # Compute the average probability of routing to these experts + router_prob_per_expert = torch.mean(routing_weights, dim=0) + else: + batch_size, sequence_length = attention_mask.shape + num_hidden_layers = concatenated_gate_logits.shape[0] // (batch_size * sequence_length) + + # Compute the mask that masks all padding tokens as 0 with the same shape of expert_mask + expert_attention_mask = ( + attention_mask[None, :, :, None, None] + .expand((num_hidden_layers, batch_size, sequence_length, 2, num_experts)) + .reshape(-1, 2, num_experts) + .to(compute_device) + ) + + # Compute the percentage of tokens routed to each experts + tokens_per_expert = torch.sum(expert_mask.float() * expert_attention_mask, dim=0) / torch.sum( + expert_attention_mask, dim=0 + ) + + # Compute the mask that masks all padding tokens as 0 with the same shape of tokens_per_expert + router_per_expert_attention_mask = ( + attention_mask[None, :, :, None] + .expand((num_hidden_layers, batch_size, sequence_length, num_experts)) + .reshape(-1, num_experts) + .to(compute_device) + ) + + # Compute the average probability of routing to these experts + router_prob_per_expert = torch.sum(routing_weights * router_per_expert_attention_mask, dim=0) / torch.sum( + router_per_expert_attention_mask, dim=0 + ) + + overall_loss = torch.sum(tokens_per_expert * router_prob_per_expert.unsqueeze(dim=0)) + + return overall_loss * num_experts + + +# Copied from transformers.models.llama.modeling_llama.repeat_kv +def repeat_kv(hidden_states: torch.Tensor, n_rep: int) -> torch.Tensor: + """ + This is the equivalent of torch.repeat_interleave(x, dim=1, repeats=n_rep). The hidden states go from (batch, + num_key_value_heads, seqlen, head_dim) to (batch, num_attention_heads, seqlen, head_dim) + """ + batch, num_key_value_heads, slen, head_dim = hidden_states.shape + if n_rep == 1: + return hidden_states + hidden_states = hidden_states[:, :, None, :, :].expand(batch, num_key_value_heads, n_rep, slen, head_dim) + return hidden_states.reshape(batch, num_key_value_heads * n_rep, slen, head_dim) + + +# Copied from transformers.models.llama.modeling_llama.rotate_half +def rotate_half(x): + """Rotates half the hidden dims of the input.""" + x1 = x[..., : x.shape[-1] // 2] + x2 = x[..., x.shape[-1] // 2:] + return torch.cat((-x2, x1), dim=-1) + + +# Copied from transformers.models.mistral.modeling_mistral.apply_rotary_pos_emb +def apply_rotary_pos_emb(q, k, cos, sin, position_ids, unsqueeze_dim=1): + """Applies Rotary Position Embedding to the query and key tensors. + + Args: + q (`torch.Tensor`): The query tensor. + k (`torch.Tensor`): The key tensor. + cos (`torch.Tensor`): The cosine part of the rotary embedding. + sin (`torch.Tensor`): The sine part of the rotary embedding. + position_ids (`torch.Tensor`): + The position indices of the tokens corresponding to the query and key tensors. For example, this can be + used to pass offsetted position ids when working with a KV-cache. + unsqueeze_dim (`int`, *optional*, defaults to 1): + The 'unsqueeze_dim' argument specifies the dimension along which to unsqueeze cos[position_ids] and + sin[position_ids] so that they can be properly broadcasted to the dimensions of q and k. For example, note + that cos[position_ids] and sin[position_ids] have the shape [batch_size, seq_len, head_dim]. Then, if q and + k have the shape [batch_size, heads, seq_len, head_dim], then setting unsqueeze_dim=1 makes + cos[position_ids] and sin[position_ids] broadcastable to the shapes of q and k. Similarly, if q and k have + the shape [batch_size, seq_len, heads, head_dim], then set unsqueeze_dim=2. + Returns: + `tuple(torch.Tensor)` comprising of the query and key tensors rotated using the Rotary Position Embedding. + """ + cos = cos[position_ids].unsqueeze(unsqueeze_dim) + sin = sin[position_ids].unsqueeze(unsqueeze_dim) + q_embed = (q * cos) + (rotate_half(q) * sin) + k_embed = (k * cos) + (rotate_half(k) * sin) + return q_embed, k_embed + + +class TimeMoeInputEmbedding(nn.Module): + """ + Use a mlp layer to embedding the time-series. + """ + + def __init__(self, config: TimeMoeConfig): + super().__init__() + self.config = config + self.input_size = config.input_size # default 1 + self.hidden_size = config.hidden_size + self.emb_layer = nn.Linear(self.input_size, self.hidden_size, bias=False) + self.gate_layer = nn.Linear(self.input_size, self.hidden_size, bias=False) + self.act_fn = ACT2FN[config.hidden_act] + + def forward(self, x): + emb = self.act_fn(self.gate_layer(x)) * self.emb_layer(x) + return emb + + +# Copied from transformers.models.mistral.modeling_mistral.MistralRotaryEmbedding with Mistral->TimeMOE +class TimeMoeRotaryEmbedding(torch.nn.Module): + def __init__(self, dim, max_position_embeddings=2048, base=10000, device=None): + super().__init__() + + self.dim = dim + self.max_position_embeddings = max_position_embeddings + self.base = base + inv_freq = 1.0 / (self.base ** (torch.arange(0, self.dim, 2, dtype=torch.int64).float().to(device) / self.dim)) + self.register_buffer("inv_freq", inv_freq, persistent=False) + + # Build here to make `torch.jit.trace` work. + self._set_cos_sin_cache( + seq_len=max_position_embeddings, device=self.inv_freq.device, dtype=torch.get_default_dtype() + ) + + def _set_cos_sin_cache(self, seq_len, device, dtype): + self.max_seq_len_cached = seq_len + t = torch.arange(self.max_seq_len_cached, device=device, dtype=torch.int64).type_as(self.inv_freq) + + freqs = torch.outer(t, self.inv_freq) + # Different from paper, but it uses a different permutation in order to obtain the same calculation + emb = torch.cat((freqs, freqs), dim=-1) + self.register_buffer("cos_cached", emb.cos().to(dtype), persistent=False) + self.register_buffer("sin_cached", emb.sin().to(dtype), persistent=False) + + def forward(self, x, seq_len=None): + # x: [bs, num_attention_heads, seq_len, head_size] + if seq_len > self.max_seq_len_cached: + self._set_cos_sin_cache(seq_len=seq_len, device=x.device, dtype=x.dtype) + + return ( + self.cos_cached[:seq_len].to(dtype=x.dtype), + self.sin_cached[:seq_len].to(dtype=x.dtype), + ) + + +# Copied from transformers.models.llama.modeling_llama.LlamaRMSNorm with Llama->TimeMOE +class TimeMoeRMSNorm(torch.nn.Module): + def __init__(self, hidden_size, eps=1e-6): + super().__init__() + self.weight = nn.Parameter(torch.ones(hidden_size)) + self.variance_epsilon = eps + + def forward(self, hidden_states): + input_dtype = hidden_states.dtype + hidden_states = hidden_states.to(torch.float32) + variance = hidden_states.pow(2).mean(-1, keepdim=True) + hidden_states = hidden_states * torch.rsqrt(variance + self.variance_epsilon) + return self.weight * hidden_states.to(input_dtype) + + +class TimeMoeTemporalBlock(nn.Module): + def __init__(self, hidden_size: int, intermediate_size: int, hidden_act: str): + super().__init__() + self.hidden_size = hidden_size + self.intermediate_size = intermediate_size + self.gate_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) + self.up_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) + self.down_proj = nn.Linear(self.intermediate_size, self.hidden_size, bias=False) + self.act_fn = ACT2FN[hidden_act] + + def forward(self, hidden_state): + return self.down_proj(self.act_fn(self.gate_proj(hidden_state)) * self.up_proj(hidden_state)) + + +class TimeMoeMLP(TimeMoeTemporalBlock): + def __init__(self, hidden_size: int, intermediate_size: int, hidden_act: str): + super().__init__(hidden_size, intermediate_size, hidden_act) + + def forward(self, hidden_state): + return super().forward(hidden_state), None + + +class TimeMoeSparseExpertsLayer(nn.Module): + def __init__(self, config): + super().__init__() + self.config = config + self.top_k = config.num_experts_per_tok + self.hidden_size = config.hidden_size + self.num_experts = config.num_experts + self.norm_topk_prob = False + + moe_intermediate_size = self.config.intermediate_size // self.top_k + + # gating + self.gate = nn.Linear(config.hidden_size, config.num_experts, bias=False) + self.experts = nn.ModuleList( + [TimeMoeTemporalBlock( + hidden_size=self.config.hidden_size, + intermediate_size=moe_intermediate_size, + hidden_act=self.config.hidden_act, + ) for _ in range(self.num_experts)] + ) + + self.shared_expert = TimeMoeTemporalBlock( + hidden_size=self.config.hidden_size, + intermediate_size=self.config.intermediate_size, + hidden_act=self.config.hidden_act, + ) + self.shared_expert_gate = torch.nn.Linear(config.hidden_size, 1, bias=False) + + def forward(self, hidden_states: torch.Tensor): + """ """ + batch_size, sequence_length, hidden_dim = hidden_states.shape + hidden_states = hidden_states.view(-1, hidden_dim) + # router_logits -> (batch * sequence_length, n_experts) + router_logits = self.gate(hidden_states) + + routing_weights = F.softmax(router_logits, dim=1, dtype=torch.float) + routing_weights, selected_experts = torch.topk(routing_weights, self.top_k, dim=-1) + if self.norm_topk_prob: + routing_weights /= routing_weights.sum(dim=-1, keepdim=True) + # we cast back to the input dtype + routing_weights = routing_weights.to(hidden_states.dtype) + + final_hidden_states = torch.zeros( + (batch_size * sequence_length, hidden_dim), dtype=hidden_states.dtype, device=hidden_states.device + ) + + # One hot encode the selected experts to create an expert mask + # this will be used to easily index which expert is going to be sollicitated + expert_mask = torch.nn.functional.one_hot(selected_experts, num_classes=self.num_experts).permute(2, 1, 0) + + # Loop over all available experts in the model and perform the computation on each expert + for expert_idx in range(self.num_experts): + expert_layer = self.experts[expert_idx] + idx, top_x = torch.where(expert_mask[expert_idx]) + + # Index the correct hidden states and compute the expert hidden state for + # the current expert. We need to make sure to multiply the output hidden + # states by `routing_weights` on the corresponding tokens (top-1 and top-2) + current_state = hidden_states[None, top_x].reshape(-1, hidden_dim) + current_hidden_states = expert_layer(current_state) * routing_weights[top_x, idx, None] + + # However `index_add_` only support torch tensors for indexing so we'll use + # the `top_x` tensor here. + final_hidden_states.index_add_(0, top_x, current_hidden_states.to(hidden_states.dtype)) + + shared_expert_output = self.shared_expert(hidden_states) + shared_expert_output = F.sigmoid(self.shared_expert_gate(hidden_states)) * shared_expert_output + + final_hidden_states = final_hidden_states + shared_expert_output + + final_hidden_states = final_hidden_states.reshape(batch_size, sequence_length, hidden_dim) + return final_hidden_states, router_logits + + +# Copied from transformers.models.qwen2.modeling_qwen2.Qwen2Attention with Qwen2->TimeMoe +class TimeMoeAttention(nn.Module): + """ + Multi-headed attention from 'Attention Is All You Need' paper. Modified to use sliding window attention: Longformer + and "Generating Long Sequences with Sparse Transformers". + """ + + def __init__(self, config: TimeMoeConfig, layer_idx: Optional[int] = None): + super().__init__() + self.config = config + self.layer_idx = layer_idx + if layer_idx is None: + logger.warning_once( + f"Instantiating {self.__class__.__name__} without passing `layer_idx` is not recommended and will " + "to errors during the forward call, if caching is used. Please make sure to provide a `layer_idx` " + "when creating this class." + ) + + self.hidden_size = config.hidden_size + self.num_heads = config.num_attention_heads + self.head_dim = self.hidden_size // self.num_heads + self.num_key_value_heads = config.num_key_value_heads + self.num_key_value_groups = self.num_heads // self.num_key_value_heads + self.max_position_embeddings = config.max_position_embeddings + self.rope_theta = config.rope_theta + self.is_causal = True + self.attention_dropout = config.attention_dropout + + if (self.head_dim * self.num_heads) != self.hidden_size: + raise ValueError( + f"hidden_size must be divisible by num_heads (got `hidden_size`: {self.hidden_size}" + f" and `num_heads`: {self.num_heads})." + ) + self.q_proj = nn.Linear(self.hidden_size, self.num_heads * self.head_dim, bias=True) + self.k_proj = nn.Linear(self.hidden_size, self.num_key_value_heads * self.head_dim, bias=True) + self.v_proj = nn.Linear(self.hidden_size, self.num_key_value_heads * self.head_dim, bias=True) + self.o_proj = nn.Linear(self.num_heads * self.head_dim, self.hidden_size, bias=False) + + self.rotary_emb = TimeMoeRotaryEmbedding( + self.head_dim, + max_position_embeddings=self.max_position_embeddings, + base=self.rope_theta, + ) + + def forward( + self, + hidden_states: torch.Tensor, + attention_mask: Optional[torch.Tensor] = None, + position_ids: Optional[torch.LongTensor] = None, + past_key_value: Optional[Cache] = None, + output_attentions: bool = False, + **kwargs, + ) -> Tuple[torch.Tensor, Optional[torch.Tensor], Optional[Tuple[torch.Tensor]]]: + if "padding_mask" in kwargs: + warnings.warn( + "Passing `padding_mask` is deprecated and will be removed in v4.37. Please make sure use `attention_mask` instead.`" + ) + bsz, q_len, _ = hidden_states.size() + + query_states = self.q_proj(hidden_states) + key_states = self.k_proj(hidden_states) + value_states = self.v_proj(hidden_states) + + query_states = query_states.view(bsz, q_len, self.num_heads, self.head_dim).transpose(1, 2) + key_states = key_states.view(bsz, q_len, self.num_key_value_heads, self.head_dim).transpose(1, 2) + value_states = value_states.view(bsz, q_len, self.num_key_value_heads, self.head_dim).transpose(1, 2) + + kv_seq_len = key_states.shape[-2] + if past_key_value is not None: + if self.layer_idx is None: + raise ValueError( + f"The cache structure has changed since version v4.36. If you are using {self.__class__.__name__} " + "for auto-regressive decoding with k/v caching, please make sure to initialize the attention class " + "with a layer index." + ) + kv_seq_len += past_key_value.get_usable_length(kv_seq_len, self.layer_idx) + cos, sin = self.rotary_emb(value_states, seq_len=kv_seq_len) + query_states, key_states = apply_rotary_pos_emb(query_states, key_states, cos, sin, position_ids) + + if past_key_value is not None: + cache_kwargs = {"sin": sin, "cos": cos} # Specific to RoPE models + key_states, value_states = past_key_value.update(key_states, value_states, self.layer_idx, cache_kwargs) + + # repeat k/v heads if n_kv_heads < n_heads + key_states = repeat_kv(key_states, self.num_key_value_groups) + value_states = repeat_kv(value_states, self.num_key_value_groups) + + attn_weights = torch.matmul(query_states, key_states.transpose(2, 3)) / math.sqrt(self.head_dim) + + if attn_weights.size() != (bsz, self.num_heads, q_len, kv_seq_len): + raise ValueError( + f"Attention weights should be of size {(bsz, self.num_heads, q_len, kv_seq_len)}, but is" + f" {attn_weights.size()}" + ) + + if attention_mask is not None: + if attention_mask.size() != (bsz, 1, q_len, kv_seq_len): + raise ValueError( + f"Attention mask should be of size {(bsz, 1, q_len, kv_seq_len)}, but is {attention_mask.size()}" + ) + + attn_weights = attn_weights + attention_mask + + # upcast attention to fp32 + attn_weights = nn.functional.softmax(attn_weights, dim=-1, dtype=torch.float32).to(query_states.dtype) + attn_weights = nn.functional.dropout(attn_weights, p=self.attention_dropout, training=self.training) + attn_output = torch.matmul(attn_weights, value_states) + + if attn_output.size() != (bsz, self.num_heads, q_len, self.head_dim): + raise ValueError( + f"`attn_output` should be of size {(bsz, self.num_heads, q_len, self.head_dim)}, but is" + f" {attn_output.size()}" + ) + + attn_output = attn_output.transpose(1, 2).contiguous() + attn_output = attn_output.reshape(bsz, q_len, self.hidden_size) + + attn_output = self.o_proj(attn_output) + + if not output_attentions: + attn_weights = None + + return attn_output, attn_weights, past_key_value + + +class TimeMoeFlashAttention2(TimeMoeAttention): + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self._flash_attn_uses_top_left_mask = not is_flash_attn_greater_or_equal_2_10() + + def forward( + self, + hidden_states: torch.Tensor, + attention_mask: Optional[torch.LongTensor] = None, + position_ids: Optional[torch.LongTensor] = None, + past_key_value: Optional[Cache] = None, + output_attentions: bool = False, + use_cache: bool = False, + cache_position: Optional[torch.LongTensor] = None, + ) -> Tuple[torch.Tensor, Optional[torch.Tensor], Optional[Tuple[torch.Tensor]]]: + if isinstance(past_key_value, StaticCache): + raise ValueError( + "`static` cache implementation is not compatible with `attn_implementation==flash_attention_2` " + "make sure to use `sdpa` in the mean time, and open an issue at https://github.com/huggingface/transformers" + ) + + output_attentions = False + + bsz, q_len, _ = hidden_states.size() + + query_states = self.q_proj(hidden_states) + key_states = self.k_proj(hidden_states) + value_states = self.v_proj(hidden_states) + + # Flash attention requires the input to have the shape + # batch_size x seq_length x head_dim x hidden_dim + # therefore we just need to keep the original shape + query_states = query_states.view(bsz, q_len, self.num_heads, self.head_dim).transpose(1, 2) + key_states = key_states.view(bsz, q_len, self.num_key_value_heads, self.head_dim).transpose(1, 2) + value_states = value_states.view(bsz, q_len, self.num_key_value_heads, self.head_dim).transpose(1, 2) + + kv_seq_len = key_states.shape[-2] + if past_key_value is not None: + if self.layer_idx is None: + raise ValueError( + f"The cache structure has changed since version v4.36. If you are using {self.__class__.__name__} " + "for auto-regressive decoding with k/v caching, please make sure to initialize the attention class " + "with a layer index." + ) + kv_seq_len += past_key_value.get_usable_length(kv_seq_len, self.layer_idx) + rotary_seq_len = max(kv_seq_len, position_ids[:, -1].max().item()) + 1 + cos, sin = self.rotary_emb(value_states, seq_len=rotary_seq_len) + query_states, key_states = apply_rotary_pos_emb(query_states, key_states, cos, sin, position_ids) + + if past_key_value is not None: + # sin and cos are specific to RoPE models; cache_position needed for the static cache + cache_kwargs = {"sin": sin, "cos": cos, "cache_position": cache_position} + key_states, value_states = past_key_value.update(key_states, value_states, self.layer_idx, cache_kwargs) + + # TODO: These transpose are quite inefficient but Flash Attention requires the layout [batch_size, sequence_length, num_heads, head_dim]. We would need to refactor the KV cache + # to be able to avoid many of these transpose/reshape/view. + query_states = query_states.transpose(1, 2) + key_states = key_states.transpose(1, 2) + value_states = value_states.transpose(1, 2) + + dropout_rate = self.attention_dropout if self.training else 0.0 + + # In PEFT, usually we cast the layer norms in float32 for training stability reasons + # therefore the input hidden states gets silently casted in float32. Hence, we need + # cast them back in the correct dtype just to be sure everything works as expected. + # This might slowdown training & inference so it is recommended to not cast the LayerNorms + # in fp32. (LlamaRMSNorm handles it correctly) + + input_dtype = query_states.dtype + if input_dtype == torch.float32: + + if torch.is_autocast_enabled(): + target_dtype = torch.get_autocast_gpu_dtype() + # Handle the case where the model is quantized + elif hasattr(self.config, "_pre_quantization_dtype"): + target_dtype = self.config._pre_quantization_dtype + else: + target_dtype = self.q_proj.weight.dtype + + logger.warning_once( + f"The input hidden states seems to be silently casted in float32, this might be related to" + f" the fact you have upcasted embedding or layer norm layers in float32. We will cast back the input in" + f" {target_dtype}." + ) + + query_states = query_states.to(target_dtype) + key_states = key_states.to(target_dtype) + value_states = value_states.to(target_dtype) + + attn_output = self._flash_attention_forward( + query_states, key_states, value_states, attention_mask, q_len, dropout=dropout_rate + ) + + attn_output = attn_output.reshape(bsz, q_len, -1).contiguous() + attn_output = self.o_proj(attn_output) + + if not output_attentions: + attn_weights = None + + return attn_output, attn_weights, past_key_value + + def _flash_attention_forward( + self, query_states, key_states, value_states, attention_mask, query_length, dropout=0.0, softmax_scale=None + ): + """ + Calls the forward method of Flash Attention - if the input hidden states contain at least one padding token + first unpad the input, then computes the attention scores and pad the final attention scores. + + Args: + query_states (`torch.Tensor`): + Input query states to be passed to Flash Attention API + key_states (`torch.Tensor`): + Input key states to be passed to Flash Attention API + value_states (`torch.Tensor`): + Input value states to be passed to Flash Attention API + attention_mask (`torch.Tensor`): + The padding mask - corresponds to a tensor of size `(batch_size, seq_len)` where 0 stands for the + position of padding tokens and 1 for the position of non-padding tokens. + dropout (`float`): + Attention dropout + softmax_scale (`float`, *optional*): + The scaling of QK^T before applying softmax. Default to 1 / sqrt(head_dim) + """ + if not self._flash_attn_uses_top_left_mask: + causal = self.is_causal + else: + # TODO: Remove the `query_length != 1` check once Flash Attention for RoCm is bumped to 2.1. For details, please see the comment in LlamaFlashAttention2 __init__. + causal = self.is_causal and query_length != 1 + + origin_dtype = query_states.dtype + if origin_dtype not in [torch.bfloat16, torch.float16]: + query_states = query_states.to(dtype=torch.bfloat16) + key_states = key_states.to(dtype=torch.bfloat16) + value_states = value_states.to(dtype=torch.bfloat16) + + # without attention mask to faster speed + attn_output = flash_attn_func( + query_states, + key_states, + value_states, + dropout, + softmax_scale=softmax_scale, + causal=causal + ) + if origin_dtype not in [torch.bfloat16, torch.float16]: + return attn_output.to(origin_dtype) + else: + return attn_output + + def _upad_input(self, query_layer, key_layer, value_layer, attention_mask, query_length): + indices_k, cu_seqlens_k, max_seqlen_in_batch_k = _get_unpad_data(attention_mask) + batch_size, kv_seq_len, num_key_value_heads, head_dim = key_layer.shape + + key_layer = index_first_axis( + key_layer.reshape(batch_size * kv_seq_len, num_key_value_heads, head_dim), indices_k + ) + value_layer = index_first_axis( + value_layer.reshape(batch_size * kv_seq_len, num_key_value_heads, head_dim), indices_k + ) + if query_length == kv_seq_len: + query_layer = index_first_axis( + query_layer.reshape(batch_size * kv_seq_len, self.num_heads, head_dim), indices_k + ) + cu_seqlens_q = cu_seqlens_k + max_seqlen_in_batch_q = max_seqlen_in_batch_k + indices_q = indices_k + elif query_length == 1: + max_seqlen_in_batch_q = 1 + cu_seqlens_q = torch.arange( + batch_size + 1, dtype=torch.int32, device=query_layer.device + ) # There is a memcpy here, that is very bad. + indices_q = cu_seqlens_q[:-1] + query_layer = query_layer.squeeze(1) + else: + # The -q_len: slice assumes left padding. + attention_mask = attention_mask[:, -query_length:] + query_layer, indices_q, cu_seqlens_q, max_seqlen_in_batch_q = unpad_input(query_layer, attention_mask) + + return ( + query_layer, + key_layer, + value_layer, + indices_q, + (cu_seqlens_q, cu_seqlens_k), + (max_seqlen_in_batch_q, max_seqlen_in_batch_k), + ) + + +TIME_MOE_ATTENTION_CLASSES = { + "eager": TimeMoeAttention, + 'flash_attention_2': TimeMoeFlashAttention2, +} + + +class TimeMoeDecoderLayer(nn.Module): + def __init__(self, config: TimeMoeConfig, layer_idx: int): + super().__init__() + self.config = config + self.hidden_size = config.hidden_size + + self.self_attn = TIME_MOE_ATTENTION_CLASSES[config._attn_implementation](config, layer_idx) + + if self.config.use_dense: + self.ffn_layer = TimeMoeMLP( + hidden_size=self.config.hidden_size, + intermediate_size=self.config.intermediate_size, + hidden_act=self.config.hidden_act, + ) + else: + self.ffn_layer = TimeMoeSparseExpertsLayer(config) + self.input_layernorm = TimeMoeRMSNorm(config.hidden_size, eps=config.rms_norm_eps) + self.post_attention_layernorm = TimeMoeRMSNorm(config.hidden_size, eps=config.rms_norm_eps) + + def forward( + self, + hidden_states: torch.Tensor, + attention_mask: Optional[torch.Tensor] = None, + position_ids: Optional[torch.LongTensor] = None, + past_key_value: Optional[Tuple[torch.Tensor]] = None, + output_attentions: Optional[bool] = False, + use_cache: Optional[bool] = False, + **kwargs, + ) -> Tuple[torch.FloatTensor, torch.FloatTensor, Optional[torch.FloatTensor], Optional[torch.FloatTensor]]: + if "padding_mask" in kwargs: + warnings.warn( + "Passing `padding_mask` is deprecated and will be removed in v4.37. " + "Please make sure use `attention_mask` instead.`" + ) + """ + Args: + hidden_states (`torch.FloatTensor`): input to the layer of shape `(batch, seq_len, embed_dim)` + attention_mask (`torch.FloatTensor`, *optional*): attention mask of size + `(batch, sequence_length)` where padding elements are indicated by 0. + output_attentions (`bool`, *optional*): + Whether or not to return the attentions tensors of all attention layers. See `attentions` under + returned tensors for more detail. + use_cache (`bool`, *optional*): + If set to `True`, `past_key_values` key value states are returned and can be used to speed up decoding + (see `past_key_values`). + past_key_value (`Tuple(torch.FloatTensor)`, *optional*): cached past key and value projection states + """ + + residual = hidden_states + + hidden_states = self.input_layernorm(hidden_states) + + # Self Attention + hidden_states, self_attn_weights, present_key_value = self.self_attn( + hidden_states=hidden_states, + attention_mask=attention_mask, + position_ids=position_ids, + past_key_value=past_key_value, + output_attentions=output_attentions, + use_cache=use_cache, + ) + hidden_states = residual + hidden_states + + # Fully Connected + residual = hidden_states + hidden_states = self.post_attention_layernorm(hidden_states) + hidden_states, router_logits = self.ffn_layer(hidden_states) + hidden_states = residual + hidden_states + + if not output_attentions: + self_attn_weights = None + + if not use_cache: + present_key_value = None + return hidden_states, self_attn_weights, present_key_value, router_logits + + +class TimeMoePreTrainedModel(PreTrainedModel): + config_class = TimeMoeConfig + base_model_prefix = "model" + supports_gradient_checkpointing = True + _no_split_modules = ["TimeMoeDecoderLayer"] + _skip_keys_device_placement = "past_key_values" + _supports_flash_attn_2 = True + _supports_sdpa = False + _supports_cache_class = True + + def _init_weights(self, module): + std = self.config.initializer_range + if isinstance(module, torch.nn.Linear): + module.weight.data.normal_(mean=0.0, std=std) + if module.bias is not None: + module.bias.data.zero_() + elif isinstance(module, torch.nn.Embedding): + module.weight.data.normal_(mean=0.0, std=std) + if module.padding_idx is not None: + module.weight.data[module.padding_idx].zero_() + + +class TimeMoeModel(TimeMoePreTrainedModel): + """ + Transformer decoder consisting of *config.num_hidden_layers* layers. Each layer is a [`TimeMoeDecoderLayer`] + + Args: + config: TimeMoeConfig + """ + + def __init__(self, config: TimeMoeConfig): + super().__init__(config) + self.embed_layer = TimeMoeInputEmbedding(config) + self.layers = nn.ModuleList( + [TimeMoeDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] + ) + self._attn_implementation = config._attn_implementation + self.norm = TimeMoeRMSNorm(config.hidden_size, eps=config.rms_norm_eps) + + self.gradient_checkpointing = False + # Initialize weights and apply final processing + self.post_init() + + def forward( + self, + input_ids: torch.FloatTensor = None, + attention_mask: Optional[torch.Tensor] = None, + position_ids: Optional[torch.LongTensor] = None, + past_key_values: Optional[List[torch.FloatTensor]] = None, + inputs_embeds: Optional[torch.FloatTensor] = None, + use_cache: Optional[bool] = None, + output_attentions: Optional[bool] = None, + output_hidden_states: Optional[bool] = None, + return_dict: Optional[bool] = None, + ) -> Union[Tuple, MoeModelOutputWithPast]: + # input_ids is the input of time series, its shape is [batch_size, seq_len, input_size] + output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions + output_hidden_states = ( + output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states + ) + use_cache = use_cache if use_cache is not None else self.config.use_cache + + return_dict = return_dict if return_dict is not None else self.config.use_return_dict + + # retrieve input_ids and inputs_embeds + if input_ids is not None and inputs_embeds is not None: + raise ValueError("You cannot specify both decoder_input_ids and decoder_inputs_embeds at the same time") + elif input_ids is not None: + if len(input_ids.shape) == 2: + input_ids.unsqueeze_(dim=-1) + batch_size, seq_length, _ = input_ids.shape + elif inputs_embeds is not None: + batch_size, seq_length, _ = inputs_embeds.shape + else: + raise ValueError("You have to specify either decoder_input_ids or decoder_inputs_embeds") + + if self.gradient_checkpointing and self.training: + if use_cache: + logger.warning_once( + "`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`..." + ) + use_cache = False + + past_key_values_length = 0 + + if use_cache: + use_legacy_cache = not isinstance(past_key_values, Cache) + if use_legacy_cache: + past_key_values = DynamicCache.from_legacy_cache(past_key_values) + past_key_values_length = past_key_values.get_usable_length(seq_length) + + if position_ids is None: + device = input_ids.device if input_ids is not None else inputs_embeds.device + position_ids = torch.arange( + past_key_values_length, seq_length + past_key_values_length, dtype=torch.long, device=device + ) + # position_ids = position_ids.unsqueeze(0).view(-1, seq_length) + position_ids = position_ids.view(-1, seq_length) + else: + position_ids = position_ids.view(-1, seq_length).long() + + if inputs_embeds is None: + inputs_embeds = self.embed_layer(input_ids) + + # 4d mask is passed through the layers + attention_mask = _prepare_4d_causal_attention_mask( + attention_mask, + (batch_size, seq_length), + inputs_embeds, + past_key_values_length, + sliding_window=None, + ) + + hidden_states = inputs_embeds + + # decoder layers + all_hidden_states = () if output_hidden_states else None + all_self_attns = () if output_attentions else None + all_router_logits = () + next_decoder_cache = None + + for decoder_layer in self.layers: + if output_hidden_states: + all_hidden_states += (hidden_states,) + + if self.gradient_checkpointing and self.training: + layer_outputs = self._gradient_checkpointing_func( + decoder_layer.__call__, + hidden_states, + attention_mask, + position_ids, + past_key_values, + output_attentions, + use_cache, + ) + else: + layer_outputs = decoder_layer( + hidden_states, + attention_mask=attention_mask, + position_ids=position_ids, + past_key_value=past_key_values, + output_attentions=output_attentions, + use_cache=use_cache, + ) + + hidden_states = layer_outputs[0] + + all_router_logits += (layer_outputs[-1],) + + if output_attentions: + all_self_attns += (layer_outputs[1],) + + if use_cache: + next_decoder_cache = layer_outputs[2] + + hidden_states = self.norm(hidden_states) + + # add hidden states from the last decoder layer + if output_hidden_states: + all_hidden_states += (hidden_states,) + + next_cache = None + if use_cache: + next_cache = next_decoder_cache.to_legacy_cache() if use_legacy_cache else next_decoder_cache + + if not return_dict: + return tuple( + v + for v in [hidden_states, next_cache, all_hidden_states, all_self_attns, all_router_logits] + if v is not None + ) + return MoeModelOutputWithPast( + last_hidden_state=hidden_states, + past_key_values=next_cache, + hidden_states=all_hidden_states, + attentions=all_self_attns, + router_logits=all_router_logits + ) + + +class TimeMoeOutputLayer(nn.Module): + + def __init__(self, hidden_size: int, horizon_length: int, input_size: int = 1): + super().__init__() + + self.out_layer = nn.Linear( + hidden_size, + input_size * horizon_length, + bias=False, + ) + + def forward(self, x): + """ + + Args: + x (torch.FloatTensor): with shape [B, seq_len, hidden_size] + + Returns: + ` torch.FloatTensor: final prediction with shape [B, seq_len, input_size] + """ + return self.out_layer(x) + + +class TimeMoeForPrediction(TimeMoePreTrainedModel, TSGenerationMixin): + + def __init__(self, config: TimeMoeConfig): + super().__init__(config) + self.config = config + self.apply_aux_loss = config.apply_aux_loss + self.num_experts_per_tok = config.num_experts_per_tok + self.router_aux_loss_factor = config.router_aux_loss_factor + + self.model = TimeMoeModel(config) + # output layer + lm_head_list = [] + self.horizon_length_map = {} + for i, horizon_length in enumerate(config.horizon_lengths): + lm_head_list.append( + TimeMoeOutputLayer( + hidden_size=self.config.hidden_size, + input_size=self.config.input_size, + horizon_length=horizon_length, + ) + ) + self.horizon_length_map[horizon_length] = i + self.lm_heads = nn.ModuleList(lm_head_list) + + self.loss_function = torch.nn.HuberLoss(reduction='none', delta=2.0) + + # Initialize weights and apply final processing + self.post_init() + + def set_decoder(self, decoder): + self.model = decoder + + def get_decoder(self): + return self.model + + def forward( + self, + input_ids: torch.FloatTensor = None, + attention_mask: Optional[torch.Tensor] = None, + position_ids: Optional[torch.LongTensor] = None, + past_key_values: Optional[List[torch.FloatTensor]] = None, + inputs_embeds: Optional[torch.FloatTensor] = None, + labels: Optional[torch.FloatTensor] = None, + loss_masks: Optional[torch.FloatTensor] = None, + use_cache: Optional[bool] = None, + output_attentions: Optional[bool] = None, + output_hidden_states: Optional[bool] = None, + return_dict: Optional[bool] = None, + max_horizon_length: Optional[int] = None, + ) -> Union[Tuple, MoeCausalLMOutputWithPast]: + + output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions + output_hidden_states = ( + output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states + ) + return_dict = return_dict if return_dict is not None else self.config.use_return_dict + + # decoder outputs consists of (dec_features, layer_state, dec_hidden, dec_attn) + outputs = self.model( + input_ids=input_ids, + attention_mask=attention_mask, + position_ids=position_ids, + past_key_values=past_key_values, + inputs_embeds=inputs_embeds, + use_cache=use_cache, + output_attentions=output_attentions, + output_hidden_states=output_hidden_states, + return_dict=return_dict, + ) + + hidden_states = outputs[0] + predictions = None + + loss = None + aux_loss = None + if labels is not None: + # AutoRegressive loss + ar_loss = 0.0 + for lm_head, horizon_length in zip(self.lm_heads, self.config.horizon_lengths): + one_predictions = lm_head(hidden_states) + one_loss = self.calc_ar_loss(one_predictions, labels, loss_masks, horizon_length) + ar_loss += one_loss + if predictions is None: + predictions = one_predictions + loss = ar_loss / len(self.config.horizon_lengths) + + if self.apply_aux_loss: + router_logits = outputs.router_logits if return_dict else outputs[-1] + + temporal_aux_loss = load_balancing_loss_func( + router_logits, + top_k=self.num_experts_per_tok, + num_experts=self.config.num_experts, + attention_mask=attention_mask + ) + loss += self.router_aux_loss_factor * temporal_aux_loss.to(loss.device) + else: + if max_horizon_length is None: + horizon_length = self.config.horizon_lengths[0] + max_horizon_length = horizon_length + else: + horizon_length = self.config.horizon_lengths[0] + for h in self.config.horizon_lengths[1:]: + if h > max_horizon_length: + break + else: + horizon_length = h + lm_head = self.lm_heads[self.horizon_length_map[horizon_length]] + predictions = lm_head(hidden_states) + if horizon_length > max_horizon_length: + predictions = predictions[:, :, : self.config.input_size * max_horizon_length] + + if not return_dict: + output = (predictions,) + outputs[1:] + return (loss, aux_loss) + output if loss is not None else output + + return MoeCausalLMOutputWithPast( + loss=loss, + aux_loss=aux_loss, + logits=predictions, + past_key_values=outputs.past_key_values, + hidden_states=outputs.hidden_states, + attentions=outputs.attentions, + ) + + def calc_ar_loss(self, predictions, labels, loss_masks, horizon_length): + if len(labels.shape) == 2: + labels.unsqueeze_(dim=-1) + # enable model parallelism + labels = labels.to(predictions.device) + if loss_masks is not None and len(loss_masks.shape) == 2: + loss_masks.unsqueeze_(dim=-1) + # enable model parallelism + loss_masks = loss_masks.to(predictions.device) + + if horizon_length > 1: + batch_size, seq_len, output_size = predictions.shape + shift_predictions = predictions.view(batch_size, seq_len, horizon_length, -1) + + # pad to the same length with predictions + # shape -> [B, input_size, seq_len + horizon_length -1] + labels = F.pad(labels.transpose(-1, -2), (0, horizon_length - 1), mode='constant', value=0) + + # shape -> [B, input_size, seq_len, horizon_length] + shift_labels = labels.unfold(dimension=-1, size=horizon_length, step=1) + shift_labels = shift_labels.permute(0, 2, 3, 1) + + if loss_masks is not None: + # pad to the same length with predictions + loss_masks = F.pad(loss_masks.transpose(-1, -2), (0, horizon_length - 1), mode='constant', value=0) + + loss_masks = loss_masks.unfold(dimension=-1, size=horizon_length, step=1) + loss_masks = loss_masks.permute(0, 2, 3, 1) + + else: + shift_predictions = predictions + shift_labels = labels + + # Calculate loss with mask + losses = self.loss_function(shift_predictions, shift_labels) + + if loss_masks is not None: + losses = losses * loss_masks + loss = losses.sum() / loss_masks.sum() + else: + loss = torch.mean(losses) + + return loss + + def prepare_inputs_for_generation( + self, input_ids, past_key_values=None, attention_mask=None, inputs_embeds=None, **kwargs + ): + # Omit tokens covered by past_key_values + if past_key_values is not None: + if isinstance(past_key_values, Cache): + cache_length = past_key_values.get_seq_length() + if isinstance(past_key_values, DynamicCache): + past_length = past_key_values.seen_tokens + else: + past_length = cache_length + + max_cache_length = past_key_values.get_max_length() + else: + cache_length = past_length = past_key_values[0][0].shape[2] + max_cache_length = None + + # Keep only the unprocessed tokens: + # 1 - If the length of the attention_mask exceeds the length of input_ids, then we are in a setting where + # some of the inputs are exclusively passed as part of the cache (e.g. when passing input_embeds as + # input) + if attention_mask is not None and attention_mask.shape[1] > input_ids.shape[1]: + input_ids = input_ids[:, -(attention_mask.shape[1] - past_length):] + # 2 - If the past_length is smaller than input_ids', then input_ids holds all input tokens. We can discard + # input_ids based on the past_length. + elif past_length < input_ids.shape[1]: + input_ids = input_ids[:, past_length:] + # 3 - Otherwise (past_length >= input_ids.shape[1]), let's assume input_ids only has unprocessed tokens. + + # If we are about to go beyond the maximum cache length, we need to crop the input attention mask. + if ( + max_cache_length is not None + and attention_mask is not None + and cache_length + input_ids.shape[1] > max_cache_length + ): + attention_mask = attention_mask[:, -max_cache_length:] + + position_ids = kwargs.get("position_ids", None) + if attention_mask is not None and position_ids is None: + # create position_ids on the fly for batch generation + position_ids = attention_mask.long().cumsum(-1) - 1 + position_ids.masked_fill_(attention_mask == 0, 1) + if past_key_values: + position_ids = position_ids[:, -input_ids.shape[1]:] + + # if `inputs_embeds` are passed, we only want to use them in the 1st generation step + if inputs_embeds is not None and past_key_values is None: + logger.info('Use input_embedding') + model_inputs = {"inputs_embeds": inputs_embeds} + else: + model_inputs = {"input_ids": input_ids} + + model_inputs.update( + { + "position_ids": position_ids, + "past_key_values": past_key_values, + "use_cache": kwargs.get("use_cache"), + "attention_mask": attention_mask, + } + ) + return model_inputs + + @staticmethod + def _reorder_cache(past_key_values, beam_idx): + reordered_past = () + for layer_past in past_key_values: + reordered_past += ( + tuple(past_state.index_select(0, beam_idx.to(past_state.device)) for past_state in layer_past), + ) + return reordered_past diff --git a/src/samay/models/Time_MoE/time_moe/models/ts_generation_mixin.py b/src/samay/models/Time_MoE/time_moe/models/ts_generation_mixin.py new file mode 100644 index 0000000..63e32b7 --- /dev/null +++ b/src/samay/models/Time_MoE/time_moe/models/ts_generation_mixin.py @@ -0,0 +1,241 @@ +import warnings +from typing import Any, Dict, List, Optional, Union + +import torch + +from transformers import GenerationMixin, LogitsProcessorList, StoppingCriteriaList +from transformers.generation import validate_stopping_criteria, EosTokenCriteria +from transformers.generation.utils import GenerateNonBeamOutput, GenerateEncoderDecoderOutput, GenerateDecoderOnlyOutput +from transformers.utils import ModelOutput + + +class TSGenerationMixin(GenerationMixin): + + def _greedy_search( + self, + input_ids: torch.Tensor, + logits_processor: Optional[LogitsProcessorList] = None, + stopping_criteria: Optional[StoppingCriteriaList] = None, + max_length: Optional[int] = None, + pad_token_id: Optional[int] = None, + eos_token_id: Optional[Union[int, List[int]]] = None, + output_attentions: Optional[bool] = None, + output_hidden_states: Optional[bool] = None, + output_scores: Optional[bool] = None, + output_logits: Optional[bool] = None, + return_dict_in_generate: Optional[bool] = None, + synced_gpus: bool = False, + streamer: Optional["BaseStreamer"] = None, + **model_kwargs, + ) -> Union[GenerateNonBeamOutput, torch.Tensor]: + input_ids_origin_device = input_ids.device + input_ids = input_ids.to(self.device) + if len(input_ids.shape) == 2: + batch_size, cur_len = input_ids.shape + else: + raise ValueError('Input shape must be: [batch_size, seq_len]') + # init values + logits_processor = logits_processor if logits_processor is not None else LogitsProcessorList() + stopping_criteria = stopping_criteria if stopping_criteria is not None else StoppingCriteriaList() + if max_length is not None: + warnings.warn( + "`max_length` is deprecated in this function, use" + " `stopping_criteria=StoppingCriteriaList([MaxLengthCriteria(max_length=max_length)])` instead.", + UserWarning, + ) + stopping_criteria = validate_stopping_criteria(stopping_criteria, max_length) + pad_token_id = pad_token_id if pad_token_id is not None else self.generation_config.pad_token_id + if eos_token_id is not None: + stopping_criteria.append(EosTokenCriteria(eos_token_id=eos_token_id)) + else: + # remove when the method is totally private + # need to get `eos_token_id` and add stopping criteria, so that generation does not go forever + eos_token_id = [ + criteria.eos_token_id.tolist() for criteria in stopping_criteria if hasattr(criteria, "eos_token_id") + ] + eos_token_id = eos_token_id[0] if eos_token_id else None + if eos_token_id is None and self.generation_config.eos_token_id is not None: + eos_token_id = self.generation_config.eos_token_id + stopping_criteria.append(EosTokenCriteria(eos_token_id=eos_token_id)) + + if isinstance(eos_token_id, int): + eos_token_id = [eos_token_id] + output_scores = output_scores if output_scores is not None else self.generation_config.output_scores + output_attentions = ( + output_attentions if output_attentions is not None else self.generation_config.output_attentions + ) + output_hidden_states = ( + output_hidden_states if output_hidden_states is not None else self.generation_config.output_hidden_states + ) + return_dict_in_generate = ( + return_dict_in_generate + if return_dict_in_generate is not None + else self.generation_config.return_dict_in_generate + ) + + # init attention / hidden states / scores tuples + raw_logits = () if (return_dict_in_generate and output_logits) else None + scores = () if (return_dict_in_generate and output_scores) else None + decoder_attentions = () if (return_dict_in_generate and output_attentions) else None + cross_attentions = () if (return_dict_in_generate and output_attentions) else None + decoder_hidden_states = () if (return_dict_in_generate and output_hidden_states) else None + + # if model is an encoder-decoder, retrieve encoder attention weights and hidden states + if return_dict_in_generate and self.config.is_encoder_decoder: + encoder_attentions = model_kwargs["encoder_outputs"].get("attentions") if output_attentions else None + encoder_hidden_states = ( + model_kwargs["encoder_outputs"].get("hidden_states") if output_hidden_states else None + ) + + # keep track of which sequences are already finished + if "inputs_embeds" in model_kwargs: + cur_len = model_kwargs["inputs_embeds"].shape[1] + this_peer_finished = False + unfinished_sequences = torch.ones(batch_size, dtype=torch.long, device=input_ids.device) + model_kwargs["cache_position"] = torch.arange(cur_len, device=input_ids.device) + + max_length = stopping_criteria.max_length + while self._has_unfinished_sequences(this_peer_finished, synced_gpus, device=input_ids.device): + # prepare model inputs + model_inputs = self.prepare_inputs_for_generation(input_ids, **model_kwargs) + + input_length = input_ids.shape[1] + + # forward pass to get next token + outputs = self( + **model_inputs, + return_dict=True, + output_attentions=output_attentions, + output_hidden_states=output_hidden_states, + max_horizon_length=max_length - input_length, + ) + + if synced_gpus and this_peer_finished: + continue # don't waste resources running the code we don't need + + next_token_logits = outputs.logits[:, -1, :] + + # pre-process distribution + next_tokens_scores = logits_processor(input_ids, next_token_logits) + + # Store scores, attentions and hidden_states when required + if return_dict_in_generate: + if output_scores: + scores += (next_tokens_scores,) + if output_logits: + raw_logits += (next_token_logits,) + if output_attentions: + decoder_attentions += ( + (outputs.decoder_attentions,) if self.config.is_encoder_decoder else (outputs.attentions,) + ) + if self.config.is_encoder_decoder: + cross_attentions += (outputs.cross_attentions,) + + if output_hidden_states: + decoder_hidden_states += ( + (outputs.decoder_hidden_states,) + if self.config.is_encoder_decoder + else (outputs.hidden_states,) + ) + + # argmax + # next_tokens = torch.argmax(next_tokens_scores, dim=-1) + next_tokens = next_tokens_scores + + # finished sentences should have their next token be a padding token + if eos_token_id is not None: + if pad_token_id is None: + raise ValueError("If `eos_token_id` is defined, make sure that `pad_token_id` is defined.") + next_tokens = next_tokens * unfinished_sequences + pad_token_id * (1 - unfinished_sequences) + + # update generated ids, model inputs, and length for next step + next_tokens = next_tokens.reshape(batch_size, -1, self.config.input_size) + horizon_length = next_tokens.shape[1] + + input_ids = torch.cat([input_ids, next_tokens], dim=-2) + if streamer is not None: + streamer.put(next_tokens.cpu()) + model_kwargs = self._update_model_kwargs_for_generation( + outputs, + model_kwargs, + horizon_length=horizon_length, + is_encoder_decoder=self.config.is_encoder_decoder, + ) + + unfinished_sequences = unfinished_sequences & ~stopping_criteria(input_ids[..., 0], scores) + this_peer_finished = unfinished_sequences.max() == 0 + + if input_ids.shape[1] > max_length: + input_ids = input_ids[:, :max_length] + + if streamer is not None: + streamer.end() + + input_ids.squeeze_(dim=-1).to(input_ids_origin_device) + if return_dict_in_generate: + if self.config.is_encoder_decoder: + return GenerateEncoderDecoderOutput( + sequences=input_ids, + scores=scores, + logits=raw_logits, + encoder_attentions=encoder_attentions, + encoder_hidden_states=encoder_hidden_states, + decoder_attentions=decoder_attentions, + cross_attentions=cross_attentions, + decoder_hidden_states=decoder_hidden_states, + past_key_values=model_kwargs.get("past_key_values"), + ) + else: + return GenerateDecoderOnlyOutput( + sequences=input_ids, + scores=scores, + logits=raw_logits, + attentions=decoder_attentions, + hidden_states=decoder_hidden_states, + past_key_values=model_kwargs.get("past_key_values"), + ) + else: + return input_ids + + def _update_model_kwargs_for_generation( + self, + outputs: ModelOutput, + model_kwargs: Dict[str, Any], + horizon_length: int = 1, + is_encoder_decoder: bool = False, + standardize_cache_format: bool = False, + ) -> Dict[str, Any]: + # update past_key_values + model_kwargs["past_key_values"] = self._extract_past_from_model_output( + outputs, + standardize_cache_format=standardize_cache_format + ) + if getattr(outputs, "state", None) is not None: + model_kwargs["state"] = outputs.state + + # update token_type_ids with last value + if "token_type_ids" in model_kwargs: + token_type_ids = model_kwargs["token_type_ids"] + model_kwargs["token_type_ids"] = torch.cat([token_type_ids, token_type_ids[:, -1].unsqueeze(-1)], dim=-1) + + if not is_encoder_decoder: + # update attention mask + if "attention_mask" in model_kwargs: + attention_mask = model_kwargs["attention_mask"] + model_kwargs["attention_mask"] = torch.cat( + [attention_mask, attention_mask.new_ones((attention_mask.shape[0], horizon_length))], dim=-1 + ) + else: + # update decoder attention mask + if "decoder_attention_mask" in model_kwargs: + decoder_attention_mask = model_kwargs["decoder_attention_mask"] + model_kwargs["decoder_attention_mask"] = torch.cat( + [decoder_attention_mask, decoder_attention_mask.new_ones((decoder_attention_mask.shape[0], horizon_length))], + dim=-1, + ) + + if "cache_position" in model_kwargs and model_kwargs["cache_position"] is not None: + model_kwargs["cache_position"] = model_kwargs["cache_position"][-1:] + horizon_length + # model_kwargs["cache_position"] = model_kwargs["cache_position"][-1:] + 1 + + return model_kwargs diff --git a/src/samay/models/Time_MoE/time_moe/runner.py b/src/samay/models/Time_MoE/time_moe/runner.py new file mode 100644 index 0000000..efa8f71 --- /dev/null +++ b/src/samay/models/Time_MoE/time_moe/runner.py @@ -0,0 +1,248 @@ +import os +import math +import random +from functools import reduce +from operator import mul + +import numpy as np +import torch +import torch.distributed as dist + +from time_moe.datasets.time_moe_dataset import TimeMoEDataset +from time_moe.datasets.time_moe_window_dataset import TimeMoEWindowDataset +from time_moe.models.modeling_time_moe import TimeMoeForPrediction, TimeMoeConfig +from time_moe.trainer.hf_trainer import TimeMoETrainingArguments, TimeMoeTrainer +from time_moe.utils.dist_util import get_world_size +from time_moe.utils.log_util import logger, log_in_local_rank_0 + + +class TimeMoeRunner: + def __init__( + self, + model_path: str = None, + output_path: str = 'logs/time_moe', + seed: int = 9899 + ): + self.model_path = model_path + self.output_path = output_path + self.seed = seed + + def load_model(self, model_path: str = None, from_scatch: bool = False, **kwargs): + if model_path is None: + model_path = self.model_path + attn = kwargs.pop('attn_implementation', None) + if attn is None: + attn = 'eager' + elif attn == 'auto': + # try to use flash-attention + try: + from flash_attn import flash_attn_func, flash_attn_varlen_func + from flash_attn.bert_padding import index_first_axis, pad_input, unpad_input # noqa + attn = 'flash_attention_2' + except: + log_in_local_rank_0('Flash attention import failed, switching to eager attention.', type='warn') + attn = 'eager' + + if attn == 'eager': + log_in_local_rank_0('Use Eager Attention') + elif attn == 'flash_attention_2': + log_in_local_rank_0('Use Flash Attention 2') + else: + raise ValueError(f'Unknown attention method: {attn}') + kwargs['attn_implementation'] = attn + + if from_scatch: + config = TimeMoeConfig.from_pretrained(model_path, _attn_implementation=attn) + model = TimeMoeForPrediction(config) + else: + model = TimeMoeForPrediction.from_pretrained(model_path, **kwargs) + return model + + def train_model(self, from_scratch: bool = False, **kwargs): + setup_seed(self.seed) + + train_config = kwargs + + num_devices = get_world_size() + + global_batch_size = train_config.get('global_batch_size', None) + micro_batch_size = train_config.get('micro_batch_size', None) + + if global_batch_size is None and micro_batch_size is None: + raise ValueError('Must set at lease one argument: "global_batch_size" or "micro_batch_size"') + elif global_batch_size is None: + gradient_accumulation_steps = 1 + global_batch_size = micro_batch_size * num_devices + elif micro_batch_size is None: + micro_batch_size = math.ceil(global_batch_size / num_devices) + gradient_accumulation_steps = 1 + else: + if micro_batch_size * num_devices > global_batch_size: + if num_devices > global_batch_size: + micro_batch_size = 1 + global_batch_size = num_devices + else: + micro_batch_size = math.ceil(global_batch_size / num_devices) + gradient_accumulation_steps = math.ceil(global_batch_size / num_devices / micro_batch_size) + global_batch_size = int(gradient_accumulation_steps * num_devices * micro_batch_size) + + if ('train_steps' in train_config + and train_config['train_steps'] is not None + and train_config['train_steps'] > 0): + train_steps = int(train_config["train_steps"]) + num_train_epochs = -1 + else: + train_steps = -1 + num_train_epochs = _safe_float(train_config.get("num_train_epochs", 1)) + + precision = train_config.get('precision', 'bf16') + if precision not in ['bf16', 'fp16', 'fp32']: + log_in_local_rank_0(f'Precision {precision} is not set, use fp32 default!', type='warn') + precision = 'fp32' + + if precision == 'bf16': + torch_dtype = torch.bfloat16 + elif precision == 'fp16': + # use fp32 to load model but uses fp15 to train model + torch_dtype = torch.float32 + elif precision == 'fp32': + torch_dtype = torch.float32 + else: + raise ValueError(f'Unsupported precision {precision}') + + log_in_local_rank_0(f'Set global_batch_size to {global_batch_size}') + log_in_local_rank_0(f'Set micro_batch_size to {micro_batch_size}') + log_in_local_rank_0(f'Set gradient_accumulation_steps to {gradient_accumulation_steps}') + log_in_local_rank_0(f'Set precision to {precision}') + log_in_local_rank_0(f'Set normalization to {train_config["normalization_method"]}') + + training_args = TimeMoETrainingArguments( + output_dir=self.output_path, + num_train_epochs=num_train_epochs, + # use_cpu=True, + max_steps=train_steps, + evaluation_strategy=train_config.get("evaluation_strategy", 'no'), + eval_steps=_safe_float(train_config.get("eval_steps", None)), + save_strategy=train_config.get("save_strategy", "no"), + save_steps=_safe_float(train_config.get("save_steps", None)), + learning_rate=float(train_config.get("learning_rate", 1e-5)), + min_learning_rate=float(train_config.get("min_learning_rate", 0)), + adam_beta1=float(train_config.get("adam_beta1", 0.9)), + adam_beta2=float(train_config.get("adam_beta2", 0.95)), + adam_epsilon=float(train_config.get("adam_epsilon", 1e-8)), + lr_scheduler_type=train_config.get("lr_scheduler_type", 'constant'), + warmup_ratio=float(train_config.get("warmup_ratio") or 0.0), + warmup_steps=int(train_config.get("warmup_steps", 0)), + weight_decay=float(train_config.get("weight_decay", 0.1)), + per_device_train_batch_size=int(micro_batch_size), + per_device_eval_batch_size=int(micro_batch_size * 2), + gradient_accumulation_steps=int(gradient_accumulation_steps), + gradient_checkpointing=train_config.get("gradient_checkpointing", False), + bf16=True if precision == 'bf16' else False, + fp16=True if precision == 'fp16' else False, + deepspeed=train_config.get("deepspeed"), + push_to_hub=False, + logging_first_step=True, + log_on_each_node=False, + logging_steps=int(train_config.get('logging_steps', 1)), + seed=self.seed, + data_seed=self.seed, + max_grad_norm=train_config.get('max_grad_norm', 1.0), + optim=train_config.get('optim', 'adamw_torch'), + torch_compile=train_config.get('torch_compile', False), + dataloader_num_workers=train_config.get('dataloader_num_workers') or 2, + ddp_find_unused_parameters=False, + + logging_dir=os.path.join(self.output_path, 'tb_logs'), + save_only_model=train_config.get('save_only_model', True), + save_total_limit=train_config.get('save_total_limit'), + ) + + model_path = train_config.pop('model_path', None) or self.model_path + if model_path is not None: + model = self.load_model( + model_path=model_path, + from_scatch=from_scratch, + torch_dtype=torch_dtype, + attn_implementation=train_config.get('attn_implementation', 'eager'), + ) + log_in_local_rank_0(f'Load model parameters from: {model_path}') + else: + raise ValueError('Model path is None') + + num_total_params = 0 + for p in model.parameters(): + num_total_params += reduce(mul, p.shape) + + # print statistics info + log_in_local_rank_0(train_config) + log_in_local_rank_0(training_args) + log_in_local_rank_0(model.config) + log_in_local_rank_0(f'Number of the model parameters: {length_to_str(num_total_params)}') + + if train_steps > 0: + total_train_tokens = train_steps * global_batch_size * train_config['max_length'] + log_in_local_rank_0(f'Tokens will consume: {length_to_str(total_train_tokens)}') + + # Training + train_ds = self.get_train_dataset(train_config['data_path'], max_length=train_config['max_length'], normalization_method=train_config['normalization_method']) + trainer = TimeMoeTrainer( + model=model, + args=training_args, + train_dataset=train_ds, + ) + trainer.train() + trainer.save_model(self.output_path) + log_in_local_rank_0(f'Saving model to {self.output_path}') + + return trainer.model + + def get_train_dataset(self, data_path, max_length, normalization_method): + log_in_local_rank_0('Loading dataset...') + dataset = TimeMoEDataset(data_path, normalization_method=normalization_method) + log_in_local_rank_0('Processing dataset to fixed-size sub-sequences...') + window_dataset = TimeMoEWindowDataset(dataset, context_length=max_length, prediction_length=0, shuffle=False) + return window_dataset + + + +def setup_seed(seed: int = 9899): + """ + Setup seed for all known operations. + + Args: + seed (int): seed number. + + Returns: + + """ + random.seed(seed) + try: + import numpy as np + np.random.seed(seed) + except ImportError: + pass + try: + import torch + torch.manual_seed(seed) + torch.cuda.manual_seed_all(seed) + except ImportError: + pass + + +def length_to_str(length): + if length >= 1e12: + return f'{length / 1e12:.3f}T' + if length >= 1e9: + return f'{length / 1e9:.3f}B' + elif length >= 1e6: + return f'{length / 1e6:.3f}M' + else: + return f'{length / 1e3:.3f}K' + + +def _safe_float(number): + if number is None: + return None + else: + return float(number) diff --git a/src/samay/models/Time_MoE/time_moe/trainer/__init__.py b/src/samay/models/Time_MoE/time_moe/trainer/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/samay/models/Time_MoE/time_moe/trainer/hf_trainer.py b/src/samay/models/Time_MoE/time_moe/trainer/hf_trainer.py new file mode 100755 index 0000000..291e8c9 --- /dev/null +++ b/src/samay/models/Time_MoE/time_moe/trainer/hf_trainer.py @@ -0,0 +1,94 @@ +#!/usr/bin/env python +# -*- coding:utf-8 _*- +import math +from dataclasses import field, dataclass +from functools import partial + +import inspect + +import transformers +import torch +from torch.optim.lr_scheduler import LambdaLR +from transformers import get_scheduler + + +class TimeMoeTrainer(transformers.Trainer): + epsilon = 1e-8 + + def __init__(self, label_column: str = 'labels', loss_mask_column: str = 'loss_mask', *positional_args, **kwargs): + super().__init__(*positional_args, **kwargs) + self.tokenizer = kwargs.get("tokenizer", None) + self.label_column = label_column + self.loss_mask_column = loss_mask_column + + def create_scheduler(self, num_training_steps: int, optimizer: torch.optim.Optimizer = None): + optimizer = self.optimizer if optimizer is None else optimizer + min_lr_ratio = self.args.min_learning_rate / self.args.learning_rate + if self.lr_scheduler is None: + if self.args.lr_scheduler_type == 'cosine': + self.lr_scheduler = get_cosine_schedule_with_warmup_min_lr( + optimizer=optimizer, + num_warmup_steps=self.args.get_warmup_steps(num_training_steps), + num_training_steps=num_training_steps, + min_lr_ratio=min_lr_ratio, + ) + else: + self.lr_scheduler = get_scheduler( + self.args.lr_scheduler_type, + optimizer=optimizer, + num_warmup_steps=self.args.get_warmup_steps(num_training_steps), + num_training_steps=num_training_steps, + ) + self._created_lr_scheduler = True + return self.lr_scheduler + + def _set_signature_columns_if_needed(self): + if self._signature_columns is None: + # Inspect model forward signature to keep only the arguments it accepts. + signature = inspect.signature(self.model.forward) + params = list(signature.parameters.keys()) + # Labels may be named label or label_ids, the default data collator handles that. + self._signature_columns = list(set( + params + self.label_names + [ + "label", + "label_ids", + self.label_column, + self.loss_mask_column + ] + )) + + +@dataclass +class TimeMoETrainingArguments(transformers.TrainingArguments): + min_learning_rate: float = field( + default=0, metadata={"help": "Minimum learning rate for cosine_schedule"} + ) + + +def _get_cosine_schedule_with_warmup_and_min_lr_lambda( + current_step: int, *, num_warmup_steps: int, num_training_steps: int, num_cycles: float, min_lr_ratio: float, +): + if current_step < num_warmup_steps: + return float(current_step) / float(max(1, num_warmup_steps)) + progress = float(current_step - num_warmup_steps) / float(max(1, num_training_steps - num_warmup_steps)) + cosine_ratio = 0.5 * (1.0 + math.cos(math.pi * float(num_cycles) * 2.0 * progress)) + + return max(min_lr_ratio, min_lr_ratio + (1 - min_lr_ratio) * cosine_ratio) + + +def get_cosine_schedule_with_warmup_min_lr( + optimizer: torch.optim.Optimizer, + num_warmup_steps: int, + num_training_steps: int, + num_cycles: float = 0.5, + min_lr_ratio: float = 0, + last_epoch: int = -1 +): + lr_lambda = partial( + _get_cosine_schedule_with_warmup_and_min_lr_lambda, + num_warmup_steps=num_warmup_steps, + num_training_steps=num_training_steps, + num_cycles=num_cycles, + min_lr_ratio=min_lr_ratio, + ) + return LambdaLR(optimizer, lr_lambda, last_epoch) diff --git a/src/samay/models/Time_MoE/time_moe/utils/__init__.py b/src/samay/models/Time_MoE/time_moe/utils/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/samay/models/Time_MoE/time_moe/utils/dist_util.py b/src/samay/models/Time_MoE/time_moe/utils/dist_util.py new file mode 100644 index 0000000..39946a4 --- /dev/null +++ b/src/samay/models/Time_MoE/time_moe/utils/dist_util.py @@ -0,0 +1,45 @@ +#!/usr/bin/env python +# -*- coding:utf-8 _*- +import os +import torch.distributed as dist + + +def is_master_process(): + rank = os.getenv('RANK') + if (rank is None or rank == '0') and is_local_rank_0(): + return True + else: + return False + + +def is_local_rank_0(): + local_rank = os.getenv('LOCAL_RANK') + if local_rank is None or local_rank == '0': + return True + else: + return False + + +def get_local_world_size(): + import torch + local_world_size = os.getenv('LOCAL_WORLD_SIZE') + if local_world_size is None: + num_gpus = torch.cuda.device_count() + local_world_size = num_gpus or 1 + else: + local_world_size = int(local_world_size) + return local_world_size + + +def get_world_size(): + try: + world_size = dist.get_world_size() + return world_size + except Exception: + pass + world_size = os.getenv('WORLD_SIZE') + if world_size is None: + world_size = 1 + else: + world_size = int(world_size) + return world_size diff --git a/src/samay/models/Time_MoE/time_moe/utils/log_util.py b/src/samay/models/Time_MoE/time_moe/utils/log_util.py new file mode 100644 index 0000000..6c02b08 --- /dev/null +++ b/src/samay/models/Time_MoE/time_moe/utils/log_util.py @@ -0,0 +1,56 @@ +#!/usr/bin/env python +# -*- coding:utf-8 _*- +import os +import logging +import sys +import typing + +# -------- log setting --------- +DEFAULT_LOGGER = "time_moe_logger" + +DEFAULT_FORMATTER = logging.Formatter( + '%(asctime)s - %(filename)s[pid:%(process)d;line:%(lineno)d:%(funcName)s] - %(levelname)s: %(message)s' +) + +_ch = logging.StreamHandler(stream=sys.stdout) +_ch.setFormatter(DEFAULT_FORMATTER) + +_DEFAULT_HANDLERS = [_ch] + +_LOGGER_CACHE = {} # type: typing.Dict[str, logging.Logger] + + +def is_local_rank_0(): + local_rank = os.getenv('LOCAL_RANK') + if local_rank is None or local_rank == '0': + return True + else: + return False + + +def get_logger(name, level="INFO", handlers=None, update=False): + if name in _LOGGER_CACHE and not update: + return _LOGGER_CACHE[name] + logger = logging.getLogger(name) + logger.setLevel(level) + logger.handlers = handlers or _DEFAULT_HANDLERS + logger.propagate = False + return logger + + +def log_in_local_rank_0(*msg, type='info', used_logger=None): + msg = ' '.join([str(s) for s in msg]) + if used_logger is None: + used_logger = logger + + if is_local_rank_0(): + if type == 'warn' or type == 'warning': + used_logger.warning(msg) + elif type == 'error': + used_logger.error(msg) + else: + used_logger.info(msg) + + +# -------------------------- Singleton Object -------------------------- +logger = get_logger(DEFAULT_LOGGER) diff --git a/src/samay/utils.py b/src/samay/utils.py index 2867fd3..33c45fa 100644 --- a/src/samay/utils.py +++ b/src/samay/utils.py @@ -178,7 +178,7 @@ def visualize(task_name="forecasting", trues=None, preds=None, history=None, mas pred = preds[time_idx, channel_idx, :] # Set figure size proportional to the number of forecasts - plt.figure(figsize=(0.2 * len(history), 4)) + plt.figure(figsize=(0.02 * len(history), 4)) # Plotting the first time series from history plt.plot(